repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
googleapis/python-irm | docs/conf.py | Python | apache-2.0 | 11,253 | 0.000622 | # -*- coding: utf-8 -*-
#
# google-cloud-irm documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.6.3"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = [".rst", ".md"]
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"google-cloud-irm"
copyright = u"2019, Google"
author = u"Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"description": "Google Cloud Client Libraries for google-cloud-irm",
"github_user": "googleapis",
"github_repo": "python-irm",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
"code_font_family": "'Roboto Mono', 'Consolas', monospace",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are cop | ied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional | templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "google-cloud-irm-doc"
# -- Options for warnings ------------------------------------------------------
suppress_warnings = [
# Temporarily suppress this to avoid "more than one target found for
# cross-reference" warning, which are intractable for us to avoid while in
# a mono-repo.
# See https://github.com/sphinx-doc/sphinx/blob
# /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
"ref.python"
]
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). |
ombt/ombt | pythonsrc/quick_python_book/chap6/string_format.py | Python | mit | 346 | 0.017341 | #!/usr/bin/python3.2
#
p1 = "mr"
p2 | = "duck"
p3 = "i'm"
p4 = "done"
p5 ="."
#
print("{0}. {1}, {2} {3}{4}".format(p1,p2,p3,p4,p5))
#
p1 = "mr. duck"
p2 = "i'm done"
print("{mrduck}. {imdone}".format(mrduck=p1,imdone=p2))
#
print("{0:{1}} is the food of the gods ...".format("Ambrosia", 15))
#
print("%s, %s." % ("mr. | duck", "i'm done"))
#
quit()
|
ax003d/sichu_web | sichu/apiserver/tests.py | Python | mit | 2,756 | 0.00254 | import json
from django.test import TestCase
from cabinet.factories import UserFactory, BookOwnFactory
from factories import ClientFactory, AccessTokenFactory
class BookOwnTest(TestCase):
fixtures = ['users.json', 'client.json', 'books.json',
'bookownerships.json']
def setUp(self):
self.bob = UserFactory(username='bob')
client = ClientFactory()
self.token = AccessTokenFactory(client=client, user=self.bob)
self.book = BookOwnFactory(owner=self.bob)
def test_get(self):
response = self.client.get(
'/v1/bookown/?format=json',
HTTP_AUTHORIZATION="Bearer %s" % self.token.token)
# print response.content
ret = json.loads(response.content)
self.assertTrue(ret.has_key(u'meta'))
response = self.client.get(
'/v1/bookown/?format=json&id__exact=1',
| HTTP_AUTHORIZATION="Bearer %s" % self.toke | n.token)
# print response.content
ret = json.loads(response.content)
self.assertTrue(ret.has_key(u'meta'))
# get friends books
response = self.client.get(
'/v1/bookown/?format=json&uid=2&trim_owner=1',
HTTP_AUTHORIZATION="Bearer %s" % self.token.token)
# print response.content
ret = json.loads(response.content)
self.assertTrue(ret.has_key(u'meta'))
def test_add(self):
response = self.client.post(
'/v1/bookown/add/?format=json',
{'isbn': self.book.book.isbn,
'status': '1'},
HTTP_AUTHORIZATION="Bearer %s" % self.token.token)
# print response.content
ret = json.loads(response.content)
self.assertTrue(ret.has_key(u'id'))
def test_edit(self):
response = self.client.post(
'/v1/bookown/{}/'.format(self.book.id),
{'status': '5',
'remark': 'test'},
HTTP_AUTHORIZATION="Bearer %s" % self.token.token)
# print response.content
ret = json.loads(response.content)
self.assertTrue(ret.has_key(u'error_code'))
response = self.client.post(
'/v1/bookown/{}/'.format(self.book.id),
{'status': '4',
'remark': 'test'},
HTTP_AUTHORIZATION="Bearer %s" % self.token.token)
# print response.content
ret = json.loads(response.content)
self.assertTrue(ret.has_key(u'status'))
def test_delete(self):
response = self.client.post(
'/v1/bookown/delete/{}/'.format(self.book.id),
HTTP_AUTHORIZATION="Bearer %s" % self.token.token)
# print response.content
ret = json.loads(response.content)
self.assertTrue(ret.has_key(u'status'))
|
thomasyu888/Genie | genie/validate.py | Python | mit | 10,061 | 0.000398 | #!/usr/bin/env python3
import importlib
import inspect
import logging
import sys
import synapseclient
try:
from synapseclient.core.exceptions import SynapseHTTPError
except ModuleNotFoundError:
from synapseclient.exceptions import SynapseHTTPError
from . import config
from . import example_filetype_format
from . import process_functions
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class ValidationHelper(object):
# Used for the kwargs in validate_single_file
# Overload this per class
_validate_kwargs = []
def __init__(self, syn, project_id, center, entitylist,
format_registry=config.PROCESS_FILES,
file_type=None):
"""A validator helper class for a center's files.
Args:
syn: a synapseclient.Synapse object
project_id: Synapse Project ID where files are stored and configured.
center: The participating center name.
filepathlist: a list of file paths.
format_registry: A dictionary mapping file format name to the
format class.
file_type: Specify file type to skip filename validation
"""
self._synapse_client = syn
self._project = syn.get(project_id)
self.entitylist = entitylist
self.center = center
self._format_registry = format_registry
self.file_type = (self.determine_filetype()
if file_type is None else file_type)
def determine_filetype(self):
"""Gets the file type of the file by validating its filename
Args:
syn: Synapse object
filepathlist: list of filepaths to center files
Returns:
str: File type of input files. None if no filetype found
"""
filetype = None
# Loop through file formats
for file_format in self._format_registry:
validator = self._format_registry[file_format](self._synapse_client, self.center)
try:
filenames = [entity.name for entity in self.entitylist]
filetype = validator.validateFilename(filenames)
except AssertionError:
continue
# If valid filename, return file type.
if filetype is not None:
break
return filetype
def validate_single_file(self, **kwargs):
"""Validate a submitted file unit.
Returns:
message: errors and warnings
valid: Boolean value of validation status
"""
if self.file_type not in self._format_registry:
valid = False
errors = "Your filename is incorrect! Please change your filename before you run the validator or specify --filetype if you are running the validator locally"
warnings = ""
else:
mykwargs = {}
for required_parameter in self._validate_kwargs:
assert required_parameter in kwargs.keys(), \
"%s not in parameter list" % required_parameter
mykwargs[required_parameter] = kwargs[required_parameter]
validator_cls = self._format_registry[self.file_type]
validator = validator_cls(self._synapse_client, self.center)
filepathlist = [entity.path for entity in self.entitylist]
valid, errors, warnings = validator.validate(filePathList=filepathlist,
**mykwargs)
# Complete error message
message = collect_errors_and_warnings(errors, warnings)
return (valid, message)
class GenieValidationHelper(ValidationHelper):
"""A validator helper class for AACR Project Genie.
"""
_validate_kwargs = ['oncotree_link', 'nosymbol_check']
def collect_errors_and_warnings(errors, warnings):
'''Aggregates error and warnings into a string.
Args:
errors: string of file errors, separated by new lines.
warnings: string of file warnings, separated by new lines.
Returns:
message - errors + warnings
'''
# Complete error message
message = "----------------ERRORS----------------\n"
if errors == "":
message = "YOUR FILE IS VALIDATED!\n"
logger.info(message)
else:
for error in errors.split("\n"):
if error != '':
logger.error(error)
message += errors
if warnings != "":
for warning in warnings.split("\n"):
if warning != '':
logger.warning(warning)
message += "-------------WARNINGS-------------\n" + warnings
return message
def get_config(syn, synid):
"""Gets Synapse database to Table mapping in dict
Args:
syn: Synapse connection
synid: Synapse id of database mapping table
Returns:
dict: {'databasename': 'synid'}
"""
config = syn.tableQuery('SELECT * FROM {}'.format(synid))
configdf = config.asDataFrame()
configdf.index = configdf['Database']
config_dict = configdf.to_dict()
return config_dict['Id']
def _check_parentid_permission_container(syn, parentid):
"""Checks permission / container
# TODO: Currently only checks if a user has READ permissions
"""
if parentid is not None:
try:
syn_ent = syn.get(parentid, downloadFile=False)
# If not container, throw an assertion
assert synapseclient.entity.is_container(syn_ent)
except (SynapseHTTPError, AssertionError):
raise ValueError(
"Provided Synapse id must be your input folder Synapse id "
"or a Synapse Id of a folder inside your input directory")
def _check_center_input(center, center_list):
"""Checks center input
Args:
center: Center name
center_list: List of allowed centers
Raises:
ValueError: If specify a center not part of the center list
"""
if center not in center_list:
raise ValueError("Must specify one of these "
f"centers: {', '.join(center_list)}")
def _get_oncotreelink(syn, databasetosynid_mappingdf, oncotree_link=None):
"""
Gets oncotree link unless a link is specified by the user
Args:
syn: Synapse object
databasetosynid_mappingdf: database to synid mapping
oncotree_link: link to oncotree. Default is None
|
Retu | rns:
oncotree link
"""
if oncotree_link is None:
oncolink = databasetosynid_mappingdf.query(
'Database == "oncotreeLink"').Id
oncolink_ent = syn.get(oncolink.iloc[0])
oncotree_link = oncolink_ent.externalURL
return oncotree_link
def _upload_to_synapse(syn, filepaths, valid, parentid=None):
"""
Upload to synapse if parentid is specified and valid
Args:
syn: Synapse object
filepaths: List of file paths
valid: Boolean value for validity of file
parentid: Synapse id of container. Default is None
"""
if parentid is not None and valid:
logger.info("Uploading file to {}".format(parentid))
for path in filepaths:
file_ent = synapseclient.File(path, parent=parentid)
ent = syn.store(file_ent)
logger.info("Stored to {}".format(ent.id))
def collect_format_types(package_names):
"""Finds subclasses of the example_filetype_format.FileTypeFormat
from a list of package names.
Args:
package_names: A list of Python package names as strings.
Returns:
A list of classes that are in the named packages and subclasses
of example_filetype_format.FileTypeFormat
"""
file_format_list = []
for package_name in package_names:
importlib.import_module(package_name)
for cls in config.get_subclasses(example_filetype_format.FileTypeFormat):
logger.debug("checking {}.".format(cls))
cls_module_name = cls.__module__
cls_pkg = cls_module_name.split('.')[0]
if cls_pkg in package_names:
file_format_list.append(cls)
f |
tensorflow/lucid | lucid/scratch/parameter_editor.py | Python | apache-2.0 | 1,750 | 0.009143 | import numpy as np
import tensorflow as tf
class ParameterEditor():
"""Conveniently edit the parameters of a lucid model.
Example usage:
model = models.InceptionV1()
param = ParameterEditor(model.graph_def)
# Flip weights of first channel of conv2d0
param["conv2d0 | _w", :, :, :, 0] *= -1
"""
def __init__(self, graph_def):
self.nodes = {}
for node in graph_def.node:
if "value" in node.attr:
self.n | odes[str(node.name)] = node
# Set a flag to mark the fact that this is an edited model
if not "lucid_is_edited" in self.nodes:
with tf.Graph().as_default() as temp_graph:
const = tf.constant(True, name="lucid_is_edited")
const_node = temp_graph.as_graph_def().node[0]
graph_def.node.extend([const_node])
def __getitem__(self, key):
name = key[0] if isinstance(key, tuple) else key
tensor = self.nodes[name].attr["value"].tensor
shape = [int(d.size) for d in tensor.tensor_shape.dim]
array = np.frombuffer(tensor.tensor_content, dtype="float32").reshape(shape).copy()
return array[key[1:]] if isinstance(key, tuple) else array
def __setitem__(self, key, new_value):
name = key[0] if isinstance(key, tuple) else key
tensor = self.nodes[name].attr["value"].tensor
node_shape = tuple([int(d.size) for d in tensor.tensor_shape.dim])
if isinstance(key, tuple):
array = np.frombuffer(tensor.tensor_content, dtype="float32")
array = array.reshape(node_shape).copy()
array[key[1:]] = new_value
tensor.tensor_content = array.tostring()
else:
assert new_value.shape == node_shape
dtype = tf.DType(tensor.dtype).as_numpy_dtype
tensor.tensor_content = new_value.astype(dtype).tostring()
|
jamielennox/tempest | tempest/scenario/test_volume_boot_pattern.py | Python | apache-2.0 | 8,184 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common.utils import data_utils
from tempest import config
from tempest.openstack.common import log
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = log.getLogger(__name__)
class TestVolumeBootPattern(manager.ScenarioTest):
"""
This test case attempts to reproduce the following steps:
* Create in Cinder some bootable volume importing a Glance image
* Boot an instance from the bootable volume
* Write content to the volume
* Delete an instance and Boot a new instance from the volume
* Check written content in the instance
* Create a volume snapshot while the instance is running
* Boot an additional instance from the new snapshot based volume
* Check written content in the instance booted from snapshot
"""
@classmethod
def resource_setup(cls):
if not CONF.volume_feature_enabled.snapshot:
raise cls.skipException("Cinder volume snapshots are disabled")
super(TestVolumeBootPattern, cls).resource_setup()
def _create_volume_from_image(self):
img_uuid = CONF.compute.image_ref
vol_name = data_utils.rand_name('volume-origin')
return self.create_volume(name=vol_name, imageRef=img_uuid)
def _boot_instance_from_volume(self, vol_id, keypair):
# NOTE(gfidente): the syntax for block_device_mapping is
# dev_name=id:type:size:delete_on_terminate
# where type needs to be "snap" if the server is booted
# from a snapshot, size instead can be safely left empty
bd_map = [{
'device_name': 'vda',
'volume_id': vol_id,
'delete_on_termination': '0'}]
self.security_group = self._create_security_group()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'block_device_mapping': bd_map,
'key_name': keypair['name'],
'security_groups': security_groups
}
return self.create_server(image='', create_kwargs=create_kwargs)
def _create_snapshot_from_volume(self, vol_id):
snap_name = data_utils.rand_name('snapshot')
snap = self.snapshots_client.create_snapshot(
volume_id=vol_id,
force=True,
display_name=snap_name)
self.addCleanup_with_wait(
waiter_callable=self.snapshots_client.wait_for_resource_deletion,
thing_id=snap['id'], thing_id_param='id',
cleanup_callable=self.delete_wrapper,
cleanup_args=[self.snapshots_client.delete_snapshot, snap['id']])
self.snapshots_client.wait_for_snapshot_status(snap['id'], 'available')
self.assertEqual(snap_name, snap['display_name'])
return snap
def _create_volume_from_snapshot(self, snap_id):
vol_name = data_utils.rand_name('volume')
return self.create_volume(name=vol_name, snapshot_id=snap_id)
def _stop_instances(self, instances):
# NOTE(gfidente): two loops so we do not wait for the status twice
for i in instances:
self.servers_client.stop(i['id'])
for i in instances:
self.servers_client.wait_for_server_status(i['id'], 'SHUTOFF')
def _detach_volumes(self, volumes):
# NOTE(gfidente): two loops so we do not wait for the status twice
for v in volumes:
self.volumes_client.detach_volume(v['id'])
for v in volumes:
self.volumes_client.wait_for_volume_status(v['id'], 'available')
def _ssh_to_server(self, server, keypair):
if CONF.compute.use_floatingip_for_ssh:
_, floating_ip = self.floating_ips_client.create_floating_ip()
self.addCleanup(self.delete_wrapper,
self.floating_ips_client.delete_floating_ip,
floating_ip['id'])
self.floating_ips_client.associate_floating_ip_to_server(
floating_ip['ip'], server['id'])
ip = floating_ip['ip']
else:
network_name_for_ssh = CONF.compute.network_for_ssh
ip = server.networks[network_name_for_ssh][0]
return self.get_remote_client(ip, private_key=keypair['private_key'],
log_console_of_servers=[server])
def _get_content(self, ssh_client):
return ssh_client.exec_command('cat /tmp/text')
def _write_text(self, ssh_client):
text = data_utils.rand_name('text-')
ssh_client.exec_command('echo "%s" > /tmp/text; sync' % (text))
return self._get_content(ssh_client)
def _delete_server(self, server):
self.servers_client.delete_server(server['id'])
self.servers_client.wait_for_server_termination(server['id'])
def _check_content_of_written_file(self, ssh_client, expected):
actual = self._get_content(ssh_client)
self.assertEqual(expected, actual)
@test.skip_because(bug='1373513')
@test.services('compute', 'volume', 'image')
def test_volume_boot_pattern(self):
keypair = self.create_keypair()
self.security_group = self._create_security_group()
# create an instance from volume
volume_origin = self._create_volume_from_image()
instance_1st = self._boot_instance_from_volume(volume_origin['id'],
keypair)
# write content to volume on instance
ssh_client_for_instance_1st = self._ssh_to_server(instance_1st,
keypair)
text = self._write_text(ssh_client_for_instance_1st)
# delete instance
self._delete_server(instance_1st)
# create a 2nd instance from volume
instance_2nd = self._boot_instance_from_volume(volume_origin['id'],
keypair)
# check the content of written file
ssh_client_for_instance_2nd = self._ssh_to_server(instance_2nd,
keypair)
self._check_content_of_written_file(ssh_client_for_instance_2nd, text)
# snapshot a volume
snapshot = self._create_snapshot_from_volume(volume_origin['id'])
# create a 3rd instance from snapshot
volume = self._create_volume_from_snapshot(snapshot['id'])
instance_from_snapshot = self._boot_instance_from_volume(volume['id'],
keypair)
# check the content of written file
ssh_client = self._ssh_to_server(instance_from_snapshot, keypair)
self._check_content_of_written_file(ssh_client, text)
# NOTE(gfidente): ensure resources are in clean state for
# deletion operations to succeed
self._stop_instances([instance_2nd, instance_from_snapshot])
self._detach_volumes([volume_origin, volume])
class TestVolumeBootPatternV2(TestVolumeBootPattern):
def _boot_instance_from_volume(self, vol_id, keypair):
bdms = [{'uuid': vol_id, 'source_type': 'volume',
'destination_type': 'volume', 'boot_index': 0,
'delete_on_termination': False}]
self.security_group = self._create_security_group()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'block_device_mapping_v2': bdms,
'key_name': keypair['name'],
'security_groups': se | curity_groups
}
return sel | f.create_server(image='', create_kwargs=create_kwar |
stephane-martin/salt-debian-packaging | salt-2016.3.3/tests/unit/acl/client_test.py | Python | apache-2.0 | 1,751 | 0.001142 | # -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
# Import Salt Libs
from salt import acl
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
)
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ClientACLTestCase(TestCase):
'''
Unit tests for salt.acl.ClientACL
'''
def setUp(self):
self.blacklist = {
'users': ['joker', 'penguin'],
'modules': ['cmd.run', 'test.fib'],
}
def test_user_is_blacklisted(self):
'''
test user_is_blacklisted
'''
client_acl = acl.PublisherACL(self.blacklist)
self.assertTrue(client_acl.user_is_blacklisted('joker'))
sel | f.assertTrue(client_acl.user_is_blacklisted('penguin'))
self.assertFalse(client_acl.user_is_blacklisted('batman'))
self.assertFalse(client_acl.user_is_blacklisted('robin'))
def test_cmd_is_blacklisted(self):
'''
test cmd_is_blacklisted
'''
client_acl = acl.PublisherACL(self.blacklist)
self.assertTrue(client_acl.cmd_is_blacklisted('cmd.run'))
self.assertTrue(client_acl.cmd_is_blacklisted('test.fib'))
self.assertFalse(client_ac | l.cmd_is_blacklisted('cmd.shell'))
self.assertFalse(client_acl.cmd_is_blacklisted('test.versions'))
self.assertTrue(client_acl.cmd_is_blacklisted(['cmd.run', 'state.sls']))
self.assertFalse(client_acl.cmd_is_blacklisted(['state.highstate', 'state.sls']))
if __name__ == '__main__':
from integration import run_tests
run_tests(ClientACLTestCase, needs_daemon=False)
|
opennode/nodeconductor-assembly-waldur | src/waldur_mastermind/marketplace/migrations/0008_remove_offering_enable_dynamic_components.py | Python | mit | 357 | 0 | # Generated by Django 2.2.7 on 2019-12-04 09:18
from | dj | ango.db import migrations
class Migration(migrations.Migration):
dependencies = [
('marketplace', '0007_offering_enable_dynamic_components'),
]
operations = [
migrations.RemoveField(
model_name='offering', name='enable_dynamic_components',
),
]
|
pombreda/django-hotclub | libs/external_libs/docutils-0.4/extras/optparse.py | Python | mit | 51,584 | 0.00316 | """optparse - a powerful, extensible, and easy-to-use option parser.
By Greg Ward <gward@python.net>
Originally distributed as Optik; see http://optik.sourceforge.net/ .
If you have problems with this module, please do not file bugs,
patches, or feature requests with Python; instead, use Optik's
SourceForge project page:
http://sourceforge.net/projects/optik
For support, use the optik-users@lists.sourceforge.net mailing list
(http://lists.sourceforge.net/lists/listinfo/optik-users).
"""
# Python developers: please do not make changes to this file, since
# it is automatically generated from the Optik source code.
__version__ = "1.4.1+"
__all__ = ['Option',
'SUPPRESS_HELP',
'SUPPRESS_USAGE',
'Values',
'OptionContainer',
'OptionGroup',
'OptionParser',
'HelpFormatter',
'IndentedHelpFormatter',
'TitledHelpFormatter',
'OptParseError',
'OptionError',
'OptionConflictError',
| 'OptionValueError',
'BadOptionError']
__copyright__ = """
Copyright (c) 2001-2003 Gregory P. Ward. All rights reserved.
Copyright (c) 2002-2003 Python Software Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistr | ibutions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys, os
import types
import textwrap
# This file was generated from:
# Id: option_parser.py,v 1.57 2003/08/27 02:35:41 goodger Exp
# Id: option.py,v 1.26 2003/05/08 01:20:36 gward Exp
# Id: help.py,v 1.6 2003/08/27 02:35:41 goodger Exp
# Id: errors.py,v 1.7 2003/04/21 01:53:28 gward Exp
class OptParseError (Exception):
def __init__ (self, msg):
self.msg = msg
def __str__ (self):
return self.msg
class OptionError (OptParseError):
"""
Raised if an Option instance is created with invalid or
inconsistent arguments.
"""
def __init__ (self, msg, option):
self.msg = msg
self.option_id = str(option)
def __str__ (self):
if self.option_id:
return "option %s: %s" % (self.option_id, self.msg)
else:
return self.msg
class OptionConflictError (OptionError):
"""
Raised if conflicting options are added to an OptionParser.
"""
class OptionValueError (OptParseError):
"""
Raised if an invalid option value is encountered on the command
line.
"""
class BadOptionError (OptParseError):
"""
Raised if an invalid or ambiguous option is seen on the command-line.
"""
class HelpFormatter:
"""
Abstract base class for formatting option help. OptionParser
instances should use one of the HelpFormatter subclasses for
formatting help; by default IndentedHelpFormatter is used.
Instance attributes:
indent_increment : int
the number of columns to indent per nesting level
max_help_position : int
the maximum starting column for option help text
help_position : int
the calculated starting column for option help text;
initially the same as the maximum
width : int
total number of columns for output
level : int
current indentation level
current_indent : int
current indentation level (in columns)
help_width : int
number of columns available for option help text (calculated)
"""
def __init__ (self,
indent_increment,
max_help_position,
width,
short_first):
self.indent_increment = indent_increment
self.help_position = self.max_help_position = max_help_position
self.width = width
self.current_indent = 0
self.level = 0
self.help_width = width - max_help_position
self.short_first = short_first
def indent (self):
self.current_indent += self.indent_increment
self.level += 1
def dedent (self):
self.current_indent -= self.indent_increment
assert self.current_indent >= 0, "Indent decreased below 0."
self.level -= 1
def format_usage (self, usage):
raise NotImplementedError, "subclasses must implement"
def format_heading (self, heading):
raise NotImplementedError, "subclasses must implement"
def format_description (self, description):
desc_width = self.width - self.current_indent
indent = " "*self.current_indent
return textwrap.fill(description, desc_width,
initial_indent=indent,
subsequent_indent=indent) + "\n"
def format_option (self, option):
# The help for each option consists of two parts:
# * the opt strings and metavars
# eg. ("-x", or "-fFILENAME, --file=FILENAME")
# * the user-supplied help string
# eg. ("turn on expert mode", "read data from FILENAME")
#
# If possible, we write both of these on the same line:
# -x turn on expert mode
#
# But if the opt string list is too long, we put the help
# string on a second line, indented to the same column it would
# start in if it fit on the first line.
# -fFILENAME, --file=FILENAME
# read data from FILENAME
result = []
opts = option.option_strings
opt_width = self.help_position - self.current_indent - 2
if len(opts) > opt_width:
opts = "%*s%s\n" % (self.current_indent, "", opts)
indent_first = self.help_position
else: # start help on same line as opts
opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts)
indent_first = 0
result.append(opts)
if option.help:
help_lines = textwrap.wrap(option.help, self.help_width)
result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
result.extend(["%*s%s\n" % (self.help_position, "", line)
for line in help_lines[1:]])
elif opts[-1] != "\n":
result.append("\n")
return "".join(result)
def store_option_strings (self, parser):
self.indent()
max_len = 0
for opt in parser.option_list:
strings = self.format_option_strings(opt)
opt.option_strings = strings
max_len = max(max_len, len(strings) + self.current_indent)
self.indent()
for group in parser.option_groups:
for opt in group.option_list:
strings = self.format_option_strings(opt)
opt.option_strings = strings
max_len = max(max_len, len(strings) + self.current_indent)
self.dedent()
self.dedent()
self.help_po |
M4rtinK/pyside-android | tests/QtGui/bug_750.py | Python | lgpl-2.1 | 577 | 0.003466 | import unittest
from helper import UsesQApplication
from PySide.QtCore import QTimer
from PySide.QtGu | i import QPainter, QFont, QFontInfo, QWidget, qApp
class MyWidget(QWidget):
def paintEvent(self, e):
p = QPainter(self)
self._info = p.fontInfo()
self._app.quit()
class TestQPainter(UsesQApplication):
def testFontInfo(self):
w = MyWidget()
w._app = self.app
w._inf | o = None
QTimer.singleShot(300, w.show)
self.app.exec_()
self.assert_(w._info)
if __name__ == '__main__':
unittest.main()
|
mathias4github/ripe-atlas-traceroute2kml | ipdetailscache.py | Python | mit | 9,640 | 0.024066 | # Copyright (c) 2014 Pier Carlo Chiodi - http://www.pierky.com
# Licensed under The MIT License (MIT) - http://opensource.org/licenses/MIT
#
# The MIT License (MIT)
# =====================
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Part of this work is based on Google Python IP address manipulation library
# (https://code.google.com/p/ipaddr-py/).
"""A Python library to gather IP address details (ASN, prefix, resource holder, reverse DNS) using the
RIPEStat API, with a basic cache to avoid flood of requests and to enhance performance."""
__version__ = "0.1"
# Usage
# =====
#
# Import the library, then setup a cache object and use it to gather IP address details.
# The cache object will automatically load and save data to the local cache files.
#
# Optionally, the cache object may be instantiated with the following arguments:
# - IP_ADDRESSES_CACHE_FILE, path to the file where IP addresses cache will be stored (default: "ip_addr.cache");
# - IP_PREFIXES_CACHE_FILE, path to the file where IP prefixes cache will be stored (default: "ip_pref.cache");
# - MAX_CACHE, expiration time for cache entries, in seconds (default: 604800, 1 week);
# - Debug, set to True to enable some debug messages (default: False).
#
# Results are given in a dictionary containing the following keys: ASN, Holder, Prefix, HostName, TS (time stamp).
#
# Hostname is obtained using the local socket.getfqdn function.
#
# import ipdetailscache
# cache = ipdetailscache.IPDetailsCache( IP_ADDRESSES_CACHE_FILE = "ip_addr.cache", IP_PREFIXES_CACHE_FILE = "ip_pref.cache", MAX_CACHE = 604800, Debug = False );
# result = cache.GetIPInformation( "IP_ADDRESS" )
#
# Example
# =======
#
# :~# python
# Python 2.7.2+ (default, Jul 20 2012, 22:15:08)
# [GCC 4.6.1] on linux2
# Type "help", "copyright", "credits" or "license" for more information.
# >>> import ipdetailscache
# >>> cache = ipdetailscache.IPDetailsCache();
# >>> result = cache.GetIPInformation( "193.0.6.139" )
# >>> result
# {u'Prefix': u'193.0.0.0/21', u'HostName': u'www.ripe.net', u'Holder': u'RIPE-NCC-AS Reseaux IP Europeens Network Coordination Centre (RIPE NCC),NL', u'TS': 1401781240, u'ASN': u'3333'}
import os.path
import time
import json
import ipaddr # http://code.google.com/p/ipaddr-py/ - pip install ipaddr
import socket
import urllib2
class IPDetailsCache():
def _Debug(self, s):
if self.Debug:
print("DEBUG - IPDetailsCache - %s" % s)
# IPPrefixesCache[<ip prefix>]["TS"]
# IPPrefixesCache[<ip prefix>]["ASN"]
# IPPrefixesCache[<ip prefix>]["Holder"]
# IPAddressesCache[<ip>]["TS"]
# IPAddressesCache[<ip>]["ASN"]
# IPAddressesCache[<ip>]["Holder"]
# IPAddressesCache[<ip>]["Prefix"]
# IPAddressesCache[<ip>]["HostName"]
def GetIPInformation( self, in_IP | ):
Result = {}
Result["TS"] = 0
Result["ASN"] = ""
Result["Holder"] = ""
Result["Prefix"] = ""
Result["HostName"] = ""
IP = in_IP
if not IP in self.IPAddressObjects:
self.IPAddressObjects[IP] = ipaddr.IPAddress(IP)
if self.IPAddressObjects[IP].version == 4:
if self.IPAddressObjects[IP].is_private:
Result["ASN"] = "unknown"
| return Result
if self.IPAddressObjects[IP].version == 6:
if self.IPAddressObjects[IP].is_reserved or \
self.IPAddressObjects[IP].is_link_local or \
self.IPAddressObjects[IP].is_site_local or \
self.IPAddressObjects[IP].is_private or \
self.IPAddressObjects[IP].is_multicast or \
self.IPAddressObjects[IP].is_unspecified:
Result["ASN"] = "unknown"
return Result
if IP != self.IPAddressObjects[IP].exploded:
IP = self.IPAddressObjects[IP].exploded
if not IP in self.IPAddressObjects:
self.IPAddressObjects[IP] = ipaddr.IPAddress(IP)
if IP in self.IPAddressesCache:
if self.IPAddressesCache[IP]["TS"] >= int(time.time()) - self.MAX_CACHE:
Result = self.IPAddressesCache[IP]
self._Debug("IP address cache hit for %s" % IP)
return Result
else:
self._Debug("Expired IP address cache hit for %s" % IP)
for IPPrefix in self.IPPrefixesCache:
if self.IPPrefixesCache[IPPrefix]["TS"] >= int(time.time()) - self.MAX_CACHE:
if not IPPrefix in self.IPPrefixObjects:
self.IPPrefixObjects[IPPrefix] = ipaddr.IPNetwork( IPPrefix )
if self.IPPrefixObjects[IPPrefix].Contains( self.IPAddressObjects[IP] ):
Result["TS"] = self.IPPrefixesCache[IPPrefix]["TS"]
Result["ASN"] = self.IPPrefixesCache[IPPrefix]["ASN"]
Result["Holder"] = self.IPPrefixesCache[IPPrefix].get("Holder","")
Result["Prefix"] = IPPrefix
self._Debug("IP prefix cache hit for %s (prefix %s)" % ( IP, IPPrefix ) )
break
if Result["ASN"] == "":
self._Debug("No cache hit for %s" % IP )
URL = "https://stat.ripe.net/data/prefix-overview/data.json?resource=%s" % IP
obj = json.loads( urllib2.urlopen(URL).read() )
if obj["status"] == "ok":
Result["TS"] = int(time.time())
if obj["data"]["asns"] != []:
try:
Result["ASN"] = str(obj["data"]["asns"][0]["asn"])
Result["Holder"] = obj["data"]["asns"][0]["holder"]
Result["Prefix"] = obj["data"]["resource"]
self._Debug("Got data for %s: ASN %s, prefix %s" % ( IP, Result["ASN"], Result["Prefix"] ) )
except:
Result["ASN"] = "unknown"
self._Debug("No data for %s" % IP )
else:
Result["ASN"] = "not announced"
Result["Holder"] = ""
Result["Prefix"] = obj["data"]["resource"]
if Result["ASN"].isdigit() or Result["ASN"] == "not announced":
HostName = socket.getfqdn(IP)
if HostName == IP or HostName == "":
Result["HostName"] = "unknown"
else:
Result["HostName"] = HostName
if not IP in self.IPAddressesCache:
self.IPAddressesCache[IP] = {}
self._Debug("Adding %s to addresses cache" % IP)
self.IPAddressesCache[IP]["TS"] = Result["TS"]
self.IPAddressesCache[IP]["ASN"] = Result["ASN"]
self.IPAddressesCache[IP]["Holder"] = Result["Holder"]
self.IPAddressesCache[IP]["Prefix"] = Result["Prefix"]
self.IPAddressesCache[IP]["HostName"] = Result["HostName"]
if Result["Prefix"] != "":
IPPrefix = Result["Prefix"]
if not IPPrefix in self.IPPrefixesCache:
self.IPPrefixesCache[ IPPrefix ] = {}
self._Debug("Adding %s to prefixes cache" % IPPrefix)
self.IPPrefixesCache[IPPrefix]["TS"] = Result["TS"]
self.IPPrefixesCache[IPPrefix]["ASN"] = Result["ASN"]
self.IPPrefixesCache[IPPrefix]["Holder"] = Result["Holder"]
return Result
def SaveCache( self ):
# Save IP addresses cache
self._Debug("Saving IP addresses cache to %s" % self.IP_ADDRESSES_CACHE_FILE)
with open( self.IP_ADDRESSES_CACHE_FILE, "w" ) as outfile:
json.dump( self.IPAddressesCache, outfile )
# Save IP prefixes cache
self._Debug("Saving IP prefixes cache to %s" % self.IP_PREFIXES_CACHE_FILE)
with open( self.IP_PREFIXES_CACHE_FILE, "w" ) as outfile:
json.dump( self.IPPrefixesCache, outfile )
def __init__( self, IP_ADDRESSES_CACHE_FILE = "ip_addr.cache", IP_PREFIXES_CACHE_FILE = "ip_pref.cache", MAX_CACHE = 604800, Debug = False ):
self.IPAddressesCache = {}
self.IPPrefixesCache = {}
self.IPAddressObjects = {}
self.IPPrefixObjects = {}
self.IP_ADDRESSES_CACHE_FIL |
danstowell/markovrenewal | experiments/plotynth.py | Python | gpl-2.0 | 12,698 | 0.0278 | #!/bin/env python
# plot results from ynthetictest.py
# by Dan Stowell, spring 2013
import os.path
import csv
from math import log, exp, pi, sqrt, ceil, floor
from numpy import mean, std, shape
import numpy as np
import random
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from mpl_toolkits.mplot3d import Axes3D
import itertools
#annotdir = os.path.expanduser("~/svn/stored_docs/python/markovrenewal/output")
annotdir = "output"
plotfontsize = "large" #"xx-small"
namelookup = {
'fsn':'Fsn', 'ftrans':'Ftrans', 'fsigtrans':'Fsigtrans', 'msecs':'Run time (msecs)', \
'birthdens_mism':'Error in assumed birth density (ratio)',
'deathprob_mism':'Error in assumed death probability (ratio)',
'snr_mism':'Error in assumed SNR (dB)',
'gen_mism':'Proportion of errors in transition probabilities',
'misseddetectionprob':'Missed detection probability',
'noisecorr':'Amount of signal correlation imposed on noise',
'snr':'SNR (dB)',
'birthdens':'birth intensity',
#'':'',
}
def readable_name(name):
return namelookup.get(name, name)
def fmt_chooser(currentcombi, groupcols, groupingvals):
fmt = 'k'
if groupcols[0]=='mmrpmode' and currentcombi[0]=='greedy':
if (len(groupcols)>1) and groupingvals[groupcols[1]].index(currentcombi[1])>0:
fmt += ':'
else:
fmt += '-.'
else:
if (len(groupcols)>1) and groupingvals[groupcols[1]].index(currentcombi[1])>0:
fmt += '--'
else:
fmt += '-'
return fmt
def ynth_csv_to_ciplot(csvpath, outpath, groupcols, summarycols, filtercols=None, xjitter=0.):
"""
groupcols: used for discrete grouping of data, with the first one becoming the x-axis in a plot, remaining ones as multiple lines;
summarycols: the name(s) of the columns to be made into y-values. one separate plot will be made for each.
filtercols: {key->listofallowed...} select rows only where particular STRING values are found. otherwise, summaries are pooled over all values.
"""
data = ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols)
# data is {'groupingvals':{ col: list }, 'summarydata':{ tupleofgroupvals: { summarycol:{'mean': _, 'stderr': _} } } }
csvname = os.path.splitext(os.path.basename(csvpath))[0]
if isinstance(summarycols, basestring): summarycols = [summarycols]
if isinstance(groupcols, basestring): groupcols = [groupcols]
# one plot for each summarycol
for summarycol in summarycols:
fig = plt.figure()
# Now, we're going to use the first grouper as the x-axis.
# This means we want to iterate over all combinations of the other groupers, drawing a line each time.
for linegroupcombi in itertools.product(*[data['groupingvals'][col] for col in groupcols[1:]]):
linedata = []
for xval in data['groupingvals'][groupcols[0]]:
fullgroupcombi = (xval,) + tuple(linegroupcombi)
ourdata = data['summarydata'][fullgroupcombi][summarycol]
if xjitter != 0:
xval += random.gauss(0,xjitter)
linedata.append({'xval':xval, 'mean': ourdata['mean'], 'stderr_up': ourdata['stderr'], 'stderr_dn': ourdata['stderr']})
# draw a line
linelabel = ', '.join([linegroupcombi[0]] + ["%s %s" % (readable_name(groupcols[lgi+2]), lg) for lgi, lg in enumerate(linegroupcombi[1:])])
plt.errorbar([x['xval'] for x in linedata], \
[x['mean'] for x in linedata], \
([x['stderr_dn'] for x in linedata], [x['stderr_up'] for x in linedata]), \
label=linelabel, fmt=fmt_chooser(linegroupcombi, groupcols[1:], data['groupingvals']))
#plt.title("%s_%s" % (whichstat, runtype), fontsize=plotfontsize)
plt.xlabel(readable_name(groupcols[0]), fontsize=plotfontsize)
plt.ylabel(readable_name(summarycol), fontsize=plotfontsize)
plt.xticks(data['groupingvals'][groupcols[0]], fontsize=plotfontsize)
xdatamax = max(data['groupingvals'][groupcols[0]])
xdatamin = min(data['groupingvals'][groupcols[0]])
plt.xlim(xmin=xdatamin-(xdatamax-xdatamin)*0.05, xmax=xdatamax+(xdatamax-xdatamin)*0.05)
#yuck if groupcols[0] in ['deathprob_mism', 'birthdens_mism']:
#yuck plt.xscale('log')
if summarycol in ['msecs']:
plt.yscale('log')
else:
plt.ylim(ymin=0.2, ymax=1) #rescale(0.3), ymax=rescale(1.001))
#plt.yticks(map(rescale, yticks), yticks, fontsize=plotfontsize)
plt.yticks(fontsize=plotfontsize)
plt.legend(loc=(0.02, 0.05), prop={'size':'medium'})
outfilepath = "%s/%s_%s.pdf" % (outpath, csvname, summarycol)
plt.savefig(outfilepath, papertype='A4', format='pdf')
print("Written file %s" % outfilepath)
# LATER: consider how to avoid filename collisions - just allow user to specify a lbl?
def ynth_csv_to_surfaceplot(csvpath, outpath, groupcols, summarycols, filtercols=None):
"""
groupcols: used for discrete grouping of data, with the first one becoming the x-axis in a plot, second as y-axis;
summarycols: the name(s) of the columns to be made into y-values. one separate plot will be made for each.
filtercols: {key->listofallowed...} select rows only where particular STRING values are found. otherwise, summaries are pooled over all values.
"""
data = ynth_csv_loaddata(csvpath, groupcols, summarycols, filtercols)
# data is {'groupingvals':{ col: list }, 'summarydata':{ tupleofgroupvals: { summarycol:{'mean': _, 'stderr': _} } } }
csvname = os.path.splitext(os.path.basename(csvpath))[0]
if isinstance(summarycols, basestring): summarycols = [summarycols]
if isinstance(groupcols, basestring): groupcols = [groupcols]
if len(groupcols) != 2: raise ValueError("for surface plot, exactly 2 groupcols must be specified (used as X and Y).")
# one plot for each summarycol
for summarycol in summarycols:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d') # 3D here
# NOW DO A SURFACE PLOT
data['groupingvals'][groupcols[0]].sort()
ydata = map(float, data['groupingvals'][groupcols[1]])
ydata.sort()
data['groupingvals'][groupcols[1]].sort(cmp=lambda a,b: cmp(float(a), float(b)))
z = [[data['summarydata'][(x,y)][summarycol]['mean'] for x in data['groupingvals'][groupcols[0]]] for y in data['groupingvals'][groupcols[1]]]
ymesh = np.array([data['groupingvals'][groupcols[0]] for _ in range(len(data['groupingvals'][groupcols[1]]))])
xmesh = np.array([ydata for _ in range(len(data['groupingvals'][groupcols[0]]))]).T
z = np.array(z)
ax.plot_surface(xmesh, ymesh, z, rstride=1, cstride=1)
"""
plt.imshow(z, interpolation='nearest', cmap=cm.binary)
"""
"""
# Now, we're going to use the first grouper as the x-axis.
# This means we want to iterate over all combinations of the other groupers, drawing a line each time.
for linegroupcombi in itertools.product(*[data['groupingvals'][col] for col in groupcols[1:]]):
linedata = []
for xval in data['groupingvals'][groupcols[0]]:
fullgroupcombi = (xval,) + tuple(linegroupcombi)
ourdata = data['summarydata'][fullgroupcombi][summarycol]
if xjitter != 0:
xval += random.gauss(0,xjitter)
linedata.append({'xval':xval, 'mean': ourdata['mean'], 'stderr_up': ourdata['stderr'], 'stderr_dn': ourdata['stderr']})
# draw a line
linelabel = ', '.join([linegroupcombi[0]] + ["%s %s" % (readable_name(groupcols[lgi+2]), lg) for lgi, lg in enumerate(linegroupcombi[1:])])
plt.errorbar([x['xval'] for x in linedata], \
[x['mean'] for x in linedata], \
([x['stderr_dn'] for x in linedata], [x['stderr_up'] for x in linedata]), \
label=linelabel, fmt=fmt_chooser(linegroupcombi, groupcols[1:], data['groupingvals']))
"""
#plt.title("%s_%s" % (whichstat, runtype), fontsize=plotfontsize)
"""
plt.xlabel(readable_name(grou | pcols[0]), fontsize=plotfontsize)
plt.ylabel(readable_name(groupcols[1]), fontsize=plotfontsize)
plt.title(readable_name(summarycol), fontsize=plotfontsize)
plt.xticks(range(len(data['groupingvals'][groupcols[0]])), data['groupingvals'][groupcols[0]], fontsize=plotfontsize)
plt.yticks(range(len(data['groupingvals'][groupcols[1]])), data['groupingvals'][groupcols[1]], fontsize=plotfontsize)
"""
"""
xdata | max = max(data['groupingvals'][groupcols[0]])
xdatamin = min(data['groupingvals'][groupcols[0]])
plt.xlim(xmin=xdatamin-(xdatamax-xdatamin)*0.05, xmax=xdata |
ricardoy/coccimorph | coccimorph/segment.py | Python | gpl-3.0 | 5,932 | 0.000506 | import argparse
import cv2
import numpy as np
from coccimorph.aux import load_image
class Segmentator(object):
def __init__(self, filename, threshold, scale):
self.img = load_image(filename, scale)
self.img_bin = binaryze(self.img, threshold)
self.height, self.width, _ = self.img.shape
self.vx = []
self.vy = []
self.checkpoint = 0
self.invert = [
[0, 4],
[1, 5],
[2, 6],
[3, 7],
[4, 0],
[5, 1],
[6, 2],
[7, 3]
]
def save_segmentation(self, filename):
img = np.copy(self.img)
for x, y in zip(self.vx, self.vy):
img[x, y] = 255
cv2.imwrite(filename, np.transpose(img, axes=[1, 0, 2]))
print('File %s was saved.'%(filename))
def process_contour(self):
fim = False
starting_point_found = False
self.checkpoint = 0
i = 0
while i < self.height and not fim:
j = 0
while j < self.width and not fim:
if self.img_bin[i][j] == 255:
self.vx.append(i)
self.vy.append(j-1)
starting_point_found = True
fim = True
j += 1
i += 1
if not starting_point_found:
self.vx.append(0)
self.vy.append(0)
if self.vx[0] > 1 and self.vy[0] > 1 and \
self.vx[0] < self.height - 1 and self.vy[0] < self.width - 1:
n = 2
x4 = self.vx[0]
y4 = self.vy[0] - 1
x5 = self.vx[0] + 1
y5 = self.vy[0] - 1
x6 = self.vx[0] + 1
y6 = self.vy[0]
x7 = self.vx[0] + 1
y7 = self.vy[0] + 1
x0 = self.vx[0]
y0 = self.vy[0] + 1
dcn = 0
next_pixel = (0, 0)
if self.img_bin[x4, y4] == 0 and self.img_bin[x5, y5] == 255:
next_pixel = (x4, y4)
dcn = 4
elif self.img_bin[x5, y5] == 0 and self.img_bin[x6, y6] == 255:
next_pixel = (x5, y5)
dcn = 5
elif self.img_bin[x6, y6] == 0 and self.img_bin[x7, y7] == 255:
next_pixel = (x6, y6)
dcn = 6
elif self.img_bin[x7, y7] == 0 and self.img_bin[x0, y0] == 255:
next_pixel = (x7, y7)
dcn = 7
while n | ot(next_pixel[0] == self.vx[0] and next_pixel[1] == self.vy[0]):
self.vx.append(int(next_pixel[0]))
self.vy.append(int(next_pixel[1]))
dpc = dcn
# w_vect = (next_pixel[0], next_pixel[1], dcn)
retvals = self._find_next(self.vx[-1], self.vy[-1], dpc)
next_pixel = (retvals[0], retvals[1])
dcn = retvals[2]
| n += 1
if n < 20:
i = 0
while(i < n-1):
if next_pixel[0] == self.vx[i] and \
next_pixel[1] == self.vy[i] and i > 0:
next_pixel = (self.vx[0], self.vy[0])
n -= 1
self.checkpoint = 1
i += 1
def _find_next(self, pcx: int, pcy: int, dpc: int):
w2 = np.zeros(3, dtype=np.int)
dcp = self.invert[dpc][1]
for r in range(7):
dE = (dcp + r) % 8
dI = (dcp + r + 1) % 8
pe = self.chain_point(pcx, pcy, dE)
pi = self.chain_point(pcx, pcy, dI)
if self.is_background(pe) and self.is_object(pi):
w2[0] = pe[0]
w2[1] = pe[1]
w2[2] = dE
return w2
def is_background(self, pe):
return self.img_bin[pe[0], pe[1]] == 0
def is_object(self, pi):
return self.img_bin[pi[0], pi[1]] == 255
def chain_point(self, pcx, pcy, d):
if d == 0:
return pcx, pcy + 1
elif d == 1:
return pcx - 1, pcy + 1
elif d == 2:
return pcx - 1, pcy
elif d == 3:
return pcx - 1, pcy - 1
elif d == 4:
return pcx, pcy - 1
elif d == 5:
return pcx + 1, pcy - 1
elif d == 6:
return pcx + 1, pcy
elif d == 7:
return pcx + 1, pcy + 1
else:
raise ValueError('Parameter d should be an integer in [0, 7].')
def binaryze(img, threshold):
img_grayscale = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
binarizer = np.vectorize(lambda x: 255 if x < threshold else 0)
img_bin = binarizer(img_grayscale)
# print("primeiro pixel", img_grayscale[0, 0], "cores:", img[0, 0])
return img_bin
def segment(filename, threshold, binary_file, segmented_file, scale=None):
seg = Segmentator(filename, threshold, scale)
seg.process_contour()
if binary_file is None:
binary_file = '/tmp/binary.png'
cv2.imwrite(binary_file, np.transpose(abs(seg.img_bin - 255), axes=[1, 0]))
print('File %s was saved.'%(binary_file))
if segmented_file is None:
segmented_file = '/tmp/segmented.png'
seg.save_segmentation(segmented_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Segment image.')
parser.add_argument('-input-file', type=str)
parser.add_argument('-threshold', type=int)
parser.add_argument('-scale', type=float)
parser.add_argument('-output-binary', type=str)
parser.add_argument('-output-segmented', type=str)
args = parser.parse_args()
if args.input_file is not None and args.threshold is not None:
segment(args.input_file,
args.threshold,
args.output_binary,
args.output_segmented,
args.scale)
|
inercia/evy | evy/io/convenience.py | Python | mit | 5,059 | 0.006523 | # Evy - a concurrent networking library for Python
#
# Unless otherwise noted, the files in Evy are under the following MIT license:
#
# Copyright (c) 2012, Alvaro Saurin
# Copyright (c) 2008-2010, Eventlet Contributors (see AUTHORS)
# Copyright (c) 2007-2010, Linden Research, Inc.
# Copyright (c) 2005-2006, Bob Ippolito
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from evy.green.pools import GreenPool
from evy.green.threads import kill, getcurrent
from evy.patched import socket
from evy.support import greenlets as greenlet
def connect (addr, family = socket.AF_INET, bind = None):
"""
Convenience function for opening client sockets.
: | param addr: Address of the server to connect to. For TCP sockets, this is a (host, port) tuple.
:param family: Socket family, optional. See :mod:`socket` documentation for available families.
:param bin | d: Local address to bind to, optional.
:return: The connected green socket object.
"""
sock = socket.socket(family, socket.SOCK_STREAM)
if bind is not None:
sock.bind(bind)
sock.connect(addr)
return sock
def listen (addr, family = socket.AF_INET, backlog = 50):
"""
Convenience function for opening server sockets. This
socket can be used in :func:`~evy.serve` or a custom ``accept()`` loop.
Sets SO_REUSEADDR on the socket to save on annoyance.
:param addr: Address to listen on. For TCP sockets, this is a (host, port) tuple.
:param family: Socket family, optional. See :mod:`socket` documentation for available families.
:param backlog: The maximum number of queued connections. Should be at least 1; the maximum value is system-dependent.
:return: The listening green socket object.
"""
sock = socket.socket(family, socket.SOCK_STREAM)
if sys.platform[:3] != "win":
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(backlog)
return sock
class StopServe(Exception):
"""
Exception class used for quitting :func:`~evy.serve` gracefully."""
pass
def _stop_checker (t, server_gt, conn):
try:
try:
t.wait()
finally:
conn.close()
except greenlet.GreenletExit:
pass
except Exception:
kill(server_gt, *sys.exc_info())
def serve (sock, handle, concurrency = 1000):
"""
Runs a server on the supplied socket. Calls the function *handle* in a
separate greenthread for every incoming client connection. *handle* takes
two arguments: the client socket object, and the client address::
def myhandle(client_sock, client_addr):
print "client connected", client_addr
evy.serve(evy.listen(('127.0.0.1', 9999)), myhandle)
Returning from *handle* closes the client socket.
:func:`serve` blocks the calling greenthread; it won't return until
the server completes. If you desire an immediate return,
spawn a new greenthread for :func:`serve`.
Any uncaught exceptions raised in *handle* are raised as exceptions
from :func:`serve`, terminating the server, so be sure to be aware of the
exceptions your application can raise. The return value of *handle* is
ignored.
Raise a :class:`~evy.StopServe` exception to gracefully terminate the
server -- that's the only way to get the server() function to return rather
than raise.
The value in *concurrency* controls the maximum number of
greenthreads that will be open at any time handling requests. When
the server hits the concurrency limit, it stops accepting new
connections until the existing ones complete.
"""
pool = GreenPool(concurrency)
server_gt = getcurrent()
while True:
try:
conn, addr = sock.accept()
gt = pool.spawn(handle, conn, addr)
gt.link(_stop_checker, server_gt, conn)
conn, addr, gt = None, None, None
except StopServe:
return
|
olekw/cyphesis | data/rulesets/basic/scripts/mind/goals/humanoid/construction.py | Python | gpl-2.0 | 5,984 | 0.00234 | # This file is distributed under the terms of the GNU General Public license.
# Copyright (C) 2004 Al Riddoch (See the file COPYING for details).
import entity_filter
from atlas import Operation, Entity
from mind.goals.common.misc_goal import *
from mind.goals.common.move import *
from physics import *
# Gather a resource from nearby
# This is designed to be placed early in a complex goal, so it returns
# as fulfilled when it has nothing to do
class gather(Goal):
"""Base class for getting a freely available resource."""
def __init__(self, what):
Goal.__init__(self, "gather a thing",
self.is_there_none_around,
[spot_something(what),
pick_up_focus(what)])
if isinstance(what, str):
self.what = what
else:
self.what = str(what)
# FIXME: This goal shares the same filter as spot_something
self.filter = entity_filter.Filter(self.what)
self.vars = ["what"]
def is_there_none_around(self, me):
# A suitably range
square_near_dist = 30
nearest = None
nothing_near = 1
what_all = me.map.find_by_filter(self.filter)
for thing in what_all:
square_dist = square_distance(me.entity.location, thing.location)
if square_dist < square_near_dist and \
thing.location.parent.id != me.entity.id:
return 0
return 1
# Harvest a resource from source at a place using a tool
class harvest_resource(Goal):
"""Gather something from a given location, by using a tool on something."""
def __init__(self, what, source, place, tool, range=30):
Goal.__init__(self, "Gather a resource using a tool",
false,
[acquire_thing(tool),
move_me_area(place, range=range),
gather(what),
spot_something_in_area(source, location=place, range=range,
condition=self.source_entity_condition, ),
move_me_to_focus(source),
self.do])
self.what = what
self.source = source
self.place = place
self.tool = tool
self.range = range
self.vars = ["what", "source", "place", "tool", "range"]
def source_entity_condition(self, entity):
if hasattr(entity, "sizeAdult"):
# Only chop down adult plants which have grown at least 1.1 times their adult size
# (so that we give the trees some time to disperse seeds, which they only do when they are adult)
sizeAdult = entity.sizeAdult
entity_height = entity.location.bbox.high_corner.y
if entity_height >= (sizeAdult * 1.1):
return True
return False
return True
def do(self, me):
if (self.tool in me.things) == 0:
# print "No tool"
return
tool = me.find_thing(self.tool)[0]
if not hasattr(me, 'right_hand_wield') or me.right_hand_wield != tool.id:
# FIXME We need to sort out how to tell what one is wielding
return Operation("wield", Entity(tool.id))
target = me.get_knowledge('focus', self.source)
if target == None:
# print "No resource source " + self.source
return
return Operation("use", Entity(target, objtype="obj"))
class plant_seeds(Goal):
"""Use a tool to plant a given kind of seed in a given location."""
# Get a tool, move to area, look for seed, if found act on seed, if not look for source, move near source. If neither seed nor source is found, roam.
def __init__(self, seed, source, place, tool, range=30, spacing=4):
Goal.__init__(self, "Plant seed to grow plants",
false,
[acquire_thing(tool),
| move_me_area(place, range=range),
spot_something_in_area(seed, place, range=range, seconds_until_forgotten=360),
move_me_to_focus(seed),
self.do,
spot_something_in_area(source, place, range=range),
move_me_near_focus(source, allowed_movement_radius=5),
clear_focus(source)])
self.seed = seed
self.source = source
self.source_filter = e | ntity_filter.Filter(source)
self.place = place
self.tool = tool
self.range = range
self.spacing = spacing
self.vars = ["seed", "source", "place", "tool", "range", "spacing"]
def do(self, me):
if (self.tool in me.things) == 0:
# print "No tool"
return
tool = me.find_thing(self.tool)[0]
if not hasattr(me, 'right_hand_wield') or me.right_hand_wield != tool.id:
# FIXME We need to sort out how to tell seed one is wielding
return Operation("wield", Entity(tool.id))
id = me.get_knowledge('focus', self.seed)
if id == None:
return
seed = me.map.get(id)
if seed == None:
me.remove_knowledge('focus', self.seed)
return
if seed.visible == False:
me.remove_knowledge('focus', self.seed)
return
# Check that the seed isn't too close to other sources (to prevent us from planting too closely)
sources_all = me.map.find_by_filter(self.source_filter)
spacing_sqr = self.spacing * self.spacing
for thing in sources_all:
sqr_dist = square_distance(seed.location, thing.location)
if sqr_dist < spacing_sqr:
# We've found a source which is too close to the seed, so we'll not plant this one
me.remove_knowledge('focus', self.seed)
return
return Operation("use", Entity(seed.id, objtype="obj"))
|
marcoantoniooliveira/labweb | oscar/forms/widgets.py | Python | bsd-3-clause | 7,781 | 0 | import re
import six
from six.moves import filter
from six.moves import map
import django
from django import forms
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.forms.util import flatatt
from django.forms.widgets import FileInput
from django.template import Context
from django.template.loader import render_to_string
from django.utils import formats
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class ImageInput(FileInput):
"""
Widget providing a input element for file uploads based on the
Django ``FileInput`` element. It hides the actual browser-specific
input element and shows the available image for images that have
been previously uploaded. Selecting the image will open the file
dialog and allow for selecting a new or replacing image file.
"""
template_name = 'partials/image_input_widget.html'
attrs = {'accept': 'image/*'}
def render(self, name, value, attrs=None):
"""
Render the ``input`` field based on the defined ``template_name``. The
image URL is take from *value* and is provided to the template as
``image_url`` context variable relative to ``MEDIA_URL``. Further
attributes for the ``input`` element are provide in ``input_attrs`` and
contain parameters specified in *attrs* and *name*.
If *value* contains no valid image URL an empty string will be provided
in the context.
"""
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if not value or isinstance(value, InMemoryUploadedFile):
# can't display images that aren't stored
image_url = ''
else:
image_url = final_attrs['value'] = force_text(
self._format_value(value))
return render_to_string(self.template_name, Context({
'input_attrs': flatatt(final_attrs),
'image_url': image_url,
'image_id': "%s-image" % final_attrs['id'],
}))
class WYSIWYGTextArea(forms.Textarea):
def __init__(self, *args, **kwargs):
kwargs.setdefault('attrs', {})
kwargs['attrs'].setdefault('class', '')
kwargs['attrs']['class'] += ' wysiwyg'
super(WYSIWYGTextArea, self).__init__(*args, **kwargs)
def datetime_format_to_js_date_format(format):
"""
Convert a Python datetime format to a date format suitable for use with JS
date pickers
"""
converted = format
replacements = {
'%Y': 'yy',
'%m': 'mm',
'%d': 'dd',
'%H:%M': '',
}
for search, replace in six.iteritems(replacements):
converted = converted.replace(search, replace)
return converted.strip()
def datetime_format_to_js_time_format(format):
"""
Convert a Python datetime format to a time format suitable for use with JS
date pickers
"""
converted = format
replacements = {
'%Y': '',
'%m': '',
'%d': '',
'%H': 'HH',
'%M': 'mm',
}
for search, replace in six.iteritems(replacements):
converted = converted.replace(search, replace)
converted = re.sub('[-/][^%]', '', converted)
return converted.strip()
def add_js_formats(widget):
"""
Set data attributes for date and time format on a widget
"""
attrs = {
'data-dateFormat': datetime_format_to_js_date_format(
widget.format),
'data-timeFormat': datetime_format_to_js_time_format(
widget.format)
}
widget.attrs.update(attrs)
class DatePickerInput(forms.DateInput):
"""
DatePicker input that uses the jQuery UI datepicker. Data attributes are
used to pass the date format to the JS
"""
def __init__(self, *args, **kwargs):
super(DatePickerInput, self).__init__(*args, **kwargs)
add_js_formats(self)
class DateTimePickerInput(forms.DateTimeInput):
# Build a widget which uses the locale datetime format but without seconds.
# We also use data attributes to pass these formats to the JS datepicker.
def __init__(self, *args, **kwargs):
include_seconds = kwargs.pop('include_seconds', False)
super(DateTimePickerInput, self).__init__(*args, **kwargs)
# Django 1.7+ has format default as 0
if django.VERSION >= (1, 7):
self.format = self.format or formats.get_format(self.format_key)[0]
if self.format and not include_seconds:
self.format = re.sub(':?%S', '', self.format)
add_js_formats(self)
class AdvancedSelect(forms.Select):
"""
Customised Select widget that allows a list of disabled values to be passed
to the constructor. Django's default Select widget doesn't allow this so
we have to override the render_option method and ad | d a section that checks
for whether the widget is disabled.
"""
def __init__(self, attrs=None, choices=(), disabled_values=()):
self.disabled_values = set(force_text(v) for v in disabled_values)
super(AdvancedSelect, self).__init__(attrs, choices)
def rend | er_option(self, selected_choices, option_value, option_label):
option_value = force_text(option_value)
if option_value in self.disabled_values:
selected_html = mark_safe(' disabled="disabled"')
elif option_value in selected_choices:
selected_html = mark_safe(' selected="selected"')
if not self.allow_multiple_selected:
# Only allow for a single selection.
selected_choices.remove(option_value)
else:
selected_html = ''
return format_html(u'<option value="{0}"{1}>{2}</option>',
option_value,
selected_html,
force_text(option_label))
class RemoteSelect(forms.Widget):
"""
Somewhat reusable widget that allows AJAX lookups in combination with
select2.
Requires setting the URL of a lookup view either as class attribute or when
constructing
"""
is_multiple = False
css = 'select2 input-xlarge'
lookup_url = None
def __init__(self, *args, **kwargs):
if 'lookup_url' in kwargs:
self.lookup_url = kwargs.pop('lookup_url')
if self.lookup_url is None:
raise ValueError(
"RemoteSelect requires a lookup ULR")
super(RemoteSelect, self).__init__(*args, **kwargs)
def format_value(self, value):
return six.text_type(value or '')
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
if value is None:
return value
else:
return six.text_type(value)
def render(self, name, value, attrs=None, choices=()):
attrs = self.build_attrs(attrs, **{
'type': 'hidden',
'class': self.css,
'name': name,
'data-ajax-url': self.lookup_url,
'data-multiple': 'multiple' if self.is_multiple else '',
'value': self.format_value(value),
'data-required': 'required' if self.is_required else '',
})
return mark_safe(u'<input %s>' % flatatt(attrs))
class MultipleRemoteSelect(RemoteSelect):
is_multiple = True
css = 'select2 input-xxlarge'
def format_value(self, value):
if value:
return ','.join(map(six.text_type, filter(bool, value)))
else:
return ''
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
if value is None:
return []
else:
return list(filter(bool, value.split(',')))
|
sinkpoint/pynrrd | pynrrd/nrrd.py | Python | mit | 11,390 | 0.009745 | import numpy as np
from collections import OrderedDict
import os.path as path
import gzip
# Author: David Qixiang Chen
# email: qixiang.chen@gmail.com
#
# utility nrrd header reader for .nhdr
class NrrdHeader:
def fromNiftiHeader(nifti_header):
header = NrrdHeader()
aff = nifti_header.get_best_affine()
header.correctSpaceRas()
header.setValue('space directions', aff[:3,:3])
header.setValue('space origin', aff[:3,3])
header.setValue('sizes', nifti_header.get_data_shape())
header.setValue('dimension', str(len(nifti_header.get_data_shape())))
return header
fromNiftiHeader = staticmethod(fromNiftiHeader)
b0num = 0
def __init__ (self, dic=None):
# default headers
self._data = OrderedDict({
"header": "NRRD0004",
"type": "double",
"dimension": "3",
"space": "left-posterior-superior",
"sizes": [0,0,0],
"space directions": [[1, 0.0, 0.0],[0.0, -1, 0.0], [0.0, 0.0, 1]],
"kinds": ["domain", "domain", "domain"],
"endian": "little",
"encoding": "gzip",
"space origin": [0, 0, 0]
})
if dic:
self._data = dic
if dic.has_key('b0num'):
self.b0num = self._data['b0num']
def getAffine(self):
affine = np.eye(4)
affine[:3,:3] = np.sign(self['space directions'][:3])
affine[:3,3] = self['space origin']
return affine
def getDwiGradients(self):
return self._data['DWMRI_gradient']
def setDwiGradients(self, vec):
self._data['DWMRI_gradient'] = vec
def setValue(self,key, val):
self._data[key] = val
def __getitem__(self, key):
if key in self._data:
return self._data[key]
return False
def __setitem__(self, key, val):
self._data[key] = val
def getKeys(self):
return self._data.keys()
def isDTMR(self):
return self.b0num > 0
def getBval(self):
return float(self['DWMRI_b-value'])
def getBvals(self):
b0 = self.getBval()
bvals = np.repeat(float(b0), len(self.getDwiGradients()))
return bvals
def correctSpaceRas(self):
def print_info(self):
spaceraw = self['space']
space = spaceraw.split('-')
print 'space:',space
origin = self['space origin']
print 'origin:',origin
directions = self['space directions']
print 'space directions:',directions
frame = np.array(self['measurement frame'])
print 'measurement frame:',frame
# don't invert dwi vectors, slicer doesn't do this internally
#dwivec = self.getDwiGradients()
print '==============before============'
print_info(self)
spaceraw = self['space']
space = spaceraw.split('-')
origin = self['space origin']
directions = self['space directions']
frame = np.array(self['measurement frame'])
nospace = directions.index(np.nan)
directions.remove(np.nan)
dvec = np.array([directions])
if space[0] == 'left':
print '---------------------------'
print 'invert left to right'
space[0] = 'right'
origin[0] *= -1
dvec[:,0] *= -1
frame[:,0] *= -1
#dwivec[:,0] *= -1
if space[1] == 'posterior':
print '---------------------------'
print 'invert posterior to anterior'
space[1] = 'anterior'
origin[1] *= -1
dvec[:,1] *= -1
frame[:,1] *= -1
#dwivec[:,1] *= -1
if space[1] == 'inferior':
print '---------------------------'
print 'invert inferior to superior'
space[2] = 'superior'
origin[2] *= -1
dvec[:,2] *= -1
frame[:,2] *= -1
#dwivec[:,2] *= -1
self.setValue('space','-'.join(space))
dvec = np.insert(dvec.astype(np.object),nospace,np.nan,1).tolist()[0]
dvec[nospace] = np.nan
| self.setValue('space dir | ections',dvec)
self.setValue('measurement frame', frame.tolist())
#self.setDwiGradients(dwivec)
print '==============after============'
print_info(self)
#dwivec = self.getDwiGradients()
#print dwivec
class NrrdReader:
grdkey = 'DWMRI_gradient'
b0num = 'b0num'
def load(self, filename, get_raw=False):
filedir = path.dirname(path.abspath(filename))
TFILE = open(filename, 'r')
strbuf =""
bindata = None
if filename.find("nrrd") > -1:
data = TFILE.read().split("\n\n",1)
strbuf = data[0].split('\n')
bindata = data[1]
else:
strbuf = TFILE.read().split('\n')
TFILE.close()
params = OrderedDict()
for line in strbuf:
if len(line) == 0:
break;
line = line.strip()
if line == '':
continue
if line.startswith('#'):
continue
if line.startswith('NRRD'):
params['header'] = line
else:
key,val = line.split(':')
key = key.strip()
if key=='space' and params.has_key('space') and params['space']!='':
continue
val = val.replace('=','').strip()
if key == 'sizes':
val = self.getVals(val, 'int')
elif key.startswith(self.grdkey):
val = self.getVals(val, 'float')
elif key.startswith('space origin'):
val = self.getVals(val, 'float')
elif key.startswith('space directions'):
val = self.getVals(val, 'float')
elif key.startswith('measurement frame'):
val = self.getVals(val, 'float')
elif key.startswith('data file'):
val = val.strip()
else:
val = self.getVals(val)
if key.startswith(self.grdkey):
if not params.has_key(self.grdkey):
params[self.grdkey] = []
if not params.has_key(self.b0num):
params[self.b0num] = 0
params[self.grdkey].append(val)
if val[0] == 0 and val[1] == 0 and val[2] == 0:
#only want one b0 line, very hacky
#if params[self.b0num] < 1:
# params[self.grdkey].append(val)
params[self.b0num] += 1
#else:
# params[self.grdkey].append(val)
else:
params[key]= val
if (params.has_key(self.grdkey)):
dwivec = params[self.grdkey]
params[self.grdkey] = dwivec
params = NrrdHeader(params)
if not get_raw:
if not bindata and params['data file']:
bin_file = params['data file']
r_func = open
if params['encoding'] == 'gzip':
r_func = gzip.open
with r_func(path.join(filedir, bin_file), 'rb') as fp:
bindata = fp.read()
elif bindata and params['encoding'] == 'gzip':
from StringIO import StringIO
bindata = gzip.GzipFile(fileobj=StringIO(bindata)).read()
if bindata:
type_val = params['type']
if type_val == 'short':
m_type = np.short
elif type_val == 'float':
m_type = np.float32
elif type_val == 'double':
m_type = np.double
a = np.fromstring(bindata, dtype=m_type)
bindata = np.res |
google-research/google-research | kws_streaming/layers/random_stretch_squeeze_test.py | Python | apache-2.0 | 1,962 | 0.002039 | # coding= | utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file exce | pt in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for RandomStretchSqueeze data augmentation."""
import numpy as np
from kws_streaming.layers import random_stretch_squeeze
from kws_streaming.layers import test_utils
from kws_streaming.layers.compat import tf
from kws_streaming.layers.compat import tf1
tf1.disable_eager_execution()
class RandomStretchSqueezeTest(tf.test.TestCase):
def setUp(self):
super(RandomStretchSqueezeTest, self).setUp()
self.input_shape = [2, 7] # [batch, audio_sequence]
self.seed = 5
def test_random_stretch_squeeze(self):
test_utils.set_seed(self.seed)
audio = np.zeros(self.input_shape)
audio[:, 2:5,] = 1
inputs = tf.keras.layers.Input(
shape=self.input_shape[1:],
batch_size=self.input_shape[0],
dtype=tf.float32)
outputs = random_stretch_squeeze.RandomStretchSqueeze(
resample_offset=0.5,
seed=self.seed)(
inputs, training=True)
model = tf.keras.models.Model(inputs, outputs)
prediction = model.predict(audio)
# confirm that data are squeezed
target0 = np.array([0., 0., 1., 1., 0., 0., 0.])
self.assertAllClose(prediction[0, :], target0)
# confirm that data are stretched
target1 = np.array([0., 0.44444, 1., 1., 1., 0.44444, 0.])
self.assertAllClose(prediction[1, :], target1, atol=1e-4)
if __name__ == "__main__":
tf.test.main()
|
dmaccarthy/sc8pr | sc8pr/misc/progress.py | Python | gpl-3.0 | 1,655 | 0.001813 | # Copyright 2015-2018 D.G. MacCarthy <http://dmaccarthy.github.io>
#
# This file is part of "sc8pr".
#
# "sc8pr" is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# "sc8pr" is dist | ributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "sc8pr". If not, see <http://www.gnu.org/licenses/>.
from sc8pr import Canvas, Image, TOPLEFT, BOTTOMLEFT
from sc8pr.util import tall
class ProgressBar(Canvas):
"Di | splay progress graphically"
def __init__(self, size=(128,16), color="grey", lower=0, upper=1):
super().__init__(size)
cfg = dict(anchor=BOTTOMLEFT, pos=(0, size[1]-1)) if tall(*size) else dict(anchor=TOPLEFT)
self += Image(bg=color).config(**cfg)
self.lower = lower
self.upper = upper
self.value = lower
@property
def value(self): return self._val
@value.setter
def value(self, val):
"Change the current value of the progress bar"
val = max(self.lower, min(self.upper, val))
self._val = val
dim = tall(*self.size)
x = (val - self.lower) / (self.upper - self.lower)
x = max(1, round(x * self.size[dim]))
size = (self.width, x) if dim else (x, self.height)
self[0].config(size=size)
|
dreal/dreal4 | dreal/test/python/solver_test.py | Python | apache-2.0 | 5,260 | 0 | # -*- coding: utf-8 -*-
#
# Copyright 2017 Toyota Research Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from dreal import (Config, Variable, Context, Logic, cos, sin)
import unittest
class ConfigTest(unittest.TestCase):
def test_precision(self):
c = Config()
c.precision = 0.0001
self.assertEqual(c.precision, 0.0001)
c.precision = 0.01
self.assertEqual(c.precision, 0.01)
def test_use_polytope(self):
c = Config()
c.use_polytope = False
self.assertFalse(c.use_polytope)
c.use_polytope = True
self.assertTrue(c.use_polytope)
def test_use_polytope_in_forall(self):
c = Config()
c.use_polytope_in_forall = False
self.assertFalse(c.use_polytope_in_forall)
c.use_polytope_in_forall = True
self.assertTrue(c.use_polytope_in_forall)
def test_use_worklist_fixpoint(self):
c = Config()
c.use_worklist_fixpoint = False
self.assertFalse(c.use_worklist_fixpoint)
c.use_worklist_fixpoint = True
self.assertTrue(c.use_worklist_fixpoint)
def test_use_local_optimization(self):
c = Config()
c.use_local_optimization = False
self.assertFalse(c.use_local_optimization)
c.use_local_optimization = True
self.assertTrue(c.use_local_optimization)
x = Variable("x")
y = Variable("y")
z = Variable("z")
class ContextTest(unittest.TestCase):
def test_sat(self):
ctx = Context()
ctx.SetLogic(Logic.QF_NRA)
ctx.DeclareVariable(x, -10, 10)
ctx.DeclareVariable(y, -10, 10)
ctx.Assert(x == y + 5)
ctx.Assert(y == 2)
result = ctx.CheckSat()
self.assertTrue(result)
self.assertEqual(result[y].mid(), 2)
self.assertEqual(result[x].mid(), 2 + 5)
ctx.Exit()
def test_unsat(self):
ctx = Context()
ctx.SetLogic(Logic.QF_NRA)
x = Variable("x")
y = Variable("y")
ctx.DeclareVariable(x, -10, 10)
ctx.DeclareVariable(y, -10, 10)
ctx.Assert(x == y + 5)
ctx.Assert(y == x - 3)
result = ctx.CheckSat()
self.assertFalse(result)
ctx.Exit()
def test_push_pop(self):
ctx = Context()
ctx.SetLogic(Logic.QF_NRA)
x = Variable("x")
y = Variable("y")
ctx.DeclareVariable(x)
ctx.SetInterval(x, -10, 10)
ctx.DeclareVariable(y, -10, 10)
ctx.Assert(x == y + 5)
ctx.Push(1)
ctx.Assert(y == x - 3)
result = ctx.CheckSat()
self.assertFalse(result)
ctx.Pop(1)
result = ctx.CheckSat()
self.assertTrue(result)
ctx.Exit()
self.assertTrue(ctx.box)
def test_config(self):
ctx = Context()
config1 = ctx.config
self.assertEqual(config1.precision, 0.001)
config1.precision = 0.0001
self.assertEqual(ctx.config.precision, 0.0001)
# TODO(soonho): Enable this by adding python binding for DynamicBitset
# def test_brancher(self):
# self.branch_variables = []
# def MyBrancher(box, active_set, left, right):
# def FindMaxDiam(box, active_set):
# """Returns the dimension with the largest diameter"""
# max_diam = 0.0
# max_diam_idx = -1
# for var_i in active_set:
# iv_i = box[var_i]
# diam_i = iv_i.diam()
# if diam_i > max_diam and iv_i.is_bisectable():
# | max_diam = diam_i
# max_diam_idx = var_i
# return max_diam_idx
# branching_dim = FindMaxDiam(box, active_set)
# if branching_dim >= 0:
# (b1, b2) = box.bisect(branching_dim)
# left.set(b1)
# right.set(b2)
# self.branch_variables.append(box.variable(branching_dim))
# return branching_dim
# | return -1
# ctx = Context()
# ctx.config.brancher = MyBrancher
# ctx.SetLogic(Logic.QF_NRA)
# ctx.DeclareVariable(x, -10, 10)
# ctx.DeclareVariable(y, -10, 10)
# ctx.Assert(cos(x) < sin(y))
# result = ctx.CheckSat()
# self.assertTrue(result)
# self.assertEqual(self.branch_variables, [x, y, x, y, x, y, x])
# ctx.Exit()
def test_version(self):
# Simply check if we can do this without checking the version
# string.
self.assertTrue(Context.version)
if __name__ == '__main__':
unittest.main()
|
dedoogong/asrada | HandPose_Detector/general.py | Python | apache-2.0 | 29,504 | 0.003186 | #
# ColorHandPose3DNetwork - Network for estimating 3D Hand Pose from a single RGB Image
# Copyright (C) 2017 Christian Zimmermann
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function, unicode_literals
import tensorflow as tf
from tensorflow.python import pywrap_tensorflow
import numpy as np
import math
import cv2
class NetworkOps(object):
""" Operations that are frequently used within networks. """
neg_slope_of_relu = 0.01
@classmethod
def leaky_relu(cls, tensor, name='relu'):
out_tensor = tf.maximum(tensor, cls.neg_slope_of_relu*tensor, name=name)
return out_tensor
@classmethod
def conv(cls, in_tensor, layer_name, kernel_size, stride, out_chan, trainable=True):
with tf.variable_scope(layer_name):
in_size = in_tensor.get_shape().as_list()
strides = [1, stride, stride, 1]
kernel_shape = [kernel_size, kernel_size, in_size[3], out_chan]
# conv
kernel = tf.get_variable('weights', kernel_shape, tf.float32,
tf.contrib.layers.xavier_initializer_conv2d(), trainable=trainable, collections=['wd', 'variables', 'filters'])
tmp_result = tf.nn.conv2d(in_tensor, kernel, strides, padding='SAME')
# bias
biases = tf.get_variable('biases', [kernel_shape[3]], tf.float32,
tf.constant_initializer(0.0001), trainable=trainable, collections=['wd', 'variables', 'biases'])
out_tensor = tf.nn.bias_add(tmp_result, biases, name='out')
return out_tensor
@classmethod
def conv_relu(cls, in_tensor, layer_name, kernel_size, stride, out_chan, trainable=True):
tensor = cls.conv(in_tensor, layer_name, kernel_size, stride, out_chan, trainable)
out_tensor = cls.leaky_relu(tensor, name='out')
return out_tensor
@classmethod
def max_pool(cls, bottom, name='pool'):
pooled = tf.nn.max_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='VALID', name=name)
return pooled
@classmethod
def upconv(cls, in_tensor, layer_name, output_shape, kernel_size, stride, trainable=True):
with tf.variable_scope(layer_name):
in_size = in_tensor.get_shape().as_list()
kernel_shape = [kernel_size, kernel_size, in_size[3], in_size[3]]
strides = [1, stride, stride, 1]
# conv
kernel = cls.get_deconv_filter(kernel_shape, trainable)
tmp_result = tf.nn.conv2d_transpose(value=in_tensor, filter=kernel, output_shape=output_shape,
strides=strides, padding='SAME')
# bias
biases = tf.get_variable('biases', [kernel_shape[2]], tf.float32,
tf.constant_initializer(0.0), trainable=trainable, collections=['wd', 'variables', 'biases'])
out_tensor = tf.nn.bias_add(tmp_result, biases)
return out_tensor
@classmethod
def upconv_relu(cls, in_tensor, layer_name, output_shape, kernel_size, stride, trainable=True):
tensor = cls.upconv(in_tensor, layer_name, output_shape, kernel_size, stride, trainable)
out_tensor = cls.leaky_relu(tensor, name='out')
return out_tensor
@staticmethod
def get_deconv_filter(f_shape, trainable):
width = f_shape[0]
height = f_shape[1]
f = math.ceil(width/2.0)
c = (2 * f - 1 - f % 2) / (2.0 * f)
bilinear = np.zeros([f_shape[0], f_shape[1]])
for x in range(width):
for y in range(height):
value = (1 - abs(x / f - c)) * (1 - abs(y / f - c))
bilinear[x, y] = value
weights = np.zeros(f_shape)
for i in range(f_shape[2]):
weights[:, :, i, i] = bilinear
init = tf.constant_initializer(value=weights,
dtype=tf.float32)
return tf.get_variable(name="weights", initializer=init,
shape=weights.shape, trainable=trainable, collections=['wd', 'variables', 'filters'])
@staticmethod
def fully_connected(in_tensor, layer_name, out_chan, trainable=True):
with tf.variable_scope(layer_name):
in_size = in_tensor.get_shape().as_list()
assert len(in_size) == 2, 'Input to a fully connected layer must be a vector.'
weights_shape = [in_size[1], out_chan]
# weight matrix
weights = tf.get_variable('weights', weights_shape, tf.float32,
tf.contrib.layers.xavier_initializer(), trainable=trainable)
weights = tf.check_numerics(weights, 'weights: %s' % layer_name)
# bias
biases = tf.get_variable('biases', [out_chan], tf.float32,
tf.constant_initializer(0.0001), trainable=trainable)
biases = tf.check_numerics(b | iases, 'biases: %s' % layer_name)
out_tensor = tf.matmul(in_tensor, weights) + biases
return out_tensor
@classmethod
def fully_connected_relu(cls, in_tensor, layer_name, out_chan, trainable=True):
tensor = cls.fully_connected(in_tensor, layer_name, out_chan, trainable)
out_tensor = tf.maximum(tensor, cls.neg_slope_of_relu*tensor, name='out')
return out_tensor
@staticmethod
de | f dropout(in_tensor, keep_prob, evaluation):
""" Dropout: Each neuron is dropped independently. """
with tf.variable_scope('dropout'):
tensor_shape = in_tensor.get_shape().as_list()
out_tensor = tf.cond(evaluation,
lambda: tf.nn.dropout(in_tensor, 1.0,
noise_shape=tensor_shape),
lambda: tf.nn.dropout(in_tensor, keep_prob,
noise_shape=tensor_shape))
return out_tensor
@staticmethod
def spatial_dropout(in_tensor, keep_prob, evaluation):
""" Spatial dropout: Not each neuron is dropped independently, but feature map wise. """
with tf.variable_scope('spatial_dropout'):
tensor_shape = in_tensor.get_shape().as_list()
out_tensor = tf.cond(evaluation,
lambda: tf.nn.dropout(in_tensor, 1.0,
noise_shape=tensor_shape),
lambda: tf.nn.dropout(in_tensor, keep_prob,
noise_shape=[tensor_shape[0], 1, 1, tensor_shape[3]]))
return out_tensor
def crop_image_from_xy(image, crop_location, crop_size, scale=1.0):
"""
Crops an image. When factor is not given does an central crop.
Inputs:
image: 4D tensor, [batch, height, width, channels] which will be cropped in height and width dimension
crop_location: tensor, [batch, 2] which represent the height and width location of the crop
crop_size: int, describes the extension of the crop
Outputs:
image_crop: 4D tensor, [batch, crop_size, crop_size, channels]
"""
with tf.name_scope('crop_image_from_xy'):
s = image.get_shape().as_list()
print('image.get_shape() : ', image.get_shape())
assert len(s) == 4, "Image needs to be of shape [batch, width, height, channel]"
scale = tf.reshape(scale, [-1] |
mitodl/open-discussions | channels/conftest.py | Python | bsd-3-clause | 228 | 0 | """Test config for channels"""
import pytest
@ | pytest.fixture(autouse=True)
def mock_search_tasks(mocker):
"""Patch the helpers so they don't fire celery tasks"""
return mocker.patc | h("channels.api.search_task_helpers")
|
yannrouillard/weboob | weboob/applications/handjoob/handjoob.py | Python | agpl-3.0 | 4,944 | 0.000809 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import sys
from weboob.capabilities.job import ICapJob
from weboob.tools.application.repl import ReplApplication, defaultcount
from weboob.tools.application.formatters.iformatter import IFormatter, PrettyFormatter
__all__ = ['Handjoob']
class JobAdvertFormatter(IFormatter):
MANDATORY_FIELDS = ('id', 'url', 'publication_date', 'title')
def format_obj(self, obj, alias):
result = u'%s%s%s\n' % (self.BOLD, obj.title, self.NC)
result += 'url: %s\n' % obj.url
if hasattr(obj, 'publication_date') and obj.publication_date:
result += 'Publication date : %s\n' % obj.publication_date.strftime('%Y-%m-%d')
if hasattr(obj, 'place') and obj.place:
result += 'Location: %s\n' % obj.place
if hasattr(obj, 'society_name') and obj.society_name:
result += 'Society : %s\n' % obj.society_name
if hasattr(obj, 'job_name') and obj.job_name:
result += 'Job name : %s\n' % obj.job_name
if hasattr(obj, 'contract_type') and obj.contract_type:
result += 'Contract : %s\n' % obj.contract_type
if hasattr(obj, 'pay') and obj.pay:
result += 'Pay : %s\n' % obj.pay
if hasattr(obj, 'formation') and obj.formation:
result += 'Formation : %s\n' % obj.formation
if hasattr(obj, 'experience') and obj.experience:
result += 'Experience : %s\n' % obj.experience
if hasattr(obj, 'description') and obj.description:
result += 'Description : %s\n' % obj.description
return result
class JobAdvertListFormatter(PrettyFormatter):
MANDATORY_FIELDS = ('id', 'title')
def get_title(self, obj):
return '%s' % (obj.title)
def get_description(self, obj):
result = u''
if hasattr(obj, 'publication_date') and obj.publication_date:
result += '\tPublication date : %s\n' % obj.publication_date.strftime('%Y-%m-%d')
if hasattr(obj, 'place') and obj.place:
result += '\tLocation: %s\n' % obj.place
if hasattr(obj, 'society_name') and obj.society_name:
result += '\tSociety : %s\n' % obj.society_name
if hasattr(obj, 'contract_type') and obj.contract_type:
result += '\tContract : %s\n' % obj.contract_type
return result.strip('\n\t')
class Handjoob(ReplApplication):
APPNAME = 'handjoob'
VERSION = '0.i'
COPYRIGHT = 'Copyright(C) 2012 Bezleputh'
DESCRIPTION = "Console application to search for a job."
SHORT_DESCRIPTION = "search for a job"
CAPS = ICapJob
EXTRA_FORMATTERS = {'job_advert_list': JobAdvertListFormatter,
'job_advert': JobAdvertFormatter,
}
COMMANDS_FORMATTERS = {'search': 'job_advert_list',
'ls': 'job_advert_list',
'info': 'job_advert',
}
@defaultcount(10)
def do_search(self, pattern):
"""
search PATTERN
Search for an advert matching a PATTERN.
"""
self.change_path([u'search'])
self.start_format(pattern=pattern)
for backend, job_advert in self.do('search_job', pattern):
self.cached_format(job_advert)
@defaultcount(10)
def do_ls(self, line):
"""
advanced search
Search for an advert matching to advanced filters.
"""
self.change_path([u'advanced'])
for backend, job_advert in self.do('advanced_search_job'):
self.cached_format(job_advert)
def complete_info(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return self._complete_object()
def do_info(self, _id):
"""
info ID
Get information about an advert.
"""
if not _id:
| print >>sys.stderr, 'This command takes an argument: %s' % self.get_command_help('info', short=True)
return 2
job_advert = self.get_object(_id, 'get_job_advert')
if not job_advert:
print >>sys.stderr, | 'Job advert not found: %s' % _id
return 3
self.start_format()
self.format(job_advert)
|
dnjohnstone/hyperspy | hyperspy/tests/utils/test_eds.py | Python | gpl-3.0 | 2,070 | 0.005314 | # -*- coding: utf-8 -*-
# Copyright 2007-2020 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from hyperspy.misc.eds.utils import get_xray_lines_near_energy, take_off_angle
def test_xray_lines_near_energy():
E = 1.36
lines = get_xray_lines_near_energy(E)
assert (
lines ==
['Pm_M2N4', 'Ho_Ma', 'Eu_Mg', 'Se_La', 'Br_Ln', 'W_Mz', 'As_Lb3',
'Kr_Ll', 'Ho_Mb', 'Ta_Mz', 'Dy_Mb', 'As_Lb1', 'Gd_Mg', 'Er_Ma',
'Sm_M2N4', 'Mg_Kb', 'Se_Lb1', 'Ge_Lb3', 'Br_Ll', 'Sm_Mg', 'Dy_Ma',
'Nd_M2N4', 'As_La', 'Re_Mz', 'Hf_Mz', 'Kr_Ln', 'Er_Mb', 'Tb_Mb'])
lines = get_xray_lines_near_energy(E, 0.02)
assert lines == ['Pm_M2N4']
E = 5.4
lines = get_xray_lines_near_energy(E)
assert (
lines ==
['Cr_Ka', 'La_Lb2', 'V_Kb', 'Pm_La', 'Pm_Ln', 'Ce_Lb3', 'Gd_Ll',
'Pr_Lb1', 'Xe_Lg3', 'Pr_Lb4'])
lines = get_xray_lines_near_energy(E, only_lines=('a', 'b'))
assert ( |
lines ==
['Cr_Ka', ' | V_Kb', 'Pm_La', 'Pr_Lb1'])
lines = get_xray_lines_near_energy(E, only_lines=('a'))
assert (
lines ==
['Cr_Ka', 'Pm_La'])
def test_takeoff_angle():
np.testing.assert_allclose(40.,take_off_angle(30.,0.,10.))
np.testing.assert_allclose(40.,take_off_angle(0.,90.,10.,beta_tilt=30.))
np.testing.assert_allclose(73.15788376370121,take_off_angle(45.,45.,45.,
45.))
|
xzturn/caffe2 | caffe2/experiments/python/convnet_benchmarks.py | Python | apache-2.0 | 19,876 | 0.00005 | ## @package convnet_benchmarks
# Module caffe2.experiments.python.convnet_benchmarks
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
"""
Benchmark for common convnets.
(NOTE: Numbers below prior with missing parameter=update step, TODO to update)
Speed on Titan X, with 10 warmup steps and 10 main steps and with different
versions of cudnn, are as follows (time reported below is per-batch time,
forward / forward+backward):
CuDNN V3 CuDNN v4
AlexNet 32.5 / 108.0 27.4 / 90.1
OverFeat 113.0 / 342.3 91.7 / 276.5
Inception 134.5 / 485.8 125.7 / 450.6
VGG (batch 64) 200.8 / 650.0 164.1 / 551.7
Speed on Inception with varied batch sizes and CuDNN v4 is as follows:
Batch Size Speed per batch Speed per image
16 22.8 / 72.7 1.43 / 4.54
32 38.0 / 127.5 1.19 / 3.98
64 67.2 / 233.6 1.05 / 3.65
128 125.7 / 450.6 0.98 / 3.52
Speed on Tesla M40, which 10 warmup steps and 10 main steps and with cudnn
v4, is as follows:
AlexNet 68.4 / 218.1
OverFeat 210.5 / 630.3
Inception 300.2 / 1122.2
VGG (batch 64) 405.8 / 1327.7
(Note that these numbers involve a "full" backprop, i.e. the gradient
with respect to the input image is also computed.)
To get the numbers, simply run:
for MODEL in AlexNet OverFeat Inception; do
PYTHONPATH=../gen:$PYTHONPATH python convnet_benchmarks.py \
--batch_size 128 --model $MODEL --forward_only True
done
for MODEL in AlexNet OverFeat Inception; do
PYTHONPATH=../gen:$PYTHONPATH python convnet_benchmarks.py \
--batch_size 128 --model $MODEL
done
PYTHONPATH=../gen:$PYTHONPATH python convnet_benchmarks.py \
--batch_size 64 --model VGGA --forward_only True
PYTHONPATH=../gen:$PYTHONPATH python convnet_benchmarks.py \
--batch_size 64 --model VGGA
for BS in 16 32 64 128; do
PYTHONPATH=../gen:$PYTHONPATH python convnet_benchmarks.py \
--batch_size $BS --model Inception --forward_only True
PYTHONPATH=../gen:$PYTHONPATH python convnet_benchmarks.py \
--batch_size $BS --model Inception
done
Note that VGG needs to be run at batch 64 due to memory limit on the backward
pass.
"""
import argparse
import time
from caffe2.python import cnn, workspace, core
import caffe2.python.SparseTransformer as SparseTransformer
def MLP(order):
model = cnn.CNNModelHelper()
d = 256
depth = 20
width = 3
for i in range(depth):
for j in range(width):
current = "fc_{}_{}".format(i, j) if i > 0 else "data"
next_ = "fc_{}_{}".format(i + 1, j)
model.FC(
current, next_,
dim_in=d, dim_out=d,
weight_init=model.XavierInit,
bias_init=model.XavierInit)
model.Sum(["fc_{}_{}".format(depth, j)
for j in range(width)], ["sum"])
model.FC("sum", "last",
dim_in=d, dim_out=1000,
weight_init=model.XavierInit,
bias_init=model.XavierInit)
xent = model.LabelCrossEntropy(["last", "label"], "xent")
model.AveragedLoss(xent, "loss")
return model, d
def AlexNet(order):
model = cnn.CNNModelHelper(order, name="alexnet",
use_cudnn=True, cudnn_exhaustive_search=True)
conv1 = model.Conv(
"data",
"conv1",
3,
64,
11,
('XavierFill', {}),
('ConstantFill', {}),
stride=4,
pad=2
)
relu1 = model.Relu(conv1, "conv1")
pool1 = model.MaxPool(relu1, "pool1", kernel=3, stride=2)
conv2 = model.Conv(
pool1,
"conv2",
64,
192,
5,
('XavierFill', {}),
('ConstantFill', {}),
pad=2
)
relu2 = model.Relu(conv2, "conv2")
pool2 = model.MaxPool(relu2, "pool2", kernel=3, stride=2)
conv3 = model.Conv(
pool2,
"conv3",
192,
384,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu3 = model.Relu(conv3, "conv3")
conv4 = model.Conv(
relu3,
"conv4",
384,
256,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu4 = model.Relu(conv4, "conv4")
conv5 = model.Conv(
relu4,
"conv5",
256,
256,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu5 = model.Relu(conv5, "conv5")
pool5 = model.MaxPool(relu5, "pool5", kernel=3, stride=2)
fc6 = model.FC(
pool5, "fc6", 256 * 6 * 6, 4096, ('XavierFill', {}),
('ConstantFill', {})
)
relu6 = model.Relu(fc6, "fc6")
fc7 = model.FC(
relu6, "fc7", 4096, 4096, ('XavierFill', {}), ('ConstantFill', {})
)
relu7 = model.Relu(fc7, "fc7")
fc8 = model.FC(
relu7, "fc8", 4096, 1000, ('XavierFill', {}), ('ConstantFill', {})
)
pred = model.Softmax(fc8, "pred")
xent = model.LabelCrossEntropy([pred, "label"], "xent")
model.AveragedLoss(xent, "loss")
return model, 224
def OverFeat(order):
model = cnn.CNNModelHelper(order, name="overfeat",
use_cudnn=True, cudnn_exhaustive_search=True)
conv1 = model.Conv(
"data",
"conv1",
3,
96,
11,
('XavierFill', {}),
('ConstantFill', {}),
stride=4
)
relu1 = model.Relu(conv1, "conv1")
pool1 = model.MaxPool(relu1, "pool1", kernel=2, stride=2)
conv2 = model.Conv(
pool1, "conv2", 96, 256, 5, ('XavierFill', {}), ('ConstantFill', {})
)
relu2 = model.Relu(conv2, "conv2")
pool2 = model.MaxPool(relu2, "pool2", kernel=2, stride=2)
conv3 = model.Conv(
pool2,
"conv3",
256,
512,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu3 = model.Relu(conv3, "conv3")
conv4 = model.Conv(
relu3,
"conv4",
512,
1024,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu4 = model.Relu(conv4, "conv4")
conv5 = model.Conv(
relu4,
"conv5",
1024,
1024,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu5 = model.Relu(conv5, "conv5")
pool5 = model.MaxPool(relu5, "pool5", kernel=2, stride=2)
fc6 = model.FC(
| pool5, "fc6", 1024 * 6 * 6, 3072, ('XavierFill', {}),
('ConstantFill', {})
)
relu6 = model.Relu(fc6, "fc6")
fc7 = model.FC(
relu6, "fc7", 3072, 4096, ('XavierFill', {}), ('ConstantFill', {})
)
relu7 = model.Relu(fc7, "fc7")
fc8 = model.FC(
relu7, "fc8", 4096, 1000, ('XavierFill', {}), ('ConstantFill', {})
)
pred = model.Softmax(fc8, "pred")
xent | = model.LabelCrossEntropy([pred, "label"], "xent")
model.AveragedLoss(xent, "loss")
return model, 231
def VGGA(order):
model = cnn.CNNModelHelper(order, name='vgg-a',
use_cudnn=True, cudnn_exhaustive_search=True)
conv1 = model.Conv(
"data",
"conv1",
3,
64,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu1 = model.Relu(conv1, "conv1")
pool1 = model.MaxPool(relu1, "pool1", kernel=2, stride=2)
conv2 = model.Conv(
pool1,
"conv2",
64,
128,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu2 = model.Relu(conv2, "conv2")
pool2 = model.MaxPool(relu2, "pool2", kernel=2, stride=2)
conv3 = model.Conv(
pool2,
"conv3",
128,
256,
3,
('XavierFill', {}),
('ConstantFill', {}),
pad=1
)
relu3 = model.Relu(conv3, "conv3")
conv4 = model.Conv(
relu3,
"conv4", |
addition-it-solutions/project-all | addons/account/wizard/account_chart.py | Python | agpl-3.0 | 5,123 | 0.004688 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_chart(osv.osv_memory):
"""
For Chart of Accounts
"""
_name = "account.chart"
_description = "Account chart"
_columns = {
'fiscalyear': fields.many2one('account.fiscalyear', \
'Fiscal year', \
help='Keep empty for all open fiscal years'),
'period_from': fields.many2one('account.period', 'Start period'),
'period_to': fields.many2one('account.period', 'End period'),
'target_move': fields.selection([('posted', 'All Posted Entries'),
('all', 'All Entries'),
], 'Target Moves', required=True),
}
def _get_fiscalyear(self, cr, uid, context=None):
"""Return default Fiscalyear value"""
return self.pool.get('account.fiscalyear').find(cr, uid, context=context)
def onchange_fiscalyear(self, cr, uid, ids, fiscalyear_id=False, context=None):
res = {}
if fiscalyear_id:
start_period = end_period = False
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
ORDER BY p.date_start ASC, p.special DESC
LIMIT 1) AS period_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.date_start < NOW()
ORDER BY p.date_stop DESC
LIMIT 1) AS period_stop''', (fiscalyear_id, fiscalyear_id))
periods = [i[0] for i in cr.fetchall()]
if periods:
start_period = periods[0]
if len(periods) > 1:
end_period = periods[1]
res['value'] = {'period_from': start_period, 'period_to': end_period}
else:
res['value'] = {'period_from': False, 'period_to': False}
return res
def account_chart_open_window(self, cr, uid, ids, context=None):
"""
Opens chart of Accounts
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of account chart’s IDs
@return: dictionary of Open account chart window on given fiscalyear and all Entries or posted entries
"""
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
period_obj = self.pool.get('ac | count.period')
fy_obj = self.pool.get('account.fiscalyear')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_account_tree')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
| fiscalyear_id = data.get('fiscalyear', False) and data['fiscalyear'][0] or False
result['periods'] = []
if data['period_from'] and data['period_to']:
period_from = data.get('period_from', False) and data['period_from'][0] or False
period_to = data.get('period_to', False) and data['period_to'][0] or False
result['periods'] = period_obj.build_ctx_periods(cr, uid, period_from, period_to)
result['context'] = str({'fiscalyear': fiscalyear_id, 'periods': result['periods'], \
'state': data['target_move']})
if fiscalyear_id:
result['name'] += ':' + fy_obj.read(cr, uid, [fiscalyear_id], context=context)[0]['code']
return result
_defaults = {
'target_move': 'posted',
'fiscalyear': _get_fiscalyear,
}
|
openvstorage/arakoon | src/client/python/__init__.py | Python | apache-2.0 | 572 | 0.001748 | """
Copyri | ght (2010-2014) INCUBAID BVBA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, sof | tware
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
|
pkats15/hdt_analyzer | test/file_manager.py | Python | mit | 1,250 | 0.0032 | from os import path, listdir, remove
import zipfile
from pprint import pprint
import xmltodict
files_folder = path.join(path.dirname(path.abspath(__file__)), '..', 'files')
input_folder = path.join(files_folder, 'input')
temp_folder = path.join(files_folder, 'temp')
replay_file = path.join(files_folder, 'temp', 'replay.json')
deck_file = path.join(input_folder, 'DeckS | tats.xml')
database_file = path.join(files_folder, 'database.json')
def find_deck_games(deck_name):
deck_xml = open(deck_file)
deck_stats = xmltodict.parse(deck_xml)
deck_xml.close()
files = []
for g in deck_stats['DeckStatsList']['DeckStats']['Deck']:
if g['Games'] != None:
for game in g['Games']['Game']:
if 'DeckName' in game and 'Repla | yFile' in game and game['DeckName'] == deck_name:
files.append(game['ReplayFile'])
return files
def unzip_file(file_name):
for fn in listdir(temp_folder):
remove(path.join(temp_folder, fn))
if path.exists(path.join(input_folder, file_name)):
zip_file = zipfile.ZipFile(path.join(input_folder, file_name))
zip_file.extract('replay.json', path=temp_folder)
zip_file.close()
return True
else: return False
|
e-koch/TurbuStat | turbustat/tests/test_mahalanobis.py | Python | mit | 1,382 | 0 | # # Licensed under an MIT open source license - see LICENSE
# from __future__ import print_function, absolute_import, division
# import pytest
# import warnings
# from ..statistics import Mahalanobis, Mahalanobis_Distance
# from ..statistics.stats_warnings import TurbuStatTestingWarning
# from ._testing_data import dataset1
# def test_Mahalanobis_raisewarning():
# '''
# Mahalanobis has not been completed yet. Ensure the warning is returned
# when used.
# '''
# with warnings.catch_warnings(record=True) as w:
# mahala = Mahalanobis(dataset1['cube'])
# assert len(w) == 1
# assert w[0].category == TurbuStatTestingWarning
# assert str(w[0].message) == \
# ("Mahalanobis is an untested statistic. Its use"
# " is not yet recommended.")
# def test_Mahalanobis_Distance_raisewarning():
# '''
# Mahalanobis has not been completed yet. Ensure the warning is returned
# when used.
# '''
# w | ith warnings.catch_warnings(record=True) as w:
# mahala = Mahalanobis_Distance(dataset1['cube'], dataset1['cube'])
# # Warning is raised each time Mahalanobis is run (so twice)
# assert len(w) == 3
# assert w[0].category == TurbuStatTestingWarning
# assert str(w[0].message) == \
# ("Mahalanobis_Dist | ance is an untested metric. Its use"
# " is not yet recommended.")
|
gitchs/shadowsocks | setup.py | Python | apache-2.0 | 1,323 | 0 | import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name="shadowsocks",
version="2.8.2.1",
license='http://www.apache.org/licenses/LICENSE-2.0',
description="A fast tunnel proxy that help you get through firewalls",
| author='clowwindy',
author_email='clowwindy42@gmail.com',
url='https://github.com/shadowsocks/shadowsocks',
packages=['shadowsocks', 'shadowsocks.crypto'],
package_data={
'shadowsocks': ['README.rst', 'LICENSE']
},
install_requires=[],
entry_points="""
[console_scripts]
sslocal = shadowsocks.local:main
ssserver = shadowsocks.server:main
""",
| classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: Proxy Servers',
],
long_description=long_description,
)
|
nextgis/nextgisweb | nextgisweb/feature_layer/api.py | Python | gpl-3.0 | 25,791 | 0.000969 | import json
import os
import re
import uuid
import zipfile
import itertools
from urllib.parse import unquote
import tempfile
from collections import OrderedDict
from datetime import datetime, date, time
from osgeo import ogr, gdal
from pyramid.response import Response, FileResponse
from pyramid.httpexceptions import HTTPNoContent, HTTPNotFound
from shapely.geometry import box
from sqlalchemy.orm.exc import NoResultFound
from ..core.exception import ValidationError
from ..lib.geometry import Geometry, GeometryNotValid, Transformer
from ..resource import DataScope, Resource, resource_factory
from ..resource.exception import ResourceNotFound
from ..spatial_ref_sys import SRS
from .. import geojson
from .interface import (
IFeatureLayer,
IFeatureQueryLike,
IWritableFeatureLayer,
IFeatureQueryClipByBox,
IFeatureQuerySimplify,
FIELD_TYPE)
from .feature import Feature
from .extension import FeatureExtension
from .ogrdriver import EXPORT_FORMAT_OGR, MVT_DRIVER_EXIST
from .exception import FeatureNotFound
from .util import _
PERM_READ = DataScope.read
PERM_WRITE = DataScope.write
def _ogr_memory_ds():
return gdal.GetDriverByName('Memory').Create(
'', 0, 0, 0, gdal.GDT_Unknown)
def _ogr_ds(driver, options):
return ogr.GetDriverByName(driver).CreateDataSource(
"/vsimem/%s" % uuid.uuid4(), options=options
)
def _ogr_layer_from_features(layer, features, name='', ds=None, fid=None):
ogr_layer = layer.to_ogr(ds, name=name, fid=fid)
layer_defn = ogr_layer.GetLayerDefn()
for f in features:
ogr_layer.CreateFeature(
f.to_ogr(layer_defn, fid=fid))
return ogr_layer
def _extensions(extensions, layer):
result = []
ext_filter = None if extensions is None else extensions.split(',')
for cls in FeatureExtension.registry:
if ext_filter is None or cls.identity in ext_filter:
result.append((cls.identity, cls(layer)))
return result
def view_geojson(request):
request.GET["format"] = "GeoJSON"
request.GET["zipped"] = "false"
return export(request)
def export(request):
request.resource_permission(PERM_READ)
srs = int(
request.GET.get("srs", request.context.srs.id)
)
srs = SRS.filter_by(id=srs).one()
fid = request.GET.get("fid")
fid = fid if fid != "" else None
format = request.GET.get("format")
encoding = request.GET.get("encoding")
zipped = request.GET.get("zipped", "true")
zipped = zipped.lower() == "true"
display_name = request.GET.get("display_name", "false")
display_name = display_name.lower() == "true"
if format is None:
raise ValidationError(
_("Output format is not provided.")
)
if format not in EXPORT_FORMAT_OGR:
raise ValidationError(
_("Format '%s' is not supported.") % (format,)
)
driver = EXPORT_FORMAT_OGR[format]
# dataset creation options (configurable by user)
dsco = list()
if driver.dsco_configurable is not None:
for option in driver.dsco_configurable:
option = option.split(":")[0]
if option in request.GET:
dsco.append("%s=%s" % (option, request.GET.get(option)))
# layer creation options
lco = list(driver.options or [])
if encoding is not None:
lco.append("ENCODING=%s" % encoding)
query = request.context.feature_query()
query.geom()
ogr_ds = _ogr_memory_ds()
ogr_layer = _ogr_layer_from_features( # NOQA: 841
request.context, query(), ds=ogr_ds, fid=fid)
with tempfile.TemporaryDirectory() as tmp_dir:
filename = "%d.%s" % (
request.context.id,
driver.extension,
)
vtopts = (
[
"-f", driver.name,
"-t_srs", srs.wkt,
]
+ list(itertools.chain(*[("-lco", o) for o in lco]))
+ list(itertools.chain(*[("-dsco", | o) for o in dsco]))
)
if display_name:
# CPLES_SQLI == 7
flds = [
'"{}" as "{}"'.format(
fld.keyname.replace('"', r'\"'),
fld.display_name.replace('"', r'\"'),
)
for fld in request.context.fields
]
if fid is not None:
flds += | ['FID as "{}"'.format(fid.replace('"', r'\"'))]
vtopts += ["-sql", 'select {} from ""'.format(", ".join(
flds if len(flds) > 0 else '*'))]
if driver.fid_support and fid is None:
vtopts.append('-preserve_fid')
gdal.VectorTranslate(
os.path.join(tmp_dir, filename), ogr_ds,
options=gdal.VectorTranslateOptions(options=vtopts)
)
if zipped or not driver.single_file:
content_type = "application/zip"
content_disposition = "attachment; filename=%s" % ("%s.zip" % (filename,))
with tempfile.NamedTemporaryFile(suffix=".zip") as tmp_file:
with zipfile.ZipFile(tmp_file, "w", zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(tmp_dir):
for file in files:
path = os.path.join(root, file)
zipf.write(path, os.path.basename(path))
response = FileResponse(
tmp_file.name, content_type=content_type,
request=request)
response.content_disposition = content_disposition
return response
else:
content_type = driver.mime or "application/octet-stream"
content_disposition = "attachment; filename=%s" % filename
response = FileResponse(
os.path.join(tmp_dir, filename), content_type=content_type,
request=request)
response.content_disposition = content_disposition
return response
def mvt(request):
if not MVT_DRIVER_EXIST:
return HTTPNotFound(explanation='MVT GDAL driver not found')
z = int(request.GET["z"])
x = int(request.GET["x"])
y = int(request.GET["y"])
extent = int(request.GET.get('extent', 4096))
simplification = float(request.GET.get("simplification", extent / 512))
resids = map(
int,
filter(None, request.GET["resource"].split(",")),
)
# web mercator
merc = SRS.filter_by(id=3857).one()
minx, miny, maxx, maxy = merc.tile_extent((z, x, y))
# 5% padding by default
padding = float(request.GET.get("padding", 0.05))
bbox = (
minx - (maxx - minx) * padding,
miny - (maxy - miny) * padding,
maxx + (maxx - minx) * padding,
maxy + (maxy - miny) * padding,
)
bbox = Geometry.from_shape(box(*bbox), srid=merc.id)
options = [
"FORMAT=DIRECTORY",
"TILE_EXTENSION=pbf",
"MINZOOM=%d" % z,
"MAXZOOM=%d" % z,
"EXTENT=%d" % extent,
"COMPRESS=NO",
]
ds = _ogr_ds("MVT", options)
vsibuf = ds.GetName()
for resid in resids:
try:
obj = Resource.filter_by(id=resid).one()
except NoResultFound:
raise ResourceNotFound(resid)
request.resource_permission(PERM_READ, obj)
query = obj.feature_query()
query.intersects(bbox)
query.geom()
if IFeatureQueryClipByBox.providedBy(query):
query.clip_by_box(bbox)
if IFeatureQuerySimplify.providedBy(query):
tolerance = ((obj.srs.maxx - obj.srs.minx) / (1 << z)) / extent
query.simplify(tolerance * simplification)
_ogr_layer_from_features(
obj, query(), name="ngw:%d" % obj.id, ds=ds)
# flush changes
ds = None
filepath = os.path.join(
"%s" % vsibuf, "%d" % z, "%d" % x, "%d.pbf" % y
)
try:
f = gdal.VSIFOpenL(filepath, "rb")
if f is not None:
# SEEK_END = 2
gdal.VSIFSeekL(f, 0, 2)
size = gdal.VSIFTellL(f)
# SEEK_SET = 0
gdal.VSIFSeekL(f, 0, 0)
content = gdal.VSIF |
Aloomaio/googleads-python-lib | examples/ad_manager/v201811/proposal_service/create_proposals.py | Python | apache-2.0 | 2,591 | 0.005403 | #!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new proposals.
To determine which proposals exist, run get_all_proposals.py.
"""
import uuid
# Import appropriate modules from the client library.
from googleads import ad_manager
ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE'
PRIMARY_SALESPERSON_ID = 'INSERT_PRIMARY_SALESPERSON_ID_HERE'
SECONDARY_SALESPERSON_ID = 'INSERT_SECONDARY_SALESPERSON_ID_HERE'
PRIMARY_TRAFFICKER_ID = 'INSERT_PRIMARY_TRAFFICKER_ID_HERE'
def main(client, advertiser_id, primary_salesperson_id,
secondary_salesperson_id, primary_trafficker_id):
# Initialize appropriate services.
proposal_service = client.GetService('ProposalService', version='v201811')
network_service = client.GetService('NetworkService', version='v201811')
# Create proposal objects.
proposal = {
'name': 'Proposal #%s' % uuid.uuid4(),
'advertiser': {
'companyId': advertise | r_id,
'type': 'ADVERTISER'
},
'primarySalesperson': {
'us | erId': primary_salesperson_id,
'split': '75000'
},
'secondarySalespeople': [{
'userId': secondary_salesperson_id,
'split': '25000'
}],
'primaryTraffickerId': primary_trafficker_id,
'probabilityOfClose': '100000',
'budget': {
'microAmount': '100000000',
'currencyCode': network_service.getCurrentNetwork()['currencyCode']
},
'billingCap': 'CAPPED_CUMULATIVE',
'billingSource': 'DFP_VOLUME'
}
# Add proposals.
proposals = proposal_service.createProposals([proposal])
# Display results.
for proposal in proposals:
print ('Proposal with id "%s" and name "%s" was created.'
% (proposal['id'], proposal['name']))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, ADVERTISER_ID, PRIMARY_SALESPERSON_ID,
SECONDARY_SALESPERSON_ID, PRIMARY_TRAFFICKER_ID)
|
nvbn/guessit | guessit/transfo/split_path_components.py | Python | lgpl-3.0 | 1,367 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import fileutils
import os.path
import logging
log = logging.getLogger(__name__)
priority = 255
def process(mtree):
"""first split our path into dirs + basename + ext
:return: the filename split into [ dir*, basen | ame, ext ]
"""
components = fileutils.split_path(mtree.value)
basename = components.pop(-1)
| components += list(os.path.splitext(basename))
components[-1] = components[-1][1:] # remove the '.' from the extension
mtree.split_on_components(components)
|
deepmind/envlogger | envlogger/__init__.py | Python | apache-2.0 | 1,242 | 0 | # coding=utf-8
# Copyright 2022 DeepMind Technologies Limited..
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES O | R CONDITIONS OF ANY | KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A one-stop import for commonly used modules in EnvLogger."""
from envlogger import environment_logger
from envlogger import reader
from envlogger import step_data
from envlogger.backends import backend_type
from envlogger.backends import riegeli_backend_writer
from envlogger.backends import schedulers
from envlogger.proto import storage_pb2
EnvLogger = environment_logger.EnvLogger
Reader = reader.Reader
BackendType = backend_type.BackendType
StepData = step_data.StepData
Scheduler = schedulers.Scheduler
RiegeliBackendWriter = riegeli_backend_writer.RiegeliBackendWriter
Data = storage_pb2.Data
Datum = storage_pb2.Datum
|
actframework/FrameworkBenchmarks | frameworks/Python/web2py/gunicorn_conf.py | Python | bsd-3-clause | 448 | 0.002232 | import multiprocessing
import os
impo | rt sys
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 3
if _is_travis:
workers = 2
bind = "0.0.0.0:8080"
keepalive = 120
errorlog = '-'
pidfile = 'gunicorn.pid'
pythonpath = 'web2py'
worker_class = "meinhel | d.gmeinheld.MeinheldWorker"
def post_fork(server, worker):
# Disable access log
import meinheld.server
meinheld.server.set_access_logger(None)
|
shlopack/cursovaya | template/3_4.py | Python | mit | 108 | 0.058252 | template = ' R_{\\tex | t{6Д}} = \dfrac{R_6}{1- | K_{\\text{ОК}}} = 10 \cdot %.0f = %.0f~\\text{Ом}'%(R6,R6d) |
postlund/pyatv | tests/zeroconf_stub.py | Python | mit | 1,543 | 0 | """Stub for the zeroconf library.
As zeroconf does not provide a stub or mock, this implementation will serve as
stub here. It can fake immediate answers for any service.
"""
from zeroconf import ServiceInfo
class ServiceBrowserStub:
"""Stub for ServiceBrowser."""
def __init__(self, zeroconf, service_type, listener):
"""Create a new instance of ServiceBrowser."""
for service in zeroconf.services:
if service.type == service_type:
listener.add_service(zeroconf, service_type, service.name)
class ZeroconfStub:
"""Stub for Zeroconf."""
def __init__(self, services):
"""Create a new instance of Zeroconf."""
self.services = services
self.registered_services = []
def get_service_info(self, service_type, service_name):
"""Look up service information."""
for service in self.services:
if service.name == service_name:
| return service
def register_service(self, service):
"""Save services registered services."""
self.registered_services.append(service)
def unregister_service(self, service):
"""Stub for unregistering services (does nothing)."""
pass
def close(self):
"""Stub for closing zeroconf (does nothing)."""
pass
def stub(modul | e, *services):
"""Stub a module using zeroconf."""
instance = ZeroconfStub(list(services))
module.Zeroconf = lambda: instance
module.ServiceBrowser = ServiceBrowserStub
return instance
|
edx/edx-platform | openedx/features/discounts/admin.py | Python | agpl-3.0 | 2,665 | 0.006754 | """
Django Admin pages for DiscountRestrictionConfig.
"""
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from openedx.core.djangoapps.config_model_utils.admin import St | ackedConfigModelAdmin
from .models import DiscountPercentageConfig, DiscountRestrictionConfig
class DiscountRestrictionConfigAdmin(StackedConfigModelAdmin):
| """
Admin to configure discount restrictions
"""
fieldsets = (
('Context', {
'fields': DiscountRestrictionConfig.KEY_FIELDS,
'description': _(
'These define the context to disable lms-controlled discounts on. '
'If no values are set, then the configuration applies globally. '
'If a single value is set, then the configuration applies to all courses '
'within that context. At most one value can be set at a time.<br>'
'If multiple contexts apply to a course (for example, if configuration '
'is specified for the course specifically, and for the org that the course '
'is in, then the more specific context overrides the more general context.'
),
}),
('Configuration', {
'fields': ('disabled',),
'description': _(
'If any of these values is left empty or "Unknown", then their value '
'at runtime will be retrieved from the next most specific context that applies. '
'For example, if "Disabled" is left as "Unknown" in the course context, then that '
'course will be Disabled only if the org that it is in is Disabled.'
),
})
)
raw_id_fields = ('course',)
admin.site.register(DiscountRestrictionConfig, DiscountRestrictionConfigAdmin)
class DiscountPercentageConfigAdmin(StackedConfigModelAdmin):
"""
Admin to configure discount percentage
"""
fieldsets = (
('Context', {
'fields': DiscountRestrictionConfig.KEY_FIELDS,
'description': _(
'These define the context to configure the percentage for the first purchase discount.'
'If multiple contexts apply to a course (for example, if configuration '
'is specified for the course specifically, and for the org that the course '
'is in, then the more specific context overrides the more general context.'
),
}),
('Configuration', {
'fields': ('percentage',),
})
)
raw_id_fields = ('course',)
admin.site.register(DiscountPercentageConfig, DiscountPercentageConfigAdmin)
|
lonvia/osgende | osgende/common/sqlalchemy/column_function.py | Python | gpl-3.0 | 2,328 | 0.00043 | # This file is part of Osgende
# Copyright (C) 2017 Sarah Hoffmann
#
# This is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# With minor modifications borrowed from
# https://bitbucket.org/zzzeek/sqlalchemy/issues/3566/figure-out-how-to-support-all-of-pgs
from sqlalchemy.sql import functions
from sqlalchemy.sql.selectable import FromClause
from sqlalchemy.sql.elements import ColumnClause
from sqlalchemy.ext.compiler import compiles
class FunctionColumn(ColumnClause):
def __init__(self, function, name, type_=None):
self.function = self.table = function
self.name = se | lf.key = name
self.type_ = type_
self.is_literal = False
@property
def _from_objects(self):
return []
def _make_proxy(self, selectable, name=None, attach=True,
| name_is_truncatable=False, **kw):
co = ColumnClause(self.name, self.type_)
co.table = selectable
co._proxies = [self]
if selectable._is_clone_of is not None:
co._is_clone_of = \
selectable._is_clone_of.columns.get(co.key)
if attach:
selectable._columns[co.key] = co
return co
@compiles(FunctionColumn)
def _compile_function_column(element, compiler, **kw):
return "(%s).%s" % (
compiler.process(element.function, **kw),
compiler.preparer.quote(element.name)
)
class ColumnFunction(functions.FunctionElement):
__visit_name__ = 'function'
@property
def columns(self):
return FromClause.columns.fget(self)
def _populate_column_collection(self):
for name, t in self.column_names:
self._columns[name] = FunctionColumn(self, name, t)
|
GH1995/tools | archives/Python_江老师给的代码/chapter08/decorator1.py | Python | gpl-3.0 | 389 | 0.010782 | #decorat | or1.py
def foo(f):
"""foo fucntion Docstring"""
def wrapper(*x, **y):
"""wrapper fucntion Docstring"""
print('调用函数:', f.__name__)
return f(*x, **y)
return wrapper
@foo
def bar(x):
"""bar fucntion Docstring"""
return x**2
#测试代码
if __name__ == '__main__':
print(bar(2))
print(bar.__name__)
print(b | ar.__doc__)
|
google-research/torchsde | diagnostics/utils.py | Python | apache-2.0 | 4,683 | 0.005125 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import os
import random
import matplotlib.pyplot as plt
import numpy as np
import torch
from scipy import stats
from torchsde.types import Optional, Tensor, Sequence, Union, Callable
def to_numpy(*args):
"""Convert a sequence which might contain Tensors to numpy arrays."""
if len(args) == 1:
arg = args[0]
if isinstance(arg, torch.Tensor):
arg = _to_numpy_single(arg)
return arg
else:
return tuple(_to_numpy_single(arg) if isinstance(arg, torch.Tensor) else arg for arg in args)
def _to_numpy_single(arg: torch.Tensor) -> np.ndarray:
return arg.detach().cpu().numpy()
def mse(x: Tensor, y: Tensor, norm_dim: Optional[int] = 1, mean_dim: Optional[int] = 0) -> np.ndarray:
"""Compute mean squared error."""
return _to_numpy_single((torch.norm(x - y, dim=norm_dim) ** 2).mean(dim=mean_dim))
def mae(x: Tensor, y: Tensor, test_func: Callable, mean_dim: Optional[int] = 0) -> np.ndarray:
return _to_numpy_single(
abs(test_func(x).mean(mean_dim) - test_func(y).mean(mean_dim))
)
def log(x: Union[Sequence[float], np.ndarray]) -> np.ndarray:
"""Compute element-wise log of a sequence of floats."""
return np.log(np.array(x))
def linregress_slope(x, y):
"""Return the slope of a least-squares regression for two sets of measurements."""
return stats.linregress(x, y)[0]
def swiss_knife_plotter(img_path, plots=None, scatters=None, hists=None, options=None):
"""A multi-functional *standalone* wrapper; reduces boilerplate.
Args:
img_path (str): A path to the place where the image should be written.
plots (list of dict, optional): A list of curves that needs `plt.plot`.
scatters (list of dict, optional): A list of scatter plots that needs `plt.scatter`.
hists (list of histograms, optional): A list of histograms that needs `plt.hist`.
options (dict, optional): A dictionary of optional arguments. Possible entries include
- xscale (str): Scale of xaxis.
- yscale (str): Scale of yaxis.
- xlabel (str): Label of xaxis.
- ylabel (str): Label of yaxis.
- title (str): Title of the plot.
- cycle_linestyle (bool): Cycle through matplotlib's possible line styles if True.
Returns:
| Nothing.
"""
im | g_dir = os.path.dirname(img_path)
if not os.path.exists(img_dir):
os.makedirs(img_dir)
if plots is None: plots = ()
if scatters is None: scatters = ()
if hists is None: hists = ()
if options is None: options = {}
plt.figure(dpi=300)
if 'xscale' in options: plt.xscale(options['xscale'])
if 'yscale' in options: plt.yscale(options['yscale'])
if 'xlabel' in options: plt.xlabel(options['xlabel'])
if 'ylabel' in options: plt.ylabel(options['ylabel'])
if 'title' in options: plt.title(options['title'])
cycle_linestyle = options.get('cycle_linestyle', False)
cycler = itertools.cycle(["-", "--", "-.", ":"]) if cycle_linestyle else None
for entry in plots:
kwargs = {key: entry[key] for key in entry if key != 'x' and key != 'y'}
entry['x'], entry['y'] = to_numpy(entry['x'], entry['y'])
if cycle_linestyle:
kwargs['linestyle'] = next(cycler)
plt.plot(entry['x'], entry['y'], **kwargs)
for entry in scatters:
kwargs = {key: entry[key] for key in entry if key != 'x' and key != 'y'}
entry['x'], entry['y'] = to_numpy(entry['x'], entry['y'])
plt.scatter(entry['x'], entry['y'], **kwargs)
for entry in hists:
kwargs = {key: entry[key] for key in entry if key != 'x'}
entry['x'] = to_numpy(entry['x'])
plt.hist(entry['x'], **kwargs)
if len(plots) > 0 or len(scatters) > 0: plt.legend()
plt.tight_layout()
plt.savefig(img_path)
plt.close()
def manual_seed(seed: Optional[int] = 1147481649):
"""Set seeds for default generators of 1) torch, 2) numpy, and 3) Python's random library."""
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
|
hasteur/g13bot_tools_new | scripts/replicate_wiki.py | Python | mit | 9,039 | 0.000553 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This bot replicates pages in a wiki to a second wiki within one family.
Example:
python pwb.py replicate_wiki [-r] -ns 10 -family:wikipedia -o nl li fy
or
python pwb.py replicate_wiki [-r] -ns 10 -family:wikipedia -lang:nl li fy
to copy all templates from an nlwiki to liwiki and fywiki. It will show which
pages have to be changed if -r is not present, and will only actually write
pages if -r /is/ present.
You can add replicate_replace to your user_config.py, which has the following
format:
replicate_replace = {
'wikipedia:li': {'Hoofdpagina': 'Veurblaad'}
}
to replace all occurrences of 'Hoofdpagina' with 'Veurblaad' when writing to
liwiki. Note that this does not take the origin wiki into account.
The following parameters are supported:
-r actually replace pages (without this option
--replace you will only get an overview page)
-o original wiki
--original (you may use -lang:<code> option instead)
destination_wiki destination wiki(s)
-ns specify namespace
--namespace
-dns destination namespace (if different)
--dest-namespace
"""
#
# (C) Kasper Souren, 2012-2013
# (C) Pywikibot team, 2013-2016
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
#
import sys
from argparse import ArgumentParser
import pywikibot
from pywikibot import config, Page
from pywikibot.tools import deprecated
@deprecated('BaseSite.namespaces')
def namespaces(site):
"""Return a dictionary from namespace number to prefix."""
return dict((n.id, n.custom_name) for n in site.namespaces)
def multiple_replace(text, word_dict):
"""Replace all occurrences in text of key value pairs in word_dict."""
for key in word_dict:
text = text.replace(key, word_dict[key])
return text
class SyncSites(object):
"""Work is done in here."""
def __init__(self, options):
"""Constructor."""
self.options = options
if options.original_wiki:
original_wiki = options.original_wiki
else:
original_wiki = config.mylang
pywikibot.output("Syncing from " + original_wiki)
family = config.family
sites = options.destination_wiki
self.original = pywikibot.Site(original_wiki, family)
self.original.login()
if options.namespace and 'help' in options.namespace:
for namespace in self.original.namespaces.values():
pywikibot.output('%s %s' % (namespace.id, namespace.custom_name))
sys.exit()
self.sites = [pywikibot.Site(s, family) for s in site | s]
self.differences = {}
self.user_diff = {}
pywikibot.output('Syncing to ', newline=False)
for s in self.sites:
s.login()
self.differences[s] = []
self.user_diff[s] = []
pywikibot.output(str(s), newline= | False)
pywikibot.output('')
def check_sysops(self):
"""Check if sysops are the same on all wikis."""
def get_users(site):
userlist = [ul['name'] for ul in site.allusers(group='sysop')]
return set(userlist)
ref_users = get_users(self.original)
for site in self.sites:
users = get_users(site)
diff = list(ref_users.difference(users))
diff.sort()
self.user_diff[site] = diff
def check_namespaces(self):
"""Check all namespaces, to be ditched for clarity."""
namespaces = [
0, # Main
8, # MediaWiki
152, # DPL
102, # Eigenschap
104, # Type
106, # Formulier
108, # Concept
10, # Sjabloon
]
if self.options.namespace:
pywikibot.output(str(self.options.namespace))
namespaces = [int(self.options.namespace)]
pywikibot.output("Checking these namespaces: %s\n" % (namespaces,))
for ns in namespaces:
self.check_namespace(ns)
def check_namespace(self, namespace):
"""Check an entire namespace."""
pywikibot.output("\nCHECKING NAMESPACE %s" % namespace)
pages = (p.title() for p in self.original.allpages(
'!', namespace=namespace))
for p in pages:
if p not in ['MediaWiki:Sidebar', 'MediaWiki:Mainpage',
'MediaWiki:Sitenotice', 'MediaWiki:MenuSidebar']:
try:
self.check_page(p)
except pywikibot.exceptions.NoPage:
pywikibot.output('Bizarre NoPage exception that we are '
'just going to ignore')
except pywikibot.exceptions.IsRedirectPage:
pywikibot.output(
'error: Redirectpage - todo: handle gracefully')
pywikibot.output('')
def generate_overviews(self):
"""Create page on wikis with overview of bot results."""
for site in self.sites:
sync_overview_page = Page(site,
'User:%s/sync.py overview' % site.user())
output = "== Pages that differ from original ==\n\n"
if self.differences[site]:
output += "".join('* [[:%s]]\n' % l for l in
self.differences[site])
else:
output += "All important pages are the same"
output += "\n\n== Admins from original that are missing here ==\n\n"
if self.user_diff[site]:
output += "".join('* %s\n' % l.replace('_', ' ') for l in
self.user_diff[site])
else:
output += "All users from original are also present on this wiki"
pywikibot.output(output)
sync_overview_page.text = output
sync_overview_page.save(self.put_message(site))
def put_message(self, site):
"""Return synchonization message."""
return ('%s replicate_wiki.py synchronization from %s'
% (site.user(), str(self.original)))
def check_page(self, pagename):
"""Check one page."""
pywikibot.output("\nChecking %s" % pagename)
sys.stdout.flush()
page1 = Page(self.original, pagename)
txt1 = page1.text
if self.options.dest_namespace:
dest_ns = int(self.options.dest_namespace)
else:
dest_ns = None
for site in self.sites:
if dest_ns is not None:
page2 = Page(site, page1.title(withNamespace=False), dest_ns)
pywikibot.output("\nCross namespace, new title: %s"
% page2.title())
else:
page2 = Page(site, pagename)
if page2.exists():
txt2 = page2.text
else:
txt2 = ''
if str(site) in config.replicate_replace:
txt_new = multiple_replace(txt1,
config.replicate_replace[str(site)])
if txt1 != txt_new:
pywikibot.output(
'NOTE: text replaced using config.sync_replace')
pywikibot.output('%s %s %s' % (txt1, txt_new, txt2))
txt1 = txt_new
if txt1 != txt2:
pywikibot.output("\n %s DIFFERS" % site)
self.differences[site].append(pagename)
if self.options.replace:
page2.text = txt1
page2.save(self.put_message(site))
else:
sys.stdout.write('.')
sys.stdout.flush()
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
my_args = pywikibot.handle_args(args)
parser = ArgumentParser(add_help=False)
parser.add_argument("-r", "--replace", action="sto |
mikeywaites/flask-skeleton | {{PROJECT_NAME}}/config.py | Python | mit | 1,292 | 0 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from os import path, environ
class Config(object):
DEBUG = False
PORT = 5000
HOST = "0.0.0.0"
SQLALCHEMY_ECHO = True
BASE_URL = "http://{{ PROJECT_NAME }}.com"
PROJECT_ROOT = path.abspath(path.dirname(__file__))
TEMPLATE_FOLDER = path.join(PROJECT_ROOT, 'templates')
def build_db_url(user=None, password=None, addr=None, name=None):
tmpl = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}"
return tmpl.format(DB_USER=user or 'postgres',
DB_PASS=password or 'root',
DB_ADDR=addr or 'localhost',
DB_NAME='postgres')
class DEV(Config):
DEBUG = True
SECRET_KEY = "c1893e25-88ec-4ec2-b2fe-4c213413df25"
SQLALCHEMY_DATABASE_URI = \
build_db_url(addr=environ.get('DB_PORT_5432_TCP_ADDR'))
class STAGE(Config):
DEBUG = True
cl | ass LIVE(Config):
SQLALCHEMY_ECHO = False
DEBUG = True
class TEST(Config):
SECRET_KEY = "2147d2df-759b-40ac-8013-f6154110a7d0"
TESTING = True
SQLALCHEMY_ECHO = False
SQLALCHEMY_DATABASE_U | RI = \
build_db_url(name='postgres_test',
addr=environ.get('DB_PORT_5432_TCP_ADDR'))
settings = globals()[environ.get('FASTER_CONFIG', 'DEV')]
|
mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/scipy/spatial/tests/test_spherical_voronoi.py | Python | mit | 6,854 | 0.000875 | from __future__ import print_function
import numpy as np
import itertools
from numpy.testing import (assert_equal,
assert_almost_equal,
assert_array_equal,
assert_array_almost_equal)
from pytest import raises as assert_raises
from scipy.spatial import SphericalVoronoi, distance
from scipy.spatial import _spherical_voronoi as spherical_voronoi
class TestCircumcenters(object):
def test_circumcenters(self):
tetrahedrons = np.array([
[[1, 2, 3],
[-1.1, -2.1, -3.1],
[-1.2, 2.2, 3.2],
[-1.3, -2.3, 3.3]],
[[10, 20, 30],
[-10.1, -20.1, -30.1],
[-10.2, 20.2, 30.2],
[-10.3, -20.3, 30.3]]
])
result = spherical_voronoi.calc_circumcenters(tetrahedrons)
expected = [
[-0.5680861153262529, -0.133279590288315, 0.1843323216995444],
[-0.5965330784014926, -0.1480377040397778, 0.1981967854886021]
]
assert_array_almost_equal(result, expected)
class TestProjectToSphere(object):
def test_unit_sphere(self):
points = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
center = np.array([0, 0, 0])
radius = 1
projected = spherical_voronoi.project_to_sphere(points, center, radius)
assert_array_almost_equal(points, projected)
def test_scaled_points(self):
points = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
center = np.array([0, 0, 0])
radius = 1
scaled = points * 2
projected = spherical_voronoi.project_to_sphere(scaled, center, radius)
assert_array_almost_equal(points, projected)
def test_translated_sphere(self):
points = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
center = np.array([1, 2, 3])
translated = points + center
radius = 1
projected = spherical_voronoi.project_to_sphere(translated, center,
radius)
assert_array_almost_equal(translated, projected)
class TestSphericalVoronoi(object):
def setup_method(self):
self.points = np.array([
[-0.78928481, -0.16341094, 0.59188373],
[-0.66839141, 0.73309634, 0.12578818],
[0.32535778, -0.92476944, -0.19734181],
[-0.90177102, -0.03785291, -0.43055335],
[0.71781344, 0.68428936, 0.12842096],
[-0.96064876, 0.23492353, -0.14820556],
[0.73181537, -0.22025898, -0.6449281],
[0.79979205, 0.54555747, 0.25039913]]
)
def test_constructor(self):
center = np.array([1, 2, 3])
radius = 2
s1 = SphericalVoronoi(self.points)
# user input checks in SphericalVoronoi now require
# the radius / center to match the generators so adjust
# accordingly here
s2 = SphericalVoronoi(self.points * radius, radius)
s3 = SphericalVoronoi(self.points + center, None, center)
s4 = SphericalVoronoi(self.points * radius + center, radius, center)
assert_array_equal(s1.center, np.array([0, 0, 0]))
assert_equal(s1.radius, 1)
assert_array_equal(s2.center, np.array([0, 0, 0]))
assert_equal(s2.radius, 2)
assert_array_equal(s3.center, center)
assert_equal(s3.radius, 1)
assert_array_equal(s4.center, center)
assert_equal(s4.radius, radius)
def tes | t_vertices_regions_translation_invariance(self):
sv_origin = SphericalVoronoi(self.points)
center = np.array([1, 1, 1])
sv_translated = SphericalVoronoi(self.points + center, None, center)
assert_array_equal(sv_origin.regions, sv_translated.regions)
assert_array_almost_equal(sv_origin.vertices + center,
sv_transla | ted.vertices)
def test_vertices_regions_scaling_invariance(self):
sv_unit = SphericalVoronoi(self.points)
sv_scaled = SphericalVoronoi(self.points * 2, 2)
assert_array_equal(sv_unit.regions, sv_scaled.regions)
assert_array_almost_equal(sv_unit.vertices * 2,
sv_scaled.vertices)
def test_sort_vertices_of_regions(self):
sv = SphericalVoronoi(self.points)
unsorted_regions = sv.regions
sv.sort_vertices_of_regions()
assert_array_equal(sorted(sv.regions), sorted(unsorted_regions))
def test_sort_vertices_of_regions_flattened(self):
expected = sorted([[0, 6, 5, 2, 3], [2, 3, 10, 11, 8, 7], [0, 6, 4, 1], [4, 8,
7, 5, 6], [9, 11, 10], [2, 7, 5], [1, 4, 8, 11, 9], [0, 3, 10, 9,
1]])
expected = list(itertools.chain(*sorted(expected)))
sv = SphericalVoronoi(self.points)
sv.sort_vertices_of_regions()
actual = list(itertools.chain(*sorted(sv.regions)))
assert_array_equal(actual, expected)
def test_num_vertices(self):
# for any n >= 3, a spherical Voronoi diagram has 2n - 4
# vertices; this is a direct consequence of Euler's formula
# as explained by Dinis and Mamede (2010) Proceedings of the
# 2010 International Symposium on Voronoi Diagrams in Science
# and Engineering
sv = SphericalVoronoi(self.points)
expected = self.points.shape[0] * 2 - 4
actual = sv.vertices.shape[0]
assert_equal(actual, expected)
def test_voronoi_circles(self):
sv = spherical_voronoi.SphericalVoronoi(self.points)
for vertex in sv.vertices:
distances = distance.cdist(sv.points,np.array([vertex]))
closest = np.array(sorted(distances)[0:3])
assert_almost_equal(closest[0], closest[1], 7, str(vertex))
assert_almost_equal(closest[0], closest[2], 7, str(vertex))
def test_duplicate_point_handling(self):
# an exception should be raised for degenerate generators
# related to Issue# 7046
self.degenerate = np.concatenate((self.points, self.points))
with assert_raises(ValueError):
sv = spherical_voronoi.SphericalVoronoi(self.degenerate)
def test_incorrect_radius_handling(self):
# an exception should be raised if the radius provided
# cannot possibly match the input generators
with assert_raises(ValueError):
sv = spherical_voronoi.SphericalVoronoi(self.points,
radius=0.98)
def test_incorrect_center_handling(self):
# an exception should be raised if the center provided
# cannot possibly match the input generators
with assert_raises(ValueError):
sv = spherical_voronoi.SphericalVoronoi(self.points,
center=[0.1,0,0])
|
coobas/pydons | tests/test_file_browser_netcdf4.py | Python | mit | 740 | 0 | from pydons import MatStruct, FileBrowser, LazyDataset
import netCDF4
import numpy as np
import tempfile
import os
DATADIR = os.path.join(os.path.dirname(__file__), ' | data')
def test_netcdf4():
d = MatStruct()
data1 = np.random.rand(np.random.randint(1, 1000))
with tempfile.NamedTemporaryFile(suffix=".nc") as tmpf:
fh = netCDF4.Dataset(tmpf.name, mode='w')
grp = fh.createGroup('mygroup')
dim1 = grp.createDimension('dim1')
var1 = grp.createVariable('var1', data1.dtype.str, (dim1.name, ))
var1[:] = data1
| fh.close()
dd = FileBrowser(tmpf.name)
assert 'mygroup' in dd
assert 'var1' in dd.mygroup
assert np.all(dd.mygroup.var1[:] == data1)
|
zigitax/king-phisher | tests/spf.py | Python | bsd-3-clause | 5,698 | 0.014918 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# tests/spf.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from king_phisher import testing
from king_phisher import spf
class SPFTests(testing.KingPhisherTestCase):
@testing.skip_if_offline
def test_spf_check_host(self):
s = spf.SenderPolicyFramework('1.2.3.4', 'king-phi | sher.com')
check_host_result = s.check_host()
self.assertIsNotNone(check_host_result)
self.assertEqual(check_host_result, 'fail')
self.assertEqual(spf.check_host('1.2.3.4', 'king-phisher.com'), 'fail')
@testing.skip_if_offline
def test_spf_e | valuate_mechanism(self):
s = spf.SenderPolicyFramework('1.2.3.4', 'doesnotexist.king-phisher.com')
eval_mech = lambda m, r: s._evaluate_mechanism(s.ip_address, s.domain, s.sender, m, r)
self.assertTrue(eval_mech('all', None))
self.assertTrue(eval_mech('exists', '%{d2}'))
self.assertTrue(eval_mech('ip4', '1.2.3.0/24'))
self.assertTrue(eval_mech('ip4', '1.2.3.4'))
self.assertFalse(eval_mech('ip4', '1.1.1.0/24'))
def test_spf_evaluate_mechanism_permerror(self):
s = spf.SenderPolicyFramework('1.2.3.4', 'doesnotexist.king-phisher.com')
eval_mech = lambda m, r: s._evaluate_mechanism(s.ip_address, s.domain, s.sender, m, r)
with self.assertRaises(spf.SPFPermError):
eval_mech('ip4', 'thisisnotanetwork')
with self.assertRaises(spf.SPFPermError):
eval_mech('ip6', 'thisisnotanetwork')
with self.assertRaises(spf.SPFPermError):
eval_mech('fake', None)
def test_spf_evaluate_mechanism_temperror(self):
s = spf.SenderPolicyFramework('1.2.3.4', 'doesnotexist.king-phisher.com')
eval_mech = lambda m, r: s._evaluate_mechanism(s.ip_address, s.domain, s.sender, m, r)
with self.assertRaises(spf.SPFTempError):
eval_mech('a', None)
with self.assertRaises(spf.SPFTempError):
eval_mech('exists', None)
with self.assertRaises(spf.SPFTempError):
eval_mech('mx', None)
def test_spf_nonexistent_domain(self):
s = spf.SenderPolicyFramework('1.2.3.4', 'doesnotexist.king-phisher.com')
self.assertIsNone(s.check_host())
self.assertIsNone(spf.check_host('1.2.3.4', 'doesnotexist.king-phisher.com'))
def test_spf_rfc7208_macro_expansion(self):
spf_records = [('all', '-', None)]
s = spf.SenderPolicyFramework('192.0.2.3', 'email.example.com', 'strong-bad@email.example.com', spf_records=spf_records)
expand_macro = lambda m: s.expand_macros(m, '192.0.2.3', 'email.example.com', 'strong-bad@email.example.com')
self.assertEqual(expand_macro('%{s}'), 'strong-bad@email.example.com')
self.assertEqual(expand_macro('%{o}'), 'email.example.com')
self.assertEqual(expand_macro('%{d}'), 'email.example.com')
self.assertEqual(expand_macro('%{d4}'), 'email.example.com')
self.assertEqual(expand_macro('%{d3}'), 'email.example.com')
self.assertEqual(expand_macro('%{d2}'), 'example.com')
self.assertEqual(expand_macro('%{d1}'), 'com')
self.assertEqual(expand_macro('%{dr}'), 'com.example.email')
self.assertEqual(expand_macro('%{d2r}'), 'example.email')
self.assertEqual(expand_macro('%{l}'), 'strong-bad')
self.assertEqual(expand_macro('%{l-}'), 'strong.bad')
self.assertEqual(expand_macro('%{lr}'), 'strong-bad')
self.assertEqual(expand_macro('%{lr-}'), 'bad.strong')
self.assertEqual(expand_macro('%{l1r-}'), 'strong')
self.assertEqual(expand_macro('%{ir}.%{v}._spf.%{d2}'), '3.2.0.192.in-addr._spf.example.com')
self.assertEqual(expand_macro('%{lr-}.lp._spf.%{d2}'), 'bad.strong.lp._spf.example.com')
self.assertEqual(expand_macro('%{lr-}.lp.%{ir}.%{v}._spf.%{d2}'), 'bad.strong.lp.3.2.0.192.in-addr._spf.example.com')
self.assertEqual(expand_macro('%{ir}.%{v}.%{l1r-}.lp._spf.%{d2}'), '3.2.0.192.in-addr.strong.lp._spf.example.com')
self.assertEqual(expand_macro('%{d2}.trusted-domains.example.net'), 'example.com.trusted-domains.example.net')
def test_spf_record_unparse(self):
self.assertEqual(spf.record_unparse(('all', '+', None)), 'all')
self.assertEqual(spf.record_unparse(('all', '-', None)), '-all')
self.assertEqual(spf.record_unparse(('include', '+', '_spf.wonderland.com')), 'include:_spf.wonderland.com')
self.assertEqual(spf.record_unparse(('ip4', '+', '10.0.0.0/24')), 'ip4:10.0.0.0/24')
if __name__ == '__main__':
unittest.main()
|
obask/lispify | src/python3/lis.py | Python | mit | 4,615 | 0.014085 | ################ Lispy: Scheme Interprete | r in Python
## (c) Peter Norvig, 2010-14; See http://norvig.com/lispy.html
################ Types
from __future__ import division
Symbol = st | r # A Lisp Symbol is implemented as a Python str
List = list # A Lisp List is implemented as a Python list
Number = (int, float) # A Lisp Number is implemented as a Python int or float
################ Parsing: parse, tokenize, and read_from_tokens
def parse(program):
"Read a Scheme expression from a string."
return read_from_tokens(tokenize(program))
def tokenize(s):
"Convert a string into a list of tokens."
return s.replace('(',' ( ').replace(')',' ) ').split()
def read_from_tokens(tokens):
"Read an expression from a sequence of tokens."
if len(tokens) == 0:
raise SyntaxError('unexpected EOF while reading')
token = tokens.pop(0)
if '(' == token:
L = []
while tokens[0] != ')':
L.append(read_from_tokens(tokens))
tokens.pop(0) # pop off ')'
return L
elif ')' == token:
raise SyntaxError('unexpected )')
else:
return atom(token)
def atom(token):
"Numbers become numbers; every other token is a symbol."
try: return int(token)
except ValueError:
try: return float(token)
except ValueError:
return Symbol(token)
################ Environments
def standard_env():
"An environment with some Scheme standard procedures."
import math, operator as op
env = Env()
env.update(vars(math)) # sin, cos, sqrt, pi, ...
env.update({
'+':op.add, '-':op.sub, '*':op.mul, '/':op.div,
'>':op.gt, '<':op.lt, '>=':op.ge, '<=':op.le, '=':op.eq,
'abs': abs,
'append': op.add,
'apply': apply,
'begin': lambda *x: x[-1],
'car': lambda x: x[0],
'cdr': lambda x: x[1:],
'cons': lambda x,y: [x] + y,
'eq?': op.is_,
'equal?': op.eq,
'length': len,
'list': lambda *x: list(x),
'list?': lambda x: isinstance(x,list),
'map': map,
'max': max,
'min': min,
'not': op.not_,
'null?': lambda x: x == [],
'number?': lambda x: isinstance(x, Number),
'procedure?': callable,
'round': round,
'symbol?': lambda x: isinstance(x, Symbol),
})
return env
class Env(dict):
"An environment: a dict of {'var':val} pairs, with an outer Env."
def __init__(self, parms=(), args=(), outer=None):
self.update(zip(parms, args))
self.outer = outer
def find(self, var):
"Find the innermost Env where var appears."
return self if (var in self) else self.outer.find(var)
global_env = standard_env()
################ Interaction: A REPL
def repl(prompt='lis.py> '):
"A prompt-read-eval-print loop."
while True:
val = eval(parse(raw_input(prompt)))
if val is not None:
print(lispstr(val))
def lispstr(exp):
"Convert a Python object back into a Lisp-readable string."
if isinstance(exp, list):
return '(' + ' '.join(map(lispstr, exp)) + ')'
else:
return str(exp)
################ Procedures
class Procedure(object):
"A user-defined Scheme procedure."
def __init__(self, parms, body, env):
self.parms, self.body, self.env = parms, body, env
def __call__(self, *args):
return eval(self.body, Env(self.parms, args, self.env))
################ eval
def eval(x, env=global_env):
"Evaluate an expression in an environment."
if isinstance(x, Symbol): # variable reference
return env.find(x)[x]
elif not isinstance(x, List): # constant literal
return x
elif x[0] == 'quote': # (quote exp)
(_, exp) = x
return exp
elif x[0] == 'if': # (if test conseq alt)
(_, test, conseq, alt) = x
exp = (conseq if eval(test, env) else alt)
return eval(exp, env)
elif x[0] == 'define': # (define var exp)
(_, var, exp) = x
env[var] = eval(exp, env)
elif x[0] == 'set!': # (set! var exp)
(_, var, exp) = x
env.find(var)[var] = eval(exp, env)
elif x[0] == 'lambda': # (lambda (var...) body)
(_, parms, body) = x
return Procedure(parms, body, env)
else: # (proc arg...)
proc = eval(x[0], env)
args = [eval(exp, env) for exp in x[1:]]
return proc(*args)
|
luci/luci-py | appengine/components/components/datastore_utils/config.py | Python | apache-2.0 | 4,914 | 0.008751 | # Copyright 2014 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Versioned singleton entity with global application configuration.
Example usage:
from components.datastore_utils import config
class MyConfig(config.GlobalConfig):
param1 = ndb.StringProperty()
param2 = ndb.StringProperty()
...
...
def do_stuff():
param1 = MyConfig.cached().param1
param2 = MyConfig.cached().param2
...
def modify():
conf = MyConfig.fetch()
conf.modify(updated_by='user:abc@example.com', param1='123')
Advantages over regular ndb.Entity with predefined key:
1. All changes are logged (see datastore_utils.store_new_version).
2. In-memory process-wide cache.
"""
# Pylint fails to recognize that ndb.Model.key is defined in ndb.Model.
# pylint: disable=attribute-defined-outside-init
import datetime
import threading
from google.appengine.ext import ndb
from components import datastore_utils
from components import utils
class GlobalConfig(ndb.Model):
"""Singleton entity with the global c | onfiguration of the service.
All changes are stored in the revision log.
"""
# When this revision of configuration was created.
updated_ts = ndb.DateTimeProperty(indexed=False, auto_now_add=True)
# Who created this revision of configuration (as identity string).
updated_by = ndb.StringProperty(indexed=False, default='')
@classmethod
def cached_async(cls):
"""Fetches config entry from local cache or datastore.
Bootstraps it if missing. May return slightly stale data but in most ca | ses
doesn't do any RPCs. Should be used for read-only access to config.
"""
# Build new class-specific fetcher function with cache on the fly on
# the first attempt (it's not a big deal if it happens concurrently in MT
# environment, last one wins). Same can be achieved with metaclasses, but no
# one likes metaclasses.
if not cls._config_fetcher_async:
@ndb.tasklet
def fetcher():
with fetcher.cache_lock:
expiry = fetcher.cache_expiry
if expiry is not None and utils.utcnow() < expiry:
raise ndb.Return(fetcher.cache_value)
# Do not lock while yielding, it would cause deadlock.
# Also do not cache a future, it might cross ndb context boundary.
# If there is no cached value, multiple concurrent requests will make
# multiple RPCs, but as soon as one of them updates cache, subsequent
# requests will use the cached value, for a minute.
conf = yield cls.fetch_async()
if not conf:
conf = cls()
conf.set_defaults()
yield conf.store_async(updated_by='')
with fetcher.cache_lock:
fetcher.cache_expiry = utils.utcnow() + datetime.timedelta(minutes=1)
fetcher.cache_value = conf
raise ndb.Return(conf)
fetcher.cache_lock = threading.Lock()
fetcher.cache_expiry = None
fetcher.cache_value = None
cls._config_fetcher_async = staticmethod(fetcher)
return cls._config_fetcher_async()
cached = utils.sync_of(cached_async)
@classmethod
def clear_cache(cls):
"""Clears the cache of .cached().
So the next call to .cached() returns the fresh instance from ndb.
"""
if cls._config_fetcher_async:
cls._config_fetcher_async.cache_expiry = None
@classmethod
def fetch_async(cls):
"""Returns the current up-to-date version of the config entity.
Always fetches it from datastore. May return None if missing.
"""
return datastore_utils.get_versioned_most_recent_async(
cls, cls._get_root_key())
fetch = utils.sync_of(fetch_async)
def store_async(self, updated_by):
"""Stores a new version of the config entity."""
# Create an incomplete key, to be completed by 'store_new_version'.
self.key = ndb.Key(self.__class__, None, parent=self._get_root_key())
self.updated_by = updated_by
self.updated_ts = utils.utcnow()
return datastore_utils.store_new_version_async(self, self._get_root_model())
store = utils.sync_of(store_async)
def modify(self, updated_by, **kwargs):
"""Applies |kwargs| dict to the entity and stores the entity if changed."""
dirty = False
for k, v in kwargs.items():
assert k in self._properties, k
if getattr(self, k) != v:
setattr(self, k, v)
dirty = True
if dirty:
self.store(updated_by)
return dirty
def set_defaults(self):
"""Fills in default values for empty config. Implemented by subclasses."""
### Private stuff.
_config_fetcher_async = None
@classmethod
def _get_root_model(cls):
return datastore_utils.get_versioned_root_model('%sRoot' % cls.__name__)
@classmethod
def _get_root_key(cls):
return ndb.Key(cls._get_root_model(), 1)
|
brookefitzgerald/neural-exploration | neural_exploration/visualize/serializers.py | Python | mit | 1,912 | 0.003138 | from django.apps import apps
from rest_framework import serializers
Experiment = apps.get_model("visualize", "Experiment")
class ExperimentSerializer(serializers.ModelSerializer):
"""JSON serialized representation of the Experiment Model"""
class Meta:
model = Experiment
fields = '__all__'
class InnerListField(serializers.ListField):
child = serializers.DecimalField(max_digits=15, decimal_places=10)
class IntInnerListField(serializers.ListField):
child=serializers.IntegerField()
class CharListField(serializers.ListField):
child = serializers.CharField()
class DataSerializer(serializers.Serializer):
"""JSON serialized representation of the Data Model"""
experiment = ExperimentSerializer(many=False, read_only=True)
slug = serializers.CharField()
labels_one = CharListField()
labels_two = CharListField()
labels_three = CharListField()
labels_four = CharListField()
data = serializers.ListField(child=InnerListField())
class FirstBinSerializer(serializers.Serializer):
"""JSON representation of the Binned Data"""
bin_150_50 = serializers.ListField(child=InnerListField())
bin_150_50_extents = serializers.ListField(child=IntInnerListField())
labels = serializers.ListField(child=serializers.CharField())
class SecondBinSerializer(serializers.Serializer):
"""JSON representation of t | he Binned Data"""
bin_100_30=serializers.ListField(child=InnerList | Field())
bin_100_30_extents = serializers.ListField(child=IntInnerListField())
labels = serializers.ListField(child=serializers.CharField())
class ThirdBinSerializer(serializers.Serializer):
"""JSON representation of the Binned Data"""
bin_50_15=serializers.ListField(child=InnerListField())
bin_50_15_extents = serializers.ListField(child=IntInnerListField())
labels = serializers.ListField(child=serializers.CharField())
|
SUSE/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/compute/v2015_06_15/operations/virtual_machines_operations.py | Python | mit | 48,648 | 0.002179 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class VirtualMachinesOperations(object):
"""VirtualMachinesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client Api Version. Constant value: "2015-06-15".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2015-06-15"
self.config = config
def capture(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Captures the VM by copying virtual hard disks of the VM and outputs a
template that can be used to create similar VMs.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Capture Virtual Machine
operation.
:type parameters: :class:`VirtualMachineCaptureParameters
<azure.mgmt.compute.compute.v2015_06_15.models.VirtualMachineCaptureParameters>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`VirtualMachineCaptureResult
<azure.mgmt.compute.compute.v2015_06_15.models.VirtualMachineCaptureResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/capture'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualMachineCaptureParameters')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client. | get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
| if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachineCaptureResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def create_or_update(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
"""The operation to create or update a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Create Virtual Machine
operation.
:type parameters: :class:`VirtualMachine
<azure.mgmt.compute.compute.v2015_06_15.models.VirtualMachine>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`VirtualMachine
<azure.mgmt.compute.compute.v2015_06_15.models.VirtualMachine>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualMachi |
50onRed/phillypug-concurrent | io_bound/phillypug_async_requests.py | Python | mit | 1,225 | 0.007347 | import gevent
from gevent import monkey
from gevent.pool import Pool
monkey.patch_all()
import requests
import json
import time
import os
def download_image(image_url):
filename = 'async_images/' + image_url.rsplit('/', 1)[-1]
print 'Downloading %s...' % filename
response = requests.get(image_url)
with open(filename, 'wb') as file:
file.write(response.content)
def get_popular_instagram():
client_id = '48e1929e1e1f48c2868d9851be981066'
resp = requests.get('https://api.instagram.com/v1/media/popular', params={'client_id': client_id})
instagram_data = json.loads(resp.text)
return [entry['images']['low_resolution']['url'] for entry | in instagram_data['data']]
if __name__ == '__main__':
if not os.path.exists('async_images'):
os.mkdir('async_images')
get_popular_job = gevent.spawn(get_popular_instagram)
get_popular_job.join()
image_urls = get_popular_job.value
print 'Starting downloads...'
start_time = time.time()
pool = Pool(8)
jobs = [p | ool.spawn(download_image, url) for url in image_urls]
gevent.joinall(jobs)
stop_time = time.time()
print 'Took %.3fs to download %d images' % (stop_time - start_time, len(image_urls))
|
anatolikalysch/VMAttack | lib/Register.py | Python | mit | 2,772 | 0.002525 | #!/usr/bin/env python
"""
@author: Tobias
"""
"""@brief List of register classes"""
_registerClasses = [
['al', 'ah', 'ax', 'eax', 'rax'],
['bl', 'bh', 'bx', 'ebx', 'rbx'],
['cl', 'ch', 'cx', 'ecx', 'rcx'],
['dl', 'dh', 'dx', 'edx', 'rdx'],
['bpl', 'bp', 'ebp', 'rbp'],
['dil', 'di', 'edi', 'rdi'],
['sil', 'si', 'esi', 'rsi'],
['spl', 'sp', 'esp', 'rsp'],
['r8l', 'r8w', 'r8d', 'r8'],
['r9l', 'r9w', 'r9d', 'r9'],
['r10l', 'r10w', 'r10d', 'r10'],
['r11l', 'r11w', 'r11d', 'r11'],
['r12l', 'r12w', 'r12d', 'r12'],
['r13l', 'r13w', 'r13d', 'r13'],
['r14l', 'r14w', 'r14d', 'r14'],
['r15l', 'r15w', 'r15d', 'r15']
]
def get_reg_class(reg):
"""
@brief Determines the register class of a given reg.
All different register names that address the same register
belong to the same register class e.g.: 'ax' and 'eax'
@param reg name of register
@return register class
"""
lreg = reg.lower()
ret_value = None
for pos, reg_list in enumerate(_registerClasses):
for reg in reg_list:
found = False
if reg == lreg:
found = True
ret_value = pos
break
if found:
break
return ret_value
def get_reg_by_size(reg_class, reg_size):
"""
@brief Determines the register by its size and class
@param reg_class The register class of the register
@param reg_size The size of the register
@return Name of the register
"""
if reg_class >= len(_registerClasses):
return None
num_regs = len(_registerClasses[reg_class])
if num_regs < 4: |
return None
reg_index = -1
if reg_size > 32: # 64-bit regs
reg_index = num_regs - 1
elif reg_size > 16: # 32-bit regs
reg_index = num_regs - 2
elif reg_size > 8: # 16-bit regs
reg_index = num_regs - 3
elif reg_size > 0: # 8-bit regs
reg_index = 0
else:
return None
return _registerClasses[reg_class][reg_index]
def get_size_by_reg(reg):
"""
| @brief Determines the size of the given register
@param reg Register
@return Size of register
"""
reg_class = get_reg_class(reg)
num_regs = len(_registerClasses[reg_class])
for index, test_reg in enumerate(_registerClasses[reg_class]):
if test_reg == reg:
break
else: # no break
return None
if index == (num_regs-1):
return 64
elif index == (num_regs-2):
return 32
elif index == (num_regs-3):
return 16
else:
return 8
def get_reg_class_lst(reg_class):
"""
@return Returns the whole list of a given register class
"""
return _registerClasses[reg_class]
|
erikr/django | django/contrib/gis/geos/prototypes/io.py | Python | bsd-3-clause | 11,671 | 0.001628 | import threading
from ctypes import POINTER, Structure, byref, c_char, c_char_p, c_int, c_size_t
from django.contrib.gis.geos.base import GEOSBase
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom, check_sized_string, check_string,
)
from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p
from django.utils import six
from django.utils.encoding import force_bytes
# ### The WKB/WKT Reader/Writer structures and pointers ###
class WKTReader_st(Structure):
pass
class WKTWriter_st(Structure):
pass
class WKBReader_st(Structure):
pass
class WKBWriter_st(Structure):
pass
WKT_READ_PTR = POINTER(WKTReader_st)
WKT_WRITE_PTR = POINTER(WKTWriter_st)
WKB_READ_PTR = POINTER(WKBReader_st)
WKB_WRITE_PTR = POINTER(WKBReader_st)
# WKTReader routines
wkt_reader_create = GEOSFuncFactory('GEOSWKTReader_create', restype=WKT_READ_PTR)
wkt_reader_destroy = GEOSFuncFactory('GEOSWKTReader_destroy', argtypes=[WKT_READ_PTR])
wkt_reader_read = GEOSFuncFactory(
'GEOSWKTReader_read', argtypes=[WKT_READ_PTR, c_char_p], restype=GEOM_PTR, errcheck=check_geom
)
# WKTWriter routines
wkt_writer_create = GEOSFuncFactory('GEOSWKTWriter_create', restype=WKT_WRITE_PTR)
wkt_writer_destroy = GEOSFuncFactory('GEOSWKTWriter_destroy', argtypes=[WKT_WRITE_PTR])
wkt_writer_write = GEOSFuncFactory(
'GEOSWKTWriter_write', argtypes=[WKT_WRITE_PTR, GEOM_PTR], restype=geos_char_p, errcheck=check_string
)
wkt_writer_get_outdim = GEOSFuncFactory(
'GEOSWKTWriter_getOutputDimension', argtypes=[WKT_WRITE_PTR], restype=c_int
)
wkt_writer_set_outdim = GEOSFuncFactory(
'GEOSWKTWriter_setOutputDimension', argtypes=[WKT_WRITE_PTR, c_int]
)
wkt_writer_set_trim = GEOSFuncFactory('GEOSWKTWriter_setTrim', argtypes=[WKT_WRITE_PTR, c_char])
wkt_writer_set_precision = GEOSFuncFactory('GEOSWKTWriter_setRoundingPrecision', argtypes=[WKT_WRITE_PTR, c_int])
# WKBReader routines
wkb_reader_create = GEOSFuncFactory('GEOSWKBReader_create', restype=WKB_READ_PTR)
wkb_reader_destroy = GEOSFuncFactory('GEOSWKBReader_destroy', argtypes=[WKB_READ_PTR])
class WKBReadFunc(GEOSFuncFactory):
# Although the function definitions take `const unsigned char *`
# as their parameter, we use c_char_p here so the function may
# take Python strings directly as parameters. Inside Python there
# is not a difference between signed and unsigned characters, so
# it is not a problem.
argtypes = [WKB_READ_PTR, c_char_p, c_size_t]
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
wkb_reader_read = WKBReadFunc('GEOSWKBReader_read')
wkb_reader_read_hex = WKBReadFunc('GEOSWKBReader_readHEX')
# WKBWriter routines
wkb_writer_create = GEOSFuncFactory('GEOSWKBWriter_create', restype=WKB_WRITE_PTR)
wkb_writer_destroy = GEOSFuncFactory('GEOSWKBWriter_destroy', argtypes=[WKB_WRITE_PTR])
# WKB Writing prototypes.
class WKBWriteFunc(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)]
restype = c_uchar_p
errcheck = staticmethod(check_sized_string)
wkb_writer_write = WKBWriteFunc('GEOSWKBWriter_write')
wkb_writer_write_hex = WKBWriteFunc('GEOSWKBWriter_writeHEX')
# WKBWriter property getter/setter prototypes.
class WKBWriterGet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR]
restype = c_int
class WKBWriterSet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, c_int]
wkb_writer_get_byteorder = WKBWriterGet('GEOSWKBWriter_getByteOrder')
wkb_writer_set_byteorder = WKBWriterSet('GEOSWKBWriter_setByteOrder')
wkb_writer_get_outdim = WKBWriterGet('GEOSWKBWriter_getOutputDimension')
wkb_writer_set_outdim = WKBWriterSet('GEOSWKBWriter_setOutputDimension')
wkb_writer_get_include_srid = WKBWriterGet('GEOSWKBWriter_getIncludeSRID', restype=c_char)
wkb_writer_set_include_srid = WKBWriterSet('GEOSWKBWriter_setIncludeSRID', argtypes=[WKB_WRITE_PTR, c_char])
# ### Base I/O Class ###
class IOBase(GEOSBase):
"Base class for GEOS I/O objects."
def __init__(self):
| # Getting the pointer with the constructor.
self.ptr = self._constructor()
# Loading the real destructor function at this point as doing it in
# _ | _del__ is too late (import error).
self._destructor.func = self._destructor.get_func(
*self._destructor.args, **self._destructor.kwargs
)
def __del__(self):
# Cleaning up with the appropriate destructor.
try:
self._destructor(self._ptr)
except (AttributeError, TypeError):
pass # Some part might already have been garbage collected
# ### Base WKB/WKT Reading and Writing objects ###
# Non-public WKB/WKT reader classes for internal use because
# their `read` methods return _pointers_ instead of GEOSGeometry
# objects.
class _WKTReader(IOBase):
_constructor = wkt_reader_create
_destructor = wkt_reader_destroy
ptr_type = WKT_READ_PTR
def read(self, wkt):
if not isinstance(wkt, (bytes, six.string_types)):
raise TypeError
return wkt_reader_read(self.ptr, force_bytes(wkt))
class _WKBReader(IOBase):
_constructor = wkb_reader_create
_destructor = wkb_reader_destroy
ptr_type = WKB_READ_PTR
def read(self, wkb):
"Returns a _pointer_ to C GEOS Geometry object from the given WKB."
if isinstance(wkb, six.memoryview):
wkb_s = bytes(wkb)
return wkb_reader_read(self.ptr, wkb_s, len(wkb_s))
elif isinstance(wkb, (bytes, six.string_types)):
return wkb_reader_read_hex(self.ptr, wkb, len(wkb))
else:
raise TypeError
# ### WKB/WKT Writer Classes ###
class WKTWriter(IOBase):
_constructor = wkt_writer_create
_destructor = wkt_writer_destroy
ptr_type = WKT_WRITE_PTR
_trim = False
_precision = None
def __init__(self, dim=2, trim=False, precision=None):
super(WKTWriter, self).__init__()
if bool(trim) != self._trim:
self.trim = trim
if precision is not None:
self.precision = precision
self.outdim = dim
def write(self, geom):
"Returns the WKT representation of the given geometry."
return wkt_writer_write(self.ptr, geom.ptr)
@property
def outdim(self):
return wkt_writer_get_outdim(self.ptr)
@outdim.setter
def outdim(self, new_dim):
if new_dim not in (2, 3):
raise ValueError('WKT output dimension must be 2 or 3')
wkt_writer_set_outdim(self.ptr, new_dim)
@property
def trim(self):
return self._trim
@trim.setter
def trim(self, flag):
if bool(flag) != self._trim:
self._trim = bool(flag)
wkt_writer_set_trim(self.ptr, b'\x01' if flag else b'\x00')
@property
def precision(self):
return self._precision
@precision.setter
def precision(self, precision):
if (not isinstance(precision, int) or precision < 0) and precision is not None:
raise AttributeError('WKT output rounding precision must be non-negative integer or None.')
if precision != self._precision:
self._precision = precision
wkt_writer_set_precision(self.ptr, -1 if precision is None else precision)
class WKBWriter(IOBase):
_constructor = wkb_writer_create
_destructor = wkb_writer_destroy
ptr_type = WKB_WRITE_PTR
def __init__(self, dim=2):
super(WKBWriter, self).__init__()
self.outdim = dim
def _handle_empty_point(self, geom):
from django.contrib.gis.geos import Point
if isinstance(geom, Point) and geom.empty:
if self.srid:
# PostGIS uses POINT(NaN NaN) for WKB representation of empty
# points. Use it for EWKB as it's a PostGIS specific format.
# https://trac.osgeo.org/postgis/ticket/3181
geom = Point(float('NaN'), float('NaN'), srid=geom.srid)
else:
raise ValueError('Empty point is not representable in WKB.')
return geom
def write(self, geom):
"Returns the WKB represent |
modsy/incubator-airflow | airflow/contrib/hooks/gcs_hook.py | Python | apache-2.0 | 3,228 | 0.001239 | import httplib2
import logging
from airflow.contrib.hooks.gc_base_hook import GoogleCloudBaseHook
from airflow.hooks.base_hook import BaseHook
from apiclient.discovery import build
from apiclient.http import MediaFileUpload
from oauth2client.client import SignedJwtAssertionCredentials
logging.getLogger("google_cloud_storage").setLevel(logging.INFO)
class GoogleCloudStorageHook(GoogleCloudBaseHook):
"""
Interact with Google Cloud Storage. Connections must be defined with an
extras JSON field containing:
{
"project": "<google project ID>",
"service_account": "<google service account email>",
"key_path": "<p12 key path>"
}
If you have used ``gcloud auth`` to authenticate on the machine that's
running Airflow, you can exclude the service_account and key_path
parameters.
"""
conn_name_attr = 'google_cloud_storage_conn_id'
def __init__(self,
scope='https://www.googleapis.com/auth/devstorage.read_only',
google_cloud_storage_conn_id='google_cloud_storage_default',
delegate_to=None):
"""
:param scope: The scope of the hook (read only, read write, etc). See:
| ERROR: type should be string, got " https://cloud.google.com/storage/docs/authentication?hl=en#oauth-scopes\n :type scope: string\n \"\"\"\n super(GoogleCloudStorageHook, self).__init__(scope, google_cloud_storage_conn_id, delegate_to)\n\n def get_conn(self):\n \"\"\"\n" | Returns a Google Cloud Storage service object.
"""
http_authorized = self._authorize()
return build('storage', 'v1', http=http_authorized)
def download(self, bucket, object, filename=False):
"""
Get a file from Google Cloud Storage.
:param bucket: The bucket to fetch from.
:type bucket: string
:param object: The object to fetch.
:type object: string
:param filename: If set, a local file path where the file should be written to.
:type filename: string
"""
service = self.get_conn()
downloaded_file_bytes = service \
.objects() \
.get_media(bucket=bucket, object=object) \
.execute()
# Write the file to local file path, if requested.
if filename:
with open(filename, 'w') as file_fd:
file_fd.write(downloaded_file_bytes)
return downloaded_file_bytes
def upload(self, bucket, object, filename, mime_type='application/octet-stream'):
"""
Uploads a local file to Google Cloud Storage.
:param bucket: The bucket to upload to.
:type bucket: string
:param object: The object name to set when uploading the local file.
:type object: string
:param filename: The local file path to the file to be uploaded.
:type filename: string
:param mime_type: The MIME type to set when uploading the file.
:type mime_type: string
"""
service = self.get_conn()
media = MediaFileUpload(filename, mime_type)
response = service \
.objects() \
.insert(bucket=bucket, name=object, media_body=media) \
.execute()
|
saurabh6790/erpnext | erpnext/setup/install.py | Python | gpl-3.0 | 4,976 | 0.024317 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import print_function, unicode_literals
import frappe
from erpnext.accounts.doctype.cash_flow_mapper.default_cash_flow_mapper import DEFAULT_MAPPERS
from .default_success_action import get_default_success_action
from frappe import _
from frappe.utils import cint
from frappe.desk.page.setup_wizard.setup_wizard import add_all_roles_to
from frappe.custom.doctype.custom_field.custom_field import create_custom_field
from erpnext.setup.default_energy_point_rules import get_default_energy_point_rules
default_mail_footer = """<div style="padding: 7px; text-align: right; color: #888"><small>Sent via
<a style="color: #888" href="http://erpnext.org">ERPNext</a></div>"""
def after_install():
frappe.get_doc({'doctype': "Role", "role_name": "Analytics"}).insert()
set_single_defaults()
create_compact_item_print_custom_field()
create_print_uom_after_qty_custom_field()
create_print_zero_amount_taxes_custom_field()
add_all_roles_to("Administrator")
create_default_cash_flow_mapper_templates()
create_default_success_action()
create_default_energy_point_rules()
add_company_to_session_defaults()
add_standard_navbar_items()
frappe.db.commit()
def check_setup_wizard_not_completed():
if cint(frappe.db.get_single_value('System Settings', 'setup_complete') or 0):
message = """ERPNext can only be installed on a fresh site where the setup wizard is not completed.
You can reinstall this site (after saving your data) using: bench --site [sitename] reinstall"""
frappe.throw(message)
def set_single_defaults():
for dt in ('Accounts Settings', 'Print Settings', 'HR Settings', 'Buying Settings',
'Selling Settings', 'Stock Settings'):
default_values = frappe.db.sql("""select fieldname, `default` from `tabDocField`
where parent=%s""", dt)
if default_values:
try:
b = frappe.get_doc(dt, dt)
for fieldname, value in default_values:
b.set(fieldname, value)
b.save()
except frappe.MandatoryError:
pass
except frappe.ValidationError:
pass
frappe.db.set_default("date_format", "dd-mm-yyyy")
def create_compact_item_print_custom_field():
create_custom_field('Print Settings', {
'labe | l': _('Compact Item Print'),
'fieldname': 'compact_item_print',
'fieldtype': 'Check',
'default': 1,
'insert_after': 'with_letterhead'
})
def create_print_uom_after_qty_custom_field():
create_custom_field('Print Settings', {
'label': _('Print UOM after Quantity'),
'fieldname': 'print_uom_after_quantity',
'fieldtype': 'Check',
'def | ault': 0,
'insert_after': 'compact_item_print'
})
def create_print_zero_amount_taxes_custom_field():
create_custom_field('Print Settings', {
'label': _('Print taxes with zero amount'),
'fieldname': 'print_taxes_with_zero_amount',
'fieldtype': 'Check',
'default': 0,
'insert_after': 'allow_print_for_cancelled'
})
def create_default_cash_flow_mapper_templates():
for mapper in DEFAULT_MAPPERS:
if not frappe.db.exists('Cash Flow Mapper', mapper['section_name']):
doc = frappe.get_doc(mapper)
doc.insert(ignore_permissions=True)
def create_default_success_action():
for success_action in get_default_success_action():
if not frappe.db.exists('Success Action', success_action.get("ref_doctype")):
doc = frappe.get_doc(success_action)
doc.insert(ignore_permissions=True)
def create_default_energy_point_rules():
for rule in get_default_energy_point_rules():
# check if any rule for ref. doctype exists
rule_exists = frappe.db.exists('Energy Point Rule', {
'reference_doctype': rule.get('reference_doctype')
})
if rule_exists: continue
doc = frappe.get_doc(rule)
doc.insert(ignore_permissions=True)
def add_company_to_session_defaults():
settings = frappe.get_single("Session Default Settings")
settings.append("session_defaults", {
"ref_doctype": "Company"
})
settings.save()
def add_standard_navbar_items():
navbar_settings = frappe.get_single("Navbar Settings")
erpnext_navbar_items = [
{
'item_label': 'Documentation',
'item_type': 'Route',
'route': 'https://erpnext.com/docs/user/manual',
'is_standard': 1
},
{
'item_label': 'User Forum',
'item_type': 'Route',
'route': 'https://discuss.erpnext.com',
'is_standard': 1
},
{
'item_label': 'Report an Issue',
'item_type': 'Route',
'route': 'https://github.com/frappe/erpnext/issues',
'is_standard': 1
}
]
current_nabvar_items = navbar_settings.help_dropdown
navbar_settings.set('help_dropdown', [])
for item in erpnext_navbar_items:
navbar_settings.append('help_dropdown', item)
for item in current_nabvar_items:
navbar_settings.append('help_dropdown', {
'item_label': item.item_label,
'item_type': item.item_type,
'route': item.route,
'action': item.action,
'is_standard': item.is_standard,
'hidden': item.hidden
})
navbar_settings.save()
|
mrterry/yoink | yoink/simplify.py | Python | bsd-3-clause | 3,207 | 0.000312 | """Functions for simplifying line segments"""
from __future__ import division
import numpy as np
from skimage import img_as_bool
#from skimage.morphology import skeletonize
def rdp_indexes(points, eps2, dist2=None):
"""Indexes of points kept using the Ramer-Douglas-Peucker algorithm.
Parameters
----------
points : array_like
(n, m) n points in m dimensions
eps2 : number
(max allowable distance)**2
dist2 : callable, optional
dist to is a callable returns the square of the distance between a
sequence of points and a line defined by its two endpoints. defaults
to point_line_dist2
Returns
-------
indexes : list
sorted list of indexes of kept points
References
----------
.. [1] http://en.wikipedia.org/wiki/Ramer-Douglas-Peucker_algorithm
.. [2] Urs Ramer, "An iterative procedure for the polygonal approximation
of plane curves", Computer Graphics and Image Processing, 1(3),
244-256 (1972) doi:10.1016/S0146-664X(72)80017-0
.. [3] David Douglas & Thomas Peucker, "Algorithms for the reduction of the
number of points required to represent a digitized line or its
caricature", The Canadian Cartographer 10(2), 112-122 (1973)
doi:10.3138/FM57-6770-U75U-7727
"""
dist2 = point_line_dist2 if dist2 is not None else point_line_dist2
N = len(points)
keep = [0, N-1]
stack = [(0, N-1)]
for i in xrange(N**2):
if not stack:
return sorted(keep)
i0, i1 = stack.pop()
if i1 <= i0+1:
continue
d = dist2(points[i0+1:i1], points[i0], points[i1])
i = np.argmax(d)
dmax = d[i]
i += i0 + 1
if dmax > eps2:
keep.append(i)
stack += [(i0, i), (i, i1)]
assert False
def point_line_dist2(p, l1, l2):
"""Distance**2 between sequence of N, M-dimensional points and line l1-l2
Parameters
----------
p : array_like
sequence of N, M-dimensional points. shape = (N, M)
l1 : array_like
start of line segment len == M
l2 : array_like
end of line segment | len == M
Returns
-------
dist2 : array_like
distance**2 between | each point and line. shape == N
"""
p, l1, l2 = np.asarray(p), np.asarray(l1), np.asarray(l2)
ap = l1 - p
n = l2 - l1
n /= np.sqrt(sum(n**2))
dist = ap - np.outer(n, np.dot(ap, n)).T
return np.sum(dist**2, 1)
def img2line(img):
"""Convert an image to a sequence of indexes
Parameters
----------
img : 2d array_like
image to extract line from
Returns
-------
iseq : array
1d sequnce of i coordinates
jseq : array
1d sequnce of j coordinates
"""
img = img_as_bool(img)
Ns = sum(img, axis=1)
N = sum(Ns)
ni, nj = img.shape
jrange = np.arange(nj)
iseq = np.zeros(N, dtype=int)
jseq = np.zeros(N, dtype=int)
ii = iseq
jj = jseq
for i, n in enumerate(Ns):
ii, ii[:n] = ii[:n], i
jj, jj[:n] = jj[:n], jrange[img[i, :] == 1]
assert not ii
assert not jj
return iseq, jseq
|
googleapis/python-bigquery | samples/query_external_sheets_permanent_table.py | Python | apache-2.0 | 3,135 | 0.000957 | # Copyright 2019 Google LLC
#
# L | icensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the Lice | nse.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def query_external_sheets_permanent_table(dataset_id):
# [START bigquery_query_external_sheets_perm]
from google.cloud import bigquery
import google.auth
# Create credentials with Drive & BigQuery API scopes.
# Both APIs must be enabled for your project before running this code.
#
# If you are using credentials from gcloud, you must authorize the
# application first with the following command:
#
# gcloud auth application-default login \
# --scopes=https://www.googleapis.com/auth/drive,https://www.googleapis.com/auth/cloud-platform
credentials, project = google.auth.default(
scopes=[
"https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/bigquery",
]
)
# Construct a BigQuery client object.
client = bigquery.Client(credentials=credentials, project=project)
# TODO(developer): Set dataset_id to the ID of the dataset to fetch.
# dataset_id = "your-project.your_dataset"
# Configure the external data source.
dataset = client.get_dataset(dataset_id)
table_id = "us_states"
schema = [
bigquery.SchemaField("name", "STRING"),
bigquery.SchemaField("post_abbr", "STRING"),
]
table = bigquery.Table(dataset.table(table_id), schema=schema)
external_config = bigquery.ExternalConfig("GOOGLE_SHEETS")
# Use a shareable link or grant viewing access to the email address you
# used to authenticate with BigQuery (this example Sheet is public).
sheet_url = (
"https://docs.google.com/spreadsheets"
"/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing"
)
external_config.source_uris = [sheet_url]
external_config.options.skip_leading_rows = 1 # Optionally skip header row.
external_config.options.range = (
"us-states!A20:B49" # Optionally set range of the sheet to query from.
)
table.external_data_configuration = external_config
# Create a permanent table linked to the Sheets file.
table = client.create_table(table) # Make an API request.
# Example query to find states starting with "W".
sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id)
query_job = client.query(sql) # Make an API request.
# Wait for the query to complete.
w_states = list(query_job)
print(
"There are {} states with names starting with W in the selected range.".format(
len(w_states)
)
)
# [END bigquery_query_external_sheets_perm]
|
kalyons11/kevin | kevin/tests/leet/test_product_except_self.py | Python | mit | 678 | 0 | """
https://leetcode.com/explore/interview/card/top-interview-que | stions-hard/116/arr | ay-and-strings/827/
"""
from unittest import TestCase
from kevin.leet.product_except_self import Solution, SolutionOptimized
class TestProductExceptSelf(TestCase):
def _base_test_product_except_self(self, nums, expected):
for sol_class in [Solution, SolutionOptimized]:
sol = sol_class()
actual = sol.product_except_self(nums)
assert expected == actual, (expected, actual)
def test_product_except_self_basic(self):
nums = [1, 2, 3, 4]
expected = [24, 12, 8, 6]
self._base_test_product_except_self(nums, expected)
|
sirrice/dbtruck | bin/fixschema.py | Python | mit | 17,548 | 0.013905 | #!/usr/bin/env python
import random
import readline
import cmd
from sqlalchemy import *
from dbtruck.infertypes import infer_col_type
def err_wrapper(fn):
def f(self, *args, **kwargs):
if fn.__name__ != "do_connect" and not self.check_conn(): return
try:
return fn(self, *args, **kwargs)
except Exception as e:
print e
import traceback
#traceback.print_exc()
return None
f.__name__ | = fn.__name__
f.__doc__ = fn.__doc__
return f
def pprint(table, padding=2):
ncols = max(map(len, table))
for row in table:
while len(row) < ncols:
row.append("")
cols = zip(*table)
lens = [ max(map(len, map(str, col)))+padding for col in cols ]
fmts = {}
for row in table:
if len(row) not in fmts:
fmts[len(row)] = ("{:>%s}" * len(row)) % tuple(lens[:len(row) + 1])
print fmts[len(row)].format(* | row)
def tableize(l, padding = 2):
lens = map(len, map(str, l))
bestlen = max(lens) + padding
nitems = max(1, 80 / bestlen)
table = [[]]
for v in l:
if len(table[-1]) >= nitems:
table.append([])
table[-1].append(v)
return table
class SchemaFixer(cmd.Cmd):
prompt = "> "
intro = """
Schema Fixer is a simple interactive prompt for viewing your database
and renaming attributes in your tables.
"""
def __init__(self):
cmd.Cmd.__init__(self, "\t")
self.dburi = None
self.engine = None
self.conn = None
# cached stuff
self.tables = []
self.schemas = {}
self.cmds = []
def postloop(self):
try:
if self.conn:
self.conn.close()
except Exception as e:
print e
pass
def postcmd(self, stop, line):
self.prompt = "\nContext: %s\n> " % str(self.context)
return stop
def completedefault(text, line, begidx, endidx):
return ["hi"]
def find_cmd(self, *keys, **d): #*cmds):
"""
"""
for cmd in reversed(self.cmds):
if all(k in cmd and cmd[k] == v for k, v in d.items()):
if all(k in cmd for k in keys):
return tuple([cmd[k] for k in keys])
return None
def __context__(self):
d = dict()
for cmd in self.cmds:
d.update(cmd)
if 'cmd' in d:
del d['cmd']
return d
context = property(__context__)
def read_args(self, line, *args):
inputs = line.split()
ret = []
idx = -1
for idx, (inp, arg) in enumerate(zip(reversed(inputs), reversed(args))):
ret.append(inp)
context = self.context
rargs = list(reversed(args))
filled_args = []
filled_vals = []
for i in xrange(idx+1, len(rargs)):
arg = rargs[i]
if isinstance(arg, basestring):
arg = [arg]
inp = None
for sub_arg in arg:
if sub_arg in context:
inp = context[sub_arg]
filled_vals.append(inp)
filled_args.append(sub_arg)
break
if inp is None:
print "Could not find args %s in context" % ", ".join(rargs[i:])
return None
if filled_args:
pairs = list(reversed(zip(filled_args, filled_vals)))
print "Args used from Context: %s\n" % ", ".join(["%s = %s" % p for p in pairs])
ret.extend(filled_vals)
return tuple(list(reversed(ret)))
def get_schema(self, table):
if table not in self.schemas:
q = """
SELECT column_name, data_type
FROM information_schema.columns
WHERE table_schema not in ('information_schema', 'pg_catalog')
AND table_name = %s
"""
cur = self.conn.execute(q, table)
pairs = map(tuple, cur)
self.schemas[table] = pairs
return self.schemas[table]
def clear_schema(self, table):
if table in self.schemas:
del self.schemas[table]
def get_inferred_type(self, table, col):
q = "SELECT distinct %s FROM %s LIMIT 500" % (col, table)
vals = [row[0] for row in self.conn.execute(q)]
typ = infer_col_type(vals)
if typ is None:
typ = "str"
elif isinstance(typ, type):
typ = typ.__name__
else:
typ = str(typ)
return typ
def print_col_stats(self, table, col, typ):
q = "SELECT distinct %s FROM %s LIMIT 50" % (col, table)
vals = [row[0] for row in self.conn.execute(q)]
print "%s.%s %s\tinferred type %s\n" % (table, col, typ, self.get_inferred_type(table, col))
if vals:
pprint(tableize(vals[:50]))
else:
print "\tTable has no rows"
print
def emptyline(self):
pass
##################################################
#
# Begin Commands
#
##################################################
def do_c(self, uri):
"""
Connect to database using URI. The latter assumes localhost
connect [URI]
connect [tablename]
"""
return self.do_connect(uri)
@err_wrapper
def do_connect(self, uri):
"""
Connect to database using URI. The latter assumes localhost
connect [URI]
connect [tablename]
"""
uri = uri.strip()
if "://" in uri:
self.dburi = uri
else:
self.dburi = "postgresql://localhost/%s" % uri
print "assuming %s is database name and connecting to %s" % (uri, self.dburi)
self.engine = create_engine(self.dburi)
self.conn = self.engine.connect()
def check_conn(self):
if self.conn is None:
print "run connect first:\nconnect <dburi>"
return False
return True
@err_wrapper
def do_tables(self, line):
"""
list tables
tables
"""
self.cmds.append(dict(cmd="tables"))
q = """
SELECT table_name, count(distinct column_name)
FROM information_schema.columns
WHERE table_schema not in ('information_schema', 'pg_catalog')
GROUP BY table_name
ORDER BY table_name
"""
cur = self.conn.execute(q)
pairs = [tuple(row) for row in cur]
self.tables = zip(*pairs)[0]
tables = ["{} {:>2}".format(*p) for p in pairs]
print "printing"
pprint(tableize(tables, 2))
@err_wrapper
def do_rows(self, line):
"""
list rows in table. The latter looks for a previously used table argument.
cols [table] [n rows]
cols [n rows]
"""
args = self.read_args(line, "table", "nrows")
if not args: return
table, N = args
try:
N = int(N)
except:
table = N
N = 10
self.cmds.append(dict(cmd="rows", table=table, nrows=N))
schema = self.get_schema(table)
cur = self.conn.execute("SELECT * FROM %s LIMIT %d" % (table, N))
rows = [list(row) for row in cur]
cols = zip(*rows)
data = []
buf = []
for (col, typ), col_vals in zip(schema, cols):
header = "%s(%s)" % (col, str(typ)[:4])
buf.append([header, "-" * len(header)] + list(col_vals))
if len(buf) >= 5:
data.extend(map(list, (zip(*buf))))
data.append([""] * len(buf))
buf = []
if buf:
data.extend(map(list, zip(*buf)))
pprint(data)
@err_wrapper
def do_schema(self, line):
"""
Alias for the "cols" command
"""
return self.do_cols(line)
@err_wrapper
def do_cols(self, line):
"""
list columns in table. The latter looks for a previously used table argument.
cols [table]
cols
"""
args = self.read_args(line, "table")
if not args: return
table, = args
self.cmds.append(dict(cmd="cols", table=table))
pairs = self.get_schema(table)
q = "SELECT %s FROM %s" % (
", ".join(["count(distinct %s)" % p[0] for p in pairs]),
table
)
cur = self.conn.execute(q)
counts = cur.fetchone()
data = []
for (col, typ), n in zip(pairs, counts):
inferred = self.get_inferred_type(table, col)
data.append((n, typ, col, "inferred %s" % inferred))
pprint(data)
def complete_cols(self, text, line, begidx, endidx):
print "complete cols called"
if not text:
return self.tables
else:
return [
t for t in self.tables
if t.startswith(text)
]
@err_wrapper
def do_data(self, line):
"""
Show example data for each column in the table
data [table]
data
"""
args = self.read_args(line, "table")
if not args: return
table, = args
self.cmds.appe |
strahlex/machinekit | src/emc/rs274ngc/preview/previewclient.py | Python | lgpl-2.1 | 1,091 | 0.003666 | import sys
import zmq
from machinetalk.protobuf.message | _pb2 import Container
#pr | int "ZMQ=%s pyzmq=%s" % (zmq.zmq_version(), zmq.pyzmq_version())
context = zmq.Context()
preview = context.socket(zmq.SUB)
preview.setsockopt(zmq.SUBSCRIBE, "preview")
preview.connect(sys.argv[1])
status = context.socket(zmq.SUB)
status.setsockopt(zmq.SUBSCRIBE, "status")
preview.connect(sys.argv[2])
poll = zmq.Poller()
poll.register(preview, zmq.POLLIN)
poll.register(status, zmq.POLLIN)
rx = Container()
while True:
s = dict(poll.poll())
if status in s:
try:
(origin, msg) = status.recv_multipart()
rx.ParseFromString(msg)
except Exception, e:
print "status Exception",e, msg
else:
print "---%s:\n %s" % (origin, str(rx))
continue
if preview in s:
try:
(origin, msg) = preview.recv_multipart()
rx.ParseFromString(msg)
except Exception, e:
print "preview Exception",e,msg
else:
print "---%s:\n %s" % (origin, str(rx))
continue
|
bgottula/point | point/gemini_cmd.py | Python | mit | 1,195 | 0.001674 | #!/usr/bin/env python3
"""
A simple script for sending raw serial commands to Gemini.
"""
import time
import serial
import readline
def main():
ser = serial.Serial('/dev/ttyACM0', baudrate=9600)
while True:
cmd = input('> ')
if len(cmd) == 0:
continue
# losmandy native commands -- add checksum
if cmd[0] == '<' or cmd[0] == '>':
if ':' | not in cmd:
print("Rejected: Native command must contain a ':' character")
continue
checksum = 0
for c in cmd:
checksum = checksum ^ ord(c)
checksum %= 128
checksum += | 64
cmd = cmd + chr(checksum) + '#'
print('Native command: ' + cmd)
# LX200 command format
elif cmd[0] == ':':
print('LX200 command: ' + cmd)
pass
else:
print("Rejected: Must start with ':', '<', or '>'")
continue
ser.write(cmd.encode())
time.sleep(0.1)
reply = ser.read(ser.in_waiting).decode()
if len(reply) > 0:
print('reply: ' + reply)
if __name__ == "__main__":
main()
|
KSanthanam/rethinkdb | packaging/osx/mac_alias/__init__.py | Python | agpl-3.0 | 435 | 0.009195 | from .alias import *
__all__ = [ 'ALIAS_K | IND_FILE', 'ALIAS_KIND_FOLDER',
'ALIAS_HFS_VOLUME_SIGNATURE',
'ALIAS_FIXED_DISK', 'ALIAS_NETWORK_DISK', 'ALIAS_400KB_FLOPPY_DISK',
'ALIAS_800KB_FLOPPY_DISK', 'ALIAS_1_44MB_FLOPPY_DISK',
'ALIAS_EJECTABLE_DISK',
'ALIAS_NO_CNID',
'AppleShareInfo',
'VolumeInfo',
'TargetInfo',
| 'Alias' ]
|
peppelinux/remmina_password_exposer | remmina_password_exposer/remmina_password_exposer.py | Python | gpl-2.0 | 2,034 | 0.0059 | #!/usr/bin/python
# 2018 Giuseppe De Marco <giuseppe.demarco@unical.it>
import base64
import os
import re
import sys
try:
from Crypto.Cipher import DES3
except Exception as e:
print(e)
print('pip3 install --upgrade pycrypto')
sys.exit(1)
# ENV
HOME = os.path.expanduser("~")
CHARSET = 'utf-8'
REMMINA_FOLDER = os.getenv('REMMINA_FOLDER', HOME+'/'+'.remmina/')
if REMMINA_FOLDER[-1] != '/':
REMMINA_FOLDER = REMMINA_FOLDER+'/'
REMMINA_PREF = os.getenv('REMMINA_PREF', REMMINA_FOLDER+'remmina.pref')
REGEXP_ACCOUNTS = r'([^.]+)\.remmina(.swp)?'
DEBUG = os.getenv('DEBUG', '')
def show_rem | mina_accounts(debug=False):
diz = {}
res = []
fs = open(REMMINA_PREF)
fso = fs.readlines()
fs | .close()
for i in fso:
if re.findall(r'secret=', i):
r_secret = i[len(r'secret='):][:-1]
if debug: print('**secret found {}'.format(r_secret))
for f in os.listdir(REMMINA_FOLDER):
if re.findall(REGEXP_ACCOUNTS, f):
fo = open( REMMINA_FOLDER+f, 'r')
for i in fo.readlines():
if re.findall(r'^password=', i):
r_password = i[len(r'password='):][:-1]
if re.findall(r'^name=', i):
r_name = i.split('=')[1][:-1]
if re.findall(r'username=', i):
r_username = i.split('=')[1][:-1]
if debug: print(fo, 'found', f)
password = base64.b64decode(r_password)
secret = base64.b64decode(r_secret)
diz[r_name] = DES3.new(secret[:24], DES3.MODE_CBC, secret[24:]).decrypt(password)
if sys.version_info.major == 3:
pval = diz[r_name].decode(CHARSET)
else:
pval = diz[r_name]
r = (r_name, r_username, pval, diz[r_name])
res.append(r)
print('{} {} {} [raw:{}]'.format(*r))
fo.close()
return res
if __name__ == '__main__':
show_remmina_accounts(bool(DEBUG))
|
ntasfi/PyGame-Learning-Environment | tests/test_ple.py | Python | mit | 2,211 | 0.004071 | #!/usr/bin/python
"""
This tests that all the PLE games | launch, except for doom; we
explicitly check t | hat it isn't defined.
"""
import nose
import numpy as np
import unittest
NUM_STEPS=150
class NaiveAgent():
def __init__(self, actions):
self.actions = actions
def pickAction(self, reward, obs):
return self.actions[np.random.randint(0, len(self.actions))]
class MyTestCase(unittest.TestCase):
def run_a_game(self,game):
from ple import PLE
p = PLE(game,display_screen=True)
agent = NaiveAgent(p.getActionSet())
p.init()
reward = p.act(p.NOOP)
for i in range(NUM_STEPS):
obs = p.getScreenRGB()
reward = p.act(agent.pickAction(reward,obs))
def test_catcher(self):
from ple.games.catcher import Catcher
game = Catcher()
self.run_a_game(game)
def test_monsterkong(self):
from ple.games.monsterkong import MonsterKong
game = MonsterKong()
self.run_a_game(game)
def test_flappybird(self):
from ple.games.flappybird import FlappyBird
game = FlappyBird()
self.run_a_game(game)
def test_pixelcopter(self):
from ple.games.pixelcopter import Pixelcopter
game = Pixelcopter()
self.run_a_game(game)
def test_puckworld(self):
from ple.games.puckworld import PuckWorld
game = PuckWorld()
self.run_a_game(game)
def test_raycastmaze(self):
from ple.games.raycastmaze import RaycastMaze
game = RaycastMaze()
self.run_a_game(game)
def test_snake(self):
from ple.games.snake import Snake
game = Snake()
self.run_a_game(game)
def test_waterworld(self):
from ple.games.waterworld import WaterWorld
game = WaterWorld()
self.run_a_game(game)
def test_pong(self):
from ple.games.pong import Pong
game = Pong()
self.run_a_game(game)
def test_doom_not_defined(self):
from nose.tools import assert_raises
def invoke_doom():
DoomWrapper
assert_raises(NameError,invoke_doom)
if __name__ == "__main__":
nose.runmodule()
|
deerwalk/voltdb | tests/sqlcoverage/sql_coverage_test.py | Python | agpl-3.0 | 38,815 | 0.005668 | #!/usr/bin/env python
# This file is part of VoltDB.
# Copyright (C) 2008-2017 VoltDB Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import sys
# add the path to the volt python client, just based on knowing
# where we are now
sys.path.append('../../lib/python')
sys.path.append('./normalizer/')
import random
import time
import subprocess
import cPickle
import os.path
import imp
import re
import traceback
from voltdbclient import *
from optparse import OptionParser
from Query import VoltQueryClient
from SQLCoverageReport import generate_summary
from SQLGenerator import SQLGenerator
from xml.etree import ElementTree
from xml.etree.ElementTree import Element, SubElement
from subprocess import call # invoke unix/linux cmds
from XMLUtils import prettify # To create a human readable xml file
class Config:
def __init__(self, filename):
fd = open(filename, "r")
self.__content = fd.read()
fd.close()
self.__config = eval(self.__content.strip())
def get_configs(self):
return self.__config.keys()
def get_config(self, config_name):
return self.__config[config_name]
def minutes_colon_seconds(seconds):
return re.sub("^0:", "", str(datetime.timedelta(0, round(seconds))), 1)
def print_seconds(seconds=0, message_end="", message_begin="Total time: ",
include_current_time=False):
""" Prints, and returns, a message containing the specified number of
seconds, first in a minutes:seconds format (e.g. "01:02", or "1:43:48"),
then just the exact number of seconds in parentheses, e.g.,
"1:02 (61.9 seconds)", preceded by the 'message_begin' and followed by
'message_end'. Optionally, if 'include_current_time' is True, the current
time (in seconds since January 1, 1970) is also printed, in brackets, e.g.,
"1:02 (61.9 seconds) [at 1408645826.68], ", which is useful for debugging
purposes.
"""
time_msg = minutes_colon_seconds(seconds) + " ({0:.6f} seconds)".format(seconds)
if (include_current_time):
time_msg += " [at " + str(time.time()) + "]"
message = message_begin + time_msg + ", " + message_end
print message
return message
def print_elapsed_seconds(message_end="", prev_time=-1,
message_begin="Elapsed time: "):
"""Computes, returns and prints the difference (in seconds) between the
current system time and a previous time, which is either the specified
'prev_time' or, if that is negative (or unspecified), the previous time
at which this function was called. The printed message is preceded by
'message_begin' and followed by 'message_end'; the elapsed time is printed
in a minutes:seconds format, with the exact number of seconds in parentheses,
e.g., 61.9 seconds would be printed as "01:02 (61.9 seconds), ".
"""
now = time.time()
global save_prev_time
if (prev_time < 0):
prev_time = save_prev_time
save_prev_time = now
diff_time = now - prev_time
print_seconds(diff_time, message_end, message_begin)
return diff_time
def run_once(name, command, statements_path, results_path,
submit_verbosely, testConfigKit, precision):
print "Running \"run_once\":"
print " name: %s" % (name)
print " command: %s" % (command)
print " statements_path: %s" % (statements_path)
print " results_path: %s" % (results_path)
if precision:
print " precision: %s" % (precision)
sys.stdout.flush()
host = defaultHost
port = defaultPort
if(name == "jni"):
akey = "hostname"
if akey in testConfigKit:
host = testConfigKit["hostname"]
port = testConfigKit["hostport"]
global normalize
if(host == defaultHost):
server = subprocess.Popen(command + " backend=" + name, shell=True)
client = None
clientException = None
for i in xrange(30):
try:
client = VoltQueryClient(host, port)
client.set_quiet(True)
client.set_timeout(5.0) # 5 seconds
break
except socket.error as e:
clientException = e
time.sleep(1)
if client == None:
print >> sys.stderr, "Unable to connect/create client: there may be a problem with the VoltDB server or its ports:"
print >> sys.stderr, "name:", str(name)
print >> sys.stderr, "host:", str(host)
print >> sys.stderr, "port:", str(port)
print >> sys.stderr, "client (socket.error) exception:", str(clientException)
sys.stderr.flush()
return -1
# for key in testConfigKits:
# print "999 Key = '%s', Val = '%s'" | % (ke | y, testConfigKits[key])
if(host != defaultHost):
# Flush database
client.onecmd("updatecatalog " + testConfigKit["testCatalog"] + " " + testConfigKit["deploymentFile"])
statements_file = open(statements_path, "rb")
results_file = open(results_path, "wb")
while True:
try:
statement = cPickle.load(statements_file)
except EOFError:
break
try:
if submit_verbosely:
print "Submitting to backend " + name + " adhoc " + statement["SQL"]
client.onecmd("adhoc " + statement["SQL"])
except:
print >> sys.stderr, "Error occurred while executing '%s': %s" % \
(statement["SQL"], sys.exc_info()[1])
if(host == defaultHost):
# Should kill the server now
killer = subprocess.Popen("kill -9 %d" % (server.pid), shell=True)
killer.communicate()
if killer.returncode != 0:
print >> sys.stderr, \
"Failed to kill the server process %d" % (server.pid)
break
table = None
if client.response == None:
print >> sys.stderr, "No error, but an unexpected null client response (server crash?) from executing statement '%s': %s" % \
(statement["SQL"], sys.exc_info()[1])
if(host == defaultHost):
killer = subprocess.Popen("kill -9 %d" % (server.pid), shell=True)
killer.communicate()
if killer.returncode != 0:
print >> sys.stderr, \
"Failed to kill the server process %d" % (server.pid)
break
if client.response.tables:
### print "DEBUG: got table(s) from ", statement["SQL"] ,"."
if precision:
table = normalize(client.response.tables[0], statement["SQL"], precision)
else:
table = normalize(client.response.tables[0], statement["SQL"])
if len(client.response.tables) > 1:
print "WARNING: ignoring extra table(s) from result of query ?", statement["SQL"] , "?"
# else:
# print "WARNING: returned no table(s) from ?", statement["SQL"] ,"?"
cPickle.dump({"Status": client.response.status,
"Info": client.response.statusString,
"Result": table,
"Exception": str(client.response.except |
Nick-Hall/gramps | gramps/gen/datehandler/_datestrings.py | Python | gpl-2.0 | 15,116 | 0.005162 | # -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2013 Vassilii Khachaturov
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Date strings to translate per each language for display and parsing.
__main__
--------
Run this code with the appropriate ``LANG`` and ``LC_DATE`` set for your target
language, in order to generate the .po snippets initialized with the strings
from your locale (from the deprecated data provided in _grampslocale).
E.g., for French::
LANG=fr_FR.utf8 LC_ALL=fr_FR.utf8 GRAMPS_RESOURCES=$PWD python -m gramps.gen.datehandler._datestrings
Then merge the output into your language's .po file, and further modify the
strings as needed. Then remove the strings from your language's
:class:`DateParserXX` and :class:`DateHandlerXX` classes.
"""
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
log = logging.getLogger(".DateStrings")
#-------------------------------------------------------------------------
#
# DateStrings
#
#-------------------------------------------------------------------------
class DateStrings:
"""
String tables for :class:`.DateDisplay` and :class:`.DateParser`.
"""
# This table needs not be localized, it's only for parsing
# Swedish calendar dates using Swedish month names.
# Display of these months uses the regular long_months.
# TODO should we pack these into alt_long_months instead?
swedish_SV = (
"", "Januari", "Februari", "Mars",
"April", "Maj", "Juni",
"Juli", "Augusti", "September",
"Oktober", "November", "December"
)
def __init__(self, locale):
_ = locale.translation.lexgettext
self.long_months = ( "",
# Translators: see
| # http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("|January", "localized lexeme inflections"),
_("|February", "localized lexeme inflections"),
_("|March", "localized lexeme inflections"),
_("|April", "localized lexeme inflections"),
_("|May", "localized lexeme inflections"),
_("|June", "localized lexeme inflections"),
_("|July", "localized lexeme inflections"),
_("|August", "localized lexeme inflections"),
_("|September", "localized lexeme inflections"),
_("|October", "localized lexeme inflections"),
_("|November", "localized lexeme inflections"),
_("|December", "localized lexeme inflections") )
self.short_months = ( "",
# Translators: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("|Jan", "localized lexeme inflections - short month form"),
_("|Feb", "localized lexeme inflections - short month form"),
_("|Mar", "localized lexeme inflections - short month form"),
_("|Apr", "localized lexeme inflections - short month form"),
_("|May", "localized lexeme inflections - short month form"),
_("|Jun", "localized lexeme inflections - short month form"),
_("|Jul", "localized lexeme inflections - short month form"),
_("|Aug", "localized lexeme inflections - short month form"),
_("|Sep", "localized lexeme inflections - short month form"),
_("|Oct", "localized lexeme inflections - short month form"),
_("|Nov", "localized lexeme inflections - short month form"),
_("|Dec", "localized lexeme inflections - short month form") )
_ = locale.translation.sgettext
self.alt_long_months = ( "",
# Translators: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to add proper alternatives to be recognized in your localized
# DateParser code!
_("|", "alternative month names for January"),
_("|", "alternative month names for February"),
_("|", "alternative month names for March"),
_("|", "alternative month names for April"),
_("|", "alternative month names for May"),
_("|", "alternative month names for June"),
_("|", "alternative month names for July"),
_("|", "alternative month names for August"),
_("|", "alternative month names for September"),
_("|", "alternative month names for October"),
_("|", "alternative month names for November"),
_("|", "alternative month names for December") )
self.calendar = (
# Must appear in the order indexed by Date.CAL_... numeric constants
_("Gregorian", "calendar"),
_("Julian", "calendar"),
_("Hebrew", "calendar"),
_("French Republican", "calendar"),
_("Persian", "calendar"),
_("Islamic", "calendar"),
_("Swedish", "calendar") )
_ = locale.translation.lexgettext
self.hebrew = (
"",
# Translators: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Tishri", "Hebrew month lexeme"),
_("Heshvan", "Hebrew month lexeme"),
_("Kislev", "Hebrew month lexeme"),
_("Tevet", "Hebrew month lexeme"),
_("Shevat", "Hebrew month lexeme"),
_("AdarI", "Hebrew month lexeme"),
_("AdarII", "Hebrew month lexeme"),
_("Nisan", "Hebrew month lexeme"),
_("Iyyar", "Hebrew month lexeme"),
_("Sivan", "Hebrew month lexeme"),
_("Tammuz", "Hebrew month lexeme"),
_("Av", "Hebrew month lexeme"),
_("Elul", "Hebrew month lexeme")
)
self.french = (
"",
# Translators: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Vendémiaire", "French month lexeme"),
_("Brumaire", "French month lexeme"),
_("Frimaire", "French month lexeme"),
_("Nivôse", "French month lexeme"),
_("Pluviôse", "French month lexeme"),
_("Ventôse", "French month lexeme"),
_("Germinal", "French month lexeme"),
_("Floréal", "French month lexeme"),
_("Prairial", "French month lexeme"),
_("Messidor", "French month lexeme"),
_("Thermidor", "French month lexeme"),
_("Fructidor", "French month lexeme"),
_("Extra", "French month lexeme"),
)
self.islamic = (
"",
# Translators: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select pro | |
sharad1126/owtf | tests/test_cases/framework/plugin/plugin_params_tests.py | Python | bsd-3-clause | 4,663 | 0.001501 | from tests.testing_framework.base_test_cases import BaseTestCase
from flexmock import flexmock
from hamcrest import *
from framework.plugin.plugin_params import PluginParams
import re
from hamcrest.library.text.stringmatches import matches_regexp
class PluginParamsTests(BaseTestCase):
def before(self):
self.core_mock = flexmock()
self.plugin_params = PluginParams(self.core_mock, {'Args': ['arg1=val1', "arg2=val2"]})
def test_ProcessArgs(self):
assert_that(self.plugin_params.ProcessArgs(), is_(True))
assert_that(self.plugin_params.Args["arg1"], is_("val1"))
assert_that(self.plugin_params.Args["arg2"], is_("val2"))
def test_ListArgs_should_print_the_args_to_the_stdout(self):
args = {"arg_name": "arg_value"}
self.init_stdout_recording()
self.plugin_params.ListArgs(args)
output = self.get_recorded_stdout_and_close()
assert_that(output is not None)
def test_ShowParamInfo_should_print_the_params_to_the_stdout(self):
args = {"Description": "plugin description",
"Mandatory": {"arg_name": "arg_value"},
"Optional": {"arg_name": "arg_value"}}
plugin = self._get_plugin_example()
self.core_mock.Error = flexmock()
self.core_mock.Error.should_receive("FrameworkAbort").once()
self.init_stdout_recording()
self.plugin_params.ShowParamInfo(args, plugin)
output = self.get_recorded_stdout_and_close()
assert_that(output is not None)
def test_CheckArgList_should_be_ok(self):
plugin = self._get_plugin_example()
args = {"Mandatory": [], "Optional": [], "Description": ""}
assert_that(self.plugin_params.CheckArgList(args, plugin))
def test_CheckArgList_with_missing_Mandatory_and_Optional_args(self):
self.core_mock.Error = flexmock()
self.core_mock.Error.should_receive("Add").with_args(re.compile(".*Mandatory.*Optional")).once()
plugin = self._get_plugin_example()
self.plugin_params.CheckArgList({}, plugin)
def test_CheckArgList_with_missing_description_arg(self):
self.core_mock.Error = flexmock()
self.core_mock.Error.should_receive("Add").with_args(re.compile(".*requires.*Description")).once()
plugin = self._get_plugin_example()
args = {"Mandatory": [], "Optional": []}
self.plugin_params | .CheckArgList(args, plugin)
def test_SetArgsBasic_sets_the_args_to_the_plugin(self):
plugin = self._get_plugin_example()
args = {"arg1": "val1", "arg2": "val2"}
self.plugin_params.Args = args
assert_that(self.plugin_params.SetArgsBasic(args, plugin), equal_to([args]))
assert_that(plugin["Arg | s"], matches_regexp(".*arg1=val1.*"))
assert_that(plugin["Args"], matches_regexp(".*arg2=val2.*"))
def test_SetConfig_is_a_wrapper(self):
self.core_mock.Config = flexmock()
self.core_mock.Config.should_receive("Set").with_args("_arg1", "val1").once()
args = {"arg1": "val1"}
self.plugin_params.SetConfig(args)
def test_GetArgList_returns_the_args_we_ask_for(self):
arg_list = ["arg1", "arg2"]
plugin = self._get_plugin_example()
result = self.plugin_params.GetArgList(arg_list, plugin)
assert_that(result["arg1"], is_("val1"))
assert_that(result["arg2"], is_("val2"))
def test_GetArgList_registers_an_error_for_not_foud_args(self):
self.core_mock.Error = flexmock()
self.core_mock.Error.should_receive("Add").once()
self.core_mock.Config = flexmock()
self.core_mock.Config.should_receive("IsSet").and_return(False)
arg_list = ["non_existent_arg"]
plugin = self._get_plugin_example()
result = self.plugin_params.GetArgList(arg_list, plugin)
assert_that(result, is_({}))
assert_that(plugin["ArgError"], is_(True))
def test_GetArgs(self):
args = {"Mandatory": ["arg1"],
"Optional": ["arg2"],
"Description": "description"}
plugin = self._get_plugin_example()
self.core_mock.Config = flexmock()
self.core_mock.Config.should_receive("IsSet").and_return(False)
result = self.plugin_params.GetArgs(args, plugin)
assert_that(result[0]["arg1"], is_("val1"))
assert_that(result[0]["arg2"], is_("val2"))
def _get_plugin_example(self):
return {'Args': '', 'Code': 'OWASP-IG-005', 'Group': 'web', 'Name': 'Application_Discovery', 'File': 'Application_Discovery@OWASP-IG-005.py', 'Title': 'Application Discovery', 'Descrip': '', 'Type': 'passive'} |
rhyolight/nupic.research | htmresearch/frameworks/pytorch/sparse_net.py | Python | gpl-3.0 | 10,344 | 0.005994 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2018, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from __future__ import print_function
import collections
import torch
import torch.nn as nn
from htmresearch.frameworks.pytorch.cnn_sdr import CNNSDR2d
from htmresearch.frameworks.pytorch.linear_sdr import LinearSDR
class Flatten(nn.Module):
"""
Simple module used to flatten the tensors before passing data from CNN layer
to the linear layer
"""
def __init__(self, size):
super(Flatten, self).__init__()
self.size = size
def forward(self, x):
x = x.view(-1, self.size)
return x
class SparseNet(nn.Module):
def __init__(self,
n=2000,
k=200,
outChannels=0,
c_k=0,
inputSize=28*28,
outputSize=10,
kInferenceFactor=1.0,
weightSparsity=0.5,
boostStrength=1.0,
boostStrengthFactor=1.0,
dropout=0.0,
useBatchNorm=True):
"""
A network with one or more hidden layers, which can be a sequence of
k-sparse CNN followed by a sequence of k-sparse linear layer with optional
dropout layers in between the k-sparse linear layers.
[CNNSDR] x len(outChannels)
|
[Flatten]
|
[LinearSDR => Dropout] x len(n)
|
[Linear => Softmax]
:param n:
Number of units in each fully connected k-sparse linear layer.
Use 0 to disable the linear layer
:type n: int or list[int]
:param k:
Number of ON (non-zero) units per iteration in each k-sparse linear layer.
The sparsity of this layer will be k / n. If k >= n, the layer acts as a
traditional fully connected RELU layer
:type k: int or list[int]
:param outChannels:
Number of channels (filters) in each k-sparse convolutional layer.
Use 0 to disable the CNN layer
:type outChannels: int or list[int]
:param c_k:
Number of ON (non-zero) units per iteration in each k-sparse convolutional
layer. The sparsity of this layer will be c_k / c_n. If c_k >= c_n, the
layer acts as a traditional convolutional layer.
:type c_k: int or list[int]
:param inputSize:
If the CNN layer is enable this parameter holds a tuple representing
(in_channels,height,width). Otherwise it will hold the total
dimensionality of input vector of the first linear layer. We apply
view(-1, inputSize) to the data before passing it to Linear layers.
:type inputSize: int or tuple[int,int,int]
:param outputSize:
Total dimensionality of output vector
:type outputSize: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param weightSparsity:
Pct of weights that are allowed to be non-zero.
:type weightSparsity: float
:param boostStrength:
boost strength (0.0 implies no b | oosting).
:type boostStrength: float
:param boostStrengthFactor:
boost strength is multiplied by this factor after each epoch.
A value < 1.0 will decrement it every epoch.
:type boostStrengthFactor: float
:param dropout:
dropout probability used to train the second and subsequent layers.
A value 0.0 implies no dropout
:type dropout: float
:param useBatchNorm:
If True, applies batchNorm for each layer.
:type useBatchNorm: bool
"""
sup | er(SparseNet, self).__init__()
assert(weightSparsity >= 0)
# Validate CNN sdr params
if isinstance(inputSize, collections.Sequence):
assert(inputSize[1] == inputSize[2],
"sparseCNN only supports square images")
if type(outChannels) is not list:
outChannels = [outChannels]
if type(c_k) is not list:
c_k = [c_k]
assert(len(outChannels) == len(c_k))
# Validate linear sdr params
if type(n) is not list:
n = [n]
if type(k) is not list:
k = [k]
assert(len(n) == len(k))
for i in range(len(n)):
assert(k[i] <= n[i])
self.k = k
self.kInferenceFactor = kInferenceFactor
self.n = n
self.outChannels = outChannels
self.c_k = c_k
self.inputSize = inputSize
self.weightSparsity = weightSparsity # Pct of weights that are non-zero
self.boostStrengthFactor = boostStrengthFactor
self.boostStrength = boostStrength
self.kernelSize = 5
self.learningIterations = 0
inputFeatures = inputSize
cnnSdr = nn.Sequential()
# CNN Layers
for i in range(len(outChannels)):
if outChannels[i] != 0:
module = CNNSDR2d(imageShape=inputFeatures,
outChannels=outChannels[i],
k=c_k[i],
kernelSize=self.kernelSize,
kInferenceFactor=kInferenceFactor,
boostStrength=boostStrength,
useBatchNorm=useBatchNorm,
)
cnnSdr.add_module("cnnSdr{}".format(i), module)
# Feed this layer output into next layer input
inputFeatures = (outChannels[i], module.maxpoolWidth, module.maxpoolWidth)
if len(cnnSdr) > 0:
inputFeatures = cnnSdr[-1].outputLength
self.cnnSdr = cnnSdr
else:
self.cnnSdr = None
# Flatten input before passing to linear layers
self.flatten = Flatten(inputFeatures)
# Linear layers
self.linearSdr = nn.Sequential()
for i in range(len(n)):
if n[i] != 0:
self.linearSdr.add_module("linearSdr{}".format(i+1),
LinearSDR(inputFeatures=inputFeatures,
n=n[i],
k=k[i],
kInferenceFactor=kInferenceFactor,
weightSparsity=weightSparsity,
boostStrength=boostStrength,
useBatchNorm=useBatchNorm,
))
# Add dropout after each hidden layer
if dropout > 0.0:
self.linearSdr.add_module("dropout{}".format(i), nn.Dropout(dropout))
# Feed this layer output into next layer input
inputFeatures = n[i]
# Add one fully connected layer after all hidden layers
self.fc = nn.Linear(self.n[-1], outputSize)
self.softmax = nn.LogSoftmax(dim=1)
def postEpoch(self):
"""
Call this once after each training epoch.
"""
if self.training:
self.boostStrength = self.boostStrength * self.boostStrengthFactor
if self.cnnSdr is not None:
for module in self.cnnSdr.children():
if hasattr(module, "setBoostStrength"):
module.setBoostStrength(self.boostStrength)
for module in self.linearSdr.children():
if hasattr(module, "setBoostStrength"):
module.setBoostStrength(self.boostStrength)
if hasattr(module, "rezeroWeights"):
# The optimizer is updating the weights during training after the forward
# step. Therefore we need to re-zero the weights after every epoch
module.rezeroWeights()
print("boostStrength is now:", self.boostStrength)
|
kaichogami/sympy | sympy/external/tests/test_codegen.py | Python | bsd-3-clause | 11,832 | 0.001775 | # This tests the compilation and execution of the source code generated with
# utilities.codegen. The compilation takes place in a temporary directory that
# is removed after the test. By default the test directory is always removed,
# but this behavior can be changed by setting the environment variable
# SYMPY_TEST_CLEAN_TEMP to:
# export SYMPY_TEST_CLEAN_TEMP=always : the default behavior.
# export SYMPY_TEST_CLEAN_TEMP=success : only remove the directories of working tests.
# export SYMPY_TEST_CLEAN_TEMP=never : never remove the directories with the test code.
# When a directory is not removed, the necessary information is printed on
# screen to find the files that belong to the (failed) tests. If a test does
# not fail, py.test captures all the output and you will not see the directories
# corresponding to the successful tests. Use the --nocapture option to see all
# the output.
# All tests below have a counterpart in utilities/test/test_codegen.py. In the
# latter file, the resulting code is compared with predefined strings, without
# compilation or execution.
# All the generated Fortran code should conform with the Fortran 95 standard,
# and all the generated C code should be ANSI C, which facilitates the
# incorporation in various projects. The tests below assume that the binary cc
# is somewhere in the path and that it can compile ANSI C code.
from __future__ import print_function
from sympy.abc import x, y, z
from sympy.utilities.pytest import skip
from sympy.utilities.codegen import codegen, make_routine, get_code_generator
import sys
import os
import tempfile
import subprocess
# templates for the main program that will test the generated code.
main_template = {}
main_template['F95'] = """
program main
include "codegen.h"
integer :: result;
result = 0
%(statements)s
call exit(result)
end program
"""
main_template['C'] = """
#include "codegen.h"
#include <stdio.h>
#include <math.h>
int main() {
int result = 0;
%(statements)s
return result;
}
"""
# templates for the numerical tests
numerical_test_template = {}
numerical_test_template['C'] = """
if (fabs(%(call)s)>%(threshold)s) {
printf("Numerical validation failed: %(call)s=%%e threshold=%(threshold)s\\n", %(call)s);
result = -1;
}
"""
numerical_test_template['F95'] = """
if (abs(%(call)s)>%(threshold)s) then
write(6,"('Numerical validation failed:')")
write(6,"('%(call)s=',e15.5,'threshold=',e15.5)") %(call)s, %(threshold)s
result = -1;
end if
"""
# command sequences for supported compilers
compile_commands = {}
compile_commands['cc'] = [
"cc -c codegen.c -o codegen.o",
"cc -c main.c -o main.o",
"cc main.o codegen.o -lm -o test.exe"
]
compile_commands['gfortran'] = [
"gfortran -c codegen.f90 -o codegen.o",
"gfortran -ffree-line-length-none -c main.f90 -o main.o",
"gfortran main.o codegen.o -o test.exe"
]
compile_commands['g95'] = [
"g95 -c codegen.f90 -o codegen.o",
"g95 -ffree-line-length-huge -c main.f90 -o main.o",
"g95 main.o codegen.o -o test.exe"
]
compile_commands['ifort'] = [
"ifort -c codegen.f90 -o codegen.o",
"ifort -c main.f90 -o main.o",
"ifort main.o codegen.o -o test.exe"
]
combinations_lang_compiler = [
('C', 'cc'),
('F95', 'ifort'),
('F95', 'gfortran'),
('F95', 'g95')
]
def try_run(commands):
"""Run a series of commands and only return True if all ran fine."""
null = open(os.devnull, 'w')
for command in commands:
retcode = subprocess.call(command, stdout=null, shell=True,
stderr=subprocess.STDOUT)
if retcode != 0:
return False
return True
def run_test(label, routines, numerical_tests, language, commands, friendly=True):
"""A driver for the codegen tests.
This driver assumes that a compiler ifort is present in the PATH and that
ifort is (at least) a Fortran 90 compiler. The generated code is written in
a temporary directory, together with a main program that validates the
generated code. The test passes when the compilation and the validation
run correctly.
"""
# Check input arguments before touching the file system
language = language.upper()
assert language in main_template
assert language in numerical_test_template
# Check that evironment variable makes sense
clean = os.getenv('SYMPY_TEST_CLEAN_TEMP', 'always').lower()
if clean not in ('always', 'success', 'never'):
raise ValueError("SYMPY_TEST_CLEAN_TEMP must be one of the following: 'always', 'success' or 'never'.")
# Do all the magic to compile, run and validate the test code
# 1) prepare the temporary working directory, switch to that dir
work = tempfile.mkdtemp("_sympy_%s_test" % language, "%s_" % label)
oldwork = os.getcwd()
os.chdir(work)
# 2) write the generated code
if friendly:
# interpret the routines as a name_expr list and call the friendly
# function codegen
codegen(routines, language, "codegen", to_files=True)
else:
code_gen = get_code_generator(language, "codegen")
code_gen.write(routines, "codegen", to_files=True)
# 3) write a simple main program that links to the generated code, and that
# includes the numerical tests
test_strings = []
for fn_name, args, expected, threshold in numerical_tests:
call_string = "%s(%s)-(%s)" % (
fn_name, ",".join(str(arg) for arg in args), expected)
if language == "F95":
call_string = fortranize_double_constants(call_string)
threshold = fortranize_double_constants(str(threshold))
test_strings.append(numerical_test_template[language] % {
"call": call_string,
"threshold": threshold,
})
if language == "F95":
f_name = "main.f90"
elif language == "C":
f_name = "main.c"
else:
raise NotImplementedError(
"FIXME: filename extension unknown for language: %s" % language)
with open(f_name, "w") as f:
f.write(
main_template[language] % {'statements': "".join(test_strings)})
# 4) Compile and link
compiled = try_run(commands)
# 5) Run if compiled
if compiled:
executed = try_run(["./test.exe"])
else:
executed = False
# 6) Clean up stuff
if clean == 'always' or (clean == 'success' and compiled and executed):
def safe_remove(filename):
if os.path.isfile(filename):
| os.remove(filename)
safe_remove("codegen.f90" | )
safe_remove("codegen.c")
safe_remove("codegen.h")
safe_remove("codegen.o")
safe_remove("main.f90")
safe_remove("main.c")
safe_remove("main.o")
safe_remove("test.exe")
os.chdir(oldwork)
os.rmdir(work)
else:
print("TEST NOT REMOVED: %s" % work, file=sys.stderr)
os.chdir(oldwork)
# 7) Do the assertions in the end
assert compiled, "failed to compile %s code with:\n%s" % (
language, "\n".join(commands))
assert executed, "failed to execute %s code from:\n%s" % (
language, "\n".join(commands))
def fortranize_double_constants(code_string):
"""
Replaces every literal float with literal doubles
"""
import re
pattern_exp = re.compile('\d+(\.)?\d*[eE]-?\d+')
pattern_float = re.compile('\d+\.\d*(?!\d*d)')
def subs_exp(matchobj):
return re.sub('[eE]', 'd', matchobj.group(0))
def subs_float(matchobj):
return "%sd0" % matchobj.group(0)
code_string = pattern_exp.sub(subs_exp, code_string)
code_string = pattern_float.sub(subs_float, code_string)
return code_string
def is_feasible(language, commands):
# This test should always work, otherwise the compiler is not present.
routine = make_routine("test", x)
numerical_tests = [
("test", ( 1.0,), 1.0, 1e-15),
("test", (-1.0,), -1.0, 1e-15),
]
try:
run_test("is_feasible", [routine], numerical_tests, language, commands,
friendly=False)
return True
except |
Jgarcia-IAS/SAT | openerp/addons-extra/odoo-pruebas/odoo-server/addons-extra/l10n_pe_ple03/report/sunat_3_7.py | Python | agpl-3.0 | 1,980 | 0.00303 | # -*- e | ncoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 NUMA Extreme Systems (www.numaes.com) for Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This pro | gram as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import time
from openerp.report import report_sxw
class sunat_3_7_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context=None):
super(sunat_3_7_report, self).__init__(cr, uid, name, context)
self.localcontext.update( {
'time': time,
})
self.context = context
report_sxw.report_sxw('report.l10n_pe.sunat_3_7', 'l10n_pe.ple_3_7',
'addons/l10n_pe_ple03/report/sunat_3_7.rml', parser=sunat_3_7_report, header=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
btrent/knave | pychess/System/gstreamer.py | Python | gpl-3.0 | 1,803 | 0.007765 | from threading import Lock
from gobject import GObject, SIGNAL_RUN_FIRST, TYPE_NONE
from Log import log
try:
import pygst
pygst.require('0.10')
import gst
except ImportError, e:
log.error("Unable to import gstreamer. All sound will be mute.\n%s" % e)
class Player (GObject):
__gsignals__ = {
'end': (SIGNAL_RUN_FIRST, TYPE_NONE, ()),
'error': (SIGNAL_RUN_FIRST, TYPE_NONE, (object,))
}
def checkSound(self):
#self.emit("error", None)
def play(self, uri):
pass
else:
class Player (GObject):
__gsignals__ = {
'end': (SIGNAL_RUN_FIRST, TYPE_NONE, ()),
'error': (SIGNAL_RUN_FIRST, TYPE_NONE, (object,))
}
def __init__(self):
GObject.__init__(self)
self.player = gst.element_factory_make("playbin")
self.player.get_bus().add_watch(self.onMessage)
def onMessage(self, bus, message):
if message.type == gst.MESSAGE_ERROR:
# Sound seams sometimes to work, even though errors are dropped.
# Therefore we really can't do anything to test.
# #self.emit("error", message)
simpleMessage, advMessage = message.parse_error()
log.warn("Gstreamer error '%s': %s" % (simpleMessage, advMessage))
self.__del__()
elif message.type == gst.MESSAGE_EOS:
#self.emit("end")
return True
| def play(self, uri):
self.player.set_state(gst.STATE_READY)
self.pl | ayer.set_property("uri", uri)
self.player.set_state(gst.STATE_PLAYING)
def __del__ (self):
self.player.set_state(gst.STATE_NULL)
|
joshdrake/django_haystack_compat | tests/elasticsearch_tests/tests/elasticsearch_query.py | Python | bsd-3-clause | 6,603 | 0.002726 | import datetime
from django.test import TestCase
from haystack import connections
from haystack.inputs import Exact
from haystack.models import SearchResult
from haystack.query import SQ
from core.models import MockModel, AnotherMockModel
class ElasticsearchSearchQueryTestCase(TestCase):
def setUp(self):
super(ElasticsearchSearchQueryTestCase, self).setUp()
self.sq = connections['default'].get_query()
def test_build_query_all(self):
self.assertEqual(self.sq.build_query(), '*:*')
def test_build_query_single_word(self):
self.sq.add_filter(SQ(content='hello'))
self.assertEqual(self.sq.build_query(), 'hello')
def test_build_query_boolean(self):
self.sq.add_filter(SQ(content=True))
self.assertEqual(self.sq.build_query(), 'True')
def test_build_query_datetime(self):
self.sq.add_filter(SQ(content=datetime.datetime(2009, 5, 8, 11, 28)))
self.assertEqual(self.sq.build_query(), '2009-05-08T11:28:00')
def test_build_query_multiple_words_and(self):
self.sq.add_filter(SQ(content='hello'))
self.sq.add_filter(SQ(content='world'))
self.assertEqual(self.sq.build_query(), '(hello AND world)')
def test_build_query_multiple_words_not(self):
self.sq.add_filter(~SQ(content='hello'))
self.sq.add_filter(~SQ(content='world'))
self.assertEqual(self.sq.build_query(), '(NOT (hello) AND NOT (world))')
def test_build_query_multiple_words_or(self):
self.sq.add_filter(~SQ(content='hello'))
self.sq.add_filter(SQ(content='hello'), use_or=True)
self.assertEqual(self.sq.build_query(), '(NOT (hello) OR hello)')
def test_build_query_multiple_words_mixed(self):
self.sq.add_filter(SQ(content='why'))
self.sq.add_filter(SQ(content='hello'), use_or=True)
self.sq.add_filter(~SQ(content='world'))
self.assertEqual(self.sq.build_query(), u'((why OR hello) AND NOT (world))')
def test_build_query_phrase(self):
self.sq.add_filter(SQ(content='hello world'))
self.assertEqual(self.sq.build_query(), '(hello AND world)')
self.sq.add_filter(SQ(content__exact='hello world'))
self.assertEqual(self.sq.build_query(), u'((hello AND world) AND "hello world")')
def test_build_query_boost(self):
self.sq.add_filter(SQ(content='hello'))
self.sq.add_boost('world', 5)
self.assertEqual(self.sq.build_query(), "hello world^5")
def test_build_query_multiple_filter_types(self):
self.sq.add_filter(SQ(content='why'))
self.sq.add_filter(SQ(pub_date__lte=Exact('2009-02-10 01:59:00')))
self.sq.add_filter(SQ(author__gt='daniel'))
self.sq.add_filter(SQ(created__lt=Exact('2009-02-12 12:13:00')))
self.sq.add_filter(SQ(title__gte='B'))
self.sq.add_filter(SQ(id__in=[1, 2, 3]))
self.sq.add_filter(SQ(rating__range=[3, 5]))
self.assertEqual(self.sq.build_query(), u'(why AND pub_date:[* TO "2009-02-10 01:59:00"] AND author:{"daniel" TO *} AND created:{* TO "2009-02-12 12:13:00"} AND title:["B" TO *] AND id:("1" OR "2" OR "3") AND rating:["3" TO "5"])')
def test_build_query_multiple_filter_types_with_datetimes(self):
self.sq.add_filter(SQ(content='why'))
self.sq.add_filter(SQ(pub_date__lte=datetime.datetime(2009, 2, 10, 1, 59, 0)))
self.sq.add_filter(SQ(author__gt='daniel'))
self.sq.add_filter(SQ(created__lt=datetime.datetime(2009, 2, 12, 12, 13, 0)))
self.sq.add_filter(SQ(title__gte='B'))
self.sq.add_filter(SQ(id__in=[1, 2, 3]))
self.sq.add_filter(SQ(rating__range=[3, 5]))
self.assertEqual(self.sq.build_query(), u'(why AND pub_date:[* TO "2009-02-10T01:59:00"] AND author:{"daniel" TO *} AND created:{* TO "2009-02-12T12:13:00"} AND title:["B" TO *] AND id:("1" OR "2" OR "3") AND rating:["3" TO "5"])')
def test_build_query_in_filter_multiple_words(self):
self.sq.add_filter(SQ(content='why'))
self.sq.add_filter(SQ(title__in=["A Famous Paper", "An Infamous Article"]))
self.assertEqual(self.sq.build_query(), u'(why AND title:("A Famous Paper" OR "An Infamous Article"))')
def test_build_query_in_filter_datetime(self):
self.sq.add_filter(SQ(content='why'))
self.sq.add_filter(SQ(pub_date__in=[datetime.datetime(2009, 7, 6, 1, 56, 21)]))
self.assertEqual(self.sq.build_query(), u'(why AND pub_date:("2009-07-06T01:56:21"))')
def test_build_query_in_with_set(self):
self.sq.add_filter(SQ(content='why'))
self.sq.add_filter(SQ(title__in=set(["A Famous Paper", "An Infamous Article"])))
self.assertEqual(self.sq.build_query(), u'(why AND title:("A Famous Paper" OR "An Infamous Article"))')
def test_build_query_wildcard_filter_types(self):
self.sq.add_filter(SQ(conten | t='why'))
self.sq.add_filter(SQ(title__startswith='haystack'))
self.assertEqual(self.sq.build_query(), u'(why AND title:haystack*)')
def test_clean(self):
self.assertEqual(self.sq.clean('hello world'), 'hello world')
self.assertEqual(self.sq.clean('hello AND world'), 'hello | and world')
self.assertEqual(self.sq.clean('hello AND OR NOT TO + - && || ! ( ) { } [ ] ^ " ~ * ? : \ world'), 'hello and or not to \\+ \\- \\&& \\|| \\! \\( \\) \\{ \\} \\[ \\] \\^ \\" \\~ \\* \\? \\: \\\\ world')
self.assertEqual(self.sq.clean('so please NOTe i am in a bAND and bORed'), 'so please NOTe i am in a bAND and bORed')
def test_build_query_with_models(self):
self.sq.add_filter(SQ(content='hello'))
self.sq.add_model(MockModel)
self.assertEqual(self.sq.build_query(), 'hello')
self.sq.add_model(AnotherMockModel)
self.assertEqual(self.sq.build_query(), u'hello')
def test_set_result_class(self):
# Assert that we're defaulting to ``SearchResult``.
self.assertTrue(issubclass(self.sq.result_class, SearchResult))
# Custom class.
class IttyBittyResult(object):
pass
self.sq.set_result_class(IttyBittyResult)
self.assertTrue(issubclass(self.sq.result_class, IttyBittyResult))
# Reset to default.
self.sq.set_result_class(None)
self.assertTrue(issubclass(self.sq.result_class, SearchResult))
def test_in_filter_values_list(self):
self.sq.add_filter(SQ(content='why'))
self.sq.add_filter(SQ(title__in=MockModel.objects.values_list('id', flat=True)))
self.assertEqual(self.sq.build_query(), u'(why AND title:("1" OR "2" OR "3"))')
|
taf3/taf | taf/testlib/dev_trextg.py | Python | apache-2.0 | 5,943 | 0.001683 | # Copyright (c) 2016 - 2017, Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""``dev_trextg.py``
`TRex traffic generators specific functionality`
Notes:
To install TRex client api package:
1. Download package from http://trex-tgn.cisco.com/trex/release/
2. Unpack main package 'v2.00.tar.gz' and then client package 'trex_client_v2.00.tar.gz'
3. Add path to the trex client stateless lib for PYTHONPATH environment variable: env PYTHONPATH=<your path>/trex_client/stl
"""
from . import loggers
from .TRex.Trex import TrexMixin
from .tg_template import GenericTG
from .TRex.TrexHLT import TrexHLTMixin
from .packet_processor import PacketProcessor
class Trex(TrexMixin, TrexHLTMixin, PacketProcessor, GenericTG):
"""TRex interaction base class.
Configuration examples:
TRex server Example::
{
"name": "TRex"
"entry_type": "tg",
"instance_type": "trex",
"id": "TG1",
"ports": [0, 1],
"ipaddr": "1.1.1.1",
"ssh_user": "user",
"ssh_pass": "PassworD",
}
Where::
\b entry_type and \b instance_type are mandatory values and cannot be changed
\n\b id - int or str uniq device ID (mandatory)
\n\b name - User defined device name (optional)
\n\b ports or \b port_list - short or long ports configuration (Only one of them has to be used)
\n\b ipaddr - remote host IP address (mandatory)
\n\b ssh_user - remote host login user (mandatory)
\n\b ssh_pass - remote host login password (mandatory)
Notes:
You can safely add additional custom attributes.
"""
class_logger = loggers.ClassLogger()
def __init__(self, config, opts):
"""Initializes connection to TRex.
Args:
config(dict): Configuration information.
opts(OptionParser): py.test config.option object which contains all py.test cli options.
"""
self.config = config
self.opts = opts
# Indicates if TG object supports high level protocol emulation (can emulate dialogs)
self.is_protocol_emulation_present = self.config.get("trex_hltapi", False)
self.host = self.config["ipaddr"]
super(Trex, self).__init__(self.config, self.opts)
self.ports, self.port_list = self._get_speed_ports()
def _get_speed_ports(self):
"""Get ports with speed from config.
Returns:
tuple(list[tuple], list[tuple, int]): Tuple with list of ports used in real config and list of port/speed values
Notes:
This function check if port has speed in config file.
"""
ports = []
ports_list = []
if 'ports' in self.config:
ports = [int(x) for x in self.config["ports"]]
if "port_list" in self.config:
ports = [int(x[0]) for x in self.config["port_list"]]
ports_list = [[int(x[0]), int(x[1])] for x in self.config["port_list"]]
return ports, ports_list
def start(self, wait_on=True):
"""Start Trex TG.
Args:
wait_on(bool): Wait for device is loaded
"""
pass
def stop(self):
"""Shutdown TRex TG device.
"""
pass
def check(self):
"""Checking connection to TRex.
Returns:
None
"""
if self.is_protocol_emulation_present:
TrexHLTMixin.check(self)
TrexMixin.check(self)
def create(self):
"""Obligatory class for entry_type = tg.
"""
if self.is_protocol_emulation_present:
TrexHLTMixin.create(self)
TrexMixin.create(self)
def destroy(self):
"""Obligatory class for entry_type = tg.
"""
self.class_logger.info("Destroying TRex object...")
self.cleanup(mode="fast")
self.class_logger.info("TRex Cleanup finished.")
self.class_logger.info("Disconnecting TRex...")
TrexMixin.destroy(self)
if self.is_protocol_emulation_present:
self.class_logger.info("Disconnecting TRexHLT...")
TrexHLTMixin.destroy(self)
def cleanup(self, mode="complete"):
"""This method should do TRex ports cleanup (remove streams etc.)
Args:
mode(str): "fast" or "complete". If mode == "fast", method does not clear streams on the port (string)
"""
TrexMixin.cleanup(self, mode)
if self.is_protocol_emulation_present:
TrexHLTMixin.create(self)
def sanitize(self):
"""Clear ownership before exit.
"""
self.destroy()
def get_os_mtu(self, iface=None):
"""Get MTU value in host OS.
Args:
iface(str): Interface for getting MTU in host OS
Returns:
int: Original MTU value
Examples::
env.tg[1].get_os_mtu(iface=ports[('tg1', 'sw1')][1])
"""
pass
def set_os_mtu(self, iface=None, mtu=None):
"""Set MTU value in host OS.
Args:
ifa | ce(str): Interface for changing MTU in host OS
mtu(int): New MTU value
Returns:
int: Original MTU value
Examples::
env.tg[1].set_os_mtu(iface=ports[('tg1', 'sw1')][1], mtu=1650)
"""
pass
ENTRY_TYPE = "tg"
INSTANCES = {"trex": Trex,
}
NAME | = "tg"
|
shoopio/shoop | shuup/admin/modules/customers_dashboard/dashboard.py | Python | agpl-3.0 | 802 | 0.001247 | # This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in | the root directory of this source tree.
from django.utils.translation import ugettext | _lazy as _
from shuup.admin.dashboard import DashboardNumberBlock
from shuup.core.models import Order
def get_active_customers_block(request):
shop = request.shop
customer_ids = set(Order.objects.filter(shop=shop).since(30).values_list("customer_id", flat=True))
return DashboardNumberBlock(
id="active_customers_count",
color="blue",
title=_("Active customers"),
value=len(customer_ids),
icon="fa fa-history",
subtitle=_("Based on orders within 30 days"),
)
|
davidt/reviewboard | reviewboard/webapi/tests/test_file_attachment.py | Python | mit | 12,315 | 0 | from __future__ import unicode_literals
from django.utils import six
from djblets.webapi.errors import INVALID_FORM_DATA, PERMISSION_DENIED
from reviewboard.attachments.models import (FileAttachment,
FileAttachmentHistory)
from reviewboard.webapi.resources import resources
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import (file_attachment_item_mimetype,
file_attachment_list_mimetype)
from reviewboard.webapi.tests.mixins import (BasicTestsMetaclass,
ReviewRequestChildItemMixin,
ReviewRequestChildListMixin)
from reviewboard.webapi.tests.urls import (get_file_attachment_item_url,
get_file_attachment_list_url)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceListTests(ReviewRequestChildListMixin, BaseWebAPITestCase):
"""Testing the FileAttachmentResource list APIs."""
fixtures = ['test_users']
basic_get_fixtures = ['test_scmtools']
sample_api_url = 'review-requests/<id>/file-attachments/'
resource = resources.file_attachment
def setup_review_request_child_test(self, review_request):
return (get_file_attachment_list_url(review_request),
file_attachment_list_mimetype)
def compare_item(self, item_rsp, attachment):
self.assertEqual(item_rsp['id'], attachment.pk)
self.assertEqual(item_rsp['filename'], attachment.filename)
self.assertEqual(item_rsp['revision'], attachment.attachment_revision)
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name,
populate_items):
review_request = self.create_review_request(
create_repository=True,
with_local_site=with_local_site,
submitter=user)
if populate_items:
# This is the file attachment that should be returned.
items = [
self.create_file_attachment(review_request,
orig_filename='logo1.png'),
]
# This attachment shouldn't be shown in the results. It represents
# a file to be shown in the diff viewer.
self.create_file_attachment(review_request,
orig_filename='logo2.png',
repo_path='/logo.png',
repo_revision='123',
repository=review_request.repository)
# This attachment shouldn't be shown either, for the same
# reasons.
diffset = self.create_diffset(review_request)
filediff = self.create_filediff(diffset,
source_file='/logo3.png',
dest_file='/logo3.png',
source_revision='123',
dest_detail='124')
self.create_file_attachment(review_request,
orig_filename='logo3.png',
added_in_filediff=filediff)
else:
items = []
return (get_file_attachment_list_url(review_request, local_site_name),
file_attachment_list_mimetype,
items)
#
# HTTP POST tests
#
def setup_basic_post_test(self, user, with_local_site, local_site_name,
post_valid_data):
review_request = self.create_review_request(
with_local_site=with_local_site,
submitter=user,
publish=True)
return (get_file_attachment_list_url(review_request, local_site_name),
file_attachment_item_mimetype,
{'path': open(self.get_sample_image_filename(), 'r')},
[review_request])
def check_post_result(self, user, rsp, review_request):
draft = review_request.get_draft()
self.assertIsNotNone(draft)
self.assertIn('file_attachment', rsp)
item_rsp = rsp['file_attachment']
attachment = FileAttachment.objects.get(pk=item_rsp['id'])
self.assertIn(attachment, draft.file_attachments.all())
self.assertNotIn(attachment, review_request.file_attachments.all())
self.compare_item(item_rsp, attachment)
def test_post_not_owner(self):
"""Testing the POST review-requests/<id>/file-attachments/ API
without owner
"""
review_request = self.create_review_request()
self.assertNotEqual(review_request.submitter, self.user)
with open(self.get_sample_image_filename(), "r") as f:
self.assertTrue(f)
rsp = self.api_post(
get_file_attachment_list_url(review_request),
{
'caption': 'logo',
'path': f,
},
expected_status=403)
self.assertEqual(rsp['stat'], 'fail')
self.assertEqual(rsp['err']['code'], PERMISSION_DENIED.code)
def test_post_with_attachment_history_id(self):
"""Testing the POST review-requests/<id>/file-attachments/ API with a
file attachment history
"""
| review_request = self.create_review_request(submitter=self.user,
| publish=True)
history = FileAttachmentHistory.objects.create(display_position=0)
review_request.file_attachment_histories.add(history)
self.assertEqual(history.latest_revision, 0)
with open(self.get_sample_image_filename(), "r") as f:
self.assertTrue(f)
rsp = self.api_post(
get_file_attachment_list_url(review_request),
{
'path': f,
'attachment_history': history.pk,
},
expected_mimetype=file_attachment_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['file_attachment']['attachment_history_id'],
history.pk)
history = FileAttachmentHistory.objects.get(pk=history.pk)
self.assertEqual(history.latest_revision, 1)
review_request.get_draft().publish()
# Add a second revision
f.seek(0)
rsp = self.api_post(
get_file_attachment_list_url(review_request),
{
'path': f,
'attachment_history': history.pk,
},
expected_mimetype=file_attachment_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertEqual(rsp['file_attachment']['attachment_history_id'],
history.pk)
history = FileAttachmentHistory.objects.get(pk=history.pk)
self.assertEqual(history.latest_revision, 2)
def test_post_with_attachment_history_id_wrong_review_request(self):
"""Testing the POST review-requests/<id>/file-attachments/ API with a
file attachment history belonging to a different reiew request
"""
review_request_1 = self.create_review_request(submitter=self.user,
publish=True)
history = FileAttachmentHistory.objects.create(display_position=0)
review_request_1.file_attachment_histories.add(history)
review_request_2 = self.create_review_request(submitter=self.user,
publish=True)
self.assertEqual(history.latest_revision, 0)
with open(self.get_sample_image_filename(), "r") as f:
self.assertTrue(f)
rsp = self.api_post(
get_file_attachment_list_url(review_request_2),
{
'path': f,
'attachment_history': history.pk,
|
rigdenlab/conkit | conkit/io/fasta.py | Python | bsd-3-clause | 4,455 | 0.000673 | # BSD 3-Clause License
#
# Copyright (c) 2016-21, University of Liverpool
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Parser module specific to FASTA sequence files
"""
__author__ = "Felix Simkovic"
__date__ = "09 Sep 2016"
__version__ = "0.13.1"
from conkit.io._parser import SequenceFileParser
from conkit.core.sequence import Sequence
from conkit.core.sequencefile import SequenceFile
class FastaParser(SequenceFileParser):
"""Parser class for FASTA sequence files
"""
def __init__(self):
super(FastaParser, self).__init__()
def read(self, f_handle, f_id="fasta"):
"""Read a sequence file
Parameters
----------
f_handle
Open file handle [read permissions]
f_id : str, optional
Unique sequence file identifier
Returns
-------
:obj:`~conkit.core.sequencefile.SequenceFile`
Raises
------
:exc:`ValueError`
FASTA record needs to start with >
"""
hierarchy = SequenceFile(f_id)
while True:
line = f_handle.readline().rstrip()
if not line:
continue
elif line.startswith("#"):
hierarchy.remark = line[1:]
elif line.startswith(">"):
break
while True:
if not line.startswith(">"):
raise ValueError("Fasta record needs to start with '>'")
id = line[1:] # Header without '>'
chunks = []
line = f_handle.readline().rstrip()
while True:
if not line:
break
elif line.startswith(">"):
break
chunks.append(line)
line = f_handle.readline().rstrip()
_seq_string = "".join(chunks) # Sequence from chunks
sequence_entry = Sequence(id, _seq_string)
hierarchy.add(sequence_entry)
if not line:
break
return hierarchy
def write(self, f_handle, hierarchy):
"""Write a sequence file instance to to file
Parameters
| ----------
f_handle
Open file handle [write permissions]
hierarchy : :obj:`~conkit.core.sequencefile.SequenceFile`, :obj:`~conkit.core.sequence.Sequence`
"""
hierarchy = self._reconstruct(hierarchy)
content = ""
for remark in hierarchy.remark:
content += "#{}\n".format(remark)
| for sequence_entry in hierarchy:
header = ">{}".format(sequence_entry.id)
if len(sequence_entry.remark) > 0:
header = "|".join([header] + sequence_entry.remark)
content += header + "\n"
sequence_string = sequence_entry.seq.upper() # UPPER CASE !!!
for i in range(0, sequence_entry.seq_len, 60):
content += sequence_string[i : i + 60] + "\n"
f_handle.write(content)
|
dholm/voidwalker | voidwalker/framework/interface/__init__.py | Python | gpl-3.0 | 1,391 | 0 | # (void)walker command line interface
# Copyright (C) 2012 David Holm <dholmster@gmail.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR | A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# | along with this program. If not, see <http://www.gnu.org/licenses/>.
from .command import BreakpointCommand
from .command import Command
from .command import CommandBuilder
from .command import CommandFactory
from .command import DataCommand
from .command import PrefixCommand
from .command import StackCommand
from .command import SupportCommand
from .command import register_command
from .parameter import Parameter
from .parameter import PrefixParameter
from .parameter import BooleanParameter
from .parameter import EnumParameter
from .parameter import IntegerParameter
from .parameter import ParameterFactory
from .parameter import ParameterBuilder
from .parameter import register_parameter
from .config import Configuration
|
ashwyn/eden-message_parser | modules/eden/dvi.py | Python | mit | 22,129 | 0.016224 | # -*- coding: utf-8 | -*-
| """ Sahana Eden DVI Model
@author: Dominic König <dominic[at]aidiq.com>
@copyright: 2009-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3DVIModel"]
from gluon import *
from gluon.storage import Storage
from ..s3 import *
from eden.layouts import S3AddResourceLink
# =============================================================================
class S3DVIModel(S3Model):
names = ["dvi_recreq",
"dvi_body",
"dvi_morgue",
"dvi_checklist",
"dvi_effects",
"dvi_identification"
]
def model(self):
T = current.T
db = current.db
request = current.request
person_id = self.pr_person_id
location_id = self.gis_location_id
messages = current.messages
UNKNOWN_OPT = messages.UNKNOWN_OPT
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
super_link = self.super_link
# ---------------------------------------------------------------------
# Recovery Request
#
task_status = {
1:T("Not Started"),
2:T("Assigned"),
3:T("In Progress"),
4:T("Completed"),
5:T("Not Applicable"),
6:T("Not Possible")
}
tablename = "dvi_recreq"
table = define_table(tablename,
s3_datetime(label = T("Date/Time of Find"),
empty=False,
default = "now",
future=0
),
Field("marker", length=64,
label = T("Marker"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Marker"),
T("Number or code used to mark the place of find, e.g. flag code, grid coordinates, site reference number or similar (if available)")))),
person_id(label = T("Finder")),
Field("bodies_found", "integer",
label = T("Bodies found"),
requires = IS_INT_IN_RANGE(1, 99999),
represent = lambda v, row=None: IS_INT_AMOUNT.represent(v),
default = 0,
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Number of bodies found"),
T("Please give an estimated figure about how many bodies have been found.")))),
Field("bodies_recovered", "integer",
label = T("Bodies recovered"),
requires = IS_NULL_OR(IS_INT_IN_RANGE(0, 99999)),
represent = lambda v, row=None: IS_INT_AMOUNT.represent(v),
default = 0),
Field("description", "text"),
location_id(label=T("Location")),
Field("status", "integer",
requires = IS_IN_SET(task_status,
zero=None),
default = 1,
label = T("Task Status"),
represent = lambda opt: \
task_status.get(opt, UNKNOWN_OPT)),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("Body Recovery Request"),
title_display = T("Request Details"),
title_list = T("Body Recovery Requests"),
title_update = T("Update Request"),
title_search = T("Search Request"),
subtitle_create = T("Add New Request"),
label_list_button = T("List Requests"),
label_create_button = T("Add Request"),
label_delete_button = T("Delete Request"),
msg_record_created = T("Recovery Request added"),
msg_record_modified = T("Recovery Request updated"),
msg_record_deleted = T("Recovery Request deleted"),
msg_list_empty = T("No requests found"))
# Resource configuration
configure(tablename,
list_fields = ["id",
"date",
"marker",
"location_id",
"bodies_found",
"bodies_recovered",
"status"
])
# Reusable fields
dvi_recreq_id = S3ReusableField("dvi_recreq_id", table,
requires = IS_NULL_OR(IS_ONE_OF(db,
"dvi_recreq.id",
"[%(marker)s] %(date)s: %(bodies_found)s bodies")),
represent = lambda id: id,
label=T("Recovery Request"),
ondelete = "RESTRICT")
# ---------------------------------------------------------------------
# Morgue
#
tablename = "dvi_morgue"
table = define_table(tablename,
super_link("pe_id", "pr_pentity"),
super_link("site_id", "org_site"),
Field("name", unique=True, notnull=True,
label = T("Morgue")),
self.org_organisation_id(),
Field("description",
label = T("Description")),
location_id(),
Field("obsolete", "boolean",
label = T("Obsolete"),
represent = lambda bool: \
(bool and [T("Obsolete")] or [messages.NONE])[0],
default = False,
readable = False,
writable = False),
*s3_meta_fields())
# Reusable Field
morgue_id = S3ReusableField("morgue_id", table,
requires = IS_NULL_OR(IS_ONE_OF(db,
"dvi_morgue.id", "%(name)s")),
represent = self.morgue_represent,
|
goosechooser/file-manip-toolkit | tests/eswap_cli_test.py | Python | mit | 794 | 0.002519 | import sys
import pytest
from file_manip_toolkit.eswap import cli
# tod | o - parameterize
def test_parse_args():
args = cli.parse_args(['test.py', 'filetemp', '-o', 'here'])
assert args.file
assert args.fo | rmat
assert args.output
with pytest.raises(AssertionError):
assert args.verbose
@pytest.mark.parametrize("test_input, expected", [
(['eswap_placeholder', 'test', 'filetemp', '-o', 'here'], 1),
(['eswap_placeholder', 'testdir\\vm3.13', 'h', '-o', 'there'], 0),
])
def test_main(tmpdir, test_input, expected):
fn = tmpdir.mkdir('cool').join(test_input[-1])
new_input = test_input[:-1]
new_input.append(str(fn))
sys.argv = new_input
with pytest.raises(SystemExit) as sysexit:
cli.main()
assert sysexit.code == expected
|
jsebechlebsky/pptp_auditor | pptp_auditor/__main__.py | Python | gpl-3.0 | 68 | 0.014706 | from pptp_aud | itor import main
if __name__ == '__main__':
| main() |
lightcode/SeriesWatcher | serieswatcher/serieswatcher/tasks/getcover.py | Python | mit | 506 | 0 | # -*- coding: utf-8 -*-
from PyQt4.QtCore | import Qt |
from PyQt4 import QtCore, QtGui
class GetCoverTask(QtCore.QObject):
coverLoaded = QtCore.pyqtSignal(QtGui.QImage)
def __init__(self, coverPath):
super(GetCoverTask, self).__init__()
self._coverPath = coverPath
def run(self):
image = QtGui.QImage(self._coverPath)
image = image.scaled(120, 120, Qt.KeepAspectRatio,
Qt.SmoothTransformation)
self.coverLoaded.emit(image)
|
nwinter/bantling | src/application/__init__.py | Python | mit | 1,104 | 0.002717 | """
Initialize Flask app
"""
from flask import Flask
import os
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.debug import DebuggedApp | lication
app = Flask('application')
if os.getenv('FLASK_CONF') == 'DEV':
# Development settings
app.config.from_object('application.settings.Development')
# Flask-DebugToolbar
toolbar = DebugToolbarExtension(app)
# Google app engine mini profiler
# https://github.com/kamens/gae_mini_ | profiler
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
from gae_mini_profiler import profiler, templatetags
@app.context_processor
def inject_profiler():
return dict(profiler_includes=templatetags.profiler_includes())
app.wsgi_app = profiler.ProfilerWSGIMiddleware(app.wsgi_app)
elif os.getenv('FLASK_CONF') == 'TEST':
app.config.from_object('application.settings.Testing')
else:
app.config.from_object('application.settings.Production')
# Enable jinja2 loop controls extension
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
# Pull in URL dispatch routes
import urls
|
BirdAPI/BirdJSON | tests/test.py | Python | mit | 277 | 0.00361 | from pprint import ppr | int
import birdjson
if __name__ == "__main__":
js = birdjson.load_file("config.json")
if js:
print js.settings.mysql.config.test[0].hello.int
print js.settings.mysql.config.test[1]._2.int
pprint(vars(js))
| |
klrmn/well-rested-tests | unittest/tests/test_loader.py | Python | mpl-2.0 | 2,850 | 0.000351 | import well_rested_unittest
import unittest2
import os
import sample_tests
from sample_tests import subdirectory
from sample_tests.subdirectory import test_class
class NewSuite(well_rested_unittest.ErrorTolerantOptimisedTestSuite):
pass
class TestAutoDiscoveringTestLoader(unittest2.TestCase):
maxDiff = None
concurrency = 4
def test_specific_suite(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader(suiteClass=NewSuite)
self.assertEqual(loader.suiteClass, NewSuite)
def test_top_level(se | lf):
loader = well_rested_unittest.AutoDiscoveringTestLoader()
self.assertEqua | l(loader.suiteClass,
well_rested_unittest.ErrorTolerantOptimisedTestSuite)
suite = loader.loadTestsFromNames(['sample_tests'], None)
self.assertEqual(len(suite._tests), 17)
def test_subdirectory(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader()
suite = loader.loadTestsFromNames(
['sample_tests/subdirectory'], None)
self.assertEqual(len(suite._tests), 6)
def test_module(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader()
suite = loader.loadTestsFromNames(
['sample_tests.test_class'], None)
self.assertEqual(len(suite._tests), 4)
def test_class(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader()
suite = loader.loadTestsFromNames(
['sample_tests.test_class.TestClass1'], None)
self.assertEqual(len(suite._tests), 2)
def test_method(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader()
suite = loader.loadTestsFromNames(
['sample_tests.test_class.TestClass1.test_1'], None)
self.assertEqual(len(suite._tests), 1)
def test_subdirectory_and_class(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader()
suite = loader.loadTestsFromNames(
['sample_tests/subdirectory',
'sample_tests.test_class.TestClass2'], None)
self.assertEqual(len(suite._tests), 8)
def test_module_and_method(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader()
suite = loader.loadTestsFromNames(
['sample_tests.subdirectory.test_class',
'sample_tests.test_class.TestClass1.test_2'], None)
self.assertEqual(len(suite._tests), 7)
def test_failing(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader(failing=True)
self.assertEqual(loader.from_file, '.failing')
def test_from_file(self):
loader = well_rested_unittest.AutoDiscoveringTestLoader(
from_file='sample_tests/load_2_tests.txt')
suite = loader.loadTestsFromNames(['sample_tests'], None)
self.assertEqual(len(suite._tests), 2)
|
SanPen/GridCal | src/research/power_flow/iwamoto_1_bus_test.py | Python | lgpl-3.0 | 2,463 | 0.00203 | import numpy as np
from scipy.sparse import csc_matrix, lil_matrix
from GridCal.Engine.Simulations.PowerFlow.jacobian_based_power_flow import NR_LS
"""
From the paper:
Load-Flow Solutions for Ill-Conditioned Power Systems by a Newton-Like Method
"""
# Ybus in tripplets form as per the paper
triplets = [(1, 1, 0-14.939j),
(1, 2, 14.148+0j),
(2, 2, 12.051 - 33.089j),
(2, 3, 0.0 + 6.494j),
(2, 4, -12.051 + 13.197j),
(3, 3, 2.581 - 10.282j),
(3, 5, -2.581 + 0.786j),
(4, 4, 12.642 - 74.081j),
(4, 5, 0.0 + 2.177j),
(4, 6, 0.0 + 56.689j),
(4, 7, -0.592 + 0.786j),
(5, 5, 2.581 - 5.889j),
(6, 6, 0.0 - 55.556j),
(7, 7, 3.226 - 4.304j),
(7, 8, -2.213 + 2.959j),
(8, 8, 2.893 - 5.468j),
(8, 9, -0.138 + 1.379j),
(8, 10, -0.851 + 1.163j),
(9 | , 9, 0.104 - 1.042j),
(10, 10, 1.346 - 6.110j),
(10, 11, -0.374 + 3.742j),
(11, 11, 0.283 - 2.785j)]
# correct thr triplets indices to zero-base
Y = lil_matrix((11, 11), dtype=complex)
for i, j, v in triplets:
Y[i-1, j-1] = v
Ybus = Y.tocsc()
print('Ybus')
print(Ybus.todense())
print(np.linalg.cond(Ybus.todense()))
Sbus = np.array([0+0j,
0+0j,
-0.128-0.062j, |
0+0j,
-0.165-0.080j,
-0.09-0.068j,
0+0j,
0+0j,
-0.026-0.009j,
0+0j,
-0.158-0.057j])
V0 = np.ones_like(Sbus)
V0[0] = 1.024 + 0j
V, converged, norm_f, Scalc, iter_, elapsed = NR_LS(Ybus=Ybus,
Sbus=Sbus,
V0=V0,
Ibus=np.zeros_like(Sbus),
pv=np.array([], dtype=int),
pq=np.arange(1, 11, dtype=int),
tol=1e-3,
max_it=15,
acceleration_parameter=1.05)
import pandas as pd
df = pd.DataFrame(data=np.c_[np.abs(V), np.angle(V), Scalc.real, Scalc.imag],
columns=['Vm', 'Va', 'P', 'Q'])
print(df)
print(norm_f) |
juanjux/python-driver | fixtures/repr.py | Python | gpl-3.0 | 8 | 0 | re | pr( | 1)
|
google-research/electra | run_finetuning.py | Python | apache-2.0 | 12,663 | 0.006791 | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fine-tunes an ELECTRA model on a downstream task."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import collections
import json
import tensorflow.compat.v1 as tf
import configure_finetuning
from finetune import preprocessing
from finetune import task_builder
from model import modeling
from model import optimization
from util import training_utils
from util import utils
class FinetuningModel(object):
"""Finetuning model with support for multi-task training."""
def __init__(self, config: configure_finetuning.FinetuningConfig, tasks,
is_training, features, num_train_steps):
# Create a shared transformer encoder
bert_config = training_utils.get_bert_config(config)
self.bert_config = bert_config
if config.debug:
bert_config.num_hidden_layers = 3
bert_config.hidden_size = 144
bert_config.intermediate_size = 144 * 4
bert_config.num_attention_heads = 4
assert config.max_seq_length <= bert_config.max_position_embeddings
bert_model = modeling.BertModel(
bert_config=bert_config,
is_training=is_training,
input_ids=features["input_ids"],
input_mask=features["input_mask"],
token_type_ids=features["segment_ids"],
use_one_hot_embeddings=config.use_tpu,
embedding_size=config.embedding_size)
percent_done = (tf.cast(tf.train.get_or_create_global_step(), tf.float32) /
tf.cast(num_train_steps, tf.float32))
# Add specific tasks
self.outputs = {"task_id": features["task_id"]}
losses = []
for task in tasks:
with tf.variable_scope("task_specific/" + task.name):
task_losses, task_outputs = task.get_prediction_module(
bert_model, features, is_training, percent_done)
losses.append(task_losses)
self.outputs[task.name] = task_outputs
self.loss = tf.reduce_sum(
tf.stack(losses, -1) *
tf.one_hot(features["task_id"], len(config.task_names)))
def model_fn_builder(config: configure_finetuning.FinetuningConfig, tasks,
num_train_steps, pretraining_config=None):
"""Returns `model_fn` closure for TPUEstimator."""
def model_fn(features, labels, mode, params):
"""The `model_fn` for TPUEstimator."""
utils.log("Building model...")
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
model = FinetuningModel(
config, tasks, is_training, features, num_train_steps)
# Load pre-trained weights from checkpoint
init_checkpoint = config.init_checkpoint
if pretraining_config is not None:
init_checkpoint = tf.train.latest_checkpoint(pretraining_config.model_dir)
utils.log("Using checkpoint", init_checkpoint)
tvars = tf.trainable_variables()
scaffold_fn = None
if init_checkpoint:
assignment_map, _ = modeling.get_assignment_map_from_checkpoint(
tvars, init_checkpoint)
if config.use_tpu:
def tpu_scaffold():
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
# Build model for training or prediction
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = optimization.create_optimizer(
model.loss, config.learning_rate, num_train_steps,
weight_decay_rate=config.weight_decay_rate,
use_tpu=config.use_tpu,
warmup_proportion=config.warmup_proportion,
layerwise_lr_decay_power=config.layerwise_lr_decay,
n_transformer_layers=model.bert_config.num_hidden_layers
)
output_spec = tf.estimator.tpu.TPUEstimatorSpec(
mode=mode,
loss=model.loss,
train_op=train_op,
scaffold_fn=scaffold_fn,
training_hooks=[training_utils.ETAHook(
{} if config.use_tpu else dict(loss=model.loss),
num_train_steps, config.iterations_per_loop, config.use_tpu, 10)])
else:
assert mode == tf.estimator.ModeKeys.PREDICT
output_spec = tf.estimator.tpu.TPUEstimatorSpec(
mode=mode,
predictions=utils.flatten_dict(model.outputs),
scaffold_fn=scaffold_fn)
utils.log("Building complete")
return output_spec
return model_fn
class ModelRunner(object):
"""Fine-tunes a model on a supervised task."""
def __init__(self, config: configure_finetuning.FinetuningConfig, tasks,
pretraining_config=None):
self._config = config
self._tasks = tasks
self._preprocessor = preprocessing.Preprocessor(config, self._tasks)
is_per_host = tf.estimator.tpu.InputPipelineConfig.PER_HOST_V2
tpu_cluster_resolver = None
if config.use_tpu and config.tpu_name:
tpu_cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver(
config.tpu_name, zone=config.tpu_zone, project=config.gcp_project)
tpu_config = tf.estimator.tpu.TPUConfig(
iterations_per_loop=config.iterations_per_loop,
num_shards=config.num_tpu_cores,
per_host_input_for_training=is_per_host,
tpu_job_name=config.tpu_job_name)
run_config = tf.estimator.tpu.RunConfig(
cluster=tpu_cluster_resolver,
model_dir=config.model_dir,
save_checkpoints_steps=config.save_checkpoints_steps,
save_checkpoints_secs=None,
tpu_config=tpu_config)
if self._config.do_train:
(self._train_input_fn,
self.train_steps) = self._preprocessor.prepare_train()
else:
self._train_input_fn, self.train_steps = None, 0
model_fn = model_fn_builder(
config=config,
tasks=self._tasks,
num_train_steps=self.train_steps,
pretraining_config=pretraining_config)
self._estimator = tf.estimator.tpu.TPUEstimator(
use_tpu=config.use_tpu,
model_fn=model_fn,
config=run_config,
| train_batch_size=config.train_batch_size,
eval_batch_size=config.eval_batch_size,
predict_batch_size=config.predict_batch_size)
def tr | ain(self):
utils.log("Training for {:} steps".format(self.train_steps))
self._estimator.train(
input_fn=self._train_input_fn, max_steps=self.train_steps)
def evaluate(self):
return {task.name: self.evaluate_task(task) for task in self._tasks}
def evaluate_task(self, task, split="dev", return_results=True):
"""Evaluate the current model."""
utils.log("Evaluating", task.name)
eval_input_fn, _ = self._preprocessor.prepare_predict([task], split)
results = self._estimator.predict(input_fn=eval_input_fn,
yield_single_examples=True)
scorer = task.get_scorer()
for r in results:
if r["task_id"] != len(self._tasks): # ignore padding examples
r = utils.nest_dict(r, self._config.task_names)
scorer.update(r[task.name])
if return_results:
utils.log(task.name + ": " + scorer.results_str())
utils.log()
return dict(scorer.get_results())
else:
return scorer
def write_classification_outputs(self, tasks, trial, split):
"""Write classification predictions to disk."""
utils.log("Writing out predictions for", tasks, split)
predict_input_fn, _ = self._preprocessor.prepare_predict(tasks, split)
results = self._estimator.predict(input_fn=predict_input_fn,
yield_single_examples=True)
# task name -> eid -> model |
Xprima-ERP/odoo_addons | xpr_xis_connector/res_users.py | Python | gpl-3.0 | 1,160 | 0 | # -*- encoding: utf-8 -*-
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2014 Savoir-faire Linux
# (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version | 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILI | TY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
from openerp.osv import osv, fields
_logger = logging.getLogger(__name__)
class res_users(osv.osv):
_inherit = "res.users"
_columns = {
'xis_user_external_id': fields.integer('XIS external user',
required=True),
}
|
sdpython/pyquickhelper | _unittests/ut_helpgen/test_utils_sphinxdoc2.py | Python | mit | 2,612 | 0.001149 | """
@brief test log(time=1s)
@author Xavier Dupre
"""
import sys
import os
import unittest
import pyquickhelper.helpgen.utils_sphinx_doc as utils_sphinx_doc
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import ExtTestCase
class TestSphinxDoc2(ExtTestCase):
def test_apply_modification_template_obj(self):
path = os.path.split(__file__)[0]
file = os.path.normpath(
os.path.join(
path,
"..",
"..",
"src",
| "pyquickhelper",
"loghelper",
"pqh_exception.py"))
rootm = os.path.normpath(os.path.join(path, "..", "..", "src"))
rootrep = ("pyquickhelper.src.pyquickhelper.", "")
stor | e_obj = {}
def softfile(f):
return False
rst = utils_sphinx_doc.apply_modification_template(rootm, store_obj,
utils_sphinx_doc.add_file_rst_template,
file, rootrep, softfile,
{}, additional_sys_path=[],
fLOG=fLOG)
self.assertNotEmpty(rst)
self.assertNotEmpty(store_obj)
for k, v in store_obj.items():
fLOG("test1", k, v)
@staticmethod
def private_static():
""" doc pr"""
res = 0
return res
@property
def prop(self):
""" doc prop"""
return 1
def __gt__(self, o):
"""doc gt"""
return True
def test_inspect_object(self):
""" test 2"""
mod = sys.modules[__name__]
objs = utils_sphinx_doc.get_module_objects(mod)
ty = {}
for _ in objs:
ty[_.type] = ty.get(_.type, 0) + 1
if ty.get("method", 0) > 5 or ty.get("staticmethod", 0) == 0:
for _ in objs:
if _.type == "method":
continue
if "private" in _.name:
self.assertIn("doc pr", _.doc)
fLOG(_.type, _.name, _.doc.replace("\n", "\\n"))
for _ in objs:
if _.type != "method":
continue
fLOG(_.type, _.module, _.name, _.doc.replace("\n", "\\n"))
self.assertEqual(ty.get("property", 0), 1)
if ty.get("staticmethod", 0) != 1:
raise Exception("{0}".format(str(ty)))
self.assertGreater(ty["method"], 1)
if __name__ == "__main__":
unittest.main()
|
Ambahm/assignment_testing | unit/lesson_02/test_task_09.py | Python | mpl-2.0 | 540 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests lesson 02 task 09."""
# Import Python libs
imp | ort unittest
# Import student file
import inquisition
class L02T09T | estCase(unittest.TestCase):
"""
Tests for lesson 02 task 09.
"""
def test_module_docstring_length(self):
"""
Tests that the module has a docstring at least 3 lines long.
"""
numlines = len(inquisition.__doc__.splitlines())
self.assertGreaterEqual(numlines, 3)
if __name__ == '__main__':
unittest.main()
|
sk1tt1sh/python-docx | tests/test_text.py | Python | mit | 12,538 | 0 | # encoding: utf-8
"""
Test suite for the docx.text module
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from docx.enum.text import WD_BREAK
from docx.oxml.text import CT_P, CT_R
from docx.text import Paragraph, Run
import pytest
from mock import call, Mock
from .oxml.unitdata.text import (
a_b, a_bCs, a_br, a_caps, a_cs, a_dstrike, a_p, a_shadow, a_smallCaps,
a_snapToGrid, a_specVanish, a_strike, a_t, a_vanish, a_webHidden,
an_emboss, an_i, an_iCs, an_imprint, an_oMath, a_noProof, an_outline,
an_r, an_rPr, an_rtl
)
from .unitutil import class_mock, instance_mock
class DescribeParagraph(object):
def it_has_a_sequence_of_the_runs_it_contains(self, runs_fixture):
paragraph, Run_, r_, r_2_, run_, run_2_ = runs_fixture
runs = paragraph.runs
assert Run_.mock_calls == [call(r_), call(r_2_)]
assert runs == [run_, run_2_]
def it_can_add_a_run_to_itself(self, add_run_fixture):
paragraph, text, expected_xml = add_run_fixture
run = paragraph.add_run(text)
assert paragraph._p.xml == expected_xml
assert isinstance(run, Run)
assert run._r is paragraph._p.r_lst[0]
def it_knows_its_paragraph_style(self):
cases = (
(Mock(name='p_elm', style='foobar'), 'foobar'),
(Mock(name='p_elm', style=None), 'Normal'),
)
for p_elm, expected_style in cases:
p = Paragraph(p_elm)
assert p.style == expected_style
def it_can_set_its_paragraph_style(self):
cases = (
('foobar', 'foobar'),
('Normal', None),
)
for style, expected_setting in cases:
p_elm = Mock(name='p_elm')
p = Paragraph(p_elm)
p.style = style
assert p_elm.style == expected_setting
def it_knows_the_text_it_contains(self, text_prop_fixture):
p, expected_text = text_prop_fixture
assert p.text == expected_text
# fixtures -------------------------------------------------------
@pytest.fixture(params=[None, '', 'foobar'])
def add_run_fixture(self, request, paragraph):
text = request.param
r_bldr = an_r()
if text:
r_bldr.with_child(a_t().with_text(text))
expected_xml = a_p().with_nsdecls().with_child(r_bldr).xml()
return paragraph, text, expected_xml
@pytest.fixture
def p_(self, request, r_, r_2_):
return instance_mock(request, CT_P, r_lst=(r_, r_2_))
@pytest.fixture
def paragraph(self, request):
p = a_p().with_nsdecls().element
return Paragraph(p)
@pytest.fixture
def Run_(self, request, runs_):
run_, run_2_ = runs_
return class_mock(
request, 'docx.text.Run', side_effect=[run_, run_2_]
)
@pytest.fixture
def r_(self, request):
return instance_mock(request, CT_R)
@pytest.fixture
def r_2_(self, request):
return instance_mock(request, CT_R)
@pytest.fixture
def runs_(self, request):
run_ = instance_mock(request, Run, name='run_')
run_2_ = instance_mock(request, Run, name='run_2_')
return run_, run_2_
@pytest.fixture
def runs_fixture(self, p_, Run_, r_, r_2_, runs_):
paragraph = Paragraph(p_)
run_, run_2_ = runs_
return paragraph, Run_, r_, r_2_, run_, run_2_
@pytest.fixture
def text_prop_fixture(self):
p = (
a_p().with_nsdecls().with_child(
an_r().with_child(
a_t().with_text('foo'))).with_child(
an_r().with_child(
a_t().with_text(' de bar')))
).element
paragraph = Paragraph(p)
return paragraph, 'foo de bar'
class DescribeRun(object):
def it_knows_its_bool_prop_states(self, bool_prop_get_fixture):
run, prop_name, expected_state = bool_prop_get_fixture
assert getattr(run, prop_name) == expected_state
def it_can_change_its_bool_prop_settings(self, bool_prop_set_fixture):
run, prop_name, value, expected_xml = bool_prop_set_fixture
setattr(run, prop_name, value)
assert run._r.xml == expected_xml
def it_can_add_text(self, add_text_fixture):
run, text_str, expected_xml, Text_ = add_text_fixture
_text = run.add_text(text_str)
assert run._r.xml == expected_xml
assert _text is Text_.return_value
def it_can_add_a_break(self, add_break_fixture):
run, break_type, expected_xml = add_break_fixture
run.add_break(break_type)
assert run._r.xml == expected_xml
def it_knows_the_text_it_contains(self, text_prop_fixture):
run, expected_text = text_prop_fixture
assert run.text == expected_text
# fixtures -------------------------------------------------------
@pytest.fixture(params=[
'line', 'page', 'column', 'clr_lt', 'clr_rt', 'clr_all'
])
def add_break_fixture(self, request, run):
type_, clear, break_type = {
'line': (None, None, WD_BREAK.LINE),
'page': ('page', None, WD_BREAK.PAGE),
'column': ('column', None, WD_BREAK.COLUMN),
'clr_lt': ('textWrapping', 'left', WD_BREAK.LINE_CLEAR_LEFT),
'clr_rt': ('textWrapping', 'right', WD_BREAK.LINE_CLEAR_RIGHT),
'clr_all': ('textWrapping', 'all', WD_BREAK.LINE_CLEAR_ALL),
}[request.param]
# expected_xml -----------------
br_bldr = a_br()
if type_ is not None:
br_bldr.with_type(type_)
if clear is not None:
br_bldr.with_clear(clear)
expected_xml = an_r().with_nsdecls().with_child(br_bldr).xml()
return run, break_type, expected_xml
@pytest.fixture(params=['foobar', ' foo bar', 'bar foo '])
def add_text_fixture(self, request, run, Text_):
text_str = request.param
t_bldr = a_t().with_text(text_str)
if text_str.startswith(' ') or text_str.endswith(' '):
t_bldr.with_space('preserve')
expected_xml = an_r().with_nsdecls().with_child(t_bldr).xml()
return run, text_str, expected_xml, Text_
@pytest.fixture(params=[
('all_caps', True), ('all_caps', False), ('all_caps', None),
('bold', True), ('bold', False), ('bold', None),
('italic', True), ('italic', False), ('italic', None),
('com | plex_script', True), ('complex_script', False),
('complex_script', None),
('cs_bold', True), ('cs_bold', False), ('cs_bold', None),
('cs_italic', True), ('cs_italic', False), ('cs_italic', None),
('double_strike', True), ('double_strike', False),
('double_strike', None),
('emboss', True), ('emboss', False), ('emboss', None),
('hidden', True), ('hidden', False), ('hidden', None),
('italic', True), ('italic', False), ('italic', None),
( | 'imprint', True), ('imprint', False), ('imprint', None),
('math', True), ('math', False), ('math', None),
('no_proof', True), ('no_proof', False), ('no_proof', None),
('outline', True), ('outline', False), ('outline', None),
('rtl', True), ('rtl', False), ('rtl', None),
('shadow', True), ('shadow', False), ('shadow', None),
('small_caps', True), ('small_caps', False), ('small_caps', None),
('snap_to_grid', True), ('snap_to_grid', False),
('snap_to_grid', None),
('spec_vanish', True), ('spec_vanish', False), ('spec_vanish', None),
('strike', True), ('strike', False), ('strike', None),
('web_hidden', True), ('web_hidden', False), ('web_hidden', None),
])
def bool_prop_get_fixture(self, request):
bool_prop_name, expected_state = request.param
bool_prop_bldr = {
'all_caps': a_caps,
'bold': a_b,
'complex_script': a_cs,
'cs_bold': a_bCs,
'cs_italic': an_iCs,
'double_strike': a_dstrike,
'emboss': an_emboss,
' |
vesellov/callfeed.net | mainapp/migrations/0012_auto_20150525_1959.py | Python | mit | 2,217 | 0.000902 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0011_widget_is_raw'),
]
operations = [
migrations.AddField(
model_name='widget',
name='blacklist_ip',
field=models.CharField(default=b'', max_length=3000),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='blacklist_phones',
field=models.CharField(default=b'', max_length=3000),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='delay_before_callback_from_a_to_b',
field=models.IntegerField(default=0),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='delay_before_callback_to_additional_number',
field=models.IntegerField(default=0),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='disable_on_mobiles',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='geo_filter',
field=models.CharField(default=b'all', max_length=20, choices=[(b'all', b'All')]),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='operator_incoming_number',
field=models.CharField(default=b'callfeed', max_length= | 8, choices=[(b'callfeed', b'Callfeed'), (b'client', b'Client')]),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='speak_site_name',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='time_before_callback_sec',
field=mod | els.IntegerField(default=0),
preserve_default=True,
),
]
|
tanglei528/nova | nova/tests/api/openstack/compute/contrib/test_shelve.py | Python | apache-2.0 | 5,679 | 0.001057 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import webob
from nova.api.openstack.compute.contrib import shelve
from nova.compute import api as compute_api
from nova import db
from nova import exception
from nova.openstack.common import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
def fake_instance_get_by_uuid(context, instance_id,
columns_to_join=None, use_slave=False):
return fake_instance.fake_db_instance(
**{'name': 'fake', 'project_id': '%s_unequal' % context.project_id})
def fake_auth_context(context):
return True
class ShelvePolicyTest(test.NoDBTestCase):
def setUp(self):
super(ShelvePolicyTest, self).setUp()
self.controller = shelve.ShelveController()
def test_shelve_restricted_by_role(self):
rules = policy.Rules({'compute_extension:shelve':
policy.parse_rule('role:admin')})
policy.set_rules(rules)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(exception.Forbidden, self.controller._shelve,
req, str(uuid.uuid4()), {})
def test_shelve_allowed(self):
rules = policy.Rules({'compute:get': policy.parse_rule(''),
'compute_extension:shelve':
policy.parse_rule('')})
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(exception.Forbidden, self.controller._shelve,
req, str(uuid.uuid4()), {})
def test_shelve_locked_server(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
self.stubs.Set(shelve, 'auth_shelve', fake_auth_context)
self.stubs.Set(compute_api.API, 'shelve',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(webob.exc.HTTPConflict, self.controller._shelve,
req, str(uuid.uuid4()), {})
def test_unshelve_restricted_by_role(self):
rules = policy.Rules({'compute_extension:unshelve':
policy.parse_rule('role:admin')})
policy.set_rules(rules)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(exception.Forbidden, self.controller._unshelve,
req, str(uuid.uuid4()), {})
def test_unshelve_allowed(self):
rules = policy.Rules({'compute:get': policy.parse_rule(''),
'compute_extension:unshelve':
policy.parse_rule('')})
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(exception.Forbidden, self.controller._unshelve,
req, str(uuid.uuid4()), {})
def test_unshelve_locked_server(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
self.stubs.Set(shelve, 'auth_unshelve', fake_auth_context)
self.stubs.Set(compute_api.API, 'unshelve',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(webob.exc.HTTPConflict, self.controller._unshelve,
req, str(uuid.uui | d4()), {})
def test_shelve_offload_restricted_by_role(self):
| rules = policy.Rules({'compute_extension:shelveOffload':
policy.parse_rule('role:admin')})
policy.set_rules(rules)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(exception.Forbidden,
self.controller._shelve_offload, req, str(uuid.uuid4()), {})
def test_shelve_offload_allowed(self):
rules = policy.Rules({'compute:get': policy.parse_rule(''),
'compute_extension:shelveOffload':
policy.parse_rule('')})
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(exception.Forbidden,
self.controller._shelve_offload, req, str(uuid.uuid4()), {})
def test_shelve_offload_locked_server(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
self.stubs.Set(shelve, 'auth_shelve_offload', fake_auth_context)
self.stubs.Set(compute_api.API, 'shelve_offload',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequest.blank('/v2/123/servers/12/os-shelve')
self.assertRaises(webob.exc.HTTPConflict,
self.controller._shelve_offload,
req, str(uuid.uuid4()), {})
|
cmek/radiusdev-web | radius/radauth/apps.py | Python | bsd-2-clause | 134 | 0.007463 | fro | m django.apps import AppConfig
class RadauthAppConfig(AppConfig):
name = 'radauth'
verbose_name = "Radius Au | thentication"
|
brandonPurvis/osf.io | admin/common_auth/admin.py | Python | apache-2.0 | 2,035 | 0.00344 | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import Permission
from django.contrib.auth.forms import PasswordResetForm
from .models import MyUser
from forms import CustomUserRegistrationForm
class PermissionAdmin(admin.ModelAdmin):
search_fields = ['name', 'codename']
class CustomUserAdmin(UserAdmin):
add_form = CustomUserRegistrationForm
list_display = ['email', 'first_name', 'last_name', 'is_active', 'confirmed', 'osf_id']
fieldsets = (
(None, {'fields': ('email', 'password',)}),
('Personal info', {'fields': ('first_name', 'last_name', 'email', 'date_joined', 'last_login', 'osf_id')}),
('Permissions', {'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions',)}),
)
add_fieldsets = (
(None, {'fields':
('email', 'first_name', 'last_name', 'password1', 'password2'),
}),)
search_fields = ('email', 'first_name', 'last_name',)
ordering = ('last_name', | 'first_name',)
actions = ['send_email_invitation']
# TODO - include alternative messages for warning/failure
def send_email_invitation(self, request, queryset): |
for user in queryset:
reset_form = PasswordResetForm({'email': user.email}, request.POST)
assert reset_form.is_valid()
reset_form.save(
#subject_template_name='templates/emails/account_creation_subject.txt',
#email_template_name='templates/emails/invitation_email.html',
request=request
)
self.message_user(request, 'Email invitation successfully sent')
send_email_invitation.short_description = 'Send email invitation to selected users'
def save_model(self, request, obj, form, change):
if change:
pass
else:
obj.is_active = False
obj.save()
admin.site.register(MyUser, CustomUserAdmin)
admin.site.register(Permission, PermissionAdmin)
|
letsencrypt/letsencrypt | certbot-dns-dnsmadeeasy/certbot_dns_dnsmadeeasy/__init__.py | Python | apache-2.0 | 3,576 | 0.001678 | """
The `~certbot_dns_dnsmadeeasy.dns_dnsmadeeasy` plugin automates the process of
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
subsequently removing, TXT records using the DNS Made Easy API.
.. note::
The plugin is not installed by default. It can be installed by heading to
`certbot.eff.org <https://certbot.eff.org/instructions#wildcard>`_, choosing your system and
selecting the Wildcard tab.
Named Arguments
---------------
========================================= =====================================
``--dns-dnsmadeeasy-credentials`` DNS Made Easy credentials_ INI file.
(Required)
``--dns-dnsmadeeasy-propagation-seconds`` The number of seconds to wait for DNS
to propagate before asking the ACME
server to verify the DNS record.
(Default: 60)
========================================= =====================================
Credentials
-----------
Use of this plugin requires a configuration file containing DNS Made Easy API
credentials, obtained from your DNS Made Easy
`account page <https://cp.dnsmadeeasy.com/account/info>`_.
.. code-block:: ini
:name: credentials.ini
:caption: Example credentials file:
# DNS Made Easy API credentials used by Certbot
dns_dnsmadeeasy_api_key = 1c1a3c91-4770-4ce7-96f4-54c0eb0e4 | 57a
dns_dnsmadeeasy_secret_key = c9b5625f-9834-4ff8-baba-4ed5f32cae55
The path to this file can be provided interactively or using the
``--dns-dnsmadeeasy-credentials`` command-line argument. Certbot records the path
to this file for use during renewal, but does not store the file's contents.
.. caution::
You should protect these API credentials as you would | the password to your
DNS Made Easy account. Users who can read this file can use these credentials
to issue arbitrary API calls on your behalf. Users who can cause Certbot to
run using these credentials can complete a ``dns-01`` challenge to acquire
new certificates or revoke existing certificates for associated domains,
even if those domains aren't being managed by this server.
Certbot will emit a warning if it detects that the credentials file can be
accessed by other users on your system. The warning reads "Unsafe permissions
on credentials configuration file", followed by the path to the credentials
file. This warning will be emitted each time Certbot uses the credentials file,
including for renewal, and cannot be silenced except by addressing the issue
(e.g., by using a command like ``chmod 600`` to restrict access to the file).
Examples
--------
.. code-block:: bash
:caption: To acquire a certificate for ``example.com``
certbot certonly \\
--dns-dnsmadeeasy \\
--dns-dnsmadeeasy-credentials ~/.secrets/certbot/dnsmadeeasy.ini \\
-d example.com
.. code-block:: bash
:caption: To acquire a single certificate for both ``example.com`` and
``www.example.com``
certbot certonly \\
--dns-dnsmadeeasy \\
--dns-dnsmadeeasy-credentials ~/.secrets/certbot/dnsmadeeasy.ini \\
-d example.com \\
-d www.example.com
.. code-block:: bash
:caption: To acquire a certificate for ``example.com``, waiting 2 minutes
for DNS propagation
certbot certonly \\
--dns-dnsmadeeasy \\
--dns-dnsmadeeasy-credentials ~/.secrets/certbot/dnsmadeeasy.ini \\
--dns-dnsmadeeasy-propagation-seconds 120 \\
-d example.com
"""
|
Densvin/RSSVK | vkfeed/tools/html_parser.py | Python | bsd-2-clause | 8,733 | 0.004122 | '''A convenient class for parsing HTML pages.'''
from __future__ import unicode_literals
from HTMLParser import HTMLParser
import logging
import re
from RSSvk.core import Error
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class HTMLPageParser(HTMLParser):
'''A convenient class for parsing HTML pages.'''
tag_name_regex = '[a-zA-Z][-.a-zA-Z0-9:_]*'
'''A regular expression for tag name.'''
attribute_name_regex = tag_name_regex
'''A regular expression for attribute name.'''
tag_attrs_regex = re.sub(r'\s*', '', r'''
(?:\s+
''' + attribute_name_regex + r'''
(?:\s*=\s*
(?:
'[^']*'
|"[^"]*"
|[^'"/>\s]+
)
)?
)*
''')
'''A regular expression for tag attributes.'''
script_regex = re.compile('<script' + tag_attrs_regex + '>.*?</script>', re.DOTALL | re.IGNORECASE)
'''A regular expression for matching scripts.'''
__invalid_tag_attr_spacing_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
# Two attributes without a space between them
\s+ # whitespace before attribute name
''' + attribute_name_regex + r''' # attribute name
\s*=\s* # value indicator
(?:
'[^']*' # LITA-enclosed value
|"[^"]*" # LIT-enclosed value
)
)
([^\s>]) # Do not include / to make the preparation replacement for __invalid_tag_attr_regex
''', re.VERBOSE)
'''
A regular expression for matching a common error in specifying tag
attributes.
'''
__invalid_tag_attr_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
)
\s+(?:
# Invalid characters instead of an attribute
[^\sa-zA-Z/>]\S*
|
# Sole slash
/\s
|
# Invalid characters starting from slash instead of an attribute
/[^>\s]+
)
''', re.VERBOSE)
'''
A regular expression for matching HTML errors like:
<a class="app photo"/app2322149_58238998?from_id=2381857&loc=addneighbour onclick="return cur.needLoginBox()">
'''
__empty_tags = 'area|base|basefont|br|col|frame|hr|img|input|link|meta|param'
'''A list of all HTML empty tags.'''
__misopened_tag_regex = re.compile(r'<(' + __empty_tags + tag_attrs_regex + r')\s*>', re.IGNORECASE)
'''A regular expression for matching opened tags that should be closed.'''
__tag_stack = None
'''A stack of currently opened HTML tags.'''
__cur_data = None
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
def __init__(self):
HTMLParser.__init__(self)
def handle_charref(self, name):
'''Handles | a character reference of the form &#ref;.'''
self.__accumulate_data('&#' + name + ';')
def handle_data(self, data):
'''Handles data.'''
self.__accumulate_data(data)
def handle_endtag(self, tag_name):
'''Handles end of a tag.'''
self.__handle_data_if_exists()
| if self.__get_cur_tag()['name'] == tag_name:
self.__close_tag(self.__tag_stack.pop())
else:
for tag_id in xrange(len(self.__tag_stack) - 1, -1, -1):
if self.__tag_stack[tag_id]['name'] == tag_name:
for tag in reversed(self.__tag_stack[tag_id + 1:]):
self.__close_tag(tag, forced = True)
self.__tag_stack.pop()
self.__close_tag(self.__tag_stack.pop())
break
else:
LOG.debug('Dropping excess end tag "%s"...', tag_name)
def handle_entityref(self, name):
'''Handles a general entity reference of the form &name;.'''
self.__accumulate_data('&' + name + ';')
def handle_root_data(self, tag, data):
'''Handles data inside of the root of the document.'''
LOG.debug('%s', data)
def handle_root(self, tag, attrs, empty):
'''Handles a tag inside of the root of the document.'''
LOG.debug('<%s %s%s>', tag['name'], attrs, '/' if empty else '')
tag['new_tag_handler'] = self.handle_root
tag['data_handler'] = self.handle_root_data
tag['end_tag_handler'] = self.handle_root_end
def handle_root_end(self, tag):
'''Handles end of the root of the document.'''
LOG.debug('</%s>', tag['name'])
def handle_startendtag(self, tag, attrs):
'''Handles start of an XHTML-style empty tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, True)
def handle_starttag(self, tag, attrs):
'''Handles start of a tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, False)
def reset(self):
'''Resets the parser.'''
HTMLParser.reset(self)
self.__tag_stack = [{
# Add fake root tag
'name': None,
'new_tag_handler': self.handle_root,
'data_handler': self.handle_root_data,
'end_tag_handler': self.handle_root_end,
}]
def parse(self, html):
'''Parses the specified HTML page.'''
html = self.__fix_html(html)
self.reset()
try:
# Run the parser
self.feed(html)
self.close()
finally:
# Close all unclosed tags
for tag in self.__tag_stack[1:]:
self.__close_tag(tag, True)
def __accumulate_data(self, data):
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
if self.__cur_data is None:
self.__cur_data = data
else:
self.__cur_data += data
def __close_tag(self, tag, forced = False):
'''Forces closing of an unclosed tag.'''
if forced:
LOG.debug('Force closing of unclosed tag "%s".', tag['name'])
else:
LOG.debug('Tag %s closed.', tag)
if 'end_tag_handler' in tag:
tag['end_tag_handler'](tag)
LOG.debug('Current tag: %s.', self.__get_cur_tag())
def __fix_html(self, html):
'''Fixes various things that may confuse the Python's HTML parser.'''
html = self.script_regex.sub('', html)
loop_replacements = (
lambda html: self.__invalid_tag_attr_spacing_regex.subn(r'\1 \2', html),
lambda html: self.__invalid_tag_attr_regex.subn(r'\1 ', html),
)
for loop_replacement in loop_replacements:
for i in xrange(0, 1000):
html, changed = loop_replacement(html)
if not changed:
break
else:
raise Error('Too many errors in the HTML or infinite loop.')
html = self.__misopened_tag_regex.sub(r'<\1 />', html)
return html
def __get_cur_tag(self):
'''Returns currently opened tag.'''
return self.__tag_stack[-1]
def __handle_data_if_exists(self):
'''Handles accumulated data (if exists).'''
data = self.__cur_data
if data is None:
return
self.__cur_data = None
tag = self.__get_cur_tag()
handler = tag.get('data_handler')
if handler is not None:
LOG.debug('Data "%s" in "%s" with handler %s.',
data, tag['name'], handler.func_name)
handler(tag, data)
def __handle_start_tag(self, tag_name, attrs, empty):
'''Handles start of any tag.'''
tag = { 'name': tag_name }
handler = self.__get_cur_t |
jacobajit/ion | intranet/apps/board/urls.py | Python | gpl-2.0 | 1,474 | 0.007463 | # -*- coding: utf-8 -*-
from django.conf.urls import url
from . import views
urlpatterns = [
url(r"^$", views.home, name="board"),
url(r"^/all$", views.all_feed, name="board_all"),
url(r"^/course/(?P<course_id>.*)?$", views.course_feed, name="board_course"),
url(r"^/submit/course/(?P<course_id>.*)?$", views.course_feed_post, name="board_course_post"),
url(r"^/meme/submit/course/(?P<course_id>.*)?$", views.course_feed_post_meme, name="board_course_post_meme"),
url(r"^/section/(?P<section_id>.*)?$", views.section_feed, name="board_section"),
url(r"^/submit/section/(?P<section_id>.*)?$", views.section_feed_post, name="board_section_post"),
url(r"^/meme/submit/section/(?P<section_id>.*)?$", views.section_feed_post_meme, name="board_section_post_meme"),
url(r"^/meme/get$", views.get | _memes_json, name="board_get_memes_json"),
url(r"^/post/(?P<post_id>\d+)?$", views.view_post, name="board_post"),
url(r"^/post/(?P<post_id>\d+)/comment$", views.comment_view, name="board_comment"),
# url(r"^/add$", views.add_post_view, name="add_boardpost"),
url(r"^/post/(?P<id>\d+)/modify$", views.modify_post_view, name="board_modify_post"),
url(r"^/post/(?P<id>\d+)/delete$", views.delete_post_view, name="board_delete_pos | t"),
url(r"^/comment/(?P<id>\d+)/delete$", views.delete_comment_view, name="board_delete_comment"),
url(r"^/post/(?P<id>\d+)/react$", views.react_post_view, name="board_react_post"),
]
|
AKSW/KBox | kbox.pip/kbox/kbox.py | Python | apache-2.0 | 1,180 | 0.002542 | import os
import subprocess
import click
DIR | _PATH = os.path.dirname(os.path.realpath(__file__))
JAR_EXECUTE = "java -jar " + DIR_PATH + "/kbox-v0.0.2-alpha.jar" # kbox-v0.0.2-alpha.jar
SPACE = " "
PUSH_COMMAND = 'push'
COMMAND_LIST = [PUSH_COMMAND]
@click. | command(context_settings={"ignore_unknown_options": True})
@click.argument('commands', nargs=-1)
def execute_kbox_command(commands):
args = SPACE
for item in commands:
args += item + SPACE
execute_commands = JAR_EXECUTE + args
try:
process = subprocess.Popen(execute_commands.split(), stdout=subprocess.PIPE)
output, err = process.communicate()
output = output.decode("utf-8")
click.echo(output)
except OSError as e:
click.echo(e)
def execute(commands):
commands = tuple(commands.split())
args = SPACE
for item in commands:
args += item + SPACE
execute_commands = JAR_EXECUTE + args
try:
process = subprocess.Popen(execute_commands.split(), stdout=subprocess.PIPE)
output, err = process.communicate()
output = output.decode("utf-8")
return output
except OSError as e:
print(e)
|
qhm123/the10000 | feed.py | Python | bsd-3-clause | 1,821 | 0.004942 | #!/usr/bin/env python
# coding=utf-8
import os
import datetime
from google.appengine.ext import webapp
from google.appengine.api import memcache
from google.appengine.ext import db
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp import template
from v2ex.babel import Member
from v2ex.babel import Counter
from v2ex.babel import Section
from v2ex.babel import Node
from v2ex.babel import Topic
from v2ex.babel import Reply
from v2ex.babel.da import *
template.register_template_library('v2ex.templatetags.filters')
class FeedHomeHandler(webapp.RequestHandler):
def head(self):
self.response.out.write('')
def get(self):
site = GetSite()
output = memcache.get('feed_index')
if output is None:
template_values = {}
template_values['site'] = site
template_values['site_domain'] = site.domain
template_values['site_name'] = site.title
template_values['site_slogan'] = site.slogan
template_values['feed_url'] = 'http://' + template_values['site_domain'] + '/index.xml'
template_values['site_updated'] = datetime.datetime.now()
q = db.GqlQuery("SELECT * FROM Topic ORDER BY created DESC LIMIT 10")
template_values['topics'] = q
| path = os.path.join(os.path.dirname(__file__), 'tpl', 'feed', 'index.xml')
output = template.render(path, template_values)
memcache.set('feed_index', output, 600)
self.response.out.write(output)
def main():
application = webapp.WSGIApplication([
('/index.xml', FeedHomeHandler),
('/feed/v2ex.rss', FeedHomeHandler)
],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__ma | in__':
main() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.