code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: SearchDialog.py
from Tkinter import *
from idlelib import SearchEngine
from idlelib.SearchDialogBase import SearchDialogBase
def _setup(text):
root = text._root()
engine = SearchEngine.get(root)
if not hasattr(engine, '_searchdialog'):
engine._searchdialog = SearchDialog(root, engine)
return engine._searchdialog
def find(text):
pat = text.get('sel.first', 'sel.last')
return _setup(text).open(text, pat)
def find_again(text):
return _setup(text).find_again(text)
def find_selection(text):
return _setup(text).find_selection(text)
class SearchDialog(SearchDialogBase):
def create_widgets(self):
f = SearchDialogBase.create_widgets(self)
self.make_button('Find', self.default_command, 1)
def default_command(self, event=None):
if not self.engine.getprog():
return
if self.find_again(self.text):
self.close()
def find_again(self, text):
if not self.engine.getpat():
self.open(text)
return False
else:
if not self.engine.getprog():
return False
res = self.engine.search_text(text)
if res:
line, m = res
i, j = m.span()
first = '%d.%d' % (line, i)
last = '%d.%d' % (line, j)
try:
selfirst = text.index('sel.first')
sellast = text.index('sel.last')
if selfirst == first and sellast == last:
text.bell()
return False
except TclError:
pass
text.tag_remove('sel', '1.0', 'end')
text.tag_add('sel', first, last)
text.mark_set('insert', self.engine.isback() and first or last)
text.see('insert')
return True
text.bell()
return False
def find_selection(self, text):
pat = text.get('sel.first', 'sel.last')
if pat:
self.engine.setcookedpat(pat)
return self.find_again(text)
|
DarthMaulware/EquationGroupLeaks
|
Leak #5 - Lost In Translation/windows/Resources/Python/Core/Lib/idlelib/SearchDialog.py
|
Python
|
unlicense
| 2,315
|
from haas import api, model, config, server
from haas.test_common import config_testsuite, config_merge, fresh_database, \
ModelTest, fail_on_log_warnings
from haas.flaskapp import app
from haas.model import db
from haas.errors import AuthorizationError
from haas.rest import init_auth, local
from haas.ext.auth.database import User, user_create, user_delete, \
user_add_project, user_remove_project
import flask
import pytest
import unittest
fail_on_log_warnings = pytest.fixture(autouse=True)(fail_on_log_warnings)
@pytest.fixture
def configure():
config_testsuite()
config_merge({
'auth': {
# The tests in this module are checking the specific authorization
# requirements of the API calls. as such, we don't want things to
# fail due to complete lack of authentication, where they should
# fail later when the specific authorization checks we're testing
# for happen.
'require_authentication': 'False',
},
'extensions': {
'haas.ext.auth.database': '',
'haas.ext.auth.null': None,
},
})
config.load_extensions()
@pytest.fixture
def initial_db(request):
fresh_database(request)
with app.app_context():
alice = User(label='alice',
password='secret',
is_admin=True)
bob = User(label='bob',
password='password',
is_admin=False)
db.session.add(alice)
db.session.add(bob)
runway = model.Project('runway')
runway.users.append(alice)
db.session.add(runway)
db.session.commit()
@pytest.fixture
def server_init():
server.register_drivers()
server.validate_state()
@pytest.yield_fixture
def auth_context():
with app.test_request_context():
init_auth()
yield
class FakeAuthRequest(object):
"""Fake (authenticated) request object.
This spoofs just enough of flask's request functionality for the
database auth plugin to work.
"""
def __init__(self, username, password):
self.username = username
self.password = password
@property
def authorization(self):
return self
class FakeNoAuthRequest(object):
"""Fake (unauthenticated) request object.
Like `FakeAuthRequest`, except that the spoofed request is
unauthenticated.
"""
authorization = None
@pytest.fixture
def admin_auth():
"""Inject mock credentials that give the request admin access."""
flask.request = FakeAuthRequest('alice', 'secret')
@pytest.fixture
def runway_auth():
"""
Inject mock credentials that give the request
access to the "runway" project.
"""
flask.request = FakeAuthRequest('bob', 'password')
@pytest.fixture
def no_auth():
"""Spoof an unauthenticated request."""
flask.request = FakeNoAuthRequest()
def use_fixtures(auth_fixture):
return pytest.mark.usefixtures('configure',
'initial_db',
'server_init',
auth_fixture,
'auth_context')
@use_fixtures('admin_auth')
class TestUserCreateDelete(unittest.TestCase):
"""Tests for user_create and user_delete."""
def test_new_user(self):
api._assert_absent(User, 'charlie')
user_create('charlie', 'foo')
def test_duplicate_user(self):
user_create('charlie', 'secret')
with pytest.raises(api.DuplicateError):
user_create('charlie', 'password')
def test_delete_user(self):
user_create('charlie', 'foo')
user_delete('charlie')
def test_delete_missing_user(self):
with pytest.raises(api.NotFoundError):
user_delete('charlie')
def test_delete_user_twice(self):
user_create('charlie', 'foo')
user_delete('charlie')
with pytest.raises(api.NotFoundError):
user_delete('charlie')
def _new_user(self, is_admin):
"""Helper method for creating/switching to a new user.
A new admin user will be created with the credentials:
username: 'charlie'
password: 'foo'
The argument is_admin determines whether the user has admin rights.
Once the user has been created, the authentication info will be
changed to that user.
"""
user_create('charlie', 'foo', is_admin=is_admin)
flask.request = FakeAuthRequest('charlie', 'foo')
local.auth = User.query.filter_by(label='charlie').one()
def test_new_admin_can_admin(self):
"""Verify that a newly created admin can actually do admin stuff."""
self._new_user(is_admin=True)
user_delete('charlie')
def test_new_non_admin_cannot_admin(self):
"""Verify that a newly created regular user can't do admin stuff."""
self._new_user(is_admin=False)
with pytest.raises(AuthorizationError):
user_delete('charlie')
@use_fixtures('admin_auth')
class TestUserAddRemoveProject(unittest.TestCase):
"""Tests for user_add_project/user_remove_project."""
def test_user_add_project(self):
user_create('charlie', 'secret')
api.project_create('acme-corp')
user_add_project('charlie', 'acme-corp')
user = api._must_find(User, 'charlie')
project = api._must_find(model.Project, 'acme-corp')
assert project in user.projects
assert user in project.users
def test_user_remove_project(self):
user_create('charlie', 'secret')
api.project_create('acme-corp')
user_add_project('charlie', 'acme-corp')
user_remove_project('charlie', 'acme-corp')
user = api._must_find(User, 'charlie')
project = api._must_find(model.Project, 'acme-corp')
assert project not in user.projects
assert user not in project.users
def test_duplicate_user_add_project(self):
user_create('charlie', 'secret')
api.project_create('acme-corp')
user_add_project('charlie', 'acme-corp')
with pytest.raises(api.DuplicateError):
user_add_project('charlie', 'acme-corp')
def test_bad_user_remove_project(self):
"""Tests that removing a user from a project they're not in fails."""
user_create('charlie', 'secret')
api.project_create('acme-corp')
with pytest.raises(api.NotFoundError):
user_remove_project('charlie', 'acme-corp')
@pytest.mark.usefixtures('configure', 'initial_db')
class TestUserModel(ModelTest):
"""Basic sanity check for the User model.
Similar to the tests in /tests/unit/model.py, which cover the models
defined in HaaS core.
"""
def sample_obj(self):
return User('charlie', 'secret')
admin_calls = [
(user_create, ['charlie', '1337']),
(user_create, ['charlie', '1337', False]),
(user_create, ['charlie', '1337', True]),
(user_delete, ['bob']),
(user_add_project, ['bob', 'runway']),
(user_remove_project, ['alice', 'runway']),
]
@pytest.mark.parametrize('fn,args', admin_calls)
@use_fixtures('admin_auth')
def test_admin_succeed(fn, args):
"""Verify that an admin-only call succeds when invoked by an admin."""
fn(*args)
@pytest.mark.parametrize('fn,args', admin_calls)
@use_fixtures('runway_auth')
def test_admin_runway_fail(fn, args):
"""
Verify that an admin-only call fails when invoked by a non-admin user.
"""
with pytest.raises(AuthorizationError):
fn(*args)
@pytest.mark.parametrize('fn,args', admin_calls)
@use_fixtures('no_auth')
def test_admin_noauth_fail(fn, args):
"""
Verify that an admin-only call fails when invoked without authentication.
"""
with pytest.raises(AuthorizationError):
fn(*args)
|
kylehogan/hil
|
tests/unit/ext/auth/database.py
|
Python
|
apache-2.0
| 7,877
|
# -*- coding: utf-8 -*-
# eip_preferenceswindow.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
EIP Preferences window
"""
import logging
from functools import partial
from PySide import QtCore, QtGui
from leap.bitmask.config.leapsettings import LeapSettings
from leap.bitmask.gui.ui_eippreferences import Ui_EIPPreferences
logger = logging.getLogger(__name__)
class EIPPreferencesWindow(QtGui.QDialog):
"""
Window that displays the EIP preferences.
"""
def __init__(self, parent, domain, backend, leap_signaler):
"""
:param parent: parent object of the EIPPreferencesWindow.
:type parent: QWidget
:param domain: the selected by default domain.
:type domain: unicode
:param backend: Backend being used
:type backend: Backend
"""
QtGui.QDialog.__init__(self, parent)
self.AUTOMATIC_GATEWAY_LABEL = self.tr("Automatic")
self._settings = LeapSettings()
self._leap_signaler = leap_signaler
self._backend = backend
# Load UI
self.ui = Ui_EIPPreferences()
self.ui.setupUi(self)
self.ui.lblProvidersGatewayStatus.setVisible(False)
# Connections
self.ui.cbProvidersGateway.currentIndexChanged[int].connect(
self._populate_gateways)
self.ui.cbGateways.currentIndexChanged[unicode].connect(
lambda x: self.ui.lblProvidersGatewayStatus.setVisible(False))
self._selected_domain = domain
self._configured_providers = []
self._backend_connect()
self._add_configured_providers()
def _set_providers_gateway_status(self, status, success=False,
error=False):
"""
Sets the status label for the gateway change.
:param status: status message to display, can be HTML
:type status: str
:param success: is set to True if we should display the
message as green
:type success: bool
:param error: is set to True if we should display the
message as red
:type error: bool
"""
if success:
status = "<font color='green'><b>%s</b></font>" % (status,)
elif error:
status = "<font color='red'><b>%s</b></font>" % (status,)
self.ui.lblProvidersGatewayStatus.setVisible(True)
self.ui.lblProvidersGatewayStatus.setText(status)
def _add_configured_providers(self):
"""
Add the client's configured providers to the providers combo boxes.
"""
providers = self._settings.get_configured_providers()
if not providers:
return
self._backend.eip_get_initialized_providers(domains=providers)
@QtCore.Slot(list)
def _load_providers_in_combo(self, providers):
"""
TRIGGERS:
Signaler.eip_get_initialized_providers
Add the client's configured providers to the providers combo boxes.
:param providers: the list of providers to add and whether each one is
initialized or not.
:type providers: list of tuples (str, bool)
"""
self.ui.cbProvidersGateway.clear()
if not providers:
self.ui.gbGatewaySelector.setEnabled(False)
return
# block signals so the currentIndexChanged slot doesn't get triggered
self.ui.cbProvidersGateway.blockSignals(True)
for provider, is_initialized in providers:
label = provider
if not is_initialized:
label += self.tr(" (uninitialized)")
self.ui.cbProvidersGateway.addItem(label, userData=provider)
self.ui.cbProvidersGateway.blockSignals(False)
# Select provider by name
domain = self._selected_domain
if domain is not None:
provider_index = self.ui.cbProvidersGateway.findText(
domain, QtCore.Qt.MatchStartsWith)
self.ui.cbProvidersGateway.setCurrentIndex(provider_index)
@QtCore.Slot(str)
def _save_selected_gateway(self, provider):
"""
TRIGGERS:
self.ui.pbSaveGateway.clicked
Saves the new gateway setting to the configuration file.
:param provider: the provider config that we need to save.
:type provider: str
"""
gateway = self.ui.cbGateways.currentText()
if gateway == self.AUTOMATIC_GATEWAY_LABEL:
gateway = self._settings.GATEWAY_AUTOMATIC
else:
idx = self.ui.cbGateways.currentIndex()
gateway = self.ui.cbGateways.itemData(idx)
self._settings.set_selected_gateway(provider, gateway)
self._backend.settings_set_selected_gateway(provider=provider,
gateway=gateway)
msg = self.tr(
"Gateway settings for provider '{0}' saved.").format(provider)
self._set_providers_gateway_status(msg, success=True)
@QtCore.Slot(int)
def _populate_gateways(self, domain_idx):
"""
TRIGGERS:
self.ui.cbProvidersGateway.currentIndexChanged[unicode]
Loads the gateways that the provider provides into the UI for
the user to select.
:param domain: the domain index of the provider to load gateways from.
:type domain: int
"""
# We hide the maybe-visible status label after a change
self.ui.lblProvidersGatewayStatus.setVisible(False)
if domain_idx == -1:
return
domain = self.ui.cbProvidersGateway.itemData(domain_idx)
self._selected_domain = domain
self._backend.eip_get_gateways_list(domain=domain)
@QtCore.Slot(list)
def _update_gateways_list(self, gateways):
"""
TRIGGERS:
Signaler.eip_get_gateways_list
:param gateways: a list of gateways
:type gateways: list of unicode
Add the available gateways and select the one stored in configuration
file.
"""
self.ui.pbSaveGateway.setEnabled(True)
self.ui.cbGateways.setEnabled(True)
self.ui.cbGateways.clear()
self.ui.cbGateways.addItem(self.AUTOMATIC_GATEWAY_LABEL)
try:
# disconnect previously connected save method
self.ui.pbSaveGateway.clicked.disconnect()
except RuntimeError:
pass # Signal was not connected
# set the proper connection for the 'save' button
domain = self._selected_domain
save_gateway = partial(self._save_selected_gateway, domain)
self.ui.pbSaveGateway.clicked.connect(save_gateway)
selected_gateway = self._settings.get_selected_gateway(
self._selected_domain)
index = 0
for idx, (gw_name, gw_ip) in enumerate(gateways):
gateway = "{0} ({1})".format(gw_name, gw_ip)
self.ui.cbGateways.addItem(gateway, gw_ip)
if gw_ip == selected_gateway:
index = idx + 1
self.ui.cbGateways.setCurrentIndex(index)
@QtCore.Slot()
def _gateways_list_error(self):
"""
TRIGGERS:
Signaler.eip_get_gateways_list_error
An error has occurred retrieving the gateway list so we inform the
user.
"""
self._set_providers_gateway_status(
self.tr("There was a problem with configuration files."),
error=True)
self.ui.pbSaveGateway.setEnabled(False)
self.ui.cbGateways.setEnabled(False)
@QtCore.Slot()
def _gateways_list_uninitialized(self):
"""
TRIGGERS:
Signaler.eip_uninitialized_provider
The requested provider in not initialized yet, so we give the user an
error msg.
"""
self._set_providers_gateway_status(
self.tr("This is an uninitialized provider, please log in first."),
error=True)
self.ui.pbSaveGateway.setEnabled(False)
self.ui.cbGateways.setEnabled(False)
def _backend_connect(self):
sig = self._leap_signaler
sig.eip_get_gateways_list.connect(self._update_gateways_list)
sig.eip_get_gateways_list_error.connect(self._gateways_list_error)
sig.eip_uninitialized_provider.connect(
self._gateways_list_uninitialized)
sig.eip_get_initialized_providers.connect(
self._load_providers_in_combo)
|
laborautonomo/bitmask_client
|
src/leap/bitmask/gui/eip_preferenceswindow.py
|
Python
|
gpl-3.0
| 9,099
|
#!/usr/bin/env python
#
# Copyright (c) 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
import os
import sys
if os.name == 'nt':
# eventlet monkey patching causes subprocess.Popen to fail on Windows
# when using pipes due to missing non blocking I/O support
eventlet.monkey_patch(os=False)
else:
eventlet.monkey_patch()
# If ../murano/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
root = os.path.join(os.path.abspath(__file__), os.pardir, os.pardir, os.pardir)
if os.path.exists(os.path.join(root, 'murano', '__init__.py')):
sys.path.insert(0, root)
from murano.api.v1 import request_statistics
from murano.common import config
from murano.common import policy
from murano.common import server
from murano.common import statservice as stats
from murano.common import wsgi
from murano.openstack.common import log
from murano.openstack.common import service
def main():
try:
config.parse_args()
log.setup('murano')
request_statistics.init_stats()
policy.init()
launcher = service.ServiceLauncher()
app = config.load_paste_app('murano')
port, host = (config.CONF.bind_port, config.CONF.bind_host)
launcher.launch_service(wsgi.Service(app, port, host))
launcher.launch_service(server.get_rpc_service())
launcher.launch_service(server.get_notification_service())
launcher.launch_service(stats.StatsCollectingService())
launcher.wait()
except RuntimeError as e:
sys.stderr.write("ERROR: %s\n" % e)
sys.exit(1)
if __name__ == '__main__':
main()
|
sergmelikyan/murano
|
murano/cmd/api.py
|
Python
|
apache-2.0
| 2,250
|
# code snippet, to be included in 'sitecustomize.py'
import sys
def info(etype, value, tb):
if hasattr(sys, 'ps1') or not sys.stderr.isatty():
# we are in interactive mode or we don't have a tty-like
# device, so we call the default hook
sys.__excepthook__(etype, value, tb)
else:
import traceback, pdb
# we are NOT in interactive mode, print the exception...
traceback.print_exception(etype, value, tb)
print
# ...then start the debugger in post-mortem mode.
pdb.pm()
sys.excepthook = info
|
wmodes/crs
|
conductor/debughook.py
|
Python
|
gpl-3.0
| 573
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import fnmatch
import gzip
import os
from pathlib import Path
import click
from .command import Bash, Command, default_bin
from .cmake import CMake
from .git import git
from .logger import logger
from ..lang.cpp import CppCMakeDefinition, CppConfiguration
from ..lang.python import Autopep8, Flake8, NumpyDoc
from .rat import Rat, exclusion_from_globs
from .tmpdir import tmpdir
_archery_install_msg = (
"Please install archery using: `pip install -e dev/archery[lint]`. "
)
class LintValidationException(Exception):
pass
class LintResult:
def __init__(self, success, reason=None):
self.success = success
def ok(self):
if not self.success:
raise LintValidationException
@staticmethod
def from_cmd(command_result):
return LintResult(command_result.returncode == 0)
def cpp_linter(src, build_dir, clang_format=True, cpplint=True,
clang_tidy=False, iwyu=False, iwyu_all=False,
fix=False):
""" Run clang-format, cpplint and clang-tidy on cpp/ codebase. """
logger.info("Running C++ linters")
cmake = CMake()
if not cmake.available:
logger.error("cpp linter requested but cmake binary not found.")
return
# A cmake build directory is required to populate `compile_commands.json`
# which in turn is required by clang-tidy. It also provides a convenient
# way to hide clang-format/clang-tidy invocation via the Generate
# (ninja/make) targets.
# ARROW_LINT_ONLY exits early but ignore building compile_command.json
lint_only = not (iwyu or clang_tidy)
cmake_args = {"with_python": False, "with_lint_only": lint_only}
cmake_def = CppCMakeDefinition(src.cpp, CppConfiguration(**cmake_args))
build = cmake_def.build(build_dir)
if clang_format:
target = "format" if fix else "check-format"
yield LintResult.from_cmd(build.run(target, check=False))
if cpplint:
yield LintResult.from_cmd(build.run("lint", check=False))
yield LintResult.from_cmd(build.run("lint_cpp_cli", check=False))
if clang_tidy:
yield LintResult.from_cmd(build.run("check-clang-tidy", check=False))
if iwyu:
if iwyu_all:
iwyu_cmd = "iwyu-all"
else:
iwyu_cmd = "iwyu"
yield LintResult.from_cmd(build.run(iwyu_cmd, check=False))
class CMakeFormat(Command):
def __init__(self, paths, cmake_format_bin=None):
self.check_version()
self.bin = default_bin(cmake_format_bin, "cmake-format")
self.paths = paths
@classmethod
def from_patterns(cls, base_path, include_patterns, exclude_patterns):
paths = {
str(path.as_posix())
for pattern in include_patterns
for path in base_path.glob(pattern)
}
for pattern in exclude_patterns:
pattern = (base_path / pattern).as_posix()
paths -= set(fnmatch.filter(paths, str(pattern)))
return cls(paths)
@staticmethod
def check_version():
try:
# cmake_format is part of the cmakelang package
import cmakelang
except ImportError:
raise ImportError(
)
# pin a specific version of cmake_format, must be updated in setup.py
if cmakelang.__version__ != "0.6.13":
raise LintValidationException(
f"Wrong version of cmake_format is detected. "
f"{_archery_install_msg}"
)
def check(self):
return self.run("-l", "error", "--check", *self.paths, check=False)
def fix(self):
return self.run("--in-place", *self.paths, check=False)
def cmake_linter(src, fix=False):
"""
Run cmake-format on all CMakeFiles.txt
"""
logger.info("Running cmake-format linters")
cmake_format = CMakeFormat.from_patterns(
src.path,
include_patterns=[
'ci/**/*.cmake',
'cpp/CMakeLists.txt',
'cpp/src/**/CMakeLists.txt',
'cpp/examples/**/CMakeLists.txt',
'cpp/cmake_modules/*.cmake',
'go/**/CMakeLists.txt',
'java/**/CMakeLists.txt',
'matlab/**/CMakeLists.txt',
'python/CMakeLists.txt',
],
exclude_patterns=[
'cpp/cmake_modules/FindNumPy.cmake',
'cpp/cmake_modules/FindPythonLibsNew.cmake',
'cpp/cmake_modules/UseCython.cmake',
'cpp/src/arrow/util/config.h.cmake',
]
)
method = cmake_format.fix if fix else cmake_format.check
yield LintResult.from_cmd(method())
def python_linter(src, fix=False):
"""Run Python linters on python/pyarrow, python/examples, setup.py
and dev/. """
setup_py = os.path.join(src.python, "setup.py")
setup_cfg = os.path.join(src.python, "setup.cfg")
logger.info("Running Python formatter (autopep8)")
autopep8 = Autopep8()
if not autopep8.available:
logger.error(
"Python formatter requested but autopep8 binary not found. "
f"{_archery_install_msg}")
return
# Gather files for autopep8
patterns = ["python/pyarrow/**/*.py",
"python/pyarrow/**/*.pyx",
"python/pyarrow/**/*.pxd",
"python/pyarrow/**/*.pxi",
"python/examples/**/*.py",
"dev/archery/**/*.py",
"dev/release/**/*.py"]
files = [setup_py]
for pattern in patterns:
files += list(map(str, Path(src.path).glob(pattern)))
args = ['--global-config', setup_cfg, '--ignore-local-config']
if fix:
args += ['-j0', '--in-place']
args += sorted(files)
yield LintResult.from_cmd(autopep8(*args))
else:
# XXX `-j0` doesn't work well with `--exit-code`, so instead
# we capture the diff and check whether it's empty
# (https://github.com/hhatto/autopep8/issues/543)
args += ['-j0', '--diff']
args += sorted(files)
diff = autopep8.run_captured(*args)
if diff:
print(diff.decode('utf8'))
yield LintResult(success=False)
else:
yield LintResult(success=True)
# Run flake8 after autopep8 (the latter may have modified some files)
logger.info("Running Python linter (flake8)")
flake8 = Flake8()
if not flake8.available:
logger.error(
"Python linter requested but flake8 binary not found. "
f"{_archery_install_msg}")
return
flake8_exclude = ['.venv*', 'vendored']
yield LintResult.from_cmd(
flake8("--extend-exclude=" + ','.join(flake8_exclude),
setup_py, src.pyarrow, os.path.join(src.python, "examples"),
src.dev, check=False))
config = os.path.join(src.python, ".flake8.cython")
yield LintResult.from_cmd(
flake8("--config=" + config, src.pyarrow, check=False))
def python_numpydoc(symbols=None, allow_rules=None, disallow_rules=None):
"""Run numpydoc linter on python.
Pyarrow must be available for import.
"""
logger.info("Running Python docstring linters")
# by default try to run on all pyarrow package
symbols = symbols or {
'pyarrow',
'pyarrow.compute',
'pyarrow.csv',
'pyarrow.dataset',
'pyarrow.feather',
# 'pyarrow.flight',
'pyarrow.fs',
'pyarrow.gandiva',
'pyarrow.ipc',
'pyarrow.json',
'pyarrow.orc',
'pyarrow.parquet',
'pyarrow.plasma',
'pyarrow.types',
}
try:
numpydoc = NumpyDoc(symbols)
except RuntimeError as e:
logger.error(str(e))
yield LintResult(success=False)
return
results = numpydoc.validate(
# limit the validation scope to the pyarrow package
from_package='pyarrow',
allow_rules=allow_rules,
disallow_rules=disallow_rules
)
if len(results) == 0:
yield LintResult(success=True)
return
number_of_violations = 0
for obj, result in results:
errors = result['errors']
# inspect doesn't play nice with cython generated source code,
# to use a hacky way to represent a proper __qualname__
doc = getattr(obj, '__doc__', '')
name = getattr(obj, '__name__', '')
qualname = getattr(obj, '__qualname__', '')
module = getattr(obj, '__module__', '')
instance = getattr(obj, '__self__', '')
if instance:
klass = instance.__class__.__name__
else:
klass = ''
try:
cython_signature = doc.splitlines()[0]
except Exception:
cython_signature = ''
desc = '.'.join(filter(None, [module, klass, qualname or name]))
click.echo()
click.echo(click.style(desc, bold=True, fg='yellow'))
if cython_signature:
qualname_with_signature = '.'.join([module, cython_signature])
click.echo(
click.style(
'-> {}'.format(qualname_with_signature),
fg='yellow'
)
)
for error in errors:
number_of_violations += 1
click.echo('{}: {}'.format(*error))
msg = 'Total number of docstring violations: {}'.format(
number_of_violations
)
click.echo()
click.echo(click.style(msg, fg='red'))
yield LintResult(success=False)
def rat_linter(src, root):
"""Run apache-rat license linter."""
logger.info("Running apache-rat linter")
if src.git_dirty:
logger.warn("Due to the usage of git-archive, uncommitted files will"
" not be checked for rat violations. ")
exclusion = exclusion_from_globs(
os.path.join(src.dev, "release", "rat_exclude_files.txt"))
# Creates a git-archive of ArrowSources, apache-rat expects a gzip
# compressed tar archive.
archive_path = os.path.join(root, "apache-arrow.tar.gz")
src.archive(archive_path, compressor=gzip.compress)
report = Rat().report(archive_path)
violations = list(report.validate(exclusion=exclusion))
for violation in violations:
print("apache-rat license violation: {}".format(violation))
yield LintResult(len(violations) == 0)
def r_linter(src):
"""Run R linter."""
logger.info("Running R linter")
r_lint_sh = os.path.join(src.r, "lint.sh")
yield LintResult.from_cmd(Bash().run(r_lint_sh, check=False))
class Hadolint(Command):
def __init__(self, hadolint_bin=None):
self.bin = default_bin(hadolint_bin, "hadolint")
def is_docker_image(path):
dirname = os.path.dirname(path)
filename = os.path.basename(path)
excluded = dirname.startswith(
"dev") or dirname.startswith("python/manylinux")
return filename.startswith("Dockerfile") and not excluded
def docker_linter(src):
"""Run Hadolint docker linter."""
logger.info("Running Docker linter")
hadolint = Hadolint()
if not hadolint.available:
logger.error(
"hadolint linter requested but hadolint binary not found.")
return
for path in git.ls_files(git_dir=src.path):
if is_docker_image(path):
yield LintResult.from_cmd(hadolint.run(path, check=False,
cwd=src.path))
def linter(src, fix=False, *, clang_format=False, cpplint=False,
clang_tidy=False, iwyu=False, iwyu_all=False,
python=False, numpydoc=False, cmake_format=False, rat=False,
r=False, docker=False):
"""Run all linters."""
with tmpdir(prefix="arrow-lint-") as root:
build_dir = os.path.join(root, "cpp-build")
# Linters yield LintResult without raising exceptions on failure.
# This allows running all linters in one pass and exposing all
# errors to the user.
results = []
if clang_format or cpplint or clang_tidy or iwyu:
results.extend(cpp_linter(src, build_dir,
clang_format=clang_format,
cpplint=cpplint,
clang_tidy=clang_tidy,
iwyu=iwyu,
iwyu_all=iwyu_all,
fix=fix))
if python:
results.extend(python_linter(src, fix=fix))
if numpydoc:
results.extend(python_numpydoc())
if cmake_format:
results.extend(cmake_linter(src, fix=fix))
if rat:
results.extend(rat_linter(src, root))
if r:
results.extend(r_linter(src))
if docker:
results.extend(docker_linter(src))
# Raise error if one linter failed, ensuring calling code can exit with
# non-zero.
for result in results:
result.ok()
|
kou/arrow
|
dev/archery/archery/utils/lint.py
|
Python
|
apache-2.0
| 13,767
|
# -*- coding: utf-8 -*-
"""
jinja2.runtime
~~~~~~~~~~~~~~
Runtime helpers.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from itertools import chain
from jinja2.nodes import EvalContext, _context_function_types
from jinja2.utils import Markup, soft_unicode, escape, missing, concat, \
internalcode, object_type_repr
from jinja2.exceptions import UndefinedError, TemplateRuntimeError, \
TemplateNotFound
from jinja2._compat import next, imap, text_type, iteritems, \
implements_iterator, implements_to_string, string_types, PY2
# these variables are exported to the template runtime
__all__ = ['LoopContext', 'TemplateReference', 'Macro', 'Markup',
'TemplateRuntimeError', 'missing', 'concat', 'escape',
'markup_join', 'unicode_join', 'to_string', 'identity',
'TemplateNotFound']
#: the name of the function that is used to convert something into
#: a string. We can just use the text type here.
to_string = text_type
#: the identity function. Useful for certain things in the environment
identity = lambda x: x
_last_iteration = object()
def markup_join(seq):
"""Concatenation that escapes if necessary and converts to unicode."""
buf = []
iterator = imap(soft_unicode, seq)
for arg in iterator:
buf.append(arg)
if hasattr(arg, '__html__'):
return Markup(u'').join(chain(buf, iterator))
return concat(buf)
def unicode_join(seq):
"""Simple args to unicode conversion and concatenation."""
return concat(imap(text_type, seq))
def new_context(environment, template_name, blocks, vars=None,
shared=None, globals=None, locals=None):
"""Internal helper to for context creation."""
if vars is None:
vars = {}
if shared:
parent = vars
else:
parent = dict(globals or (), **vars)
if locals:
# if the parent is shared a copy should be created because
# we don't want to modify the dict passed
if shared:
parent = dict(parent)
for key, value in iteritems(locals):
if key[:2] == 'l_' and value is not missing:
parent[key[2:]] = value
return Context(environment, parent, template_name, blocks)
class TemplateReference(object):
"""The `self` in templates."""
def __init__(self, context):
self.__context = context
def __getitem__(self, name):
blocks = self.__context.blocks[name]
return BlockReference(name, self.__context, blocks, 0)
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.__context.name
)
class Context(object):
"""The template context holds the variables of a template. It stores the
values passed to the template and also the names the template exports.
Creating instances is neither supported nor useful as it's created
automatically at various stages of the template evaluation and should not
be created by hand.
The context is immutable. Modifications on :attr:`parent` **must not**
happen and modifications on :attr:`vars` are allowed from generated
template code only. Template filters and global functions marked as
:func:`contextfunction`\s get the active context passed as first argument
and are allowed to access the context read-only.
The template context supports read only dict operations (`get`,
`keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
`__getitem__`, `__contains__`). Additionally there is a :meth:`resolve`
method that doesn't fail with a `KeyError` but returns an
:class:`Undefined` object for missing variables.
"""
__slots__ = ('parent', 'vars', 'environment', 'eval_ctx', 'exported_vars',
'name', 'blocks', '__weakref__')
def __init__(self, environment, parent, name, blocks):
self.parent = parent
self.vars = {}
self.environment = environment
self.eval_ctx = EvalContext(self.environment, name)
self.exported_vars = set()
self.name = name
# create the initial mapping of blocks. Whenever template inheritance
# takes place the runtime will update this mapping with the new blocks
# from the template.
self.blocks = dict((k, [v]) for k, v in iteritems(blocks))
def super(self, name, current):
"""Render a parent block."""
try:
blocks = self.blocks[name]
index = blocks.index(current) + 1
blocks[index]
except LookupError:
return self.environment.undefined('there is no parent block '
'called %r.' % name,
name='super')
return BlockReference(name, self, blocks, index)
def get(self, key, default=None):
"""Returns an item from the template context, if it doesn't exist
`default` is returned.
"""
try:
return self[key]
except KeyError:
return default
def resolve(self, key):
"""Looks up a variable like `__getitem__` or `get` but returns an
:class:`Undefined` object with the name of the name looked up.
"""
if key in self.vars:
return self.vars[key]
if key in self.parent:
return self.parent[key]
return self.environment.undefined(name=key)
def get_exported(self):
"""Get a new dict with the exported variables."""
return dict((k, self.vars[k]) for k in self.exported_vars)
def get_all(self):
"""Return a copy of the complete context as dict including the
exported variables.
"""
return dict(self.parent, **self.vars)
@internalcode
def call(__self, __obj, *args, **kwargs):
"""Call the callable with the arguments and keyword arguments
provided but inject the active context or environment as first
argument if the callable is a :func:`contextfunction` or
:func:`environmentfunction`.
"""
if __debug__:
__traceback_hide__ = True
# Allow callable classes to take a context
fn = __obj.__call__
for fn_type in ('contextfunction',
'evalcontextfunction',
'environmentfunction'):
if hasattr(fn, fn_type):
__obj = fn
break
if isinstance(__obj, _context_function_types):
if getattr(__obj, 'contextfunction', 0):
args = (__self,) + args
elif getattr(__obj, 'evalcontextfunction', 0):
args = (__self.eval_ctx,) + args
elif getattr(__obj, 'environmentfunction', 0):
args = (__self.environment,) + args
try:
return __obj(*args, **kwargs)
except StopIteration:
return __self.environment.undefined('value was undefined because '
'a callable raised a '
'StopIteration exception')
def derived(self, locals=None):
"""Internal helper function to create a derived context."""
context = new_context(self.environment, self.name, {},
self.parent, True, None, locals)
context.vars.update(self.vars)
context.eval_ctx = self.eval_ctx
context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks))
return context
def _all(meth):
proxy = lambda self: getattr(self.get_all(), meth)()
proxy.__doc__ = getattr(dict, meth).__doc__
proxy.__name__ = meth
return proxy
keys = _all('keys')
values = _all('values')
items = _all('items')
# not available on python 3
if PY2:
iterkeys = _all('iterkeys')
itervalues = _all('itervalues')
iteritems = _all('iteritems')
del _all
def __contains__(self, name):
return name in self.vars or name in self.parent
def __getitem__(self, key):
"""Lookup a variable or raise `KeyError` if the variable is
undefined.
"""
item = self.resolve(key)
if isinstance(item, Undefined):
raise KeyError(key)
return item
def __repr__(self):
return '<%s %s of %r>' % (
self.__class__.__name__,
repr(self.get_all()),
self.name
)
# register the context as mapping if possible
try:
from collections import Mapping
Mapping.register(Context)
except ImportError:
pass
class BlockReference(object):
"""One block on a template reference."""
def __init__(self, name, context, stack, depth):
self.name = name
self._context = context
self._stack = stack
self._depth = depth
@property
def super(self):
"""Super the block."""
if self._depth + 1 >= len(self._stack):
return self._context.environment. \
undefined('there is no parent block called %r.' %
self.name, name='super')
return BlockReference(self.name, self._context, self._stack,
self._depth + 1)
@internalcode
def __call__(self):
rv = concat(self._stack[self._depth](self._context))
if self._context.eval_ctx.autoescape:
rv = Markup(rv)
return rv
class LoopContext(object):
"""A loop context for dynamic iteration."""
def __init__(self, iterable, recurse=None, depth0=0):
self._iterator = iter(iterable)
self._recurse = recurse
self._after = self._safe_next()
self.index0 = -1
self.depth0 = depth0
# try to get the length of the iterable early. This must be done
# here because there are some broken iterators around where there
# __len__ is the number of iterations left (i'm looking at your
# listreverseiterator!).
try:
self._length = len(iterable)
except (TypeError, AttributeError):
self._length = None
def cycle(self, *args):
"""Cycles among the arguments with the current loop index."""
if not args:
raise TypeError('no items for cycling given')
return args[self.index0 % len(args)]
first = property(lambda x: x.index0 == 0)
last = property(lambda x: x._after is _last_iteration)
index = property(lambda x: x.index0 + 1)
revindex = property(lambda x: x.length - x.index0)
revindex0 = property(lambda x: x.length - x.index)
depth = property(lambda x: x.depth0 + 1)
def __len__(self):
return self.length
def __iter__(self):
return LoopContextIterator(self)
def _safe_next(self):
try:
return next(self._iterator)
except StopIteration:
return _last_iteration
@internalcode
def loop(self, iterable):
if self._recurse is None:
raise TypeError('Tried to call non recursive loop. Maybe you '
"forgot the 'recursive' modifier.")
return self._recurse(iterable, self._recurse, self.depth0 + 1)
# a nifty trick to enhance the error message if someone tried to call
# the the loop without or with too many arguments.
__call__ = loop
del loop
@property
def length(self):
if self._length is None:
# if was not possible to get the length of the iterator when
# the loop context was created (ie: iterating over a generator)
# we have to convert the iterable into a sequence and use the
# length of that.
iterable = tuple(self._iterator)
self._iterator = iter(iterable)
self._length = len(iterable) + self.index0 + 1
return self._length
def __repr__(self):
return '<%s %r/%r>' % (
self.__class__.__name__,
self.index,
self.length
)
@implements_iterator
class LoopContextIterator(object):
"""The iterator for a loop context."""
__slots__ = ('context',)
def __init__(self, context):
self.context = context
def __iter__(self):
return self
def __next__(self):
ctx = self.context
ctx.index0 += 1
if ctx._after is _last_iteration:
raise StopIteration()
next_elem = ctx._after
ctx._after = ctx._safe_next()
return next_elem, ctx
class Macro(object):
"""Wraps a macro function."""
def __init__(self, environment, func, name, arguments, defaults,
catch_kwargs, catch_varargs, caller):
self._environment = environment
self._func = func
self._argument_count = len(arguments)
self.name = name
self.arguments = arguments
self.defaults = defaults
self.catch_kwargs = catch_kwargs
self.catch_varargs = catch_varargs
self.caller = caller
@internalcode
def __call__(self, *args, **kwargs):
# try to consume the positional arguments
arguments = list(args[:self._argument_count])
off = len(arguments)
# if the number of arguments consumed is not the number of
# arguments expected we start filling in keyword arguments
# and defaults.
if off != self._argument_count:
for idx, name in enumerate(self.arguments[len(arguments):]):
try:
value = kwargs.pop(name)
except KeyError:
try:
value = self.defaults[idx - self._argument_count + off]
except IndexError:
value = self._environment.undefined(
'parameter %r was not provided' % name, name=name)
arguments.append(value)
# it's important that the order of these arguments does not change
# if not also changed in the compiler's `function_scoping` method.
# the order is caller, keyword arguments, positional arguments!
if self.caller:
caller = kwargs.pop('caller', None)
if caller is None:
caller = self._environment.undefined('No caller defined',
name='caller')
arguments.append(caller)
if self.catch_kwargs:
arguments.append(kwargs)
elif kwargs:
raise TypeError('macro %r takes no keyword argument %r' %
(self.name, next(iter(kwargs))))
if self.catch_varargs:
arguments.append(args[self._argument_count:])
elif len(args) > self._argument_count:
raise TypeError('macro %r takes not more than %d argument(s)' %
(self.name, len(self.arguments)))
return self._func(*arguments)
def __repr__(self):
return '<%s %s>' % (
self.__class__.__name__,
self.name is None and 'anonymous' or repr(self.name)
)
@implements_to_string
class Undefined(object):
"""The default undefined type. This undefined type can be printed and
iterated over, but every other access will raise an :exc:`UndefinedError`:
>>> foo = Undefined(name='foo')
>>> str(foo)
''
>>> not foo
True
>>> foo + 42
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
"""
__slots__ = ('_undefined_hint', '_undefined_obj', '_undefined_name',
'_undefined_exception')
def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
self._undefined_hint = hint
self._undefined_obj = obj
self._undefined_name = name
self._undefined_exception = exc
@internalcode
def _fail_with_undefined_error(self, *args, **kwargs):
"""Regular callback function for undefined objects that raises an
`UndefinedError` on call.
"""
if self._undefined_hint is None:
if self._undefined_obj is missing:
hint = '%r is undefined' % self._undefined_name
elif not isinstance(self._undefined_name, string_types):
hint = '%s has no element %r' % (
object_type_repr(self._undefined_obj),
self._undefined_name
)
else:
hint = '%r has no attribute %r' % (
object_type_repr(self._undefined_obj),
self._undefined_name
)
else:
hint = self._undefined_hint
raise self._undefined_exception(hint)
@internalcode
def __getattr__(self, name):
if name[:2] == '__':
raise AttributeError(name)
return self._fail_with_undefined_error()
__add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = \
__truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = \
__mod__ = __rmod__ = __pos__ = __neg__ = __call__ = \
__getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = \
__float__ = __complex__ = __pow__ = __rpow__ = \
_fail_with_undefined_error
def __str__(self):
return u''
def __len__(self):
return 0
def __iter__(self):
if 0:
yield None
def __nonzero__(self):
return False
def __repr__(self):
return 'Undefined'
@implements_to_string
class DebugUndefined(Undefined):
"""An undefined that returns the debug info when printed.
>>> foo = DebugUndefined(name='foo')
>>> str(foo)
'{{ foo }}'
>>> not foo
True
>>> foo + 42
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
"""
__slots__ = ()
def __str__(self):
if self._undefined_hint is None:
if self._undefined_obj is missing:
return u'{{ %s }}' % self._undefined_name
return '{{ no such element: %s[%r] }}' % (
object_type_repr(self._undefined_obj),
self._undefined_name
)
return u'{{ undefined value printed: %s }}' % self._undefined_hint
@implements_to_string
class StrictUndefined(Undefined):
"""An undefined that barks on print and iteration as well as boolean
tests and all kinds of comparisons. In other words: you can do nothing
with it except checking if it's defined using the `defined` test.
>>> foo = StrictUndefined(name='foo')
>>> str(foo)
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
>>> not foo
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
>>> foo + 42
Traceback (most recent call last):
...
UndefinedError: 'foo' is undefined
"""
__slots__ = ()
__iter__ = __str__ = __len__ = __nonzero__ = __eq__ = \
__ne__ = __bool__ = Undefined._fail_with_undefined_error
# remove remaining slots attributes, after the metaclass did the magic they
# are unneeded and irritating as they contain wrong data for the subclasses.
del Undefined.__slots__, DebugUndefined.__slots__, StrictUndefined.__slots__
|
harshilasu/LinkurApp
|
y/google-cloud-sdk/lib/jinja2/runtime.py
|
Python
|
gpl-3.0
| 19,343
|
from multistack.services.remote import Remote
from multistack.constants import *
def setup_s3fs(credentials, remote):
"""
Creates /etc/passwd-s3fs containing AWS access and secret key in the
following form.
accesskey:secretaccesskey
@param credentials: AWS access key and secret ID
@type credentials: C{dict}
@param remote: Instance of remote.Remote class
@type remote: remote.Remote instance
"""
pass_file_content = ':'.join([
credentials['ec2_access_key'],
credentials['ec2_secret_key']
])
remote.run("echo {0} | sudo tee -a /etc/passwd-s3fs".format(pass_file_content))
remote.sudo("chmod 0400 /etc/passwd-s3fs")
def mount_bucket(bucket, remote):
"""
Mount the remote bucket with input data using s3fs
@param bucket: Bucket name
@type bucket: C{str}
@param remote: Instance of remote.Remote class
@type remote: remote.Remote instance
"""
MAPRED_UID = remote.sudo("id -u mapred")
HADOOP_GID = remote.sudo("grep -i hadoop /etc/group | cut -d ':' -f 3")
remote.sudo("mkdir /media/{0}".format(bucket))
remote.sudo("chown root:hadoop -R /media/{0}".format(bucket))
remote.sudo("chmod 775 -R /media/{0}".format(bucket))
remote.sudo("s3fs {0} -o uid={1},gid={2},umask={3},allow_other \
/media/{0}".format(bucket, MAPRED_UID, HADOOP_GID, UMASK))
def copy_to_hdfs(input_uri, remote):
"""
Copy the data stored at uri(s3) to local HDFS
@param input_uri: s3 address - s3://bucket/path/to/input/dir
@type input_uri: C{str}
"""
bucket_name = input_uri.split('/')[2]
input_path = input_uri.split('//')[1]
remote.sudo("hadoop fs -mkdir tmp", user='mapred')
remote.sudo("hadoop fs -copyFromLocal /media/{0}/ .".format(input_path),
user='mapred')
def copy_to_s3(output_uri, input_uri, remote):
"""
Copy the output stored at base directory of output_uri
@param output_uri: s3 address - s3://bucket/path/to/output/dir
@type output_uri: C{str}
@param input_uri: s3 address - s3://bucket/path/to/input/dir
@type input_uri: C{str}
"""
input_bucket = input_uri.split('/')[2]
output_bucket = output_uri.split('/')[2]
if input_bucket != output_bucket:
mount_bucket(output_bucket, remote)
output_dir = output_uri.split('/')[-1]
output_path = output_uri.split('//')[1]
remote.sudo("hadoop fs -copyToLocal {0}/* /media/{1}".format(output_dir,
output_path), user = 'mapred')
def download_jar(jar_location, remote):
"""
Download jar from a remote jar_location
"""
uri_protocol = jar_location.split(':')[0]
if uri_protocol == 's3':
download_url = 'https://s3.amazonaws.com/{0}'.format(jar_location.split('//')[1])
else:
download_url = jar_location
remote.run("wget {0} -O /tmp/file.jar".format(download_url))
def run_job(jar_location, args, input_uri, output_uri, remote):
"""
Submits a job to a hadoop cluster
"""
input_dir = input_uri.split('/')[-1]
output_dir = output_uri.split('/')[-1]
download_jar(jar_location, remote)
remote.sudo("hadoop jar /tmp/file.jar {0} {1} {2}".format(args,
input_dir, output_dir),
user = 'mapred')
def submit_job(data, user, credentials):
"""
Makes all preparation required prior to submitting a job.
* Mount S3 bucket
* Copy data to HDFS
* Download jar
and then finally submit the job.
"""
job_name = data['job']['name']
key_location = "/tmp/multistack-" + job_name + ".pem"
for node in data['job']['nodes']:
if node['role'] == 'master':
remote = Remote(node['ip_address'], user, key_location)
if data['job']['input'] != 's3://':
bucket_name = data['job']['input'].split('/')[2]
setup_s3fs(credentials, remote)
mount_bucket(bucket_name, remote)
copy_to_hdfs(data['job']['input'], remote)
run_job(
data['job']['jar'],
data['job']['args'],
data['job']['input'],
data['job']['output'],
remote
)
copy_to_s3(data['job']['output'], data['job']['input'], remote)
|
siel-iiith/MultiStack
|
multistack/services/run.py
|
Python
|
apache-2.0
| 4,336
|
d = dump("dump.kinase")
r = gl(d)
r.zoom(1.3)
r.q(12)
r.acol([1,4,6,8,9],["white","red","green","blue","purple"])
r.show(0)
|
sn-amber/mylpp
|
examples/files/tmp.py
|
Python
|
gpl-2.0
| 124
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides the data access object (DAO) for Groups."""
from Queue import Queue
from google.cloud.security.common.data_access import dao
from google.cloud.security.common.data_access.sql_queries import select_data
from google.cloud.security.common.util import log_util
# TODO: The next editor must remove this disable and correct issues.
# pylint: disable=missing-type-doc,missing-return-type-doc
LOGGER = log_util.get_logger(__name__)
MY_CUSTOMER = 'my_customer'
class GroupDao(dao.Dao):
"""Data access object (DAO) for Groups."""
def get_all_groups(self, resource_name, timestamp):
"""Get all the groups.
Args:
resource_name: String of the resource name.
timestamp: The timestamp of the snapshot.
Returns:
A tuple of the groups as dict.
"""
sql = select_data.GROUPS.format(timestamp)
return self.execute_sql_with_fetch(resource_name, sql, None)
def get_group_id(self, resource_name, group_email, timestamp):
"""Get the group_id for the specified group_email.
Args:
resource_name: String of the resource name.
group_email: String of the group email.
timestamp: The timestamp of the snapshot.
Returns:
String of the group id.
"""
sql = select_data.GROUP_ID.format(timestamp)
result = self.execute_sql_with_fetch(resource_name, sql, (group_email,))
return result[0].get('group_id')
def get_group_members(self, resource_name, group_id, timestamp):
"""Get the members of a group.
Args:
resource_name: String of the resource name.
group_id: String of the group id.
timestamp: The timestamp of the snapshot.
Returns:
A tuple of group members in dict format.
({'group_id': '00lnxb',
'member_email': 'foo@mygbiz.com',
'member_id': '11111',
'member_role': 'OWNER',
'member_type': 'USER'}, ...)
"""
sql = select_data.GROUP_MEMBERS.format(timestamp)
return self.execute_sql_with_fetch(resource_name, sql, (group_id,))
def get_recursive_members_of_group(self, group_email, timestamp):
"""Get all the recursive members of a group.
Args:
group_email: String of the group email.
timestamp: The timestamp of the snapshot.
Returns:
A list of group members in dict format.
[{'group_id': '00lnxb',
'member_email': 'foo@mygbiz.com',
'member_id': '11111',
'member_role': 'OWNER',
'member_type': 'USER'}, ...]
"""
all_members = []
queue = Queue()
group_id = self.get_group_id('group', group_email, timestamp)
queue.put(group_id)
while not queue.empty():
group_id = queue.get()
members = self.get_group_members('group_members', group_id,
timestamp)
for member in members:
all_members.append(member)
if member.get('member_type') == 'GROUP':
queue.put(member.get('member_id'))
return all_members
|
thenenadx/forseti-security
|
google/cloud/security/common/data_access/group_dao.py
|
Python
|
apache-2.0
| 3,871
|
# -*- encoding: utf-8 -*-
# pilas engine - a video game framework.
#
# copyright 2010 - hugo ruscitti
# license: lgplv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# website - http://www.pilas-engine.com.ar
'''
pilas.pilasverion
=================
Definición de la version actual de pilas y funciones para compararla.
'''
#: Contiene la versión actual de pilas.
VERSION = "0.83"
def compareactual(v):
"""Compara la versión actual de pilas con una que se pasa como parámetro
Sus posibles retornos son:
- **-1** si *versión actual de pilas* < ``v``.
- **0** si *versión actual de pilas* == ``v``.
- **1** si *versión actual de pilas* > ``v``.
:param v: versión a comparar con la actual.
:type v: str
"""
return compare(VERSION, v)
def compare(v0, v1):
"""Compara dos versiones de pilas.
Sus posibles retornos son
- **-1** si ``v0`` < ``v1``.
- **0** si ``v0`` == ``v1``.
- **1** si ``v0`` > ``v1``.
:param v0: primer versión a comparar.
:type v0: str
:param v1: segunda versión a comparar.
:type v1: str
"""
v0 = v0.split(".")
v1 = v1.split(".")
return -1 if v0 < v1 else 0 if v0 == 1 else 1
|
irvingprog/pilas
|
pilas/pilasversion.py
|
Python
|
lgpl-3.0
| 1,208
|
"""
Test script for doctest.
"""
from test import test_support
import doctest
import warnings
######################################################################
## Sample Objects (used by test cases)
######################################################################
def sample_func(v):
"""
Blah blah
>>> print sample_func(22)
44
Yee ha!
"""
return v+v
class SampleClass:
"""
>>> print 1
1
>>> # comments get ignored. so are empty PS1 and PS2 prompts:
>>>
...
Multiline example:
>>> sc = SampleClass(3)
>>> for i in range(10):
... sc = sc.double()
... print sc.get(),
6 12 24 48 96 192 384 768 1536 3072
"""
def __init__(self, val):
"""
>>> print SampleClass(12).get()
12
"""
self.val = val
def double(self):
"""
>>> print SampleClass(12).double().get()
24
"""
return SampleClass(self.val + self.val)
def get(self):
"""
>>> print SampleClass(-5).get()
-5
"""
return self.val
def a_staticmethod(v):
"""
>>> print SampleClass.a_staticmethod(10)
11
"""
return v+1
a_staticmethod = staticmethod(a_staticmethod)
def a_classmethod(cls, v):
"""
>>> print SampleClass.a_classmethod(10)
12
>>> print SampleClass(0).a_classmethod(10)
12
"""
return v+2
a_classmethod = classmethod(a_classmethod)
a_property = property(get, doc="""
>>> print SampleClass(22).a_property
22
""")
class NestedClass:
"""
>>> x = SampleClass.NestedClass(5)
>>> y = x.square()
>>> print y.get()
25
"""
def __init__(self, val=0):
"""
>>> print SampleClass.NestedClass().get()
0
"""
self.val = val
def square(self):
return SampleClass.NestedClass(self.val*self.val)
def get(self):
return self.val
class SampleNewStyleClass(object):
r"""
>>> print '1\n2\n3'
1
2
3
"""
def __init__(self, val):
"""
>>> print SampleNewStyleClass(12).get()
12
"""
self.val = val
def double(self):
"""
>>> print SampleNewStyleClass(12).double().get()
24
"""
return SampleNewStyleClass(self.val + self.val)
def get(self):
"""
>>> print SampleNewStyleClass(-5).get()
-5
"""
return self.val
######################################################################
## Fake stdin (for testing interactive debugging)
######################################################################
class _FakeInput:
"""
A fake input stream for pdb's interactive debugger. Whenever a
line is read, print it (to simulate the user typing it), and then
return it. The set of lines to return is specified in the
constructor; they should not have trailing newlines.
"""
def __init__(self, lines):
self.lines = lines
def readline(self):
line = self.lines.pop(0)
print line
return line+'\n'
######################################################################
## Test Cases
######################################################################
def test_Example(): r"""
Unit tests for the `Example` class.
Example is a simple container class that holds:
- `source`: A source string.
- `want`: An expected output string.
- `exc_msg`: An expected exception message string (or None if no
exception is expected).
- `lineno`: A line number (within the docstring).
- `indent`: The example's indentation in the input string.
- `options`: An option dictionary, mapping option flags to True or
False.
These attributes are set by the constructor. `source` and `want` are
required; the other attributes all have default values:
>>> example = doctest.Example('print 1', '1\n')
>>> (example.source, example.want, example.exc_msg,
... example.lineno, example.indent, example.options)
('print 1\n', '1\n', None, 0, 0, {})
The first three attributes (`source`, `want`, and `exc_msg`) may be
specified positionally; the remaining arguments should be specified as
keyword arguments:
>>> exc_msg = 'IndexError: pop from an empty list'
>>> example = doctest.Example('[].pop()', '', exc_msg,
... lineno=5, indent=4,
... options={doctest.ELLIPSIS: True})
>>> (example.source, example.want, example.exc_msg,
... example.lineno, example.indent, example.options)
('[].pop()\n', '', 'IndexError: pop from an empty list\n', 5, 4, {8: True})
The constructor normalizes the `source` string to end in a newline:
Source spans a single line: no terminating newline.
>>> e = doctest.Example('print 1', '1\n')
>>> e.source, e.want
('print 1\n', '1\n')
>>> e = doctest.Example('print 1\n', '1\n')
>>> e.source, e.want
('print 1\n', '1\n')
Source spans multiple lines: require terminating newline.
>>> e = doctest.Example('print 1;\nprint 2\n', '1\n2\n')
>>> e.source, e.want
('print 1;\nprint 2\n', '1\n2\n')
>>> e = doctest.Example('print 1;\nprint 2', '1\n2\n')
>>> e.source, e.want
('print 1;\nprint 2\n', '1\n2\n')
Empty source string (which should never appear in real examples)
>>> e = doctest.Example('', '')
>>> e.source, e.want
('\n', '')
The constructor normalizes the `want` string to end in a newline,
unless it's the empty string:
>>> e = doctest.Example('print 1', '1\n')
>>> e.source, e.want
('print 1\n', '1\n')
>>> e = doctest.Example('print 1', '1')
>>> e.source, e.want
('print 1\n', '1\n')
>>> e = doctest.Example('print', '')
>>> e.source, e.want
('print\n', '')
The constructor normalizes the `exc_msg` string to end in a newline,
unless it's `None`:
Message spans one line
>>> exc_msg = 'IndexError: pop from an empty list'
>>> e = doctest.Example('[].pop()', '', exc_msg)
>>> e.exc_msg
'IndexError: pop from an empty list\n'
>>> exc_msg = 'IndexError: pop from an empty list\n'
>>> e = doctest.Example('[].pop()', '', exc_msg)
>>> e.exc_msg
'IndexError: pop from an empty list\n'
Message spans multiple lines
>>> exc_msg = 'ValueError: 1\n 2'
>>> e = doctest.Example('raise ValueError("1\n 2")', '', exc_msg)
>>> e.exc_msg
'ValueError: 1\n 2\n'
>>> exc_msg = 'ValueError: 1\n 2\n'
>>> e = doctest.Example('raise ValueError("1\n 2")', '', exc_msg)
>>> e.exc_msg
'ValueError: 1\n 2\n'
Empty (but non-None) exception message (which should never appear
in real examples)
>>> exc_msg = ''
>>> e = doctest.Example('raise X()', '', exc_msg)
>>> e.exc_msg
'\n'
"""
def test_DocTest(): r"""
Unit tests for the `DocTest` class.
DocTest is a collection of examples, extracted from a docstring, along
with information about where the docstring comes from (a name,
filename, and line number). The docstring is parsed by the `DocTest`
constructor:
>>> docstring = '''
... >>> print 12
... 12
...
... Non-example text.
...
... >>> print 'another\example'
... another
... example
... '''
>>> globs = {} # globals to run the test in.
>>> parser = doctest.DocTestParser()
>>> test = parser.get_doctest(docstring, globs, 'some_test',
... 'some_file', 20)
>>> print test
<DocTest some_test from some_file:20 (2 examples)>
>>> len(test.examples)
2
>>> e1, e2 = test.examples
>>> (e1.source, e1.want, e1.lineno)
('print 12\n', '12\n', 1)
>>> (e2.source, e2.want, e2.lineno)
("print 'another\\example'\n", 'another\nexample\n', 6)
Source information (name, filename, and line number) is available as
attributes on the doctest object:
>>> (test.name, test.filename, test.lineno)
('some_test', 'some_file', 20)
The line number of an example within its containing file is found by
adding the line number of the example and the line number of its
containing test:
>>> test.lineno + e1.lineno
21
>>> test.lineno + e2.lineno
26
If the docstring contains inconsistant leading whitespace in the
expected output of an example, then `DocTest` will raise a ValueError:
>>> docstring = r'''
... >>> print 'bad\nindentation'
... bad
... indentation
... '''
>>> parser.get_doctest(docstring, globs, 'some_test', 'filename', 0)
Traceback (most recent call last):
ValueError: line 4 of the docstring for some_test has inconsistent leading whitespace: 'indentation'
If the docstring contains inconsistent leading whitespace on
continuation lines, then `DocTest` will raise a ValueError:
>>> docstring = r'''
... >>> print ('bad indentation',
... ... 2)
... ('bad', 'indentation')
... '''
>>> parser.get_doctest(docstring, globs, 'some_test', 'filename', 0)
Traceback (most recent call last):
ValueError: line 2 of the docstring for some_test has inconsistent leading whitespace: '... 2)'
If there's no blank space after a PS1 prompt ('>>>'), then `DocTest`
will raise a ValueError:
>>> docstring = '>>>print 1\n1'
>>> parser.get_doctest(docstring, globs, 'some_test', 'filename', 0)
Traceback (most recent call last):
ValueError: line 1 of the docstring for some_test lacks blank after >>>: '>>>print 1'
If there's no blank space after a PS2 prompt ('...'), then `DocTest`
will raise a ValueError:
>>> docstring = '>>> if 1:\n...print 1\n1'
>>> parser.get_doctest(docstring, globs, 'some_test', 'filename', 0)
Traceback (most recent call last):
ValueError: line 2 of the docstring for some_test lacks blank after ...: '...print 1'
"""
def test_DocTestFinder(): r"""
Unit tests for the `DocTestFinder` class.
DocTestFinder is used to extract DocTests from an object's docstring
and the docstrings of its contained objects. It can be used with
modules, functions, classes, methods, staticmethods, classmethods, and
properties.
Finding Tests in Functions
~~~~~~~~~~~~~~~~~~~~~~~~~~
For a function whose docstring contains examples, DocTestFinder.find()
will return a single test (for that function's docstring):
>>> finder = doctest.DocTestFinder()
We'll simulate a __file__ attr that ends in pyc:
>>> import test.test_doctest
>>> old = test.test_doctest.__file__
>>> test.test_doctest.__file__ = 'test_doctest.pyc'
>>> tests = finder.find(sample_func)
>>> print tests # doctest: +ELLIPSIS
[<DocTest sample_func from ...:13 (1 example)>]
The exact name depends on how test_doctest was invoked, so allow for
leading path components.
>>> tests[0].filename # doctest: +ELLIPSIS
'...test_doctest.py'
>>> test.test_doctest.__file__ = old
>>> e = tests[0].examples[0]
>>> (e.source, e.want, e.lineno)
('print sample_func(22)\n', '44\n', 3)
By default, tests are created for objects with no docstring:
>>> def no_docstring(v):
... pass
>>> finder.find(no_docstring)
[]
However, the optional argument `exclude_empty` to the DocTestFinder
constructor can be used to exclude tests for objects with empty
docstrings:
>>> def no_docstring(v):
... pass
>>> excl_empty_finder = doctest.DocTestFinder(exclude_empty=True)
>>> excl_empty_finder.find(no_docstring)
[]
If the function has a docstring with no examples, then a test with no
examples is returned. (This lets `DocTestRunner` collect statistics
about which functions have no tests -- but is that useful? And should
an empty test also be created when there's no docstring?)
>>> def no_examples(v):
... ''' no doctest examples '''
>>> finder.find(no_examples) # doctest: +ELLIPSIS
[<DocTest no_examples from ...:1 (no examples)>]
Finding Tests in Classes
~~~~~~~~~~~~~~~~~~~~~~~~
For a class, DocTestFinder will create a test for the class's
docstring, and will recursively explore its contents, including
methods, classmethods, staticmethods, properties, and nested classes.
>>> finder = doctest.DocTestFinder()
>>> tests = finder.find(SampleClass)
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
3 SampleClass.NestedClass
1 SampleClass.NestedClass.__init__
1 SampleClass.__init__
2 SampleClass.a_classmethod
1 SampleClass.a_property
1 SampleClass.a_staticmethod
1 SampleClass.double
1 SampleClass.get
New-style classes are also supported:
>>> tests = finder.find(SampleNewStyleClass)
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
1 SampleNewStyleClass
1 SampleNewStyleClass.__init__
1 SampleNewStyleClass.double
1 SampleNewStyleClass.get
Finding Tests in Modules
~~~~~~~~~~~~~~~~~~~~~~~~
For a module, DocTestFinder will create a test for the class's
docstring, and will recursively explore its contents, including
functions, classes, and the `__test__` dictionary, if it exists:
>>> # A module
>>> import new
>>> m = new.module('some_module')
>>> def triple(val):
... '''
... >>> print triple(11)
... 33
... '''
... return val*3
>>> m.__dict__.update({
... 'sample_func': sample_func,
... 'SampleClass': SampleClass,
... '__doc__': '''
... Module docstring.
... >>> print 'module'
... module
... ''',
... '__test__': {
... 'd': '>>> print 6\n6\n>>> print 7\n7\n',
... 'c': triple}})
>>> finder = doctest.DocTestFinder()
>>> # Use module=test.test_doctest, to prevent doctest from
>>> # ignoring the objects since they weren't defined in m.
>>> import test.test_doctest
>>> tests = finder.find(m, module=test.test_doctest)
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
1 some_module
3 some_module.SampleClass
3 some_module.SampleClass.NestedClass
1 some_module.SampleClass.NestedClass.__init__
1 some_module.SampleClass.__init__
2 some_module.SampleClass.a_classmethod
1 some_module.SampleClass.a_property
1 some_module.SampleClass.a_staticmethod
1 some_module.SampleClass.double
1 some_module.SampleClass.get
1 some_module.__test__.c
2 some_module.__test__.d
1 some_module.sample_func
Duplicate Removal
~~~~~~~~~~~~~~~~~
If a single object is listed twice (under different names), then tests
will only be generated for it once:
>>> from test import doctest_aliases
>>> tests = excl_empty_finder.find(doctest_aliases)
>>> print len(tests)
2
>>> print tests[0].name
test.doctest_aliases.TwoNames
TwoNames.f and TwoNames.g are bound to the same object.
We can't guess which will be found in doctest's traversal of
TwoNames.__dict__ first, so we have to allow for either.
>>> tests[1].name.split('.')[-1] in ['f', 'g']
True
Empty Tests
~~~~~~~~~~~
By default, an object with no doctests doesn't create any tests:
>>> tests = doctest.DocTestFinder().find(SampleClass)
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
3 SampleClass.NestedClass
1 SampleClass.NestedClass.__init__
1 SampleClass.__init__
2 SampleClass.a_classmethod
1 SampleClass.a_property
1 SampleClass.a_staticmethod
1 SampleClass.double
1 SampleClass.get
By default, that excluded objects with no doctests. exclude_empty=False
tells it to include (empty) tests for objects with no doctests. This feature
is really to support backward compatibility in what doctest.master.summarize()
displays.
>>> tests = doctest.DocTestFinder(exclude_empty=False).find(SampleClass)
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
3 SampleClass.NestedClass
1 SampleClass.NestedClass.__init__
0 SampleClass.NestedClass.get
0 SampleClass.NestedClass.square
1 SampleClass.__init__
2 SampleClass.a_classmethod
1 SampleClass.a_property
1 SampleClass.a_staticmethod
1 SampleClass.double
1 SampleClass.get
Turning off Recursion
~~~~~~~~~~~~~~~~~~~~~
DocTestFinder can be told not to look for tests in contained objects
using the `recurse` flag:
>>> tests = doctest.DocTestFinder(recurse=False).find(SampleClass)
>>> for t in tests:
... print '%2s %s' % (len(t.examples), t.name)
3 SampleClass
Line numbers
~~~~~~~~~~~~
DocTestFinder finds the line number of each example:
>>> def f(x):
... '''
... >>> x = 12
...
... some text
...
... >>> # examples are not created for comments & bare prompts.
... >>>
... ...
...
... >>> for x in range(10):
... ... print x,
... 0 1 2 3 4 5 6 7 8 9
... >>> x//2
... 6
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> [e.lineno for e in test.examples]
[1, 9, 12]
"""
def test_DocTestParser(): r"""
Unit tests for the `DocTestParser` class.
DocTestParser is used to parse docstrings containing doctest examples.
The `parse` method divides a docstring into examples and intervening
text:
>>> s = '''
... >>> x, y = 2, 3 # no output expected
... >>> if 1:
... ... print x
... ... print y
... 2
... 3
...
... Some text.
... >>> x+y
... 5
... '''
>>> parser = doctest.DocTestParser()
>>> for piece in parser.parse(s):
... if isinstance(piece, doctest.Example):
... print 'Example:', (piece.source, piece.want, piece.lineno)
... else:
... print ' Text:', `piece`
Text: '\n'
Example: ('x, y = 2, 3 # no output expected\n', '', 1)
Text: ''
Example: ('if 1:\n print x\n print y\n', '2\n3\n', 2)
Text: '\nSome text.\n'
Example: ('x+y\n', '5\n', 9)
Text: ''
The `get_examples` method returns just the examples:
>>> for piece in parser.get_examples(s):
... print (piece.source, piece.want, piece.lineno)
('x, y = 2, 3 # no output expected\n', '', 1)
('if 1:\n print x\n print y\n', '2\n3\n', 2)
('x+y\n', '5\n', 9)
The `get_doctest` method creates a Test from the examples, along with the
given arguments:
>>> test = parser.get_doctest(s, {}, 'name', 'filename', lineno=5)
>>> (test.name, test.filename, test.lineno)
('name', 'filename', 5)
>>> for piece in test.examples:
... print (piece.source, piece.want, piece.lineno)
('x, y = 2, 3 # no output expected\n', '', 1)
('if 1:\n print x\n print y\n', '2\n3\n', 2)
('x+y\n', '5\n', 9)
"""
class test_DocTestRunner:
def basics(): r"""
Unit tests for the `DocTestRunner` class.
DocTestRunner is used to run DocTest test cases, and to accumulate
statistics. Here's a simple DocTest case we can use:
>>> def f(x):
... '''
... >>> x = 12
... >>> print x
... 12
... >>> x//2
... 6
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
The main DocTestRunner interface is the `run` method, which runs a
given DocTest case in a given namespace (globs). It returns a tuple
`(f,t)`, where `f` is the number of failed tests and `t` is the number
of tried tests.
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 3)
If any example produces incorrect output, then the test runner reports
the failure and proceeds to the next example:
>>> def f(x):
... '''
... >>> x = 12
... >>> print x
... 14
... >>> x//2
... 6
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=True).run(test)
... # doctest: +ELLIPSIS
Trying:
x = 12
Expecting nothing
ok
Trying:
print x
Expecting:
14
**********************************************************************
File ..., line 4, in f
Failed example:
print x
Expected:
14
Got:
12
Trying:
x//2
Expecting:
6
ok
(1, 3)
"""
def verbose_flag(): r"""
The `verbose` flag makes the test runner generate more detailed
output:
>>> def f(x):
... '''
... >>> x = 12
... >>> print x
... 12
... >>> x//2
... 6
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=True).run(test)
Trying:
x = 12
Expecting nothing
ok
Trying:
print x
Expecting:
12
ok
Trying:
x//2
Expecting:
6
ok
(0, 3)
If the `verbose` flag is unspecified, then the output will be verbose
iff `-v` appears in sys.argv:
>>> # Save the real sys.argv list.
>>> old_argv = sys.argv
>>> # If -v does not appear in sys.argv, then output isn't verbose.
>>> sys.argv = ['test']
>>> doctest.DocTestRunner().run(test)
(0, 3)
>>> # If -v does appear in sys.argv, then output is verbose.
>>> sys.argv = ['test', '-v']
>>> doctest.DocTestRunner().run(test)
Trying:
x = 12
Expecting nothing
ok
Trying:
print x
Expecting:
12
ok
Trying:
x//2
Expecting:
6
ok
(0, 3)
>>> # Restore sys.argv
>>> sys.argv = old_argv
In the remaining examples, the test runner's verbosity will be
explicitly set, to ensure that the test behavior is consistent.
"""
def exceptions(): r"""
Tests of `DocTestRunner`'s exception handling.
An expected exception is specified with a traceback message. The
lines between the first line and the type/value may be omitted or
replaced with any other string:
>>> def f(x):
... '''
... >>> x = 12
... >>> print x//0
... Traceback (most recent call last):
... ZeroDivisionError: integer division or modulo by zero
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 2)
An example may not generate output before it raises an exception; if
it does, then the traceback message will not be recognized as
signaling an expected exception, so the example will be reported as an
unexpected exception:
>>> def f(x):
... '''
... >>> x = 12
... >>> print 'pre-exception output', x//0
... pre-exception output
... Traceback (most recent call last):
... ZeroDivisionError: integer division or modulo by zero
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 4, in f
Failed example:
print 'pre-exception output', x//0
Exception raised:
...
ZeroDivisionError: integer division or modulo by zero
(1, 2)
Exception messages may contain newlines:
>>> def f(x):
... r'''
... >>> raise ValueError, 'multi\nline\nmessage'
... Traceback (most recent call last):
... ValueError: multi
... line
... message
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 1)
If an exception is expected, but an exception with the wrong type or
message is raised, then it is reported as a failure:
>>> def f(x):
... r'''
... >>> raise ValueError, 'message'
... Traceback (most recent call last):
... ValueError: wrong message
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 3, in f
Failed example:
raise ValueError, 'message'
Expected:
Traceback (most recent call last):
ValueError: wrong message
Got:
Traceback (most recent call last):
...
ValueError: message
(1, 1)
However, IGNORE_EXCEPTION_DETAIL can be used to allow a mismatch in the
detail:
>>> def f(x):
... r'''
... >>> raise ValueError, 'message' #doctest: +IGNORE_EXCEPTION_DETAIL
... Traceback (most recent call last):
... ValueError: wrong message
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 1)
But IGNORE_EXCEPTION_DETAIL does not allow a mismatch in the exception type:
>>> def f(x):
... r'''
... >>> raise ValueError, 'message' #doctest: +IGNORE_EXCEPTION_DETAIL
... Traceback (most recent call last):
... TypeError: wrong type
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 3, in f
Failed example:
raise ValueError, 'message' #doctest: +IGNORE_EXCEPTION_DETAIL
Expected:
Traceback (most recent call last):
TypeError: wrong type
Got:
Traceback (most recent call last):
...
ValueError: message
(1, 1)
If an exception is raised but not expected, then it is reported as an
unexpected exception:
>>> def f(x):
... r'''
... >>> 1//0
... 0
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 3, in f
Failed example:
1//0
Exception raised:
Traceback (most recent call last):
...
ZeroDivisionError: integer division or modulo by zero
(1, 1)
"""
def optionflags(): r"""
Tests of `DocTestRunner`'s option flag handling.
Several option flags can be used to customize the behavior of the test
runner. These are defined as module constants in doctest, and passed
to the DocTestRunner constructor (multiple constants should be or-ed
together).
The DONT_ACCEPT_TRUE_FOR_1 flag disables matches between True/False
and 1/0:
>>> def f(x):
... '>>> True\n1\n'
>>> # Without the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 1)
>>> # With the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.DONT_ACCEPT_TRUE_FOR_1
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
True
Expected:
1
Got:
True
(1, 1)
The DONT_ACCEPT_BLANKLINE flag disables the match between blank lines
and the '<BLANKLINE>' marker:
>>> def f(x):
... '>>> print "a\\n\\nb"\na\n<BLANKLINE>\nb\n'
>>> # Without the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 1)
>>> # With the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.DONT_ACCEPT_BLANKLINE
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print "a\n\nb"
Expected:
a
<BLANKLINE>
b
Got:
a
<BLANKLINE>
b
(1, 1)
The NORMALIZE_WHITESPACE flag causes all sequences of whitespace to be
treated as equal:
>>> def f(x):
... '>>> print 1, 2, 3\n 1 2\n 3'
>>> # Without the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print 1, 2, 3
Expected:
1 2
3
Got:
1 2 3
(1, 1)
>>> # With the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.NORMALIZE_WHITESPACE
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
(0, 1)
An example from the docs:
>>> print range(20) #doctest: +NORMALIZE_WHITESPACE
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
The ELLIPSIS flag causes ellipsis marker ("...") in the expected
output to match any substring in the actual output:
>>> def f(x):
... '>>> print range(15)\n[0, 1, 2, ..., 14]\n'
>>> # Without the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print range(15)
Expected:
[0, 1, 2, ..., 14]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
(1, 1)
>>> # With the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.ELLIPSIS
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
(0, 1)
... also matches nothing:
>>> for i in range(100):
... print i**2, #doctest: +ELLIPSIS
0 1...4...9 16 ... 36 49 64 ... 9801
... can be surprising; e.g., this test passes:
>>> for i in range(21): #doctest: +ELLIPSIS
... print i,
0 1 2 ...1...2...0
Examples from the docs:
>>> print range(20) # doctest:+ELLIPSIS
[0, 1, ..., 18, 19]
>>> print range(20) # doctest: +ELLIPSIS
... # doctest: +NORMALIZE_WHITESPACE
[0, 1, ..., 18, 19]
The SKIP flag causes an example to be skipped entirely. I.e., the
example is not run. It can be useful in contexts where doctest
examples serve as both documentation and test cases, and an example
should be included for documentation purposes, but should not be
checked (e.g., because its output is random, or depends on resources
which would be unavailable.) The SKIP flag can also be used for
'commenting out' broken examples.
>>> import unavailable_resource # doctest: +SKIP
>>> unavailable_resource.do_something() # doctest: +SKIP
>>> unavailable_resource.blow_up() # doctest: +SKIP
Traceback (most recent call last):
...
UncheckedBlowUpError: Nobody checks me.
>>> import random
>>> print random.random() # doctest: +SKIP
0.721216923889
The REPORT_UDIFF flag causes failures that involve multi-line expected
and actual outputs to be displayed using a unified diff:
>>> def f(x):
... r'''
... >>> print '\n'.join('abcdefg')
... a
... B
... c
... d
... f
... g
... h
... '''
>>> # Without the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 3, in f
Failed example:
print '\n'.join('abcdefg')
Expected:
a
B
c
d
f
g
h
Got:
a
b
c
d
e
f
g
(1, 1)
>>> # With the flag:
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.REPORT_UDIFF
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 3, in f
Failed example:
print '\n'.join('abcdefg')
Differences (unified diff with -expected +actual):
@@ -1,7 +1,7 @@
a
-B
+b
c
d
+e
f
g
-h
(1, 1)
The REPORT_CDIFF flag causes failures that involve multi-line expected
and actual outputs to be displayed using a context diff:
>>> # Reuse f() from the REPORT_UDIFF example, above.
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.REPORT_CDIFF
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 3, in f
Failed example:
print '\n'.join('abcdefg')
Differences (context diff with expected followed by actual):
***************
*** 1,7 ****
a
! B
c
d
f
g
- h
--- 1,7 ----
a
! b
c
d
+ e
f
g
(1, 1)
The REPORT_NDIFF flag causes failures to use the difflib.Differ algorithm
used by the popular ndiff.py utility. This does intraline difference
marking, as well as interline differences.
>>> def f(x):
... r'''
... >>> print "a b c d e f g h i j k l m"
... a b c d e f g h i j k 1 m
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.REPORT_NDIFF
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 3, in f
Failed example:
print "a b c d e f g h i j k l m"
Differences (ndiff with -expected +actual):
- a b c d e f g h i j k 1 m
? ^
+ a b c d e f g h i j k l m
? + ++ ^
(1, 1)
The REPORT_ONLY_FIRST_FAILURE supresses result output after the first
failing example:
>>> def f(x):
... r'''
... >>> print 1 # first success
... 1
... >>> print 2 # first failure
... 200
... >>> print 3 # second failure
... 300
... >>> print 4 # second success
... 4
... >>> print 5 # third failure
... 500
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.REPORT_ONLY_FIRST_FAILURE
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 5, in f
Failed example:
print 2 # first failure
Expected:
200
Got:
2
(3, 5)
However, output from `report_start` is not supressed:
>>> doctest.DocTestRunner(verbose=True, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
Trying:
print 1 # first success
Expecting:
1
ok
Trying:
print 2 # first failure
Expecting:
200
**********************************************************************
File ..., line 5, in f
Failed example:
print 2 # first failure
Expected:
200
Got:
2
(3, 5)
For the purposes of REPORT_ONLY_FIRST_FAILURE, unexpected exceptions
count as failures:
>>> def f(x):
... r'''
... >>> print 1 # first success
... 1
... >>> raise ValueError(2) # first failure
... 200
... >>> print 3 # second failure
... 300
... >>> print 4 # second success
... 4
... >>> print 5 # third failure
... 500
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> flags = doctest.REPORT_ONLY_FIRST_FAILURE
>>> doctest.DocTestRunner(verbose=False, optionflags=flags).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 5, in f
Failed example:
raise ValueError(2) # first failure
Exception raised:
...
ValueError: 2
(3, 5)
New option flags can also be registered, via register_optionflag(). Here
we reach into doctest's internals a bit.
>>> unlikely = "UNLIKELY_OPTION_NAME"
>>> unlikely in doctest.OPTIONFLAGS_BY_NAME
False
>>> new_flag_value = doctest.register_optionflag(unlikely)
>>> unlikely in doctest.OPTIONFLAGS_BY_NAME
True
Before 2.4.4/2.5, registering a name more than once erroneously created
more than one flag value. Here we verify that's fixed:
>>> redundant_flag_value = doctest.register_optionflag(unlikely)
>>> redundant_flag_value == new_flag_value
True
Clean up.
>>> del doctest.OPTIONFLAGS_BY_NAME[unlikely]
"""
def option_directives(): r"""
Tests of `DocTestRunner`'s option directive mechanism.
Option directives can be used to turn option flags on or off for a
single example. To turn an option on for an example, follow that
example with a comment of the form ``# doctest: +OPTION``:
>>> def f(x): r'''
... >>> print range(10) # should fail: no ellipsis
... [0, 1, ..., 9]
...
... >>> print range(10) # doctest: +ELLIPSIS
... [0, 1, ..., 9]
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print range(10) # should fail: no ellipsis
Expected:
[0, 1, ..., 9]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
(1, 2)
To turn an option off for an example, follow that example with a
comment of the form ``# doctest: -OPTION``:
>>> def f(x): r'''
... >>> print range(10)
... [0, 1, ..., 9]
...
... >>> # should fail: no ellipsis
... >>> print range(10) # doctest: -ELLIPSIS
... [0, 1, ..., 9]
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False,
... optionflags=doctest.ELLIPSIS).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 6, in f
Failed example:
print range(10) # doctest: -ELLIPSIS
Expected:
[0, 1, ..., 9]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
(1, 2)
Option directives affect only the example that they appear with; they
do not change the options for surrounding examples:
>>> def f(x): r'''
... >>> print range(10) # Should fail: no ellipsis
... [0, 1, ..., 9]
...
... >>> print range(10) # doctest: +ELLIPSIS
... [0, 1, ..., 9]
...
... >>> print range(10) # Should fail: no ellipsis
... [0, 1, ..., 9]
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print range(10) # Should fail: no ellipsis
Expected:
[0, 1, ..., 9]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
**********************************************************************
File ..., line 8, in f
Failed example:
print range(10) # Should fail: no ellipsis
Expected:
[0, 1, ..., 9]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
(2, 3)
Multiple options may be modified by a single option directive. They
may be separated by whitespace, commas, or both:
>>> def f(x): r'''
... >>> print range(10) # Should fail
... [0, 1, ..., 9]
... >>> print range(10) # Should succeed
... ... # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
... [0, 1, ..., 9]
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print range(10) # Should fail
Expected:
[0, 1, ..., 9]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
(1, 2)
>>> def f(x): r'''
... >>> print range(10) # Should fail
... [0, 1, ..., 9]
... >>> print range(10) # Should succeed
... ... # doctest: +ELLIPSIS,+NORMALIZE_WHITESPACE
... [0, 1, ..., 9]
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print range(10) # Should fail
Expected:
[0, 1, ..., 9]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
(1, 2)
>>> def f(x): r'''
... >>> print range(10) # Should fail
... [0, 1, ..., 9]
... >>> print range(10) # Should succeed
... ... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
... [0, 1, ..., 9]
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
... # doctest: +ELLIPSIS
**********************************************************************
File ..., line 2, in f
Failed example:
print range(10) # Should fail
Expected:
[0, 1, ..., 9]
Got:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
(1, 2)
The option directive may be put on the line following the source, as
long as a continuation prompt is used:
>>> def f(x): r'''
... >>> print range(10)
... ... # doctest: +ELLIPSIS
... [0, 1, ..., 9]
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 1)
For examples with multi-line source, the option directive may appear
at the end of any line:
>>> def f(x): r'''
... >>> for x in range(10): # doctest: +ELLIPSIS
... ... print x,
... 0 1 2 ... 9
...
... >>> for x in range(10):
... ... print x, # doctest: +ELLIPSIS
... 0 1 2 ... 9
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 2)
If more than one line of an example with multi-line source has an
option directive, then they are combined:
>>> def f(x): r'''
... Should fail (option directive not on the last line):
... >>> for x in range(10): # doctest: +ELLIPSIS
... ... print x, # doctest: +NORMALIZE_WHITESPACE
... 0 1 2...9
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
(0, 1)
It is an error to have a comment of the form ``# doctest:`` that is
*not* followed by words of the form ``+OPTION`` or ``-OPTION``, where
``OPTION`` is an option that has been registered with
`register_option`:
>>> # Error: Option not registered
>>> s = '>>> print 12 #doctest: +BADOPTION'
>>> test = doctest.DocTestParser().get_doctest(s, {}, 's', 's.py', 0)
Traceback (most recent call last):
ValueError: line 1 of the doctest for s has an invalid option: '+BADOPTION'
>>> # Error: No + or - prefix
>>> s = '>>> print 12 #doctest: ELLIPSIS'
>>> test = doctest.DocTestParser().get_doctest(s, {}, 's', 's.py', 0)
Traceback (most recent call last):
ValueError: line 1 of the doctest for s has an invalid option: 'ELLIPSIS'
It is an error to use an option directive on a line that contains no
source:
>>> s = '>>> # doctest: +ELLIPSIS'
>>> test = doctest.DocTestParser().get_doctest(s, {}, 's', 's.py', 0)
Traceback (most recent call last):
ValueError: line 0 of the doctest for s has an option directive on a line with no example: '# doctest: +ELLIPSIS'
"""
def test_testsource(): r"""
Unit tests for `testsource()`.
The testsource() function takes a module and a name, finds the (first)
test with that name in that module, and converts it to a script. The
example code is converted to regular Python code. The surrounding
words and expected output are converted to comments:
>>> import test.test_doctest
>>> name = 'test.test_doctest.sample_func'
>>> print doctest.testsource(test.test_doctest, name)
# Blah blah
#
print sample_func(22)
# Expected:
## 44
#
# Yee ha!
<BLANKLINE>
>>> name = 'test.test_doctest.SampleNewStyleClass'
>>> print doctest.testsource(test.test_doctest, name)
print '1\n2\n3'
# Expected:
## 1
## 2
## 3
<BLANKLINE>
>>> name = 'test.test_doctest.SampleClass.a_classmethod'
>>> print doctest.testsource(test.test_doctest, name)
print SampleClass.a_classmethod(10)
# Expected:
## 12
print SampleClass(0).a_classmethod(10)
# Expected:
## 12
<BLANKLINE>
"""
def test_debug(): r"""
Create a docstring that we want to debug:
>>> s = '''
... >>> x = 12
... >>> print x
... 12
... '''
Create some fake stdin input, to feed to the debugger:
>>> import tempfile
>>> real_stdin = sys.stdin
>>> sys.stdin = _FakeInput(['next', 'print x', 'continue'])
Run the debugger on the docstring, and then restore sys.stdin.
>>> try: doctest.debug_src(s)
... finally: sys.stdin = real_stdin
> <string>(1)<module>()
(Pdb) next
12
--Return--
> <string>(1)<module>()->None
(Pdb) print x
12
(Pdb) continue
"""
def test_pdb_set_trace():
"""Using pdb.set_trace from a doctest.
You can use pdb.set_trace from a doctest. To do so, you must
retrieve the set_trace function from the pdb module at the time
you use it. The doctest module changes sys.stdout so that it can
capture program output. It also temporarily replaces pdb.set_trace
with a version that restores stdout. This is necessary for you to
see debugger output.
>>> doc = '''
... >>> x = 42
... >>> import pdb; pdb.set_trace()
... '''
>>> parser = doctest.DocTestParser()
>>> test = parser.get_doctest(doc, {}, "foo", "foo.py", 0)
>>> runner = doctest.DocTestRunner(verbose=False)
To demonstrate this, we'll create a fake standard input that
captures our debugger input:
>>> import tempfile
>>> real_stdin = sys.stdin
>>> sys.stdin = _FakeInput([
... 'print x', # print data defined by the example
... 'continue', # stop debugging
... ''])
>>> try: runner.run(test)
... finally: sys.stdin = real_stdin
--Return--
> <doctest foo[1]>(1)<module>()->None
-> import pdb; pdb.set_trace()
(Pdb) print x
42
(Pdb) continue
(0, 2)
You can also put pdb.set_trace in a function called from a test:
>>> def calls_set_trace():
... y=2
... import pdb; pdb.set_trace()
>>> doc = '''
... >>> x=1
... >>> calls_set_trace()
... '''
>>> test = parser.get_doctest(doc, globals(), "foo", "foo.py", 0)
>>> real_stdin = sys.stdin
>>> sys.stdin = _FakeInput([
... 'print y', # print data defined in the function
... 'up', # out of function
... 'print x', # print data defined by the example
... 'continue', # stop debugging
... ''])
>>> try:
... runner.run(test)
... finally:
... sys.stdin = real_stdin
--Return--
> <doctest test.test_doctest.test_pdb_set_trace[8]>(3)calls_set_trace()->None
-> import pdb; pdb.set_trace()
(Pdb) print y
2
(Pdb) up
> <doctest foo[1]>(1)<module>()
-> calls_set_trace()
(Pdb) print x
1
(Pdb) continue
(0, 2)
During interactive debugging, source code is shown, even for
doctest examples:
>>> doc = '''
... >>> def f(x):
... ... g(x*2)
... >>> def g(x):
... ... print x+3
... ... import pdb; pdb.set_trace()
... >>> f(3)
... '''
>>> test = parser.get_doctest(doc, globals(), "foo", "foo.py", 0)
>>> real_stdin = sys.stdin
>>> sys.stdin = _FakeInput([
... 'list', # list source from example 2
... 'next', # return from g()
... 'list', # list source from example 1
... 'next', # return from f()
... 'list', # list source from example 3
... 'continue', # stop debugging
... ''])
>>> try: runner.run(test)
... finally: sys.stdin = real_stdin
... # doctest: +NORMALIZE_WHITESPACE
--Return--
> <doctest foo[1]>(3)g()->None
-> import pdb; pdb.set_trace()
(Pdb) list
1 def g(x):
2 print x+3
3 -> import pdb; pdb.set_trace()
[EOF]
(Pdb) next
--Return--
> <doctest foo[0]>(2)f()->None
-> g(x*2)
(Pdb) list
1 def f(x):
2 -> g(x*2)
[EOF]
(Pdb) next
--Return--
> <doctest foo[2]>(1)<module>()->None
-> f(3)
(Pdb) list
1 -> f(3)
[EOF]
(Pdb) continue
**********************************************************************
File "foo.py", line 7, in foo
Failed example:
f(3)
Expected nothing
Got:
9
(1, 3)
"""
def test_pdb_set_trace_nested():
"""This illustrates more-demanding use of set_trace with nested functions.
>>> class C(object):
... def calls_set_trace(self):
... y = 1
... import pdb; pdb.set_trace()
... self.f1()
... y = 2
... def f1(self):
... x = 1
... self.f2()
... x = 2
... def f2(self):
... z = 1
... z = 2
>>> calls_set_trace = C().calls_set_trace
>>> doc = '''
... >>> a = 1
... >>> calls_set_trace()
... '''
>>> parser = doctest.DocTestParser()
>>> runner = doctest.DocTestRunner(verbose=False)
>>> test = parser.get_doctest(doc, globals(), "foo", "foo.py", 0)
>>> real_stdin = sys.stdin
>>> sys.stdin = _FakeInput([
... 'print y', # print data defined in the function
... 'step', 'step', 'step', 'step', 'step', 'step', 'print z',
... 'up', 'print x',
... 'up', 'print y',
... 'up', 'print foo',
... 'continue', # stop debugging
... ''])
>>> try:
... runner.run(test)
... finally:
... sys.stdin = real_stdin
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
-> self.f1()
(Pdb) print y
1
(Pdb) step
--Call--
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(7)f1()
-> def f1(self):
(Pdb) step
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(8)f1()
-> x = 1
(Pdb) step
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
-> self.f2()
(Pdb) step
--Call--
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(11)f2()
-> def f2(self):
(Pdb) step
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(12)f2()
-> z = 1
(Pdb) step
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(13)f2()
-> z = 2
(Pdb) print z
1
(Pdb) up
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(9)f1()
-> self.f2()
(Pdb) print x
1
(Pdb) up
> <doctest test.test_doctest.test_pdb_set_trace_nested[0]>(5)calls_set_trace()
-> self.f1()
(Pdb) print y
1
(Pdb) up
> <doctest foo[1]>(1)<module>()
-> calls_set_trace()
(Pdb) print foo
*** NameError: name 'foo' is not defined
(Pdb) continue
(0, 2)
"""
def test_DocTestSuite():
"""DocTestSuite creates a unittest test suite from a doctest.
We create a Suite by providing a module. A module can be provided
by passing a module object:
>>> import unittest
>>> import test.sample_doctest
>>> suite = doctest.DocTestSuite(test.sample_doctest)
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=4>
We can also supply the module by name:
>>> suite = doctest.DocTestSuite('test.sample_doctest')
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=4>
We can use the current module:
>>> suite = test.sample_doctest.test_suite()
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=4>
We can supply global variables. If we pass globs, they will be
used instead of the module globals. Here we'll pass an empty
globals, triggering an extra error:
>>> suite = doctest.DocTestSuite('test.sample_doctest', globs={})
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=5>
Alternatively, we can provide extra globals. Here we'll make an
error go away by providing an extra global variable:
>>> suite = doctest.DocTestSuite('test.sample_doctest',
... extraglobs={'y': 1})
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=3>
You can pass option flags. Here we'll cause an extra error
by disabling the blank-line feature:
>>> suite = doctest.DocTestSuite('test.sample_doctest',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE)
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=5>
You can supply setUp and tearDown functions:
>>> def setUp(t):
... import test.test_doctest
... test.test_doctest.sillySetup = True
>>> def tearDown(t):
... import test.test_doctest
... del test.test_doctest.sillySetup
Here, we installed a silly variable that the test expects:
>>> suite = doctest.DocTestSuite('test.sample_doctest',
... setUp=setUp, tearDown=tearDown)
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=3>
But the tearDown restores sanity:
>>> import test.test_doctest
>>> test.test_doctest.sillySetup
Traceback (most recent call last):
...
AttributeError: 'module' object has no attribute 'sillySetup'
The setUp and tearDown funtions are passed test objects. Here
we'll use the setUp function to supply the missing variable y:
>>> def setUp(test):
... test.globs['y'] = 1
>>> suite = doctest.DocTestSuite('test.sample_doctest', setUp=setUp)
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=9 errors=0 failures=3>
Here, we didn't need to use a tearDown function because we
modified the test globals, which are a copy of the
sample_doctest module dictionary. The test globals are
automatically cleared for us after a test.
"""
def test_DocFileSuite():
"""We can test tests found in text files using a DocFileSuite.
We create a suite by providing the names of one or more text
files that include examples:
>>> import unittest
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... 'test_doctest2.txt',
... 'test_doctest4.txt')
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=3 errors=0 failures=3>
The test files are looked for in the directory containing the
calling module. A package keyword argument can be provided to
specify a different relative location.
>>> import unittest
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... 'test_doctest2.txt',
... 'test_doctest4.txt',
... package='test')
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=3 errors=0 failures=3>
'/' should be used as a path separator. It will be converted
to a native separator at run time:
>>> suite = doctest.DocFileSuite('../test/test_doctest.txt')
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=1 errors=0 failures=1>
If DocFileSuite is used from an interactive session, then files
are resolved relative to the directory of sys.argv[0]:
>>> import new, os.path, test.test_doctest
>>> save_argv = sys.argv
>>> sys.argv = [test.test_doctest.__file__]
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... package=new.module('__main__'))
>>> sys.argv = save_argv
By setting `module_relative=False`, os-specific paths may be
used (including absolute paths and paths relative to the
working directory):
>>> # Get the absolute path of the test package.
>>> test_doctest_path = os.path.abspath(test.test_doctest.__file__)
>>> test_pkg_path = os.path.split(test_doctest_path)[0]
>>> # Use it to find the absolute path of test_doctest.txt.
>>> test_file = os.path.join(test_pkg_path, 'test_doctest.txt')
>>> suite = doctest.DocFileSuite(test_file, module_relative=False)
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=1 errors=0 failures=1>
It is an error to specify `package` when `module_relative=False`:
>>> suite = doctest.DocFileSuite(test_file, module_relative=False,
... package='test')
Traceback (most recent call last):
ValueError: Package may only be specified for module-relative paths.
You can specify initial global variables:
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... 'test_doctest2.txt',
... 'test_doctest4.txt',
... globs={'favorite_color': 'blue'})
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=3 errors=0 failures=2>
In this case, we supplied a missing favorite color. You can
provide doctest options:
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... 'test_doctest2.txt',
... 'test_doctest4.txt',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE,
... globs={'favorite_color': 'blue'})
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=3 errors=0 failures=3>
And, you can provide setUp and tearDown functions:
You can supply setUp and teatDoen functions:
>>> def setUp(t):
... import test.test_doctest
... test.test_doctest.sillySetup = True
>>> def tearDown(t):
... import test.test_doctest
... del test.test_doctest.sillySetup
Here, we installed a silly variable that the test expects:
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... 'test_doctest2.txt',
... 'test_doctest4.txt',
... setUp=setUp, tearDown=tearDown)
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=3 errors=0 failures=2>
But the tearDown restores sanity:
>>> import test.test_doctest
>>> test.test_doctest.sillySetup
Traceback (most recent call last):
...
AttributeError: 'module' object has no attribute 'sillySetup'
The setUp and tearDown funtions are passed test objects.
Here, we'll use a setUp function to set the favorite color in
test_doctest.txt:
>>> def setUp(test):
... test.globs['favorite_color'] = 'blue'
>>> suite = doctest.DocFileSuite('test_doctest.txt', setUp=setUp)
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=1 errors=0 failures=0>
Here, we didn't need to use a tearDown function because we
modified the test globals. The test globals are
automatically cleared for us after a test.
Tests in a file run using `DocFileSuite` can also access the
`__file__` global, which is set to the name of the file
containing the tests:
>>> suite = doctest.DocFileSuite('test_doctest3.txt')
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=1 errors=0 failures=0>
If the tests contain non-ASCII characters, we have to specify which
encoding the file is encoded with. We do so by using the `encoding`
parameter:
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... 'test_doctest2.txt',
... 'test_doctest4.txt',
... encoding='utf-8')
>>> suite.run(unittest.TestResult())
<unittest.TestResult run=3 errors=0 failures=2>
"""
def test_trailing_space_in_test():
"""
Trailing spaces in expected output are significant:
>>> x, y = 'foo', ''
>>> print x, y
foo \n
"""
def test_unittest_reportflags():
"""Default unittest reporting flags can be set to control reporting
Here, we'll set the REPORT_ONLY_FIRST_FAILURE option so we see
only the first failure of each test. First, we'll look at the
output without the flag. The file test_doctest.txt file has two
tests. They both fail if blank lines are disabled:
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE)
>>> import unittest
>>> result = suite.run(unittest.TestResult())
>>> print result.failures[0][1] # doctest: +ELLIPSIS
Traceback ...
Failed example:
favorite_color
...
Failed example:
if 1:
...
Note that we see both failures displayed.
>>> old = doctest.set_unittest_reportflags(
... doctest.REPORT_ONLY_FIRST_FAILURE)
Now, when we run the test:
>>> result = suite.run(unittest.TestResult())
>>> print result.failures[0][1] # doctest: +ELLIPSIS
Traceback ...
Failed example:
favorite_color
Exception raised:
...
NameError: name 'favorite_color' is not defined
<BLANKLINE>
<BLANKLINE>
We get only the first failure.
If we give any reporting options when we set up the tests,
however:
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE | doctest.REPORT_NDIFF)
Then the default eporting options are ignored:
>>> result = suite.run(unittest.TestResult())
>>> print result.failures[0][1] # doctest: +ELLIPSIS
Traceback ...
Failed example:
favorite_color
...
Failed example:
if 1:
print 'a'
print
print 'b'
Differences (ndiff with -expected +actual):
a
- <BLANKLINE>
+
b
<BLANKLINE>
<BLANKLINE>
Test runners can restore the formatting flags after they run:
>>> ignored = doctest.set_unittest_reportflags(old)
"""
def test_testfile(): r"""
Tests for the `testfile()` function. This function runs all the
doctest examples in a given file. In its simple invokation, it is
called with the name of a file, which is taken to be relative to the
calling module. The return value is (#failures, #tests).
>>> doctest.testfile('test_doctest.txt') # doctest: +ELLIPSIS
**********************************************************************
File "...", line 6, in test_doctest.txt
Failed example:
favorite_color
Exception raised:
...
NameError: name 'favorite_color' is not defined
**********************************************************************
1 items had failures:
1 of 2 in test_doctest.txt
***Test Failed*** 1 failures.
(1, 2)
>>> doctest.master = None # Reset master.
(Note: we'll be clearing doctest.master after each call to
`doctest.testfile`, to supress warnings about multiple tests with the
same name.)
Globals may be specified with the `globs` and `extraglobs` parameters:
>>> globs = {'favorite_color': 'blue'}
>>> doctest.testfile('test_doctest.txt', globs=globs)
(0, 2)
>>> doctest.master = None # Reset master.
>>> extraglobs = {'favorite_color': 'red'}
>>> doctest.testfile('test_doctest.txt', globs=globs,
... extraglobs=extraglobs) # doctest: +ELLIPSIS
**********************************************************************
File "...", line 6, in test_doctest.txt
Failed example:
favorite_color
Expected:
'blue'
Got:
'red'
**********************************************************************
1 items had failures:
1 of 2 in test_doctest.txt
***Test Failed*** 1 failures.
(1, 2)
>>> doctest.master = None # Reset master.
The file may be made relative to a given module or package, using the
optional `module_relative` parameter:
>>> doctest.testfile('test_doctest.txt', globs=globs,
... module_relative='test')
(0, 2)
>>> doctest.master = None # Reset master.
Verbosity can be increased with the optional `verbose` paremter:
>>> doctest.testfile('test_doctest.txt', globs=globs, verbose=True)
Trying:
favorite_color
Expecting:
'blue'
ok
Trying:
if 1:
print 'a'
print
print 'b'
Expecting:
a
<BLANKLINE>
b
ok
1 items passed all tests:
2 tests in test_doctest.txt
2 tests in 1 items.
2 passed and 0 failed.
Test passed.
(0, 2)
>>> doctest.master = None # Reset master.
The name of the test may be specified with the optional `name`
parameter:
>>> doctest.testfile('test_doctest.txt', name='newname')
... # doctest: +ELLIPSIS
**********************************************************************
File "...", line 6, in newname
...
(1, 2)
>>> doctest.master = None # Reset master.
The summary report may be supressed with the optional `report`
parameter:
>>> doctest.testfile('test_doctest.txt', report=False)
... # doctest: +ELLIPSIS
**********************************************************************
File "...", line 6, in test_doctest.txt
Failed example:
favorite_color
Exception raised:
...
NameError: name 'favorite_color' is not defined
(1, 2)
>>> doctest.master = None # Reset master.
The optional keyword argument `raise_on_error` can be used to raise an
exception on the first error (which may be useful for postmortem
debugging):
>>> doctest.testfile('test_doctest.txt', raise_on_error=True)
... # doctest: +ELLIPSIS
Traceback (most recent call last):
UnexpectedException: ...
>>> doctest.master = None # Reset master.
If the tests contain non-ASCII characters, the tests might fail, since
it's unknown which encoding is used. The encoding can be specified
using the optional keyword argument `encoding`:
>>> doctest.testfile('test_doctest4.txt') # doctest: +ELLIPSIS
**********************************************************************
File "...", line 7, in test_doctest4.txt
Failed example:
u'...'
Expected:
u'f\xf6\xf6'
Got:
u'f\xc3\xb6\xc3\xb6'
**********************************************************************
...
**********************************************************************
1 items had failures:
2 of 4 in test_doctest4.txt
***Test Failed*** 2 failures.
(2, 4)
>>> doctest.master = None # Reset master.
>>> doctest.testfile('test_doctest4.txt', encoding='utf-8')
(0, 4)
>>> doctest.master = None # Reset master.
"""
# old_test1, ... used to live in doctest.py, but cluttered it. Note
# that these use the deprecated doctest.Tester, so should go away (or
# be rewritten) someday.
# Ignore all warnings about the use of class Tester in this module.
# Note that the name of this module may differ depending on how it's
# imported, so the use of __name__ is important.
warnings.filterwarnings("ignore", "class Tester", DeprecationWarning,
__name__, 0)
def old_test1(): r"""
>>> from doctest import Tester
>>> t = Tester(globs={'x': 42}, verbose=0)
>>> t.runstring(r'''
... >>> x = x * 2
... >>> print x
... 42
... ''', 'XYZ')
**********************************************************************
Line 3, in XYZ
Failed example:
print x
Expected:
42
Got:
84
(1, 2)
>>> t.runstring(">>> x = x * 2\n>>> print x\n84\n", 'example2')
(0, 2)
>>> t.summarize()
**********************************************************************
1 items had failures:
1 of 2 in XYZ
***Test Failed*** 1 failures.
(1, 4)
>>> t.summarize(verbose=1)
1 items passed all tests:
2 tests in example2
**********************************************************************
1 items had failures:
1 of 2 in XYZ
4 tests in 2 items.
3 passed and 1 failed.
***Test Failed*** 1 failures.
(1, 4)
"""
def old_test2(): r"""
>>> from doctest import Tester
>>> t = Tester(globs={}, verbose=1)
>>> test = r'''
... # just an example
... >>> x = 1 + 2
... >>> x
... 3
... '''
>>> t.runstring(test, "Example")
Running string Example
Trying:
x = 1 + 2
Expecting nothing
ok
Trying:
x
Expecting:
3
ok
0 of 2 examples failed in string Example
(0, 2)
"""
def old_test3(): r"""
>>> from doctest import Tester
>>> t = Tester(globs={}, verbose=0)
>>> def _f():
... '''Trivial docstring example.
... >>> assert 2 == 2
... '''
... return 32
...
>>> t.rundoc(_f) # expect 0 failures in 1 example
(0, 1)
"""
def old_test4(): """
>>> import new
>>> m1 = new.module('_m1')
>>> m2 = new.module('_m2')
>>> test_data = \"""
... def _f():
... '''>>> assert 1 == 1
... '''
... def g():
... '''>>> assert 2 != 1
... '''
... class H:
... '''>>> assert 2 > 1
... '''
... def bar(self):
... '''>>> assert 1 < 2
... '''
... \"""
>>> exec test_data in m1.__dict__
>>> exec test_data in m2.__dict__
>>> m1.__dict__.update({"f2": m2._f, "g2": m2.g, "h2": m2.H})
Tests that objects outside m1 are excluded:
>>> from doctest import Tester
>>> t = Tester(globs={}, verbose=0)
>>> t.rundict(m1.__dict__, "rundict_test", m1) # f2 and g2 and h2 skipped
(0, 4)
Once more, not excluding stuff outside m1:
>>> t = Tester(globs={}, verbose=0)
>>> t.rundict(m1.__dict__, "rundict_test_pvt") # None are skipped.
(0, 8)
The exclusion of objects from outside the designated module is
meant to be invoked automagically by testmod.
>>> doctest.testmod(m1, verbose=False)
(0, 4)
"""
######################################################################
## Main
######################################################################
def test_main():
# Check the doctest cases in doctest itself:
test_support.run_doctest(doctest, verbosity=True)
# Check the doctest cases defined here:
from test import test_doctest
test_support.run_doctest(test_doctest, verbosity=True)
import trace, sys, re, StringIO
def test_coverage(coverdir):
tracer = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix,],
trace=0, count=1)
tracer.run('reload(doctest); test_main()')
r = tracer.results()
print 'Writing coverage results...'
r.write_results(show_missing=True, summary=True,
coverdir=coverdir)
if __name__ == '__main__':
if '-c' in sys.argv:
test_coverage('/tmp/doctest.cover')
else:
test_main()
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.5/Lib/test/test_doctest.py
|
Python
|
mit
| 73,860
|
__author__ = "Nitin Kumar, Rick Sherman"
__credits__ = "Jeremy Schulman"
import unittest
from nose.plugins.attrib import attr
from mock import patch
import os
from jnpr.junos import Device
from jnpr.junos.facts.swver import facts_software_version as software_version, version_info
from ncclient.manager import Manager, make_device_handler
from ncclient.transport import SSHSession
@attr('unit')
class TestVersionInfo(unittest.TestCase):
def test_version_info_after_type_len_else(self):
self.assertIsNone(version_info('12.1X46-D10').build)
def test_version_info_constructor_else_exception(self):
self.assertEqual(version_info('11.4R7').build, '7')
def test_version_info_repr(self):
self.assertEqual(repr(version_info('11.4R7.5')),
'junos.version_info(major=(11, 4), '
'type=R, minor=7, build=5)')
def test_version_info_lt(self):
self.assertLess(version_info('13.3-20131120'), (14, 1))
def test_version_info_lt_eq(self):
self.assertLessEqual(version_info('13.3-20131120'), (14, 1))
def test_version_info_gt(self):
self.assertGreater(version_info('13.3-20131120'), (12, 1))
def test_version_info_gt_eq(self):
self.assertGreaterEqual(version_info('13.3-20131120'), (12, 1))
def test_version_info_eq(self):
self.assertEqual(version_info('13.3-20131120'), (13, 3))
def test_version_info_not_eq(self):
self.assertNotEqual(version_info('13.3-20131120'), (15, 3))
@attr('unit')
class TestSrxCluster(unittest.TestCase):
@patch('ncclient.manager.connect')
def setUp(self, mock_connect):
mock_connect.side_effect = self._mock_manager
self.dev = Device(host='1.1.1.1', user='rick', password='password123',
gather_facts=False)
self.dev.open()
self.facts = {}
@patch('jnpr.junos.Device.execute')
def test_swver(self, mock_execute):
mock_execute.side_effect = self._mock_manager
self.facts['master'] = 'RE0'
software_version(self.dev, self.facts)
self.assertEqual(self.facts['version'], '12.3R6.6')
@patch('jnpr.junos.Device.execute')
def test_swver_hostname_none(self, mock_execute):
mock_execute.side_effect = self._mock_manager
self.facts['master'] = 'RE5'
self.facts['version_RE5'] = '15.3R6.6'
software_version(self.dev, self.facts)
self.assertEqual(self.facts['version'], '15.3R6.6')
# --> JLS, there should always be a facts['master'] assigned.
# @patch('jnpr.junos.Device.execute')
# def test_swver_master_none(self, mock_execute):
# mock_execute.side_effect = self._mock_manager
# self.facts['master'] = None
# software_version(self.dev, self.facts)
# self.assertEqual(self.facts['version'], '12.3R6.6')
@patch('jnpr.junos.Device.execute')
@patch('jnpr.junos.facts.swver.re.findall')
def test_swver_exception_handling(self, mock_re_findall, mock_execute):
mock_execute.side_effect = self._mock_manager
mock_re_findall.side_effect = IndexError
self.facts['master'] = 'RE0'
software_version(self.dev, self.facts)
self.assertEqual(self.facts['version'], '0.0I0.0')
def _read_file(self, fname):
from ncclient.xml_ import NCElement
fpath = os.path.join(os.path.dirname(__file__),
'rpc-reply', fname)
foo = open(fpath).read()
rpc_reply = NCElement(foo, self.dev._conn.
_device_handler.transform_reply())\
._NCElement__doc[0]
return rpc_reply
def _mock_manager(self, *args, **kwargs):
if kwargs:
device_params = kwargs['device_params']
device_handler = make_device_handler(device_params)
session = SSHSession(device_handler)
return Manager(session, device_handler)
if args:
return self._read_file(args[0].tag + '.xml')
|
dgjnpr/py-junos-eznc
|
tests/unit/facts/test_swver.py
|
Python
|
apache-2.0
| 4,029
|
# -*- coding: utf-8 -*-
"""
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import control
from resources.lib.modules import source_utils
from resources.lib.modules import dom_parser
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.domains = ['kinow.to']
self.base_link = 'http://kinow.to'
self.search_link = '/suche.html'
self.year_link = '/jahr/%d.html'
self.type_link = '/%s.html'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search([localtitle] + source_utils.aliases_to_array(aliases), year, 'filme')
if not url and title != localtitle: url = self.__search([title] + source_utils.aliases_to_array(aliases), year, 'filme')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = self.__search([localtvshowtitle] + source_utils.aliases_to_array(aliases), year, 'serien')
if not url and tvshowtitle != localtvshowtitle: url = self.__search([tvshowtitle] + source_utils.aliases_to_array(aliases), year, 'serien')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
def __get_correct_link(_url, content, checkval):
try:
if not _url:
return
_url = urlparse.urljoin(self.base_link, _url)
r = client.request(_url)
r = re.findall('<h4>%s[^>]*</h4>(.*?)<div' % content, r, re.DOTALL | re.IGNORECASE)[0]
r = re.compile('(<a.+?/a>)', re.DOTALL).findall(''.join(r))
r = [(dom_parser.parse_dom(i, 'a', req='href'), dom_parser.parse_dom(i, 'span')) for i in r]
r = [(i[0][0].attrs['href'], i[1][0].content) for i in r if i[0] and i[1]]
r = [(i[0], i[1] if i[1] else '0') for i in r]
r = [i[0] for i in r if int(i[1]) == int(checkval)][0]
r = re.sub('/(1080p|720p|x264|3d)', '', r, flags=re.I)
return source_utils.strip_domain(r)
except:
return
url = __get_correct_link(url, 'Staffel', season)
url = __get_correct_link(url, 'Folge', episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
r = client.request(urlparse.urljoin(self.base_link, url))
links = dom_parser.parse_dom(r, 'table')
links = [i.content for i in links if dom_parser.parse_dom(i, 'span', attrs={'class': re.compile('linkSearch(-a)?')})]
links = re.compile('(<a.+?/a>)', re.DOTALL).findall(''.join(links))
links = [dom_parser.parse_dom(i, 'a', req='href') for i in links if re.findall('(.+?)\s*\(\d+\)\s*<', i)]
links = [i[0].attrs['href'] for i in links if i]
url = re.sub('/streams-\d+', '', url)
for link in links:
if '/englisch/' in link: continue
if link != url: r = client.request(urlparse.urljoin(self.base_link, link))
quality = 'SD'
info = []
detail = dom_parser.parse_dom(r, 'th', attrs={'class': 'thlink'})
detail = [dom_parser.parse_dom(i, 'a', req='href') for i in detail]
detail = [(i[0].attrs['href'], i[0].content.replace('▶', '').strip()) for i in detail if i]
if detail:
quality, info = source_utils.get_release_quality(detail[0][1])
r = client.request(urlparse.urljoin(self.base_link, detail[0][0]))
r = dom_parser.parse_dom(r, 'table')
r = [dom_parser.parse_dom(i, 'a', req=['href', 'title']) for i in r if not dom_parser.parse_dom(i, 'table')]
r = [(l.attrs['href'], l.attrs['title']) for i in r for l in i if l.attrs['title']]
info = ' | '.join(info)
for stream_link, hoster in r:
valid, hoster = source_utils.is_host_valid(hoster, hostDict)
if not valid: continue
sources.append({'source': hoster, 'quality': quality, 'language': 'de', 'url': stream_link, 'info': info, 'direct': False, 'debridonly': False, 'checkquality': True})
return sources
except:
return sources
def resolve(self, url):
try:
control.sleep(5000)
url = urlparse.urljoin(self.base_link, url)
url = client.request(url, output='geturl')
if self.base_link not in url:
return url
except:
return
def __search(self, titles, year, content):
try:
t = [cleantitle.get(i) for i in set(titles) if i]
c = client.request(urlparse.urljoin(self.base_link, self.year_link % int(year)), output='cookie')
p = urllib.urlencode({'search': cleantitle.query(titles[0])})
c = client.request(urlparse.urljoin(self.base_link, self.search_link), cookie=c, post=p, output='cookie')
r = client.request(urlparse.urljoin(self.base_link, self.type_link % content), cookie=c, post=p)
r = dom_parser.parse_dom(r, 'div', attrs={'id': 'content'})
r = dom_parser.parse_dom(r, 'tr')
r = [dom_parser.parse_dom(i, 'td') for i in r]
r = [dom_parser.parse_dom(i, 'a', req='href') for i in r]
r = [(i[0].attrs['href'], i[0].content, i[1].content) for i in r if i]
r = [(i[0], i[1], re.findall('(.+?)\s<i>\((.+?)\)<', i[1]), i[2]) for i in r]
r = [(i[0], i[2][0][0] if len(i[2]) > 0 else i[1], i[2][0][1] if len(i[2]) > 0 else '', i[3]) for i in r]
r = [i[0] for i in r if (cleantitle.get(i[1]) in t or cleantitle.get(i[2]) in t) and i[3] == year][0]
return source_utils.strip_domain(r)
except:
return
|
repotvsupertuga/repo
|
script.module.stream.tvsupertuga.addon/resources/lib/sources/de/kinow.py
|
Python
|
gpl-2.0
| 7,095
|
from flask import Blueprint, flash, redirect, render_template, request, url_for,send_from_directory, abort, make_response, send_file, session
from octs.user.models import Course,Task, User, Message, Team,TeamUserRelation, File,Source,Term,TaskTeamRelation, Tag,UserScore
from .forms import CourseForm,TaskForm, FileForm,TaskScoreForm, RejectReasonForm
from octs.database import db
from flask_login import current_user
from octs.extensions import data_uploader
import time
import os,zipfile
from pypinyin import lazy_pinyin
import xlwt
blueprint = Blueprint('teacher', __name__, url_prefix='/teacher',static_folder='../static')
@blueprint.route('/<teacherid>/course/')
def course(teacherid):
teacher = User.query.filter_by(id=teacherid).first()
courseList = teacher.courses
term = Term.query.order_by(Term.id.desc()).first()
return render_template('teacher/course.html', list=courseList,term=term)
@blueprint.route('/<courseid>/task/<taskid>')
def task_detail(courseid,taskid):
taskList = Task.query.filter_by(id=taskid).all()
return render_template('teacher/taskdetail.html',list=taskList,courseid=courseid)
@blueprint.route('/<teacherid>/course/edit/<id>',methods=['GET','POST'])
def course_edit(teacherid, id):
course = Course.query.filter_by(id=id).first()
form = CourseForm()
if form.validate_on_submit():
course.course_introduction = form.course_introduction.data
course.course_outline=form.course_outline.data
userlist=User.query.all()
for user in userlist:
user.team_min=form.low_member.data
user.team_max=form.high_member.data
db.session.add(user)
db.session.add(course)
db.session.commit()
return redirect(url_for('teacher.course', teacherid=teacherid))
form.coursename.data=course.name
form.credit.data=course.credit
form.location.data=course.location
form.start_time.data=course.start_time
form.course_introduction.data=course.course_introduction
form.course_outline.data=course.course_outline
user=User.query.filter(User.id==teacherid).first()
form.low_member.data=user.team_min
form.high_member.data=user.team_max
return render_template('teacher/course_edit.html',form=form)
@blueprint.route('/course/student/<id>')
def student(id):
course=Course.query.filter_by(id=id).first()
studentList = course.users
return render_template('teacher/student.html',list=studentList)
@blueprint.route('/mainpage/')
def home():
return render_template('teacher/mainpage.html')
@blueprint.route('/<courseid>/task')
def task(courseid):
taskList = Task.query.filter_by(course_id=courseid).all()
return render_template('teacher/task.html',list = taskList, courseid=courseid)
@blueprint.route('/<courseid>/task/add',methods = ['GET','POST'])
def add(courseid):
form = TaskForm()
if form.validate_on_submit():
task = Task()
task.name = form.taskname.data
task.start_time = form.starttime.data
task.end_time = form.endtime.data
task.submit_num = form.subnum.data
task.weight = form.weight.data
task.teacher = current_user.name
task.content = form.content.data
course = Course.query.filter_by(id=courseid).first()
course.tasks.append(task)
teams = course.teams
for team in teams:
ttr = TaskTeamRelation()
ttr.team = team
ttr.task = task
db.session.add(ttr)
db.session.add(task)
db.session.add(course)
db.session.commit()
return redirect(url_for('teacher.task', courseid=courseid))
return render_template('teacher/add.html',form=form, courseid=courseid)
@blueprint.route('/<courseid>/task/edit/<userid>/<id>',methods = ['GET','POST'])
def task_edit(courseid, userid,id):
form = TaskForm()
task = Task.query.filter_by(id = id).first()
if form.validate_on_submit():
flag = True
task.name = form.taskname.data
task.start_time = form.starttime.data
task.end_time = form.endtime.data
task.content = form.content.data
task.submit_num = form.subnum.data
task.weight = form.weight.data
db.session.add(task)
db.session.commit()
return redirect(url_for('teacher.task', courseid=courseid))
form.taskname.data = task.name
form.starttime.data = task.start_time
form.endtime.data = task.end_time
form.content.data = task.content
form.subnum.data = task.submit_num
form.weight.data = task.weight
return render_template('teacher/edit.html',form = form, courseid=courseid, taskid=id)
@blueprint.route('/<courseid>/task/delete/<taskid>',methods=['GET','POST'])
def delete(courseid, taskid):
file_records= File.query.filter_by(task_id=taskid).all()
for file_record in file_records:
os.remove(file_record.path)
db.session.delete(file_record)
task = Task.query.filter_by(id=taskid).first()
ttrs = TaskTeamRelation.query.filter_by(task_id=task.id).all()
for ttr in ttrs:
db.session.delete(ttr)
db.session.delete(task)
db.session.commit()
flash('删除成功')
return redirect(url_for('teacher.task', courseid=courseid))
@blueprint.route('/team',methods=['GET', 'POST'])
def team():
teamlist = Team.query.join(TeamUserRelation, TeamUserRelation.team_id == Team.id).filter(
TeamUserRelation.team_id == Team.id).filter(TeamUserRelation.is_master == True).join(
User, TeamUserRelation.user_id == User.id).filter(TeamUserRelation.user_id == User.id).add_columns(
Team.name, User.username, Team.status, Team.id, User.user_id, User.in_team)
return render_template('teacher/team.html',list=teamlist)
@blueprint.route('/task/score<taskid>/download')
def score_download(taskid):
teamidList = TaskTeamRelation.query.filter_by(task_id=taskid).all()
teams = []
for teamid in teamidList:
team = Team.query.filter_by(id=teamid.team_id).first()
teams.append(team)
task = Task.query.filter_by(id=taskid).first()
book = xlwt.Workbook()
alignment = xlwt.Alignment() # Create Alignment
alignment.horz = xlwt.Alignment.HORZ_CENTER # May be: HORZ_GENERAL, HORZ_LEFT, HORZ_CENTER, HORZ_RIGHT, HORZ_FILLED, HORZ_JUSTIFIED, HORZ_CENTER_ACROSS_SEL, HORZ_DISTRIBUTED
alignment.vert = xlwt.Alignment.VERT_CENTER # May be: VERT_TOP, VERT_CENTER, VERT_BOTTOM, VERT_JUSTIFIED, VERT_DISTRIBUTED
style = xlwt.XFStyle() # Create Style
style.alignment = alignment # Add Alignment to Style
sheet1 = book.add_sheet('本次作业信息('+task.name+')',cell_overwrite_ok=True)
row0 = ['团队id','团队名称','作业得分']
for i in range(0,len(row0)):
sheet1.write(0,i,row0[i], style)
row_num =1
for team in teams:
sheet1.write(row_num,0,team.id,style)
sheet1.write(row_num,1,team.name,style)
score = TaskTeamRelation.query.filter(TaskTeamRelation.team_id==team.id).filter(TaskTeamRelation.task_id==taskid).first()
sheet1.write(row_num,2,score.score,style)
row_num=row_num+1
filename = 'score_table_'+ str(time.time()) + '.xls'
book.save(os.path.join(data_uploader.path('',folder='tmp'),filename))
return send_from_directory(data_uploader.path('', folder='tmp'), filename, as_attachment=True)
@blueprint.route('/team/download')
def team_download():
teams = Team.query.filter_by(status=3).all()
book = xlwt.Workbook()
alignment = xlwt.Alignment() # Create Alignment
alignment.horz = xlwt.Alignment.HORZ_CENTER # May be: HORZ_GENERAL, HORZ_LEFT, HORZ_CENTER, HORZ_RIGHT, HORZ_FILLED, HORZ_JUSTIFIED, HORZ_CENTER_ACROSS_SEL, HORZ_DISTRIBUTED
alignment.vert = xlwt.Alignment.VERT_CENTER # May be: VERT_TOP, VERT_CENTER, VERT_BOTTOM, VERT_JUSTIFIED, VERT_DISTRIBUTED
style = xlwt.XFStyle() # Create Style
style.alignment = alignment # Add Alignment to Style
sheet1 = book.add_sheet('团队信息', cell_overwrite_ok=True)
row0 = ['团队id', '团队名称', '姓名', '学号', '性别', 'Master']
for i in range(0, len(row0)):
sheet1.write(0, i, row0[i])
row_num = 1
for team in teams:
turs = TeamUserRelation.query.filter_by(team_id=team.id).all()
turs_length = len(turs)
sheet1.write_merge(row_num, row_num + turs_length - 1, 0, 0, team.id, style)
sheet1.write_merge(row_num, row_num + turs_length - 1, 1, 1, team.name, style)
for i in range(turs_length):
if turs[i].is_accepted:
sheet1.write(row_num+i, 2, turs[i].user.name)
sheet1.write(row_num + i, 3, turs[i].user.user_id)
gender = '男' if turs[i].user.gender==False else '女'
sheet1.write(row_num + i, 4, gender)
if turs[i].is_master == True:
sheet1.write(row_num + i, 5, '√')
row_num = row_num + turs_length
filename = 'team_table_' + str(time.time()) + '.xls'
book.save(os.path.join(data_uploader.path('', folder='tmp'), filename))
return send_from_directory(data_uploader.path('', folder='tmp'), filename, as_attachment=True)
@blueprint.route('/team/permit/<teacherid>/<teamid>')
def permit(teacherid,teamid):
team=Team.query.filter(Team.id==teamid).first()
team.status=3
db.session.add(team)
db.session.commit()
stulist=TeamUserRelation.query.filter(TeamUserRelation.team_id==teamid).filter(TeamUserRelation.is_accepted==True).all()
for stu in stulist:
Message.sendMessage(teacherid,stu.user_id,'提交团队申请已通过')
flash('已通过该团队申请!')
return redirect(url_for('teacher.team'))
@blueprint.route('/team/rejectreason/<teacherid>/<teamid>',methods=['GET','POST'])
def rejectreason(teacherid,teamid):
form = RejectReasonForm()
if form.validate_on_submit():
reason = form.content.data
teamlist = TeamUserRelation.query.filter(TeamUserRelation.team_id == teamid).filter(TeamUserRelation.is_accepted == True).all()
for user in teamlist:
Message.sendMessage(teacherid,user.user_id,'团队申请已驳回:'+reason)
return redirect(url_for('teacher.reject',teacherid = teacherid,teamid = teamid))
return render_template('teacher/reject_reason.html',teacherid = teacherid,teamid = teamid,form=form)
@blueprint.route('/team/reject/<teacherid>/<teamid>')
def reject(teacherid,teamid):
team=Team.query.filter(Team.id==teamid).first()
teamuser=TeamUserRelation.query.filter(TeamUserRelation.team_id==teamid).all()
for stu in teamuser:
user=User.query.filter(User.id==stu.user_id).first()
user.in_team=False
#Message.sendMessage(teacherid,user.id,'提交申请已被驳回')
db.session.add(user)
db.session.delete(stu)
db.session.delete(team)
db.session.commit()
flash('已驳回该团队申请!')
return redirect(url_for('teacher.team'))
@blueprint.route('team/detail/<teamid>')
def team_detail(teamid):
teamlist=Team.query.filter(Team.id==teamid).join(TeamUserRelation,TeamUserRelation.team_id==Team.id).join(
User,User.id==TeamUserRelation.user_id).add_columns(User.name,User.gender,User.user_id).all()
return render_template('teacher/teamdetail.html',list=teamlist)
@blueprint.route('/team/adjustion/adjust',methods=['GET', 'POST'])
def to_adjust():
teamlist1=Team.query.join(TeamUserRelation,TeamUserRelation.team_id==Team.id).filter(Team.status==1).filter(
TeamUserRelation.is_master==True).join(User,User.id==TeamUserRelation.user_id).add_columns(
Team.name,Team.status,User.username,Team.id).all()
teamlist2 = Team.query.join(TeamUserRelation,TeamUserRelation.team_id==Team.id).filter(Team.status==3).filter(
TeamUserRelation.is_master==True).join(User,User.id==TeamUserRelation.user_id).add_columns(
Team.name,Team.status,User.username,Team.id).all()
teamlist=teamlist1+teamlist2
print(teamlist)
return render_template('teacher/adjust.html',list=teamlist)
@blueprint.route('/team/adjustion/adjust/<teamid>',methods=['GET', 'POST'])
def team_adjust(teamid):
teamlist = Team.query.filter(Team.id == teamid).join(TeamUserRelation, TeamUserRelation.team_id == Team.id).join(
User, User.id == TeamUserRelation.user_id).add_columns(User.name, User.gender, User.user_id,TeamUserRelation.user_id,Team.id).all()
otherteam=Team.query.filter(Team.id!=teamid).filter(Team.status==1).all()
if session.get('deleted_stu') is None:
session['deleted_stu'] = []
translist = session['deleted_stu']
return render_template('teacher/team_adjust.html',list=teamlist,other_team=otherteam,translist=translist)
@blueprint.route('/team/adjustion/<teacherid>/adjust/<teamid>/<userid>',methods=['GET', 'POST'])
def adjust_trans(teacherid,userid,teamid):
teamlist = Team.query.filter(Team.id == teamid).join(TeamUserRelation, TeamUserRelation.team_id == Team.id).join(
User, User.id == TeamUserRelation.user_id).add_columns(User.name, User.gender, User.user_id,
TeamUserRelation.user_id, Team.id).all()
user=User.query.join(TeamUserRelation,TeamUserRelation.user_id==userid).filter(User.id==userid).add_columns(
User.id,User.name,User.gender,TeamUserRelation.is_master).first()
user_dict = {'id':user.id,'name':user.name,'gender':user.gender}
if session.get('deleted_stu') is None:
session['deleted_stu'] = []
translist = session['deleted_stu']
flag=True
for ad_stu in translist:
if(ad_stu['id']==user.id):
flag=False
flash('该学生已在调整名单中!')
if user.is_master==True:
flag=False
flash('该学生是本队组长!不能调整!')
if flag:
userlist=TeamUserRelation.query.filter(TeamUserRelation.user_id==user.id).first()
userlist.is_adjust=True
db.session.add(userlist)
db.session.commit()
translist.append(user_dict)
session['deleted_stu'] = translist
return redirect(url_for('teacher.team_adjust', teacherid=teacherid, teamid=teamid))
@blueprint.route('/team/adjustion/<teacherid>/adjust/<teamid>/add/<userid>',methods=['GET', 'POST'])
def adjust_add(teacherid,userid,teamid):
userlist=TeamUserRelation.query.filter(TeamUserRelation.user_id==userid).first()
if(int(teamid)==int(userlist.team_id)):
flash('该学生已在本团队了!')
else:
userlist.team_id=teamid
userlist.is_adjust=False
db.session.add(userlist)
db.session.commit()
Message.sendMessage(teacherid,userid,'你已经被老师调整至其他组!请注意查看')
flash('已将该学生调整到该团队!')
translist=session['deleted_stu']
for user in translist:
if user['id'] == int(userid):
translist.remove(user)
session['deleted_stu']=translist
return redirect(url_for('teacher.team_adjust', teacherid=teacherid, teamid=teamid))
@blueprint.route('/<courseid>/task/<taskid>/<teacherid>/files', methods=['GET', 'POST'])
def task_files(courseid, taskid,teacherid):
form = FileForm()
file_records = File.query.filter(File.task_id==taskid).filter(File.user_id == teacherid).all()
if form.validate_on_submit():
for file in request.files.getlist('file'):
file_record = File()
file_record.user_id = current_user.id
file_record.task_id = taskid
filename = file.filename
file_record.name = filename
filetype = filename.split('.')[-1]
tmpname = str(current_user.id) + '-' + str(time.time())
file.filename = tmpname + '.' + filetype
file_record.directory = data_uploader.path('', folder='course/'+str(courseid)+'/teacher/tasks/'+str(taskid))
file_record.real_name = file.filename
file_record.path = data_uploader.path(file.filename, folder='course/'+str(courseid)+'/teacher/tasks/'+str(taskid))
data_uploader.save(file, folder='course/'+str(courseid)+'/teacher/tasks/'+str(taskid))
db.session.add(file_record)
db.session.commit()
return redirect(url_for('teacher.task_files', courseid=courseid, taskid=taskid,teacherid = teacherid))
return render_template('teacher/task_tabfile.html',form=form, file_records=file_records, courseid=courseid, taskid=taskid)
@blueprint.route('/<courseid>/task/<taskid>/files/delete/<fileid>/<userid>', methods=['GET', 'POST'])
def task_file_delete(courseid, taskid, fileid,userid):
file_record = File.query.filter_by(id=fileid).first()
os.remove(file_record.path)
db.session.delete(file_record)
db.session.commit()
flash('删除成功')
return redirect(url_for('teacher.task_files', courseid=courseid, taskid=taskid,userid = userid))
@blueprint.route('/<courseid>/task/<taskid>/files/delete/<fileid>', methods=['GET', 'POST'])
def student_task_file_delete(courseid, taskid, fileid):
file_record = File.query.filter_by(id=fileid).first()
os.remove(file_record.path)
db.session.delete(file_record)
db.session.commit()
flash('删除成功')
return redirect(url_for('teacher.student_task', courseid=courseid, taskid=taskid))
@blueprint.route('/<courseid>/task/<taskid>/files/download/<fileid>')
def task_file_download(courseid, taskid, fileid):
file_record = File.query.filter_by(id=fileid).first()
if os.path.isfile(file_record.path):
return send_from_directory(file_record.directory, file_record.real_name, as_attachment=True, attachment_filename='_'.join(lazy_pinyin(file_record.name)))
abort(404)
@blueprint.route('/<courseid>/task/<taskid>/scores',methods=['GET', 'POST'])
def task_give_score(courseid,taskid):
tasklist=Task.query.filter(Task.id==taskid).first()
task_name = Task.query.filter(Task.id == taskid).first()
if time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))<str(tasklist.end_time):
flash('这项作业还未截止!暂时不能批改')
return render_template('teacher/task_score.html',flag=False,courseid=courseid,taskname=task_name)
else:
task_team_list=TaskTeamRelation.query.join(Task,Task.id==TaskTeamRelation.task_id).join(Team,Team.id==TaskTeamRelation.team_id
).filter(TaskTeamRelation.task_id==taskid).add_columns(Team.name,TaskTeamRelation.task_id,TaskTeamRelation.team_id,TaskTeamRelation.score,Task.weight,TaskTeamRelation.submit_num).all()
#print(task_name.name)
return render_template('teacher/task_score.html', flag=True,list=task_team_list,taskname=task_name,courseid=courseid)
@blueprint.route('/<courseid>/task/<taskid>/scores/score/<teamid>/<teacherid>',methods=['GET', 'POST'])
def task_edit_score(courseid,taskid,teamid,teacherid):
taskscore=TaskTeamRelation.query.filter(TaskTeamRelation.task_id==taskid).filter(TaskTeamRelation.team_id==teamid).first()
form = TaskScoreForm()
if form.validate_on_submit():
taskscore.score=form.task_score.data
if int(form.task_score.data)>=100 or int(form.task_score.data)<0:
flash('分数必须在0-100之间')
userlist=TeamUserRelation.query.filter(TeamUserRelation.team_id==teamid).all()
for user in userlist:
Message.sendMessage(teacherid,user.user_id,'批改意见:'+form.content.data)
db.session.add(taskscore)
db.session.commit()
flash('已经提交分数!')
return redirect(url_for('teacher.task_give_score',courseid=courseid,taskid=taskid))
if taskscore.score>=0:
form.task_score.data=taskscore.score
form.content.data=''
return render_template('teacher/set_score.html',form=form,courseid=courseid,taskid=taskid,teamid=teamid)
@blueprint.route('/<courseid>/task<taskid>/scores')
def task_score(courseid,taskid):
teamidList = TaskTeamRelation.query.filter_by(task_id=taskid).all()
teams = []
for teamid in teamidList:
team = Team.query.filter_by(id=teamid.team_id).first()
teams.append(team)
task = Task.query.filter_by(id=taskid).first()
return render_template('teacher/task_one_score.html',teams=teams,task=task,courseid=courseid,taskid=taskid)
@blueprint.route('/<courseid>/task/<taskid>/files',methods = ['GET','POST'])
def student_task(courseid,taskid):
form = FileForm()
course = Course.query.filter_by(id = courseid).first()
users = course.users
masters = []
for user in users:
tur = TeamUserRelation.query.filter(TeamUserRelation.user_id == user.id).filter(TeamUserRelation.is_master == True).first()
if tur is not None:
masters.append(tur)
print(masters)
file_records = []
for master in masters:
user_master = User.query.filter_by(id=master.user_id).first()
file_records.append((master.team_id ,File.query.filter(File.user_id == master.user_id).filter(File.task_id == int(taskid)).all(),user_master.username))
print(file_records)
return render_template('teacher/task_student.html',form = form,file_records=file_records,courseid = courseid,taskid = taskid)
@blueprint.route('/source/<courseid>')
def source(courseid):
form = FileForm()
course = Course.query.filter_by(id=courseid).first()
tags = course.tags
tag_names = {}
file_records = File.query.filter_by(course_id=courseid).all()
user_names = []
for file_record in file_records:
tag = Tag.query.filter_by(id=file_record.tag_id).first()
user = User.query.filter_by(id=file_record.user_id).first()
user_names.append(user.name)
tag_names[file_record.tag_id] = tag.name
return render_template('teacher/source.html', form=form, file_records=file_records,
courseid=courseid, tags=tags, tag_names=tag_names,user_names=user_names, file_num=len(file_records))
@blueprint.route('/source/<courseid>/tag/<tagid>',methods=['GET','POST'])
def source_tag(courseid, tagid):
form = FileForm()
course = Course.query.filter_by(id=courseid).first()
tags = course.tags
user_names = []
file_records = File.query.filter_by(tag_id=tagid).all()
for file_record in file_records:
user = User.query.filter_by(id=file_record.user_id).first()
user_names.append(user.name)
if form.validate_on_submit():
for file in request.files.getlist('file'):
file_record = File()
file_record.user_id = current_user.id
file_record.course_id = courseid
filename = file.filename
file_record.name = filename
filetype = filename.split('.')[-1]
tmpname = str(current_user.id) + '-' + str(time.time())
file.filename = tmpname + '.' + filetype
file_record.directory = data_uploader.path('', folder='course/'+str(courseid)+'/teacher/source')
file_record.real_name = file.filename
file_record.path = data_uploader.path(file.filename, folder='course/'+str(courseid)+'/teacher/source')
data_uploader.save(file, folder='course/'+str(courseid)+'/teacher/source')
file_record.tag_id = tagid
db.session.add(file_record)
db.session.commit()
return redirect(url_for('teacher.source_tag', courseid=courseid, tagid=tagid))
return render_template('teacher/source_tag.html', form=form, file_records=file_records,
courseid=courseid, tags=tags, tagid=tagid,user_names=user_names,file_num=len(file_records))
@blueprint.route('/source/<courseid>/tag/add/<tagname>',methods=['GET','POST'])
def tag_add(courseid, tagname):
course = Course.query.filter_by(id=courseid).first()
tags = course.tags
for tag in tags:
if tag.name==tagname:
flash('标签已存在')
return redirect(url_for('teacher.source', courseid=courseid))
tag = Tag()
tag.name = tagname
course = Course.query.filter_by(id=courseid).first()
course.tags.append(tag)
db.session.add(tag)
db.session.add(course)
db.session.commit()
flash('添加成功')
return redirect(url_for('teacher.source', courseid=courseid))
@blueprint.route('<courseid>/source/files/download/<fileid>')
def source_download(courseid,fileid):
file_record = File.query.filter_by(id=fileid).first()
if os.path.isfile(file_record.path):
return send_from_directory(file_record.directory, file_record.real_name, as_attachment=True,
attachment_filename='_'.join(lazy_pinyin(file_record.name)))
abort(404)
@blueprint.route('<courseid>/source/files/delete/<fileid>')
def source_delete(courseid,fileid):
file_record = File.query.filter_by(id=fileid).first()
os.remove(file_record.path)
db.session.delete(file_record)
db.session.commit()
flash('删除成功')
return redirect(url_for('teacher.source', courseid=courseid))
def zipfolder(foldername,filename):
'''
zip folder foldername and all its subfiles and folders into
a zipfile named filename
'''
zip_download=zipfile.ZipFile(filename,'w',zipfile.ZIP_DEFLATED)
for root,dirs,files in os.walk(foldername):
print(root, dirs, files)
for filename in files:
zip_download.write(os.path.join(root,filename), arcname=os.path.join(os.path.basename(root) ,filename))
zip_download.close()
return zip_download
@blueprint.route('/<courseid>/task/<taskid>/files/download')
def task_file_download_zip(courseid, taskid):
foldername = data_uploader.path('', folder='course/'+str(courseid)+'/teacher/tasks/'+str(taskid))
filename = os.path.join(data_uploader.path('', folder='tmp'), 'taskfiles.zip')
zip_download = zipfolder(foldername, filename)
return send_file(filename, as_attachment=True)
@blueprint.route('/<courseid>/task/<taskid>/studenttask/files/download')
def student_task_file_download_zip(courseid, taskid):
foldername = data_uploader.path('', folder='course/'+str(courseid)+'/student/tasks/'+str(taskid))
filename = os.path.join(data_uploader.path('', folder='tmp'), 'taskfiles.zip')
zip_download = zipfolder(foldername, filename)
return send_file(filename, as_attachment=True)
@blueprint.route('/source/<courseid>/files/download')
def source_file_download_zip(courseid):
foldername = data_uploader.path('',folder='course/'+str(courseid)+'/teacher/source')
filename = os.path.join(data_uploader.path('',folder='tmp'),'sourcefiles.zip')
zip_download = zipfolder(foldername,filename)
return send_file(filename,as_attachment=True)
@blueprint.route('/<courseid>/files/download')
def former_task_file_download_zip(courseid):
foldername = data_uploader.path('', folder='course/'+str(courseid)+'/student')
filename = os.path.join(data_uploader.path('', folder='tmp'), 'taskfiles.zip')
zip_download = zipfolder(foldername, filename)
return send_file(filename, as_attachment=True)
@blueprint.route('/<courseid>/task/submit')
def multi_check(courseid):
tasks = Task.query.filter_by(course_id = courseid).all()
ttrs_all = []
for task in tasks:
##team = Team.query.filter_by(course_id = task.course_id).first()
ttrs = TaskTeamRelation.query.filter_by(task_id = task.id).all()
if ttrs is not None:
ttrs_all.extend(ttrs)
teams = Team.query.filter_by(course_id = courseid).all()
return render_template('teacher/multi_check.html',ttrs_all = ttrs_all,courseid = courseid,tasks = tasks,teams = teams)
@blueprint.route('/course/calcu_score')
def calcu_score():
teams = Team.query.filter_by(status=3).all()
team_num = len(teams)
for i in range(0, team_num):
teamtask = TaskTeamRelation.query.filter_by(team_id=teams[i].id).all()
sum = 0
for task in teamtask:
weight = Task.query.filter_by(id=task.task_id).first()
sum += weight.weight * task.score
team_for_score = Team.query.filter_by(id=teams[i].id).first()
team_for_score.score = sum
db.session.add(team_for_score)
db.session.commit()
userList = TeamUserRelation.query.filter_by(team_id=teams[i].id).all()
for user in userList:
print(user.user_id)
user_for_score = UserScore.query.filter_by(user_id=user.user_id).first()
user_for_score.score = sum * user_for_score.grade
db.session.add(user_for_score)
db.session.commit()
return redirect(url_for('teacher.course',teacherid=current_user.id))
@blueprint.route('/course/grade_download')
def grade_download():
teams = Team.query.filter_by(status=3).all()
book = xlwt.Workbook()
alignment = xlwt.Alignment() # Create Alignment
alignment.horz = xlwt.Alignment.HORZ_CENTER # May be: HORZ_GENERAL, HORZ_LEFT, HORZ_CENTER, HORZ_RIGHT, HORZ_FILLED, HORZ_JUSTIFIED, HORZ_CENTER_ACROSS_SEL, HORZ_DISTRIBUTED
alignment.vert = xlwt.Alignment.VERT_CENTER # May be: VERT_TOP, VERT_CENTER, VERT_BOTTOM, VERT_JUSTIFIED, VERT_DISTRIBUTED
style = xlwt.XFStyle() # Create Style
style.alignment = alignment # Add Alignment to Style
sheet1 = book.add_sheet('团队成绩', cell_overwrite_ok=True)
row0 = ['团队id', '团队名称','成绩']
for i in range(0, len(row0)):
sheet1.write(0, i, row0[i])
row_num = 1
team_num = len(teams)
for i in range(0,team_num):
sheet1.write(i + 1, 0, teams[i].id)
sheet1.write(i+1,1,teams[i].name)
sheet1.write(i+1,2,teams[i].score)
filename = 'team_grade_table_' + str(time.time()) + '.xls'
book.save(os.path.join(data_uploader.path('', folder='tmp'), filename))
return send_from_directory(data_uploader.path('', folder='tmp'), filename, as_attachment=True)
@blueprint.route('/course/grade_download_stu')
def grade_download_stu():
students = UserScore.query.all()
book = xlwt.Workbook()
alignment = xlwt.Alignment() # Create Alignment
alignment.horz = xlwt.Alignment.HORZ_CENTER # May be: HORZ_GENERAL, HORZ_LEFT, HORZ_CENTER, HORZ_RIGHT, HORZ_FILLED, HORZ_JUSTIFIED, HORZ_CENTER_ACROSS_SEL, HORZ_DISTRIBUTED
alignment.vert = xlwt.Alignment.VERT_CENTER # May be: VERT_TOP, VERT_CENTER, VERT_BOTTOM, VERT_JUSTIFIED, VERT_DISTRIBUTED
style = xlwt.XFStyle() # Create Style
style.alignment = alignment # Add Alignment to Style
sheet1 = book.add_sheet('个人成绩', cell_overwrite_ok=True)
row0 = ['学生id', '姓名','个人成绩']
for i in range(0, len(row0)):
sheet1.write(0, i, row0[i])
row_num = 1
stu_num = len(students)
for i in range(0,stu_num):
username = User.query.filter_by(id=students[i].user_id).first()
stuid = User.query.filter_by(id=students[i].user_id).first()
print(username)
sheet1.write(i+1,0,stuid.user_id)
sheet1.write(i+1,1,username.name)
sheet1.write(i+1,2,students[i].score)
filename = 'student_grade_table_' + str(time.time()) + '.xls'
book.save(os.path.join(data_uploader.path('', folder='tmp'), filename))
return send_from_directory(data_uploader.path('', folder='tmp'), filename, as_attachment=True)
@blueprint.route('/<courseid>/task/submit/download')
def task_check_download(courseid):
book = xlwt.Workbook()
tasklist = Task.query.filter_by(course_id=courseid).all()
ttrs_all = []
for task in tasklist:
ttrs = TaskTeamRelation.query.filter_by(task_id = task.id).all()
if ttrs is not None:
ttrs_all.extend(ttrs)
teamlist = Team.query.filter_by(course_id = courseid).all()
##tasks = Task.query.filter_by(course_id=courseid).all()
alignment = xlwt.Alignment() # Create Alignment
alignment.horz = xlwt.Alignment.HORZ_CENTER # May be: HORZ_GENERAL, HORZ_LEFT, HORZ_CENTER, HORZ_RIGHT, HORZ_FILLED, HORZ_JUSTIFIED, HORZ_CENTER_ACROSS_SEL, HORZ_DISTRIBUTED
alignment.vert = xlwt.Alignment.VERT_CENTER # May be: VERT_TOP, VERT_CENTER, VERT_BOTTOM, VERT_JUSTIFIED, VERT_DISTRIBUTED
style = xlwt.XFStyle() # Create Style
style.alignment = alignment # Add Alignment to Style
sheet1 = book.add_sheet('作业信息', cell_overwrite_ok=True)
row0 = ['团队id', '团队名称']
for task in tasklist:
row0.append(task.name)
for i in range(0, len(row0)):
sheet1.write(0, i, row0[i])
row_num = 1
for team in teamlist:
##turs = TeamUserRelation.query.filter_by(team_id=team.id).all()
i = 2
sheet1.write(row_num, 0 , team.id)
sheet1.write(row_num, 1, team.name)
for ttrs in ttrs_all:
if ttrs.team_id == team.id:
sheet1.write(row_num, i , ttrs.score)
i = i+1
##row_num = row_num + turs_length
row_num = row_num + 1
filename = 'task_check_table_' + str(time.time()) + '.xls'
book.save(os.path.join(data_uploader.path('', folder='tmp'), filename))
return send_from_directory(data_uploader.path('', folder='tmp'), filename, as_attachment=True)
@blueprint.route('/course/grade')
def grade():
students = UserScore.query.all()
stu_num = len(students)
username=[]
stuid=[]
for i in range(0, stu_num):
stuname=User.query.filter_by(id=students[i].user_id).first()
username.append(stuname.name)
stuid.append(stuname.user_id)
teams = Team.query.filter_by(status=3).all()
team_num = len(teams)
for i in range(0, team_num):
teamtask = TaskTeamRelation.query.filter_by(team_id=teams[i].id).all()
sum = 0
for task in teamtask:
weight = Task.query.filter_by(id=task.task_id).first()
sum += round(weight.weight * task.score,1)
team_for_score = Team.query.filter_by(id=teams[i].id).first()
team_for_score.score = sum
db.session.add(team_for_score)
db.session.commit()
userList = TeamUserRelation.query.filter_by(team_id=teams[i].id).all()
for user in userList:
print(user.user_id)
user_for_score = UserScore.query.filter_by(user_id=user.user_id).first()
user_for_score.score = round(sum * user_for_score.grade +user_for_score.personal_grade,1)
db.session.add(user_for_score)
db.session.commit()
return render_template('teacher/grade.html',teamList=teams,
stuList=students,username=username,stu_num=stu_num,stuid=stuid)
|
kaiueo/octs
|
octs/teacher/views.py
|
Python
|
bsd-3-clause
| 34,607
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Carlos Jenkins <carlos@jenkins.co.cr>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Module listing ConfigOpt.
"""
from confspec.options import * # noqa
from confspec.validation import * # noqa
__all__ = ['options']
# ConfigOpt instances for testing
options = {
'ConfigOpt': None,
'ConfigList': None,
'ConfigString': None,
'ConfigText': None,
'ConfigLine': None,
'ConfigInt': ConfigInt(
key='configint',
default=99,
validator=multiple_of(3),
category='entityconfigopts',
comment='ConfigInt Test.',
),
'ConfigDecimal': None,
'ConfigOctal': None,
'ConfigHexadecimal': None,
'ConfigBoolean': ConfigBoolean(
key='configboolean',
default=True,
validator=None,
category='entityconfigopts',
comment='ConfigBoolean Test.',
),
'ConfigFloat': ConfigFloat(
key='configfloat',
default=3.14,
validator=in_range(-100.0, 100.0),
category='entityconfigopts',
comment='ConfigFloat Test.',
),
'ConfigDateTime': None,
'ConfigDate': None,
'ConfigTime': None,
'ConfigMap': None,
'ConfigClass': None,
'ConfigPath': None,
'ConfigFile': None,
'ConfigDir': None,
'ConfigColor': None,
'ConfigFont': None,
'ConfigListString': None,
'ConfigListText': None,
'ConfigListLine': None,
'ConfigListInt': ConfigListInt(
key='configlistint',
default=[1, 2, 3, 4, 5],
validator=all_validate_to(positive()),
category='collectionconfigopts',
comment='ConfigListInt Test.',
),
'ConfigListDecimal': None,
'ConfigListOctal': None,
'ConfigListHexadecimal': None,
'ConfigListBoolean': None,
'ConfigListFloat': None,
'ConfigListDateTime': None,
'ConfigListDate': None,
'ConfigListTime': None,
'ConfigListMap': None,
'ConfigListClass': None,
'ConfigListPath': None,
'ConfigListFile': None,
'ConfigListDir': None,
'ConfigListColor': None,
'ConfigListFont': None,
}
# List of ConfigOpt instances (config spec)
spec = [v for v in options.values() if v is not None]
|
carlos-jenkins/confspec
|
test/options.py
|
Python
|
apache-2.0
| 2,711
|
from kivy.vector import Vector
from parabox.phisics.base_phisics import BasePhisics
class PlainPhisics(BasePhisics):
"""Phisics model with linear gravity vector"""
def __init__(self, *args, gravity=(0, 0), **kwargs):
"""PlainPisics constructor
:param gravity: gravity vector
:param gravity: kivy.Vector
"""
super(PlainPhisics, self). __init__(*args, **kwargs)
self.gravity = Vector(gravity)
def _get_acceleration(self, world_object):
"""Returns object's acceleration change
:param world_object: object which acceleration will be changed
:type world_object: parabox.base_object.BaseObject
:return: acceleration change
:rtype: Vector
"""
return self.gravity.rotate(self.angle)
|
DenisMinich/parabox
|
parabox/phisics/plain_phisics.py
|
Python
|
mit
| 798
|
#!/usr/bin/python
import json, os, sqlite3, sys
# pass the filename as an argument when calling this script
if len(sys.argv) < 2:
sys.exit('Usage: json-to-sqlite.py /path/to/file.json')
fileIn = sys.argv[1]
fileOnly = os.path.basename(fileIn)
try:
fileOut = sys.argv[2]
except IndexError:
fileList = [fileOnly.split('.')[0], 'db']
fileOut = ".".join(fileList)
# read in the filename
input = open(fileIn)
jsonData = json.load(input)
input.close()
# structure the json data
jsonColumns = str(",".join(list(jsonData[0].keys())))
jsonValues = [':{0}'.format(i) for i in jsonData[0].keys()]
jsonValues = ", ".join(jsonValues)
# assemble the SQL query
query = ["INSERT INTO jsonFile VALUES (", jsonValues, ")"]
queryString = " ".join(query)
# establish connection to the database & execute the query
conn = sqlite3.connect(fileOut) # or use :memory: to put it in RAM
with conn:
cur = conn.cursor()
cur.execute("CREATE TABLE IF NOT EXISTS jsonFile(" + jsonColumns + ")")
cur.executemany(queryString, jsonData)
|
lenwood/csv-json
|
Python/json-to-sqlite.py
|
Python
|
mit
| 1,015
|
# This file is part of Bika LIMS
#
# Copyright 2011-2016 by it's authors.
# Some rights reserved. See LICENSE.txt, AUTHORS.txt.
# ../../skins/bika/bika_widgets/analysisprofileanalyseswidget.pt
from AccessControl import ClassSecurityInfo
from Products.Archetypes.Registry import registerWidget, registerPropertyType
from Products.Archetypes.Widget import TypesWidget
from Products.CMFCore.utils import getToolByName
from bika.lims.browser import BrowserView
from bika.lims import bikaMessageFactory as _
from bika.lims.utils import t
from bika.lims.browser.bika_listing import BikaListingView
from zope.i18n.locales import locales
from operator import itemgetter
import json
class AnalysisProfileAnalysesView(BikaListingView):
""" bika listing to display Analyses table for an Analysis Profile.
"""
def __init__(self, context, request, fieldvalue=[], allow_edit=False):
super(AnalysisProfileAnalysesView, self).__init__(context, request)
self.catalog = "bika_setup_catalog"
self.contentFilter = {'portal_type': 'AnalysisService',
'sort_on': 'sortable_title',
'inactive_state': 'active',}
self.context_actions = {}
self.base_url = self.context.absolute_url()
self.view_url = self.base_url
self.show_sort_column = False
self.show_select_row = False
self.show_select_all_checkbox = False
self.show_column_toggles = False
self.show_select_column = True
self.allow_edit = allow_edit
self.form_id = "analyses"
self.profile = None
self.categories = []
self.do_cats = self.context.bika_setup.getCategoriseAnalysisServices()
if self.do_cats:
self.pagesize = 999999 # hide batching controls
self.show_categories = True
self.expand_all_categories = False
self.ajax_categories = True
self.ajax_categories_url = self.context.absolute_url() + \
"/analysisprofile_analysesview"
self.category_index = 'getCategoryTitle'
self.columns = {
'Title': {'title': _('Service'),
'index': 'sortable_title',
'sortable': False,},
'Price': {'title': _('Price'),
'sortable': False,},
}
self.review_states = [
{'id':'default',
'title': _('All'),
'contentFilter':{},
'columns': ['Title',
'Price',
],
'transitions': [{'id':'empty'}, ], # none
},
]
if not self.context.bika_setup.getShowPrices():
self.review_states[0]['columns'].remove('Price')
self.fieldvalue = fieldvalue
self.selected = [x.UID() for x in fieldvalue]
if self.aq_parent.portal_type == 'AnalysisProfile':
# Custom settings for the Analysis Services assigned to
# the Analysis Profile
# https://jira.bikalabs.com/browse/LIMS-1324
self.profile = self.aq_parent
self.columns['Hidden'] = {'title': _('Hidden'),
'sortable': False,
'type': 'boolean'}
self.review_states[0]['columns'].insert(1, 'Hidden')
def folderitems(self):
self.categories = []
bsc = getToolByName(self.context, 'bika_setup_catalog')
wf = getToolByName(self.context, 'portal_workflow')
mtool = getToolByName(self.context, 'portal_membership')
member = mtool.getAuthenticatedMember()
roles = member.getRoles()
self.allow_edit = 'LabManager' in roles or 'Manager' in roles
items = BikaListingView.folderitems(self)
for x in range(len(items)):
if not items[x].has_key('obj'): continue
obj = items[x]['obj']
cat = obj.getCategoryTitle()
# Category (upper C) is for display column value
items[x]['Category'] = cat
if self.do_cats:
# category is for bika_listing to groups entries
items[x]['category'] = cat
if cat not in self.categories:
self.categories.append(cat)
analyses = [a.UID() for a in self.fieldvalue]
items[x]['selected'] = items[x]['uid'] in analyses
items[x]['class']['Title'] = 'service_title'
calculation = obj.getCalculation()
items[x]['Calculation'] = calculation and calculation.Title()
locale = locales.getLocale('en')
currency = self.context.bika_setup.getCurrency()
symbol = locale.numbers.currencies[currency].symbol
items[x]['Price'] = "%s %s" % (symbol, obj.getPrice())
items[x]['class']['Price'] = 'nowrap'
after_icons = ''
if obj.getAccredited():
after_icons += "<img\
src='%s/++resource++bika.lims.images/accredited.png'\
title='%s'>"%(self.context.absolute_url(),
_("Accredited"))
if obj.getReportDryMatter():
after_icons += "<img\
src='%s/++resource++bika.lims.images/dry.png'\
title='%s'>"%(self.context.absolute_url(),
_("Can be reported as dry matter"))
if obj.getAttachmentOption() == 'r':
after_icons += "<img\
src='%s/++resource++bika.lims.images/attach_reqd.png'\
title='%s'>"%(self.context.absolute_url(),
_("Attachment required"))
if obj.getAttachmentOption() == 'n':
after_icons += "<img\
src='%s/++resource++bika.lims.images/attach_no.png'\
title='%s'>"%(self.context.absolute_url(),
_('Attachment not permitted'))
if after_icons:
items[x]['after']['Title'] = after_icons
if self.profile:
# Display analyses for this Analysis Service in results?
ser = self.profile.getAnalysisServiceSettings(obj.UID())
items[x]['allow_edit'] = ['Hidden', ]
items[x]['Hidden'] = ser.get('hidden', obj.getHidden())
self.categories.sort()
return items
class AnalysisProfileAnalysesWidget(TypesWidget):
_properties = TypesWidget._properties.copy()
_properties.update({
'macro': "bika_widgets/analysisprofileanalyseswidget",
'helper_js': ("bika_widgets/analysisprofileanalyseswidget.js",),
'helper_css': ("bika_widgets/analysisprofileanalyseswidget.css",),
})
security = ClassSecurityInfo()
security.declarePublic('process_form')
def process_form(self, instance, field, form, empty_marker = None,
emptyReturnsMarker = False):
""" Return a list of dictionaries fit for AnalysisProfile/Analyses field
consumption.
"""
bsc = getToolByName(instance, 'bika_setup_catalog')
value = []
service_uids = form.get('uids', None)
if instance.portal_type == 'AnalysisProfile':
# Hidden analyses?
outs = []
hiddenans = form.get('Hidden', {})
if service_uids:
for uid in service_uids:
hidden = hiddenans.get(uid, '')
hidden = True if hidden == 'on' else False
outs.append({'uid':uid, 'hidden':hidden})
instance.setAnalysisServicesSettings(outs)
return service_uids, {}
security.declarePublic('Analyses')
def Analyses(self, field, allow_edit = False):
""" Print analyses table
"""
fieldvalue = getattr(field, field.accessor)()
view = AnalysisProfileAnalysesView(self,
self.REQUEST,
fieldvalue = fieldvalue,
allow_edit = allow_edit)
return view.contents_table(table_only = True)
registerWidget(AnalysisProfileAnalysesWidget,
title = 'Analysis Profile Analyses selector',
description = ('Analysis Profile Analyses selector'),
)
|
rockfruit/bika.lims
|
bika/lims/browser/widgets/analysisprofileanalyseswidget.py
|
Python
|
agpl-3.0
| 8,420
|
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from eventlet import tpool
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.virt.disk.vfs import api as vfs
LOG = logging.getLogger(__name__)
guestfs = None
class VFSGuestFS(vfs.VFS):
"""This class implements a VFS module that uses the libguestfs APIs
to access the disk image. The disk image is never mapped into
the host filesystem, thus avoiding any potential for symlink
attacks from the guest filesystem.
"""
def __init__(self, imgfile, imgfmt='raw', partition=None):
super(VFSGuestFS, self).__init__(imgfile, imgfmt, partition)
global guestfs
if guestfs is None:
guestfs = __import__('guestfs')
self.handle = None
def setup_os(self):
if self.partition == -1:
self.setup_os_inspect()
else:
self.setup_os_static()
def setup_os_static(self):
LOG.debug(_("Mount guest OS image %(imgfile)s partition %(part)s"),
{'imgfile': self.imgfile, 'part': str(self.partition)})
if self.partition:
self.handle.mount_options("", "/dev/sda%d" % self.partition, "/")
else:
self.handle.mount_options("", "/dev/sda", "/")
def setup_os_inspect(self):
LOG.debug(_("Inspecting guest OS image %s"), self.imgfile)
roots = self.handle.inspect_os()
if len(roots) == 0:
raise exception.NovaException(_("No operating system found in %s")
% self.imgfile)
if len(roots) != 1:
LOG.debug(_("Multi-boot OS %(roots)s") % {'roots': str(roots)})
raise exception.NovaException(
_("Multi-boot operating system found in %s") %
self.imgfile)
self.setup_os_root(roots[0])
def setup_os_root(self, root):
LOG.debug(_("Inspecting guest OS root filesystem %s"), root)
mounts = self.handle.inspect_get_mountpoints(root)
if len(mounts) == 0:
raise exception.NovaException(
_("No mount points found in %(root)s of %(imgfile)s") %
{'root': root, 'imgfile': self.imgfile})
# the root directory must be mounted first
mounts.sort(key=lambda mount: mount[0])
root_mounted = False
for mount in mounts:
LOG.debug(_("Mounting %(dev)s at %(dir)s") %
{'dev': mount[1], 'dir': mount[0]})
try:
self.handle.mount_options("", mount[1], mount[0])
root_mounted = True
except RuntimeError as e:
msg = _("Error mounting %(device)s to %(dir)s in image"
" %(imgfile)s with libguestfs (%(e)s)") % \
{'imgfile': self.imgfile, 'device': mount[1],
'dir': mount[0], 'e': e}
if root_mounted:
LOG.debug(msg)
else:
raise exception.NovaException(msg)
def setup(self):
LOG.debug(_("Setting up appliance for %(imgfile)s %(imgfmt)s") %
{'imgfile': self.imgfile, 'imgfmt': self.imgfmt})
try:
self.handle = tpool.Proxy(guestfs.GuestFS(close_on_exit=False))
except TypeError as e:
if 'close_on_exit' in str(e):
# NOTE(russellb) In case we're not using a version of
# libguestfs new enough to support the close_on_exit parameter,
# which was added in libguestfs 1.20.
self.handle = tpool.Proxy(guestfs.GuestFS())
else:
raise
try:
self.handle.add_drive_opts(self.imgfile, format=self.imgfmt)
self.handle.launch()
self.setup_os()
self.handle.aug_init("/", 0)
except RuntimeError as e:
# explicitly teardown instead of implicit close()
# to prevent orphaned VMs in cases when an implicit
# close() is not enough
self.teardown()
raise exception.NovaException(
_("Error mounting %(imgfile)s with libguestfs (%(e)s)") %
{'imgfile': self.imgfile, 'e': e})
except Exception:
# explicitly teardown instead of implicit close()
# to prevent orphaned VMs in cases when an implicit
# close() is not enough
self.teardown()
raise
def teardown(self):
LOG.debug(_("Tearing down appliance"))
try:
try:
self.handle.aug_close()
except RuntimeError as e:
LOG.warn(_("Failed to close augeas %s"), e)
try:
self.handle.shutdown()
except AttributeError:
# Older libguestfs versions haven't an explicit shutdown
pass
except RuntimeError as e:
LOG.warn(_("Failed to shutdown appliance %s"), e)
try:
self.handle.close()
except AttributeError:
# Older libguestfs versions haven't an explicit close
pass
except RuntimeError as e:
LOG.warn(_("Failed to close guest handle %s"), e)
finally:
# dereference object and implicitly close()
self.handle = None
@staticmethod
def _canonicalize_path(path):
if path[0] != '/':
return '/' + path
return path
def make_path(self, path):
LOG.debug(_("Make directory path=%s"), path)
path = self._canonicalize_path(path)
self.handle.mkdir_p(path)
def append_file(self, path, content):
LOG.debug(_("Append file path=%s"), path)
path = self._canonicalize_path(path)
self.handle.write_append(path, content)
def replace_file(self, path, content):
LOG.debug(_("Replace file path=%s"), path)
path = self._canonicalize_path(path)
self.handle.write(path, content)
def read_file(self, path):
LOG.debug(_("Read file path=%s"), path)
path = self._canonicalize_path(path)
return self.handle.read_file(path)
def has_file(self, path):
LOG.debug(_("Has file path=%s"), path)
path = self._canonicalize_path(path)
try:
self.handle.stat(path)
return True
except RuntimeError:
return False
def set_permissions(self, path, mode):
LOG.debug(_("Set permissions path=%(path)s mode=%(mode)s"),
{'path': path, 'mode': mode})
path = self._canonicalize_path(path)
self.handle.chmod(mode, path)
def set_ownership(self, path, user, group):
LOG.debug(_("Set ownership path=%(path)s "
"user=%(user)s group=%(group)s"),
{'path': path, 'user': user, 'group': group})
path = self._canonicalize_path(path)
uid = -1
gid = -1
if user is not None:
uid = int(self.handle.aug_get(
"/files/etc/passwd/" + user + "/uid"))
if group is not None:
gid = int(self.handle.aug_get(
"/files/etc/group/" + group + "/gid"))
LOG.debug(_("chown uid=%(uid)d gid=%(gid)s"),
{'uid': uid, 'gid': gid})
self.handle.chown(uid, gid, path)
|
tanglei528/nova
|
nova/virt/disk/vfs/guestfs.py
|
Python
|
apache-2.0
| 8,034
|
import nn_grad_test as nt
import numpy as np
import start.neural_network as nn
import start.layer_dict as ld
import start.weight_update_params as wup
class test_x3_fc1_sigmo1(nt.NnGradTest):
def define_nn(self):
self.net = nn.NeuralNetwork("test_net", 1)
self.layer = ld.hdict["fc"](1)
self.net.add_layer(self.layer)
self.layer = ld.odict["sigmoid"](1)
self.net.add_layer(self.layer)
np.random.seed(1)
self.params = wup.GradientDescentParams(0.1)
self.net.set_weight_update_function(self.params)
self.net.initialize_parameters()
def set_training_example(self):
self.x = np.array([[0.5]])
self.y = np.array([[0.5]])
|
anamayasullerey/test_net
|
src/tests/test_grad_x3_fc1_sigmo1.py
|
Python
|
mit
| 730
|
# -*- coding: utf-8 -*-
"""
Module: tie.py
Created on Mon Jan 27 17:12:19 2014
@author: gav
Description:
Utilities for converting to and from ASA's tie polygon format
The central common format will be geojson records suitable for the
shapely/fiona gis stack.
"""
### Imports
from __future__ import print_function
from pprint import pprint, pformat
import shapely
import geojson
from itertools import islice, izip, chain
### Logging ###
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
debug, info, error = logger.debug, logger.info, logger.error
### Constants
HEADER_LINES = 16
FIELDS = ['id', 'names', 'description', 'type',
'color_width_height', 'icon_name', 'layer_num',
'attr1','attr2','attr3','attr4','attr5', 'attr6',
'link_fn', 'real', 'n_verts']
# Validators
def _str(s):
return s.strip()
def _int(s):
return int(_str(s))
def _float(s):
return float(_str(s))
def _point(s):
return tuple( float(i) for i in s.strip().split() )
def list_of_ints(s):
return [ int(i) for i in s.strip().split() ]
FIELD_CONVERTERS = {
'id' : _int,
'names' : _str,
'description': _str,
'type' : _str,
'color_width_height': list_of_ints,
'icon_name' : _str,
'layer_num' : _int,
'attr1' : _str,
'attr2' : _str,
'attr3' : _str,
'attr4' : _str,
'attr5' : _str,
'attr6' : _str,
'link_fn' : _str,
'real' : _float,
'n_verts' : _int,
'point' : _point,
}
### Classes
### Functions
def tie_feature(txt):
"""
Return a complete geojson feature from a single tie record
Args:
Need CRS but should probably go in the feature collection level
"""
lines = chain(txt.split('\n'))
props = tie_properties(islice(lines, HEADER_LINES))
coords = tie_coordinates(islice(lines, props['n_verts']))
polygon = geojson.Polygon(coordinates=coords)
feat = geojson.Feature(id=props['id'],
geometry=polygon,
properties=props,
crs=tie_crs())
return feat
def tie_feature_gen(lines, crs=None):
"""
Return a complete geojson feature from a single tie record
Need CRS but shoul probably go in the feature collection level
"""
crs = crs or tie_crs()
try:
while True:
props = tie_properties(islice(lines, HEADER_LINES))
debug(props['n_verts'])
coords = tie_coordinates(islice(lines, props['n_verts']))
polygon = geojson.Polygon(coordinates=coords)
feat = geojson.Feature(id=props['id'],
geometry=polygon,
properties=props,
crs=crs)
yield feat
except StopIteration:
pass
def tie_properties(prop_lines, conv=FIELD_CONVERTERS):
"""
Return the geojson properties dictionary from a tie record header
"""
prop_lines = list(prop_lines)
if len(prop_lines) < 16:
raise StopIteration
props = { k: conv[k](s) for k, s in izip(FIELDS, prop_lines) }
return props
def tie_coordinates(vert_lines,conv=FIELD_CONVERTERS):
"""
Return the list of tuple points
Note the geojson coordinates call for a nested list
[[(x1, y1), (x2, y2)], # exterior
[(..), (..)], # first hole
[(..), (..)], # second hole
]
"""
exterior = [ conv['point'](s) for s in vert_lines ]
verts = [ exterior ]
return verts
def tie_crs():
crs = {"crs": {
"type": "name",
"properties": {
"name": "EPSG:4326"}}}
return crs
def tie_feature_collection(fp):
"""
Return a geojson feature collection of receptor polygons
"""
with open(fp) as source:
features = [ f for f in tie_feature_gen(source) ]
feat_coll = geojson.FeatureCollection(features)
return feat_coll
def read_tie_file(fp):
"""Return a list of dicts"""
records = []
with open(fp) as f:
lines = f.readlines()
while lines:
header = lines[:HEADER_LINES]
del lines[:HEADER_LINES]
record = (header)
vs = lines[:record['n_verts']]
del lines[:record['n_verts']]
record['verts'] = tie_verts(vs)
records.append(record)
return records
def verts(lines):
verts_ls = []
for line in lines:
verts_ls.append([float(i) for i in line.split()])
return verts_ls
def tie_record(tie_dict):
"""Return a complete tie record"""
d = tie_dict
color_width_height = "{} {} {}".format(*[int(i) for i in d['color_width_height']])
verts = "\n".join(
[ " {:16.12f} {:26.13f}".format(*tup) for tup in d['verts']]
)
record = """\
{d[id]}
{d[name]}
{d[description]}
{d[type]}
{color_width_height}
{d[icon_name]}
{d[layer_num]}
{d[attr1]}
{d[attr2]}
{d[attr3]}
{d[attr4]}
{d[attr5]}
{d[attr6]}
{d[link_fn]}
{d[real]}
{d[n_verts]}
{verts}""".format(d=tie_dict,
color_width_height=color_width_height,
verts=verts)
return record
### Tests
if __name__ == "__main__":
print("Done __main__")
|
gjcoombes/banks
|
banks/gis/tie.py
|
Python
|
mit
| 5,232
|
from fractions import Fraction
def add_fracs(*args):
return str(sum(Fraction(a) for a in args)) if args else ''
|
the-zebulan/CodeWars
|
katas/beta/adding_fractions.py
|
Python
|
mit
| 118
|
from django.core.management.base import BaseCommand
from apps.statistics.models import MStatistics
class Command(BaseCommand):
def handle(self, *args, **options):
MStatistics.collect_statistics()
|
samuelclay/NewsBlur
|
apps/statistics/management/commands/collect_stats.py
|
Python
|
mit
| 218
|
'''
Created on Apr 30, 2015
@author: pekzeki
'''
from pymongo import MongoClient
import networkx as nx
from network_analysis import graph_analysis as GA
def all_friends_graph():
G = nx.Graph()
for user in user_collection.find():
G.add_node(user.get("_id"))
friend_list = user.get("friends")
for friend in friend_list:
G.add_edge(user.get("_id"), friend)
return G
client = MongoClient()
db = client.yelp
user_collection = db.user_il_filtered
review_collection = db.review_il_filtered
business_collection = db.business_il_filtered
G = all_friends_graph()
GA.analyze(G, "undirected_all_friends")
|
pekzeki/SNAofYelp
|
network_analysis/undirected_all_friends.py
|
Python
|
mit
| 663
|
"""Support for Zigbee Home Automation devices."""
import asyncio
import logging
import voluptuous as vol
from zhaquirks import setup as setup_quirks
from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH
from homeassistant import const as ha_const
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import ConfigType
from . import api
from .core import ZHAGateway
from .core.const import (
BAUD_RATES,
CONF_BAUDRATE,
CONF_CUSTOM_QUIRKS_PATH,
CONF_DATABASE,
CONF_DEVICE_CONFIG,
CONF_ENABLE_QUIRKS,
CONF_RADIO_TYPE,
CONF_USB_PATH,
CONF_ZIGPY,
DATA_ZHA,
DATA_ZHA_CONFIG,
DATA_ZHA_GATEWAY,
DATA_ZHA_PLATFORM_LOADED,
DATA_ZHA_SHUTDOWN_TASK,
DOMAIN,
PLATFORMS,
SIGNAL_ADD_ENTITIES,
RadioType,
)
from .core.discovery import GROUP_PROBE
DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema({vol.Optional(ha_const.CONF_TYPE): cv.string})
ZHA_CONFIG_SCHEMA = {
vol.Optional(CONF_BAUDRATE): cv.positive_int,
vol.Optional(CONF_DATABASE): cv.string,
vol.Optional(CONF_DEVICE_CONFIG, default={}): vol.Schema(
{cv.string: DEVICE_CONFIG_SCHEMA_ENTRY}
),
vol.Optional(CONF_ENABLE_QUIRKS, default=True): cv.boolean,
vol.Optional(CONF_ZIGPY): dict,
vol.Optional(CONF_RADIO_TYPE): cv.enum(RadioType),
vol.Optional(CONF_USB_PATH): cv.string,
vol.Optional(CONF_CUSTOM_QUIRKS_PATH): cv.isdir,
}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
vol.All(
cv.deprecated(CONF_USB_PATH),
cv.deprecated(CONF_BAUDRATE),
cv.deprecated(CONF_RADIO_TYPE),
ZHA_CONFIG_SCHEMA,
),
),
},
extra=vol.ALLOW_EXTRA,
)
# Zigbee definitions
CENTICELSIUS = "C-100"
# Internal definitions
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up ZHA from config."""
hass.data[DATA_ZHA] = {}
if DOMAIN in config:
conf = config[DOMAIN]
hass.data[DATA_ZHA][DATA_ZHA_CONFIG] = conf
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up ZHA.
Will automatically load components to support devices found on the network.
"""
zha_data = hass.data.setdefault(DATA_ZHA, {})
config = zha_data.get(DATA_ZHA_CONFIG, {})
for platform in PLATFORMS:
zha_data.setdefault(platform, [])
if config.get(CONF_ENABLE_QUIRKS, True):
setup_quirks(config)
zha_gateway = ZHAGateway(hass, config, config_entry)
await zha_gateway.async_initialize()
zha_data[DATA_ZHA_PLATFORM_LOADED] = []
for platform in PLATFORMS:
coro = hass.config_entries.async_forward_entry_setup(config_entry, platform)
zha_data[DATA_ZHA_PLATFORM_LOADED].append(hass.async_create_task(coro))
device_registry = await hass.helpers.device_registry.async_get_registry()
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(CONNECTION_ZIGBEE, str(zha_gateway.application_controller.ieee))},
identifiers={(DOMAIN, str(zha_gateway.application_controller.ieee))},
name="Zigbee Coordinator",
manufacturer="ZHA",
model=zha_gateway.radio_description,
)
api.async_load_api(hass)
async def async_zha_shutdown(event):
"""Handle shutdown tasks."""
await zha_data[DATA_ZHA_GATEWAY].shutdown()
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
zha_data[DATA_ZHA_SHUTDOWN_TASK] = hass.bus.async_listen_once(
ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown
)
asyncio.create_task(async_load_entities(hass))
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload ZHA config entry."""
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].shutdown()
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_update_device_storage()
GROUP_PROBE.cleanup()
api.async_unload_api(hass)
# our components don't have unload methods so no need to look at return values
await asyncio.gather(
*(
hass.config_entries.async_forward_entry_unload(config_entry, platform)
for platform in PLATFORMS
)
)
hass.data[DATA_ZHA][DATA_ZHA_SHUTDOWN_TASK]()
return True
async def async_load_entities(hass: HomeAssistant) -> None:
"""Load entities after integration was setup."""
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_initialize_devices_and_entities()
to_setup = hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED]
results = await asyncio.gather(*to_setup, return_exceptions=True)
for res in results:
if isinstance(res, Exception):
_LOGGER.warning("Couldn't setup zha platform: %s", res)
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Migrate old entry."""
_LOGGER.debug("Migrating from version %s", config_entry.version)
if config_entry.version == 1:
data = {
CONF_RADIO_TYPE: config_entry.data[CONF_RADIO_TYPE],
CONF_DEVICE: {CONF_DEVICE_PATH: config_entry.data[CONF_USB_PATH]},
}
baudrate = hass.data[DATA_ZHA].get(DATA_ZHA_CONFIG, {}).get(CONF_BAUDRATE)
if data[CONF_RADIO_TYPE] != RadioType.deconz and baudrate in BAUD_RATES:
data[CONF_DEVICE][CONF_BAUDRATE] = baudrate
config_entry.version = 2
hass.config_entries.async_update_entry(config_entry, data=data)
if config_entry.version == 2:
data = {**config_entry.data}
if data[CONF_RADIO_TYPE] == "ti_cc":
data[CONF_RADIO_TYPE] = "znp"
config_entry.version = 3
hass.config_entries.async_update_entry(config_entry, data=data)
_LOGGER.info("Migration to version %s successful", config_entry.version)
return True
|
rohitranjan1991/home-assistant
|
homeassistant/components/zha/__init__.py
|
Python
|
mit
| 6,244
|
"""Tests for base extension."""
import unittest
from grow.extensions import base_extension
class BaseExtensionTestCase(unittest.TestCase):
"""Test the base extension."""
def test_config_disabled(self):
"""Uses the disabled config."""
ext = base_extension.BaseExtension(None, {
'disabled': [
'a',
],
'enabled': [
'a',
],
})
self.assertFalse(ext.hooks.is_enabled('a'))
self.assertFalse(ext.hooks.is_enabled('b'))
def test_config_enabled(self):
"""Uses the enabled config."""
ext = base_extension.BaseExtension(None, {
'enabled': [
'a',
],
})
self.assertTrue(ext.hooks.is_enabled('a'))
self.assertFalse(ext.hooks.is_enabled('b'))
|
grow/pygrow
|
grow/extensions/base_extension_test.py
|
Python
|
mit
| 844
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from __future__ import absolute_import, unicode_literals
from ..parameters import parse_token_response, prepare_token_request
from .base import Client
class BackendApplicationClient(Client):
"""A public client utilizing the client credentials grant workflow.
The client can request an access token using only its client
credentials (or other supported means of authentication) when the
client is requesting access to the protected resources under its
control, or those of another resource owner which has been previously
arranged with the authorization server (the method of which is beyond
the scope of this specification).
The client credentials grant type MUST only be used by confidential
clients.
Since the client authentication is used as the authorization grant,
no additional authorization request is needed.
"""
def prepare_request_body(self, body='', scope=None, **kwargs):
"""Add the client credentials to the request body.
The client makes a request to the token endpoint by adding the
following parameters using the "application/x-www-form-urlencoded"
format per `Appendix B`_ in the HTTP request entity-body:
:param scope: The scope of the access request as described by
`Section 3.3`_.
:param kwargs: Extra credentials to include in the token request.
The client MUST authenticate with the authorization server as
described in `Section 3.2.1`_.
The prepared body will include all provided credentials as well as
the ``grant_type`` parameter set to ``client_credentials``::
>>> from oauthlib.oauth2 import BackendApplicationClient
>>> client = BackendApplicationClient('your_id')
>>> client.prepare_request_body(scope=['hello', 'world'])
'grant_type=client_credentials&scope=hello+world'
.. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B
.. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3
.. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1
"""
return prepare_token_request('client_credentials', body=body,
scope=scope, **kwargs)
|
skion/oauthlib-oidc
|
oauthlib/oauth2/rfc6749/clients/backend_application.py
|
Python
|
bsd-3-clause
| 2,477
|
"""
Copyright 2016, Michael DeHaan <michael.dehaan@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from camp.core.scale import scale
from camp.band.conductor import Conductor
from camp.band.members.performance import Performance
from camp.band.members.scale_source import ScaleSource
from camp.band.members.roman import Roman
from camp.band.selectors.endlessly import Endlessly
def play():
# now modifying our previous example, instead of playing the same two
# scales in a loop, let's play a given set of notes in each scale.
# We'll use roman notation to play the 1st, 4th, and 5th note in the scale
# followed by the 1st, 4th, and 5th major chord
# finally, we'll play the 2nd, 3rd, and 6th minor chord.
output = Performance(bpm=120, stop_seconds=10)
# this is just as before, playing one scale then the other in a loop of 7
# notes each
scale1 = scale("c6 major")
scale2 = scale("c6 minor")
scale_choices = [ dict(scale=scale1, beats=7), dict(scale=scale2, beats=7) ]
source = ScaleSource(scales=Endlessly(scale_choices))
# the scale follower will play the first 7 notes in each scale, whatever the current
# scale is. Note that a scale change that doesn't quite line up with the length
# of the roman pattern rolling over might sound a bit weird. That's ok.
roman = Roman(symbols=Endlessly("1 4 5 I IV V ii iii vi".split()), channel=1)
source.chain([roman, output])
conductor = Conductor(signal=[source], performance=output)
conductor.start()
if __name__ == "__main__":
play()
|
mpdehaan/camp
|
examples/04_roman_notation.py
|
Python
|
apache-2.0
| 2,062
|
''' MCTSClass.py: Class for a basic Monte Carlo Tree Search Planner. '''
# Python imports.
import math
import random
from collections import defaultdict
# Other imports.
from simple_rl.planning.PlannerClass import Planner
class MCTS(Planner):
def __init__(self, mdp, name="mcts", explore_param=math.sqrt(2), rollout_depth=20, num_rollouts_per_step=10):
Planner.__init__(self, mdp, name=name)
self.rollout_depth = rollout_depth
self.num_rollouts_per_step = num_rollouts_per_step
self.value_total = defaultdict(lambda : defaultdict(float))
self.explore_param = explore_param
self.visitation_counts = defaultdict(lambda : defaultdict(lambda : 0))
def plan(self, cur_state, horizon=20):
'''
Args:
cur_state (State)
horizon (int)
Returns:
(list): List of actions
'''
action_seq = []
state_seq = [cur_state]
steps = 0
while not cur_state.is_terminal() and steps < horizon:
action = self._next_action(cur_state)
# Do the rollouts...
cur_state = self.transition_func(cur_state, action)
action_seq.append(action)
state_seq.append(cur_state)
steps += 1
self.has_planned = True
return action_seq, state_seq
def policy(self, state):
'''
Args:
state (State)
Returns:
(str)
'''
if not self.has_planned:
self.plan(state)
return self._next_action(state)
def _next_action(self, state):
'''
Args;
state (State)
Returns:
(str)
Summary:
Performs a single step of the MCTS rollout.
'''
best_action = self.actions[0]
best_score = 0
total_visits = [self.visitation_counts[state][a] for a in self.actions]
if 0 in total_visits:
# Insufficient stats, return a random action.
unsampled_actions = [self.actions[i] for i, x in enumerate(total_visits) if x == 0]
next_action = random.choice(unsampled_actions)
self.visitation_counts[state][next_action] += 1
return next_action
total = sum(total_visits)
# Else choose according to the UCT explore method.
for cur_action in self.actions:
s_a_value_tot = self.value_total[state][cur_action]
s_a_visit = self.visitation_counts[state][cur_action]
score = s_a_value_tot / s_a_visit + self.explore_param * math.sqrt(math.log(total) / s_a_visit)
if score > best_score:
best_action = cur_action
best_score = score
return best_action
def _rollout(self, cur_state, action):
'''
Args:
cur_state (State)
action (str)
Returns:
(float): Discounted reward from the rollout.
'''
trajectory = []
total_discounted_reward = []
for i in range(self.rollout_depth):
# Simulate next state.
next_action = self._next_action(cur_state)
cur_state = self.transition_func(cur_state, next_action)
next_reward = self.reward_func(cur_state, next_action)
# Track rewards and states.
total_discounted_reward.append(self.gamma**i * next_reward)
trajectory.append((cur_state, next_action))
if cur_state.is_terminal():
# Break terminal.
break
# Update all visited nodes.
for i, experience in enumerate(trajectory):
s, a = experience
self.visitation_counts[s][a] += 1
self.value_total[s][a] += sum(total_discounted_reward[i:])
return total_discounted_reward
|
david-abel/simple_rl
|
simple_rl/planning/MCTSClass.py
|
Python
|
apache-2.0
| 3,868
|
"""
Unit tests for the basin hopping global minimization algorithm.
"""
import copy
from numpy.testing import assert_almost_equal, assert_equal, assert_
import pytest
from pytest import raises as assert_raises
import numpy as np
from numpy import cos, sin
from scipy.optimize import basinhopping, OptimizeResult
from scipy.optimize._basinhopping import (
Storage, RandomDisplacement, Metropolis, AdaptiveStepsize)
from scipy._lib._pep440 import Version
def func1d(x):
f = cos(14.5 * x - 0.3) + (x + 0.2) * x
df = np.array(-14.5 * sin(14.5 * x - 0.3) + 2. * x + 0.2)
return f, df
def func2d_nograd(x):
f = cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
return f
def func2d(x):
f = cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
df = np.zeros(2)
df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2
df[1] = 2. * x[1] + 0.2
return f, df
def func2d_easyderiv(x):
f = 2.0*x[0]**2 + 2.0*x[0]*x[1] + 2.0*x[1]**2 - 6.0*x[0]
df = np.zeros(2)
df[0] = 4.0*x[0] + 2.0*x[1] - 6.0
df[1] = 2.0*x[0] + 4.0*x[1]
return f, df
class MyTakeStep1(RandomDisplacement):
"""use a copy of displace, but have it set a special parameter to
make sure it's actually being used."""
def __init__(self):
self.been_called = False
super().__init__()
def __call__(self, x):
self.been_called = True
return super().__call__(x)
def myTakeStep2(x):
"""redo RandomDisplacement in function form without the attribute stepsize
to make sure everything still works ok
"""
s = 0.5
x += np.random.uniform(-s, s, np.shape(x))
return x
class MyAcceptTest:
"""pass a custom accept test
This does nothing but make sure it's being used and ensure all the
possible return values are accepted
"""
def __init__(self):
self.been_called = False
self.ncalls = 0
self.testres = [False, 'force accept', True, np.bool_(True),
np.bool_(False), [], {}, 0, 1]
def __call__(self, **kwargs):
self.been_called = True
self.ncalls += 1
if self.ncalls - 1 < len(self.testres):
return self.testres[self.ncalls - 1]
else:
return True
class MyCallBack:
"""pass a custom callback function
This makes sure it's being used. It also returns True after 10
steps to ensure that it's stopping early.
"""
def __init__(self):
self.been_called = False
self.ncalls = 0
def __call__(self, x, f, accepted):
self.been_called = True
self.ncalls += 1
if self.ncalls == 10:
return True
class TestBasinHopping:
def setup_method(self):
""" Tests setup.
Run tests based on the 1-D and 2-D functions described above.
"""
self.x0 = (1.0, [1.0, 1.0])
self.sol = (-0.195, np.array([-0.195, -0.1]))
self.tol = 3 # number of decimal places
self.niter = 100
self.disp = False
# fix random seed
np.random.seed(1234)
self.kwargs = {"method": "L-BFGS-B", "jac": True}
self.kwargs_nograd = {"method": "L-BFGS-B"}
def test_TypeError(self):
# test the TypeErrors are raised on bad input
i = 1
# if take_step is passed, it must be callable
assert_raises(TypeError, basinhopping, func2d, self.x0[i],
take_step=1)
# if accept_test is passed, it must be callable
assert_raises(TypeError, basinhopping, func2d, self.x0[i],
accept_test=1)
def test_input_validation(self):
msg = 'target_accept_rate has to be in range \\(0, 1\\)'
with assert_raises(ValueError, match=msg):
basinhopping(func1d, self.x0[0], target_accept_rate=0.)
with assert_raises(ValueError, match=msg):
basinhopping(func1d, self.x0[0], target_accept_rate=1.)
msg = 'stepwise_factor has to be in range \\(0, 1\\)'
with assert_raises(ValueError, match=msg):
basinhopping(func1d, self.x0[0], stepwise_factor=0.)
with assert_raises(ValueError, match=msg):
basinhopping(func1d, self.x0[0], stepwise_factor=1.)
def test_1d_grad(self):
# test 1-D minimizations with gradient
i = 0
res = basinhopping(func1d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_2d(self):
# test 2d minimizations with gradient
i = 1
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
assert_(res.nfev > 0)
def test_njev(self):
# test njev is returned correctly
i = 1
minimizer_kwargs = self.kwargs.copy()
# L-BFGS-B doesn't use njev, but BFGS does
minimizer_kwargs["method"] = "BFGS"
res = basinhopping(func2d, self.x0[i],
minimizer_kwargs=minimizer_kwargs, niter=self.niter,
disp=self.disp)
assert_(res.nfev > 0)
assert_equal(res.nfev, res.njev)
def test_jac(self):
# test Jacobian returned
minimizer_kwargs = self.kwargs.copy()
# BFGS returns a Jacobian
minimizer_kwargs["method"] = "BFGS"
res = basinhopping(func2d_easyderiv, [0.0, 0.0],
minimizer_kwargs=minimizer_kwargs, niter=self.niter,
disp=self.disp)
assert_(hasattr(res.lowest_optimization_result, "jac"))
# in this case, the Jacobian is just [df/dx, df/dy]
_, jacobian = func2d_easyderiv(res.x)
assert_almost_equal(res.lowest_optimization_result.jac, jacobian,
self.tol)
def test_2d_nograd(self):
# test 2-D minimizations without gradient
i = 1
res = basinhopping(func2d_nograd, self.x0[i],
minimizer_kwargs=self.kwargs_nograd,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_all_minimizers(self):
# Test 2-D minimizations with gradient. Nelder-Mead, Powell, and COBYLA
# don't accept jac=True, so aren't included here.
i = 1
methods = ['CG', 'BFGS', 'Newton-CG', 'L-BFGS-B', 'TNC', 'SLSQP']
minimizer_kwargs = copy.copy(self.kwargs)
for method in methods:
minimizer_kwargs["method"] = method
res = basinhopping(func2d, self.x0[i],
minimizer_kwargs=minimizer_kwargs,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_all_nograd_minimizers(self):
# Test 2-D minimizations without gradient. Newton-CG requires jac=True,
# so not included here.
i = 1
methods = ['CG', 'BFGS', 'L-BFGS-B', 'TNC', 'SLSQP',
'Nelder-Mead', 'Powell', 'COBYLA']
minimizer_kwargs = copy.copy(self.kwargs_nograd)
for method in methods:
minimizer_kwargs["method"] = method
res = basinhopping(func2d_nograd, self.x0[i],
minimizer_kwargs=minimizer_kwargs,
niter=self.niter, disp=self.disp)
tol = self.tol
if method == 'COBYLA':
tol = 2
assert_almost_equal(res.x, self.sol[i], decimal=tol)
def test_pass_takestep(self):
# test that passing a custom takestep works
# also test that the stepsize is being adjusted
takestep = MyTakeStep1()
initial_step_size = takestep.stepsize
i = 1
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp,
take_step=takestep)
assert_almost_equal(res.x, self.sol[i], self.tol)
assert_(takestep.been_called)
# make sure that the build in adaptive step size has been used
assert_(initial_step_size != takestep.stepsize)
def test_pass_simple_takestep(self):
# test that passing a custom takestep without attribute stepsize
takestep = myTakeStep2
i = 1
res = basinhopping(func2d_nograd, self.x0[i],
minimizer_kwargs=self.kwargs_nograd,
niter=self.niter, disp=self.disp,
take_step=takestep)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_pass_accept_test(self):
# test passing a custom accept test
# makes sure it's being used and ensures all the possible return values
# are accepted.
accept_test = MyAcceptTest()
i = 1
# there's no point in running it more than a few steps.
basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=10, disp=self.disp, accept_test=accept_test)
assert_(accept_test.been_called)
def test_pass_callback(self):
# test passing a custom callback function
# This makes sure it's being used. It also returns True after 10 steps
# to ensure that it's stopping early.
callback = MyCallBack()
i = 1
# there's no point in running it more than a few steps.
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=30, disp=self.disp, callback=callback)
assert_(callback.been_called)
assert_("callback" in res.message[0])
# One of the calls of MyCallBack is during BasinHoppingRunner
# construction, so there are only 9 remaining before MyCallBack stops
# the minimization.
assert_equal(res.nit, 9)
def test_minimizer_fail(self):
# test if a minimizer fails
i = 1
self.kwargs["options"] = dict(maxiter=0)
self.niter = 10
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp)
# the number of failed minimizations should be the number of
# iterations + 1
assert_equal(res.nit + 1, res.minimization_failures)
def test_niter_zero(self):
# gh5915, what happens if you call basinhopping with niter=0
i = 0
basinhopping(func1d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=0, disp=self.disp)
def test_seed_reproducibility(self):
# seed should ensure reproducibility between runs
minimizer_kwargs = {"method": "L-BFGS-B", "jac": True}
f_1 = []
def callback(x, f, accepted):
f_1.append(f)
basinhopping(func2d, [1.0, 1.0], minimizer_kwargs=minimizer_kwargs,
niter=10, callback=callback, seed=10)
f_2 = []
def callback2(x, f, accepted):
f_2.append(f)
basinhopping(func2d, [1.0, 1.0], minimizer_kwargs=minimizer_kwargs,
niter=10, callback=callback2, seed=10)
assert_equal(np.array(f_1), np.array(f_2))
@pytest.mark.skipif(Version(np.__version__) < Version('1.17'),
reason='Generator not available for numpy, < 1.17')
def test_random_gen(self):
# check that np.random.Generator can be used (numpy >= 1.17)
rng = np.random.default_rng(1)
minimizer_kwargs = {"method": "L-BFGS-B", "jac": True}
res1 = basinhopping(func2d, [1.0, 1.0],
minimizer_kwargs=minimizer_kwargs,
niter=10, seed=rng)
rng = np.random.default_rng(1)
res2 = basinhopping(func2d, [1.0, 1.0],
minimizer_kwargs=minimizer_kwargs,
niter=10, seed=rng)
assert_equal(res1.x, res2.x)
def test_monotonic_basin_hopping(self):
# test 1-D minimizations with gradient and T=0
i = 0
res = basinhopping(func1d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp, T=0)
assert_almost_equal(res.x, self.sol[i], self.tol)
class Test_Storage:
def setup_method(self):
self.x0 = np.array(1)
self.f0 = 0
minres = OptimizeResult()
minres.x = self.x0
minres.fun = self.f0
self.storage = Storage(minres)
def test_higher_f_rejected(self):
new_minres = OptimizeResult()
new_minres.x = self.x0 + 1
new_minres.fun = self.f0 + 1
ret = self.storage.update(new_minres)
minres = self.storage.get_lowest()
assert_equal(self.x0, minres.x)
assert_equal(self.f0, minres.fun)
assert_(not ret)
def test_lower_f_accepted(self):
new_minres = OptimizeResult()
new_minres.x = self.x0 + 1
new_minres.fun = self.f0 - 1
ret = self.storage.update(new_minres)
minres = self.storage.get_lowest()
assert_(self.x0 != minres.x)
assert_(self.f0 != minres.fun)
assert_(ret)
class Test_RandomDisplacement:
def setup_method(self):
self.stepsize = 1.0
self.displace = RandomDisplacement(stepsize=self.stepsize)
self.N = 300000
self.x0 = np.zeros([self.N])
def test_random(self):
# the mean should be 0
# the variance should be (2*stepsize)**2 / 12
# note these tests are random, they will fail from time to time
x = self.displace(self.x0)
v = (2. * self.stepsize) ** 2 / 12
assert_almost_equal(np.mean(x), 0., 1)
assert_almost_equal(np.var(x), v, 1)
class Test_Metropolis:
def setup_method(self):
self.T = 2.
self.met = Metropolis(self.T)
def test_boolean_return(self):
# the return must be a bool, else an error will be raised in
# basinhopping
ret = self.met(f_new=0., f_old=1.)
assert isinstance(ret, bool)
def test_lower_f_accepted(self):
assert_(self.met(f_new=0., f_old=1.))
def test_KeyError(self):
# should raise KeyError if kwargs f_old or f_new is not passed
assert_raises(KeyError, self.met, f_old=1.)
assert_raises(KeyError, self.met, f_new=1.)
def test_accept(self):
# test that steps are randomly accepted for f_new > f_old
one_accept = False
one_reject = False
for i in range(1000):
if one_accept and one_reject:
break
ret = self.met(f_new=1., f_old=0.5)
if ret:
one_accept = True
else:
one_reject = True
assert_(one_accept)
assert_(one_reject)
def test_GH7495(self):
# an overflow in exp was producing a RuntimeWarning
# create own object here in case someone changes self.T
met = Metropolis(2)
with np.errstate(over='raise'):
met.accept_reject(0, 2000)
class Test_AdaptiveStepsize:
def setup_method(self):
self.stepsize = 1.
self.ts = RandomDisplacement(stepsize=self.stepsize)
self.target_accept_rate = 0.5
self.takestep = AdaptiveStepsize(takestep=self.ts, verbose=False,
accept_rate=self.target_accept_rate)
def test_adaptive_increase(self):
# if few steps are rejected, the stepsize should increase
x = 0.
self.takestep(x)
self.takestep.report(False)
for i in range(self.takestep.interval):
self.takestep(x)
self.takestep.report(True)
assert_(self.ts.stepsize > self.stepsize)
def test_adaptive_decrease(self):
# if few steps are rejected, the stepsize should increase
x = 0.
self.takestep(x)
self.takestep.report(True)
for i in range(self.takestep.interval):
self.takestep(x)
self.takestep.report(False)
assert_(self.ts.stepsize < self.stepsize)
def test_all_accepted(self):
# test that everything works OK if all steps were accepted
x = 0.
for i in range(self.takestep.interval + 1):
self.takestep(x)
self.takestep.report(True)
assert_(self.ts.stepsize > self.stepsize)
def test_all_rejected(self):
# test that everything works OK if all steps were rejected
x = 0.
for i in range(self.takestep.interval + 1):
self.takestep(x)
self.takestep.report(False)
assert_(self.ts.stepsize < self.stepsize)
|
anntzer/scipy
|
scipy/optimize/tests/test__basinhopping.py
|
Python
|
bsd-3-clause
| 16,851
|
import requests
import json
import re
from requests.adapters import HTTPAdapter
from cartodb_services.tools.exceptions import (WrongParams,
MalformedResult,
ServiceException)
from cartodb_services.tools.qps import qps_retry
from cartodb_services.tools import Coordinate, PolyLine
from cartodb_services.metrics import Traceable
class MapzenGeocoder(Traceable):
'A Mapzen Geocoder wrapper for python'
BASE_URL = 'https://search.mapzen.com/v1/search'
READ_TIMEOUT = 60
CONNECT_TIMEOUT = 10
MAX_RETRIES = 1
def __init__(self, app_key, logger, service_params=None):
service_params = service_params or {}
self._app_key = app_key
self._url = service_params.get('base_url', self.BASE_URL)
self._connect_timeout = service_params.get('connect_timeout', self.CONNECT_TIMEOUT)
self._read_timeout = service_params.get('read_timeout', self.READ_TIMEOUT)
self._max_retries = service_params.get('max_retries', self.MAX_RETRIES)
self._logger = logger
@qps_retry(qps=20)
def geocode(self, searchtext, city=None, state_province=None,
country=None, search_type=None):
# Remove the search_type if its address from the params sent to mapzen
if search_type and search_type.lower() == 'address':
search_type = None
request_params = self._build_requests_parameters(searchtext, city,
state_province,
country, search_type)
try:
# TODO Extract HTTP client wrapper
session = requests.Session()
session.mount(self._url, HTTPAdapter(max_retries=self._max_retries))
response = session.get(self._url, params=request_params,
timeout=(self._connect_timeout, self._read_timeout))
self.add_response_data(response, self._logger)
if response.status_code == requests.codes.ok:
return self.__parse_response(response.text)
elif response.status_code == requests.codes.bad_request:
return []
else:
self._logger.error('Error trying to geocode using mapzen',
data={"response_status": response.status_code,
"response_reason": response.reason,
"response_content": response.text,
"reponse_url": response.url,
"response_headers": response.headers,
"searchtext": searchtext,
"city": city, "country": country,
"state_province": state_province})
raise ServiceException('Error trying to geocode {0} using mapzen'.format(searchtext),
response)
except requests.Timeout as te:
# In case of timeout we want to stop the job because the server
# could be down
self._logger.error('Timeout connecting to Mapzen geocoding server', te)
raise ServiceException('Error trying to geocode {0} using mapzen'.format(searchtext),
None)
except requests.ConnectionError as e:
# Don't raise the exception to continue with the geocoding job
self._logger.error('Error connecting to Mapzen geocoding server',
exception=e)
return []
def _build_requests_parameters(self, searchtext, city=None,
state_province=None, country=None,
search_type=None):
request_params = {}
search_string = self._build_search_text(searchtext.strip(),
city,
state_province)
request_params['text'] = search_string
request_params['api_key'] = self._app_key
if search_type:
request_params['layers'] = search_type
if country:
request_params['boundary.country'] = country
return request_params
def _build_search_text(self, searchtext, city, state_province):
search_string = searchtext
if city:
search_string = "{0}, {1}".format(search_string, city)
if state_province:
search_string = "{0}, {1}".format(search_string, state_province)
return search_string
def __parse_response(self, response):
try:
parsed_json_response = json.loads(response)
feature = parsed_json_response['features'][0]
return self._extract_lng_lat_from_result(feature)
except IndexError:
return []
except KeyError:
raise MalformedResult()
def _extract_lng_lat_from_result(self, result):
location = result['geometry']['coordinates']
longitude = location[0]
latitude = location[1]
return [longitude, latitude]
|
CartoDB/dataservices-api
|
server/lib/python/cartodb_services/cartodb_services/mapzen/geocoder.py
|
Python
|
bsd-3-clause
| 5,270
|
from PyQt5.QtCore import Qt, pyqtSignal
from PyQt5.QtGui import QCursor
from PyQt5.QtWidgets import QAction, QMenu, QComboBox
class DropDown(QComboBox):
"""Dropdown for selecting profiles"""
deleteItem = pyqtSignal()
def __init__(self, parent=None):
super(DropDown, self).__init__(parent=parent)
self.setDuplicatesEnabled(False)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.contextMenu)
def contextMenu(self, event):
menu = QMenu(self)
delete_action = QAction('Delete profile')
delete_action.triggered.connect(self.delete_option)
menu.addAction(delete_action)
menu.exec(QCursor.pos())
def delete_option(self):
self.deleteItem.emit()
|
Thomasedv/Grabber
|
Modules/dropdown_widget.py
|
Python
|
gpl-3.0
| 789
|
#! /usr/bin/python -tt
import nose
from rhuilib.util import *
from rhuilib.rhui_testcase import *
from rhuilib.rhuimanager import *
from rhuilib.rhuimanager_cds import *
from rhuilib.rhuimanager_client import *
from rhuilib.rhuimanager_repo import *
from rhuilib.rhuimanager_sync import *
from rhuilib.rhuimanager_identity import *
from rhuilib.rhuimanager_users import *
from rhuilib.rhuimanager_entitlements import *
class test_workflow_simple(RHUITestcase, RHUI_has_RH_cert):
def _setup(self):
'''[Simple Workflow setup] Do initial rhui-manager run'''
RHUIManager.initial_run(self.rs.Instances["RHUA"][0])
def _test(self):
'''[Simple Workflow test] Add cdses '''
for cds in self.rs.Instances["CDS"]:
RHUIManagerCds.add_cds(self.rs.Instances["RHUA"][0], "Cluster1", cds.private_hostname)
'''[Simple Workflow test] Remove rhui configuration rpm from client '''
for cli in self.rs.Instances["CLI"]:
Util.remove_conf_rpm(cli)
'''[Simple Workflow test] Create custom repo '''
RHUIManagerRepo.add_custom_repo(self.rs.Instances["RHUA"][0], "repo1")
RHUIManagerRepo.add_custom_repo(self.rs.Instances["RHUA"][0], "repo2")
'''[Simple Workflow test] Associate custom repo with cluster '''
RHUIManagerCds.associate_repo_cds(self.rs.Instances["RHUA"][0], "Cluster1", ["repo1", "repo2"])
'''[Simple Workflow test] Upload content to custom repo '''
RHUIManagerRepo.upload_content(self.rs.Instances["RHUA"][0], ["repo1", "repo2"], "/etc/rhui/confrpm")
'''[Simple Workflow test] Sync cdses '''
cdslist = []
for cds in self.rs.Instances["CDS"]:
cdslist.append(cds.private_hostname)
RHUIManagerSync.sync_cds(self.rs.Instances["RHUA"][0], cdslist)
'''[Simple Workflow test] Generate entitlement certificate '''
RHUIManagerClient.generate_ent_cert(self.rs.Instances["RHUA"][0], "Cluster1", ["repo1"], "cert-repo1", "/root/", validity_days="", cert_pw=None)
#'''[Simple Workflow test] Sync cluster '''
##it's impossible to do cluster sync and individual cds sync at the same moment
##RHUIManagerSync.sync_cluster(self.rs.Instances["RHUA"][0],["Cluster1"])
'''[Simple Workflow test] Upload RH content certificate '''
RHUIManagerEntitlements.upload_content_cert(self.rs.Instances["RHUA"][0], self.cert)
#'''[Simple Workflow test] Add rh repo by product '''
## skipping as this repos are really big
## RHUIManagerRepo.add_rh_repo_by_product(self.rs.Instances["RHUA"][0], ["Red Hat Enterprise Linux 6 Server - Supplementary from RHUI \(RPMs\)", "Red Hat Enterprise Linux 6 Server from RHUI \(RPMs\)"])
'''[Simple Workflow test] Add rh repo by repo '''
RHUIManagerRepo.add_rh_repo_by_repo(self.rs.Instances["RHUA"][0], ["Red Hat Update Infrastructure 2 \(RPMs\) \(6Server-x86_64\)"])
## '''[Simple Workflow test] Add all rh products '''
## RHUIManagerRepo.add_rh_repo_all(self.rs.Instances["RHUA"][0])
'''[Simple Workflow test] Syncronize repo '''
self._sync_repo(["Red Hat Update Infrastructure 2 \(RPMs\) \(6Server-x86_64\)"])
self._sync_cds([self.rs.Instances["CDS"][0].private_hostname])
'''[Simple Workflow test] Associate rh repo with cds '''
RHUIManagerCds.associate_repo_cds(self.rs.Instances["RHUA"][0], "Cluster1", ["Red Hat Update Infrastructure 2 \(RPMs\) \(6Server-x86_64\)"])
'''[Simple Workflow test] Generate entitlement certificate with rh content '''
RHUIManagerClient.generate_ent_cert(self.rs.Instances["RHUA"][0], "Cluster1", ["repo1", "Red Hat Update Infrastructure 2 \(RPMs\)"], "cert-repo1", "/root/", validity_days="", cert_pw=None)
'''[Simple Workflow test] Create configuration rpm '''
RHUIManagerClient.create_conf_rpm(self.rs.Instances["RHUA"][0], "Cluster1", self.rs.Instances["CDS"][0].private_hostname, "/root", "/root/cert-repo1.crt", "/root/cert-repo1.key", "repo1", "3.0")
'''[Simple Workflow test] Install configuration rpm to client'''
Util.install_rpm_from_rhua(self.rs.Instances["RHUA"][0], self.rs.Instances["CLI"][0], "/root/repo1-3.0/build/RPMS/noarch/repo1-3.0-1.noarch.rpm")
'''[Simple Workflow test] Generate new identity '''
RHUIManagerIdentity.generate_new(self.rs.Instances["RHUA"][0])
'''[Simple Workflow test] Change password '''
RHUIManagerUsers.change_password(self.rs.Instances["RHUA"][0], "admin", "admin2")
RHUIManagerUsers.change_password(self.rs.Instances["RHUA"][0], "admin", "admin")
def _cleanup(self):
'''[Simple Workflow cleanup] Remove cdses '''
for cds in self.rs.Instances["CDS"]:
RHUIManagerCds.delete_cds(self.rs.Instances["RHUA"][0], "Cluster1", [cds.private_hostname])
'''[Simple Workflow cleanup] Delete custom repos '''
RHUIManagerRepo.delete_repo(self.rs.Instances["RHUA"][0], ["repo1", "repo2", "Red Hat Update Infrastructure 2 \(RPMs\) \(6Server-x86_64\)"])
'''[Simple Workflow cleanup] Remove rh certificate '''
RHUIManager.remove_rh_certs(self.rs.Instances["RHUA"][0])
if __name__ == "__main__":
nose.run(defaultTest=__name__, argv=[__file__, '-v'])
|
RedHatQE/rhui-testing-tools
|
rhui-tests/test_rhui_workflow_simple.py
|
Python
|
gpl-3.0
| 5,318
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for dateutils library."""
__revision__ = "$Id$"
import unittest
import datetime
import time
import calendar
import dateutils
from invenio.config import CFG_SITE_LANGS
from invenio.testutils import make_test_suite, run_test_suite
if 'en' in CFG_SITE_LANGS:
lang_english_configured = True
else:
lang_english_configured = False
if 'sk' in CFG_SITE_LANGS:
lang_slovak_configured = True
else:
lang_slovak_configured = False
class ConvertFromDateCVSTest(unittest.TestCase):
"""
Testing conversion of CVS dates.
"""
def test_convert_good_cvsdate(self):
"""dateutils - conversion of good CVS dates"""
# here we have to use '$' + 'Date...' here, otherwise the CVS
# commit would erase this time format to put commit date:
datecvs = "$" + "Date: 2006/09/21 10:07:22 $"
datestruct_beginning_expected = (2006, 9, 21, 10, 7, 22)
self.assertEqual(dateutils.convert_datecvs_to_datestruct(datecvs)[:6],
datestruct_beginning_expected)
# here we have to use '$' + 'Date...' here, otherwise the CVS
# commit would erase this time format to put commit date:
datecvs = "$" + "Id: dateutils_tests.py,v 1.6 2007/02/14 18:33:02 tibor Exp $"
datestruct_beginning_expected = (2007, 2, 14, 18, 33, 02)
self.assertEqual(dateutils.convert_datecvs_to_datestruct(datecvs)[:6],
datestruct_beginning_expected)
def test_convert_bad_cvsdate(self):
"""dateutils - conversion of bad CVS dates"""
# here we have to use '$' + 'Date...' here, otherwise the CVS
# commit would erase this time format to put commit date:
datecvs = "$" + "Date: 2006/AA/21 10:07:22 $"
datestruct_beginning_expected = (0, 0, 0, 0, 0, 0)
self.assertEqual(dateutils.convert_datecvs_to_datestruct(datecvs)[:6],
datestruct_beginning_expected)
class ConvertIntoDateGUITest(unittest.TestCase):
"""
Testing conversion into dategui with various languages.
"""
if lang_english_configured:
def test_convert_good_to_dategui_en(self):
"""dateutils - conversion of good text date into English GUI date"""
datetext = "2006-07-16 18:36:01"
dategui_en_expected = "16 Jul 2006, 18:36"
dategui_en = dateutils.convert_datetext_to_dategui(datetext,
ln='en')
self.assertEqual(dategui_en, dategui_en_expected)
if lang_slovak_configured:
def test_convert_good_to_dategui_sk(self):
"""dateutils - conversion of good text date into Slovak GUI date"""
datetext = "2006-07-16 18:36:01"
dategui_sk_expected = "16 júl 2006, 18:36"
dategui_sk = dateutils.convert_datetext_to_dategui(datetext,
ln='sk')
self.assertEqual(dategui_sk, dategui_sk_expected)
if lang_english_configured:
def test_convert_bad_to_dategui_en(self):
"""dateutils - conversion of bad text date into English GUI date"""
datetext = "2006-02-AA 18:36:01"
dategui_sk_expected = "N/A"
dategui_sk = dateutils.convert_datetext_to_dategui(datetext,
ln='en')
self.assertEqual(dategui_sk, dategui_sk_expected)
if lang_slovak_configured:
def test_convert_bad_to_dategui_sk(self):
"""dateutils - conversion of bad text date into Slovak GUI date"""
datetext = "2006-02-AA 18:36:01"
dategui_sk_expected = "nepríst."
dategui_sk = dateutils.convert_datetext_to_dategui(datetext,
ln='sk')
self.assertEqual(dategui_sk, dategui_sk_expected)
class ParseRuntimeLimitTest(unittest.TestCase):
"""
Testing the runtime limit parser used by BibSched to determine task
runtimes and also by the errorlib.register_emergency function to parse the
CFG_SITE_EMERGENCY_EMAIL_ADDRESSES configuration
"""
def test_parse_runtime_limit_day_abbr_plus_times(self):
"""dateutils - parse runtime using a weekday abbreviation plus a time range"""
limit = 'Sun 8:00-16:00'
day = datetime.date.today()
now = datetime.time()
while day.weekday() != calendar.SUNDAY:
day -= datetime.timedelta(1)
present_from = datetime.datetime.combine(day, now.replace(hour=8))
present_to = datetime.datetime.combine(day, now.replace(hour=16))
future_from = present_from + datetime.timedelta(days=7)
future_to = present_to + datetime.timedelta(days=7)
expected = (
(time.mktime(present_from.timetuple()), time.mktime(present_to.timetuple())),
(time.mktime(future_from.timetuple()), time.mktime(future_to.timetuple())),
)
result = dateutils.parse_runtime_limit(limit)
self.assertEqual(expected, result)
def test_parse_runtime_limit_day_plus_times(self):
"""dateutils - parse runtime using a weekday plus a time range"""
limit = 'Thursday 18:00-22:00'
day = datetime.date.today()
now = datetime.time()
while day.weekday() != calendar.THURSDAY:
day -= datetime.timedelta(1)
present_from = datetime.datetime.combine(day, now.replace(hour=18))
present_to = datetime.datetime.combine(day, now.replace(hour=22))
future_from = present_from + datetime.timedelta(days=7)
future_to = present_to + datetime.timedelta(days=7)
expected = (
(time.mktime(present_from.timetuple()), time.mktime(present_to.timetuple())),
(time.mktime(future_from.timetuple()), time.mktime(future_to.timetuple())),
)
result = dateutils.parse_runtime_limit(limit)
self.assertEqual(expected, result)
def test_parse_runtime_limit_day_abbr_only(self):
"""dateutils - parse runtime using just a weekday abbreviation"""
limit = 'Tue'
day = datetime.date.today()
now = datetime.time()
while day.weekday() != calendar.TUESDAY:
day -= datetime.timedelta(1)
present_from = datetime.datetime.combine(day, now.replace(hour=0))
present_to = present_from + datetime.timedelta(days=1)
future_from = present_from + datetime.timedelta(days=7)
future_to = present_to + datetime.timedelta(days=7)
expected = (
(time.mktime(present_from.timetuple()), time.mktime(present_to.timetuple())),
(time.mktime(future_from.timetuple()), time.mktime(future_to.timetuple())),
)
result = dateutils.parse_runtime_limit(limit)
self.assertEqual(expected, result)
def test_parse_runtime_limit_times_only(self):
"""dateutils - parse runtime using just a time range"""
limit = '06:00-18:00'
day = datetime.date.today()
now = datetime.time()
present_from = datetime.datetime.combine(day, now.replace(hour=6))
present_to = datetime.datetime.combine(day, now.replace(hour=18))
future_from = present_from + datetime.timedelta(days=1)
future_to = present_to + datetime.timedelta(days=1)
expected = (
(time.mktime(present_from.timetuple()), time.mktime(present_to.timetuple())),
(time.mktime(future_from.timetuple()), time.mktime(future_to.timetuple())),
)
result = dateutils.parse_runtime_limit(limit)
self.assertEqual(expected, result)
TEST_SUITE = make_test_suite(ConvertFromDateCVSTest,
ConvertIntoDateGUITest,
ParseRuntimeLimitTest,)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
pombredanne/invenio
|
modules/miscutil/lib/dateutils_tests.py
|
Python
|
gpl-2.0
| 8,759
|
import re
import math
from qgis.core import QgsPointXY, QgsCoordinateReferenceSystem, QgsCoordinateTransform, QgsProject
from .util import epsg4326
epsg32661 = QgsCoordinateReferenceSystem("EPSG:32661")
epsg32761 = QgsCoordinateReferenceSystem("EPSG:32761")
class UpsException(Exception):
pass
def upsParse(ups_str):
ups = ups_str.strip().upper()
m = re.match(r'^([ABYZ])\s+(\d+\.?\d*)\s*M\s*E\s+(\d+\.?\d*)\s*M\s*N', ups)
if m is None:
m = re.match(r'^([ABYZ])\s+(\d+\.?\d*)\s+(\d+\.?\d*)', ups)
if m is None:
m = re.match(r'^([ABYZ])(\d+\.?\d*)E(\d+\.?\d*)N', ups)
if m:
m = m.groups()
if len(m) == 3:
letter = m[0]
easting = float(m[1])
northing = float(m[2])
return(letter, easting, northing)
raise UpsException('Invalid UPS Coordinate')
def ups2Point(ups, crs=epsg4326):
letter, easting, northing = upsParse(ups)
if letter == 'A' or letter == 'B':
epsg = epsg32761
else:
epsg = epsg32661
pt = QgsPointXY(easting, northing)
upstrans = QgsCoordinateTransform(epsg, crs, QgsProject.instance())
return(upstrans.transform(pt))
def isUps(ups):
try:
l, e, n = upsParse(ups)
except Exception:
return(False)
return(True)
def latLon2Ups(lat, lon, precision=0, format=0):
if lon < -180 or lon > 360:
return("")
if lat < 83.5 and lat > -79.5:
return("")
if lat > 90 or lat < -90:
return("")
if lon > 180:
lon -= 360
if lat >= 83.5:
epsg = epsg32661
if lon < 0:
letter = 'Y'
else:
letter = 'Z'
else:
epsg = epsg32761
if lon < 0:
letter = 'A'
else:
letter = 'B'
upstrans = QgsCoordinateTransform(epsg4326, epsg, QgsProject.instance())
pt = QgsPointXY(lon, lat)
upspt = upstrans.transform(pt)
if format == 0:
msg = '{} {:.{prec}f}mE {:.{prec}f}mN'.format(letter, upspt.x(), upspt.y(), prec=precision)
else:
msg = '{}{:.{prec}f}E{:.{prec}f}N'.format(letter, upspt.x(), upspt.y(), prec=precision)
return(msg)
|
NationalSecurityAgency/qgis-latlontools-plugin
|
ups.py
|
Python
|
gpl-2.0
| 2,187
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class DeploymentPropertiesExtended(Model):
"""Deployment properties with additional details.
:param provisioning_state: The state of the provisioning.
:type provisioning_state: str
:param correlation_id: The correlation ID of the deployment.
:type correlation_id: str
:param timestamp: The timestamp of the template deployment.
:type timestamp: datetime
:param outputs: Key/value pairs that represent deploymentoutput.
:type outputs: object
:param providers: The list of resource providers needed for the
deployment.
:type providers:
list[~azure.mgmt.resource.resources.v2016_02_01.models.Provider]
:param dependencies: The list of deployment dependencies.
:type dependencies:
list[~azure.mgmt.resource.resources.v2016_02_01.models.Dependency]
:param template: The template content. Use only one of Template or
TemplateLink.
:type template: object
:param template_link: The URI referencing the template. Use only one of
Template or TemplateLink.
:type template_link:
~azure.mgmt.resource.resources.v2016_02_01.models.TemplateLink
:param parameters: Deployment parameters. Use only one of Parameters or
ParametersLink.
:type parameters: object
:param parameters_link: The URI referencing the parameters. Use only one
of Parameters or ParametersLink.
:type parameters_link:
~azure.mgmt.resource.resources.v2016_02_01.models.ParametersLink
:param mode: The deployment mode. Possible values include: 'Incremental',
'Complete'
:type mode: str or
~azure.mgmt.resource.resources.v2016_02_01.models.DeploymentMode
:param debug_setting: The debug setting of the deployment.
:type debug_setting:
~azure.mgmt.resource.resources.v2016_02_01.models.DebugSetting
"""
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'outputs': {'key': 'outputs', 'type': 'object'},
'providers': {'key': 'providers', 'type': '[Provider]'},
'dependencies': {'key': 'dependencies', 'type': '[Dependency]'},
'template': {'key': 'template', 'type': 'object'},
'template_link': {'key': 'templateLink', 'type': 'TemplateLink'},
'parameters': {'key': 'parameters', 'type': 'object'},
'parameters_link': {'key': 'parametersLink', 'type': 'ParametersLink'},
'mode': {'key': 'mode', 'type': 'DeploymentMode'},
'debug_setting': {'key': 'debugSetting', 'type': 'DebugSetting'},
}
def __init__(self, provisioning_state=None, correlation_id=None, timestamp=None, outputs=None, providers=None, dependencies=None, template=None, template_link=None, parameters=None, parameters_link=None, mode=None, debug_setting=None):
super(DeploymentPropertiesExtended, self).__init__()
self.provisioning_state = provisioning_state
self.correlation_id = correlation_id
self.timestamp = timestamp
self.outputs = outputs
self.providers = providers
self.dependencies = dependencies
self.template = template
self.template_link = template_link
self.parameters = parameters
self.parameters_link = parameters_link
self.mode = mode
self.debug_setting = debug_setting
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-resource/azure/mgmt/resource/resources/v2016_02_01/models/deployment_properties_extended.py
|
Python
|
mit
| 3,942
|
# -*- coding: utf-8 -*-
""" This is an integration "unit" test. """
from canaimagnulinux.web.policy.config import DEPENDENCIES as ZOPE2_STYLE_PRODUCTS
from canaimagnulinux.web.policy.config import PROFILE_ID
from canaimagnulinux.web.policy.config import PROJECTNAME
from canaimagnulinux.web.policy.testing import FUNCTIONAL_TESTING
from canaimagnulinux.web.policy.testing import INTEGRATION_TESTING
from plone import api
from plone.app.testing import TEST_USER_ID
from plone.app.testing import setRoles
from plone.testing.z2 import Browser
import unittest
DEPENDENCIES = [
'ArchAddOn',
'canaimagnulinux.web.theme',
'cioppino.twothumbs',
'collective.cover',
'collective.disqus',
'collective.facebook.portlets',
'collective.geo.usersmap',
'collective.googleanalytics',
'collective.googlenews',
'collective.nitf',
'collective.opendata',
'collective.polls',
'collective.twitter.portlets',
'collective.twitter.tweet',
'collective.upload',
'Doormat',
'FacultyStaffDirectory',
'PloneFormGen',
'PloneServicesCenter',
'PloneSoftwareCenter',
'plone.api',
'plone.contentratings',
'plone.app.caching',
'plone.app.ldap',
'sc.social.like',
] + ZOPE2_STYLE_PRODUCTS
class BaseTestCase(unittest.TestCase):
""" Base test case to be used by other tests. """
layer = INTEGRATION_TESTING
profile = PROFILE_ID
def setUp(self):
self.portal = self.layer['portal']
self.qi = self.portal['portal_quickinstaller']
self.wt = self.portal['portal_workflow']
self.st = self.portal['portal_setup']
class InstallTestCase(BaseTestCase):
""" Ensure product is properly installed. """
def test_installed(self):
""" This method test the default GenericSetup profile of this package. """
self.assertTrue(self.qi.isProductInstalled(PROJECTNAME))
def test_dependencies_installed(self):
""" This method test that dependencies products are installed of this package. """
# for p in DEPENDENCIES:
# self.assertTrue(
# self.qi.isProductInstalled(p), u'{0} not installed'.format(p))
expected = set(DEPENDENCIES)
installed = self.qi.listInstalledProducts(showHidden=True)
installed = set([product['id'] for product in installed])
result = sorted(expected - installed)
self.assertTrue(
result,
'These dependencies are not installed: ' + ', '.join(result)
)
def test_version(self):
""" This method test that last version for profile of this package. """
self.assertEqual(
self.st.getLastVersionForProfile(PROFILE_ID), (u'1000',))
class DependenciesSettingsTestCase(BaseTestCase):
""" Ensure package dependencies are properly configured. """
def test_collective_upload_settings(self):
expected = 'gif, jpeg, jpg, png, pdf, txt, ods, odt, odp, html, csv, zip, tgz, bz2'
self.assertEqual(
api.portal.get_registry_record(
'collective.upload.interfaces.IUploadSettings.upload_extensions'),
expected
)
self.assertEqual(
api.portal.get_registry_record(
'collective.upload.interfaces.IUploadSettings.max_file_size'),
10485760
)
self.assertEqual(
api.portal.get_registry_record(
'collective.upload.interfaces.IUploadSettings.resize_max_width'),
3872
)
self.assertEqual(
api.portal.get_registry_record(
'collective.upload.interfaces.IUploadSettings.resize_max_height'),
3872
)
def test_nitf_settings(self):
self.assertEqual(
api.portal.get_registry_record(
'collective.nitf.controlpanel.INITFSettings.available_genres'),
[u'Actuality', u'Anniversary', u'Current', u'Exclusive', u'From the Scene', u'Interview', u'Opinion', u'Profile']
)
self.assertEqual(
api.portal.get_registry_record(
'collective.nitf.controlpanel.INITFSettings.available_sections'),
set([u'Canaima', u'Novedades', u'Comunidad', u'Soporte y Aprendizaje', u'Soluciones', u'Descargas'])
)
self.assertEqual(
api.portal.get_registry_record(
'collective.nitf.controlpanel.INITFSettings.default_genre'),
u'Current'
)
self.assertEqual(
api.portal.get_registry_record(
'collective.nitf.controlpanel.INITFSettings.default_section'),
u'Novedades'
)
def test_nitf_google_news(self):
self.assertEqual(
api.portal.get_registry_record(
'collective.googlenews.interfaces.GoogleNewsSettings.portal_types'),
['collective.nitf.content']
)
def test_google_analytics_tool(self):
""" Test that the portal_analytics tool is created. """
analytics_tool = api.portal.get_tool('portal_analytics')
self.assertNotEqual(analytics_tool, None)
def test_geo_settings(self):
import decimal
self.assertEqual(
api.portal.get_registry_record(
'collective.geo.settings.interfaces.IGeoSettings.default_layers'),
[u'osm']
)
self.assertEqual(
api.portal.get_registry_record(
'collective.geo.settings.interfaces.IGeoSettings.zoom'),
decimal.Decimal(6)
)
self.assertEqual(
api.portal.get_registry_record(
'collective.geo.settings.interfaces.IGeoSettings.longitude'),
decimal.Decimal(6.423750000000001)
)
self.assertEqual(
api.portal.get_registry_record(
'collective.geo.settings.interfaces.IGeoSettings.latitude'),
decimal.Decimal(-66.58973000000024)
)
def test_geo_usersmap_settings(self):
self.assertEqual(
api.portal.get_registry_record(
'collective.geo.usersmap.interfaces.IUsersMapPreferences.title'),
u'Mapa de usuarios del portal'
)
self.assertEqual(
api.portal.get_registry_record(
'collective.geo.usersmap.interfaces.IUsersMapPreferences.description'),
u'Este mapa muestra las ubicaciones de los usuarios del portal.'
)
self.assertEqual(
api.portal.get_registry_record(
'collective.geo.usersmap.interfaces.IUsersMapPreferences.user_properties'),
[u'description', u'email']
)
def test_disqus_settings(self):
self.assertEqual(
api.portal.get_registry_record(
'collective.disqus.interfaces.IDisqusSettings.activated'),
True
)
self.assertEqual(
api.portal.get_registry_record(
'collective.disqus.interfaces.IDisqusSettings.developer_mode'),
False
)
self.assertEqual(
api.portal.get_registry_record(
'collective.disqus.interfaces.IDisqusSettings.forum_short_name'),
'canaimagnulinux'
)
self.assertEqual(
api.portal.get_registry_record(
'collective.disqus.interfaces.IDisqusSettings.access_token'),
'15796f758e24404bb965521fe85f9aa8'
)
self.assertEqual(
api.portal.get_registry_record(
'collective.disqus.interfaces.IDisqusSettings.app_public_key'),
'iroSK4ud2I2sLMYAqMNI56tqI1fjbCm3XQ8T5HhZGTSQfAnj9m7yBNr9GqcycA8M'
)
self.assertEqual(
api.portal.get_registry_record(
'collective.disqus.interfaces.IDisqusSettings.app_secret_key'),
'q3xfSJDNYvi5uwMq9Y6Whyu3xy6luxKN9PFsruE2X2qMz98xuX23GK7sS5KnIAtb'
)
class NonInstallableTestCase(unittest.TestCase):
"""Ensure non installable packages are available."""
layer = FUNCTIONAL_TESTING
def setUp(self):
self.portal = self.layer['portal']
def test_opendata_available(self):
portal_url = self.portal.absolute_url()
browser = Browser(self.layer['app'])
opendata_url = '{0}/{1}'.format(portal_url, '/open-data')
browser.open(opendata_url)
# self.assertIn('Open Data', browser.contents)
apidata_url = '{0}/{1}'.format(portal_url, '/apidata/cms/site_info')
browser.open(apidata_url)
self.assertIn('Portal Canaima GNU/Linux', browser.contents)
class UninstallTestCase(BaseTestCase):
""" Ensure product is properly uninstalled. """
def setUp(self):
BaseTestCase.setUp(self)
setRoles(self.portal, TEST_USER_ID, ['Manager'])
self.qi.uninstallProducts(products=[PROJECTNAME])
def test_uninstalled(self):
""" This method test the uninstall GenericSetup profile of this package. """
self.assertFalse(self.qi.isProductInstalled(PROJECTNAME))
def test_dependencies_uninstalled(self):
""" This method test that dependencies products are uninstalled. """
pass
|
CanaimaGNULinux/canaimagnulinux.web.policy
|
canaimagnulinux/web/policy/tests/test_setup.py
|
Python
|
gpl-3.0
| 9,188
|
"""Tests for AccuWeather."""
|
pschmitt/home-assistant
|
tests/components/accuweather/__init__.py
|
Python
|
apache-2.0
| 29
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'rich_string03.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': 1})
italic = workbook.add_format({'italic': 1})
worksheet.write('A1', 'Foo', bold)
worksheet.write('A2', 'Bar', italic)
worksheet.write_rich_string('A3', bold, 'abc', 'defg')
workbook.close()
self.assertExcelEqual()
|
jkyeung/XlsxWriter
|
xlsxwriter/test/comparison/test_rich_string03.py
|
Python
|
bsd-2-clause
| 1,229
|
# coding: utf8
# Copyright 2014-2015 Vincent Jacques <vincent@vincent-jacques.net>
from ..conversion import ConversionUnitTests
from ..expressions import ConditionExpressionUnitTests
from ..return_types import (
TableDescriptionUnitTests,
AttributeDefinitionUnitTests,
GlobalSecondaryIndexDescriptionUnitTests,
ProjectionUnitTests,
ProvisionedThroughputDescriptionUnitTests,
KeySchemaElementUnitTests,
LocalSecondaryIndexDescriptionUnitTests,
ConsumedCapacityUnitTests,
CapacityUnitTests,
ItemCollectionMetricsUnitTests,
)
from ..batch_get_item import BatchGetItemUnitTests, BatchGetItemResponseUnitTests
from ..batch_write_item import BatchWriteItemUnitTests, BatchWriteItemResponseUnitTests
from ..create_table import CreateTableUnitTests, CreateTableResponseUnitTests
from ..delete_item import DeleteItemUnitTests, DeleteItemResponseUnitTests
from ..delete_table import DeleteTableUnitTests, DeleteTableResponseUnitTests
from ..describe_table import DescribeTableUnitTests, DescribeTableResponseUnitTests
from ..get_item import GetItemUnitTests, GetItemResponseUnitTests
from ..list_tables import ListTablesUnitTests, ListTablesResponseUnitTests
from ..put_item import PutItemUnitTests, PutItemResponseUnitTests
from ..query import QueryUnitTests, QueryResponseUnitTests
from ..scan import ScanUnitTests, ScanResponseUnitTests
from ..update_item import UpdateItemUnitTests, UpdateItemResponseUnitTests
from ..update_table import UpdateTableUnitTests, UpdateTableResponseUnitTests
|
jacquev6/LowVoltage
|
LowVoltage/actions/tests/unit.py
|
Python
|
mit
| 1,526
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
Union.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import QgsFeatureRequest, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.ProcessingLog import ProcessingLog
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class Union(GeoAlgorithm):
INPUT = 'INPUT'
INPUT2 = 'INPUT2'
OUTPUT = 'OUTPUT'
def processAlgorithm(self, progress):
vlayerA = dataobjects.getObjectFromUri(self.getParameterValue(Union.INPUT))
vlayerB = dataobjects.getObjectFromUri(self.getParameterValue(Union.INPUT2))
GEOS_EXCEPT = True
FEATURE_EXCEPT = True
vproviderA = vlayerA.dataProvider()
fields = vector.combineVectorFields(vlayerA, vlayerB)
names = [field.name() for field in fields]
ProcessingLog.addToLog(ProcessingLog.LOG_INFO, str(names))
writer = self.getOutputFromName(Union.OUTPUT).getVectorWriter(fields,
vproviderA.geometryType(), vproviderA.crs())
inFeatA = QgsFeature()
inFeatB = QgsFeature()
outFeat = QgsFeature()
indexA = vector.spatialindex(vlayerB)
indexB = vector.spatialindex(vlayerA)
count = 0
nElement = 0
featuresA = vector.features(vlayerA)
nFeat = len(featuresA)
for inFeatA in featuresA:
progress.setPercentage(nElement / float(nFeat) * 50)
nElement += 1
lstIntersectingB = []
geom = QgsGeometry(inFeatA.geometry())
atMapA = inFeatA.attributes()
intersects = indexA.intersects(geom.boundingBox())
if len(intersects) < 1:
try:
outFeat.setGeometry(geom)
outFeat.setAttributes(atMapA)
writer.addFeature(outFeat)
except:
# This really shouldn't happen, as we haven't
# edited the input geom at all
raise GeoAlgorithmExecutionException(
self.tr('Feature exception while computing union'))
else:
for id in intersects:
count += 1
request = QgsFeatureRequest().setFilterFid(id)
inFeatB = vlayerB.getFeatures(request).next()
atMapB = inFeatB.attributes()
tmpGeom = QgsGeometry(inFeatB.geometry())
if geom.intersects(tmpGeom):
int_geom = geom.intersection(tmpGeom)
lstIntersectingB.append(tmpGeom)
if int_geom is None:
# There was a problem creating the intersection
raise GeoAlgorithmExecutionException(
self.tr('Geometry exception while computing '
'intersection'))
else:
int_geom = QgsGeometry(int_geom)
if int_geom.wkbType() == 0:
# Intersection produced different geomety types
temp_list = int_geom.asGeometryCollection()
for i in temp_list:
if i.type() == geom.type():
int_geom = QgsGeometry(i)
try:
outFeat.setGeometry(int_geom)
attrs = []
attrs.extend(atMapA)
attrs.extend(atMapB)
outFeat.setAttributes(attrs)
writer.addFeature(outFeat)
except Exception, err:
raise GeoAlgorithmExecutionException(
self.tr('Feature exception while computing union'))
try:
# the remaining bit of inFeatA's geometry
# if there is nothing left, this will just silently fail and we're good
diff_geom = QgsGeometry( geom )
if len(lstIntersectingB) != 0:
intB = QgsGeometry.unaryUnion(lstIntersectingB)
diff_geom = diff_geom.difference(intB)
if diff_geom.wkbType() == 0:
temp_list = diff_geom.asGeometryCollection()
for i in temp_list:
if i.type() == geom.type():
diff_geom = QgsGeometry(i)
outFeat.setGeometry(diff_geom)
outFeat.setAttributes(atMapA)
writer.addFeature(outFeat)
except Exception, err:
raise GeoAlgorithmExecutionException(
self.tr('Feature exception while computing union'))
length = len(vproviderA.fields())
featuresA = vector.features(vlayerB)
nFeat = len(featuresA)
for inFeatA in featuresA:
progress.setPercentage(nElement / float(nFeat) * 100)
add = False
geom = QgsGeometry(inFeatA.geometry())
diff_geom = QgsGeometry(geom)
atMap = [None] * length
atMap.extend(inFeatA.attributes())
intersects = indexB.intersects(geom.boundingBox())
if len(intersects) < 1:
try:
outFeat.setGeometry(geom)
outFeat.setAttributes(atMap)
writer.addFeature(outFeat)
except Exception, err:
raise GeoAlgorithmExecutionException(
self.tr('Feature exception while computing union'))
else:
for id in intersects:
request = QgsFeatureRequest().setFilterFid(id)
inFeatB = vlayerA.getFeatures(request).next()
atMapB = inFeatB.attributes()
tmpGeom = QgsGeometry(inFeatB.geometry())
try:
if diff_geom.intersects(tmpGeom):
add = True
diff_geom = QgsGeometry(
diff_geom.difference(tmpGeom))
else:
# Ihis only happends if the bounding box
# intersects, but the geometry doesn't
outFeat.setGeometry(diff_geom)
outFeat.setAttributes(atMap)
writer.addFeature(outFeat)
except Exception, err:
raise GeoAlgorithmExecutionException(
self.tr('Geometry exception while computing intersection'))
if add:
try:
outFeat.setGeometry(diff_geom)
outFeat.setAttributes(atMap)
writer.addFeature(outFeat)
except Exception, err:
raise err
FEATURE_EXCEPT = False
nElement += 1
del writer
if not GEOS_EXCEPT:
ProcessingLog.addToLog(ProcessingLog.LOG_WARNING,
self.tr('Geometry exception while computing intersection'))
if not FEATURE_EXCEPT:
ProcessingLog.addToLog(ProcessingLog.LOG_WARNING,
self.tr('Feature exception while computing intersection'))
def defineCharacteristics(self):
self.name = 'Union'
self.group = 'Vector overlay tools'
self.addParameter(ParameterVector(Union.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterVector(Union.INPUT2,
self.tr('Input layer 2'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addOutput(OutputVector(Union.OUTPUT, self.tr('Union')))
|
herow/planning_qgis
|
python/plugins/processing/algs/qgis/Union.py
|
Python
|
gpl-2.0
| 9,266
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Exercicio 5 - Lista 2 do curso Python para Zumbis
Escrecer um programa que receba 3 valores e processo o maior e menor numeros, e os exiba na tela.
"""
a = float(raw_input('Digite um valor para A? '))
b = float(raw_input('Digite um valor para B? '))
c = float(raw_input('Digite um valor para C? '))
media = (a + b + c) / 3
# Maior
if (a > media):
maior = 'A é o maior'
elif (b > media):
maior = 'B é o maior'
else:
maior = 'C é o maior'
# Menor
if (a < b and a < c):
menor = 'A é o menor'
elif (b < c):
menor = 'B é o menor'
else:
menor = 'C é menor'
print maior , menor
|
kaiocesar/Listasexerciciosppz
|
lista_2/exercicio_5.py
|
Python
|
mit
| 667
|
class BiDi:
"""
A wee utility class for keeping bi-directional mappings, like field
constants in protocols. Names are attributes on the object, dict-like
access maps values to names:
CONST = BiDi(a=1, b=2)
assert CONST.a == 1
assert CONST.get_name(1) == "a"
"""
def __init__(self, **kwargs):
self.names = kwargs
self.values = {}
for k, v in kwargs.items():
self.values[v] = k
if len(self.names) != len(self.values):
raise ValueError("Duplicate values not allowed.")
def __getattr__(self, k):
if k in self.names:
return self.names[k]
raise AttributeError("No such attribute: %s", k)
def get_name(self, n, default=None):
return self.values.get(n, default)
|
dwfreed/mitmproxy
|
mitmproxy/types/bidi.py
|
Python
|
mit
| 821
|
import logging
#
# This class can be used to talk to a MOH/BFBC2 game server. It can send commands to the server, and receive responses.
# When a receive is performed, the code will wait until a full packet is available.
# It is not suitable for sending commands and receiving events at the same time.
#
import socket
from packet import RConPacket
###################################################################################
class SynchronousCommandConnection:
def __init__(self):
self.socket = None
def connect(self, host, port):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((host, port))
self.socket.setblocking(1)
self.clientSequence = 0
self.receiveBuffer = b''
self.sentSequence = None
def isconnected(self):
if self.socket != None:
return True
return False
def disconnect(self):
if self.socket != None:
self.socket.close()
self.socket = None
# Wait until the local receive buffer contains a full packet (appending data from the network socket),
# then split receive buffer into first packet and remaining buffer data
def receive(self, what="response"):
while True:
while not RConPacket.containsCompletePacket(self.receiveBuffer):
self.receiveBuffer += self.socket.recv(4096)
[packet, packetSize] = RConPacket.decode(self.receiveBuffer)
self.receiveBuffer = self.receiveBuffer[packetSize:len(self.receiveBuffer)]
if what == "response" and packet.isResponse and packet.sequence == self.sentSequence:
return packet.words
elif what == "any":
response = RConPacket.createClientResponse(packet.sequence, packet.words)
self.socket.send(response.encode())
self.sentSequence = self.clientSequence
self.clientSequence = (self.clientSequence + 1) & 0x3fffffff
return packet.words
def send(self, words):
request = RConPacket.createClientRequest(self.clientSequence, words)
self.socket.send(request.encode())
self.sentSequence = self.clientSequence
self.clientSequence = (self.clientSequence + 1) & 0x3fffffff
|
rocketblast/rcon
|
src/rocketblast/rcon/frostbite/connection.py
|
Python
|
agpl-3.0
| 2,327
|
#!/usr/bin/python
import mock
import sys
class DesktopTest(mock.TestCase):
def setUp(self):
self.setupModules(["_isys", "block", "ConfigParser"])
self.fs = mock.DiskIO()
self.fs.open('/mnt/sysimage/etc/inittab', 'w').write('id:5:initdefault:')
self.fs.open('/mnt/sysimage/etc/sysconfig/desktop', 'w').write('')
import pyanaconda.desktop
pyanaconda.desktop.log = mock.Mock()
pyanaconda.desktop.open = self.fs.open
def tearDown(self):
self.tearDownModules()
def set_default_run_level_1_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
self.assertRaises(RuntimeError, dskt.setDefaultRunLevel, 1)
self.assertRaises(RuntimeError, dskt.setDefaultRunLevel, 2)
self.assertRaises(RuntimeError, dskt.setDefaultRunLevel, 4)
def set_default_run_level_2_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.setDefaultRunLevel(3)
self.assertEqual(dskt.runlevel, 3)
dskt.setDefaultRunLevel(5)
self.assertEqual(dskt.runlevel, 5)
def get_default_run_level_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
self.assertEqual(dskt.getDefaultRunLevel(), dskt.runlevel)
def set_get_default_run_level_1_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.setDefaultRunLevel(3)
self.assertEqual(dskt.getDefaultRunLevel(), 3)
def set_get_default_run_level_2_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.setDefaultRunLevel(5)
self.assertEqual(dskt.getDefaultRunLevel(), 5)
def set_default_desktop_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.setDefaultDesktop('desktop')
self.assertEqual(dskt.info['DESKTOP'], 'desktop')
def get_default_desktop_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.info['DESKTOP'] = 'foobar'
ret = dskt.getDefaultDesktop()
self.assertEqual(ret, 'foobar')
def set_get_default_desktop_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.setDefaultDesktop('foo')
ret = dskt.getDefaultDesktop()
self.assertEqual(ret, 'foo')
def write_1_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.write()
self.assertEqual(self.fs['/mnt/sysimage/etc/inittab'],
'id:3:initdefault:')
def write_2_test(self):
import pyanaconda.desktop
dskt = pyanaconda.desktop.Desktop()
dskt.setDefaultRunLevel(5)
dskt.write()
self.assertEqual(self.fs['/mnt/sysimage/etc/inittab'],
'id:5:initdefault:')
def write_3_test(self):
import pyanaconda.desktop
pyanaconda.desktop.os = mock.Mock()
pyanaconda.desktop.os.path.isdir.return_value = True
dskt = pyanaconda.desktop.Desktop()
dskt.setDefaultDesktop('foo')
dskt.write()
self.assertEqual(self.fs['/mnt/sysimage/etc/inittab'],
'id:3:initdefault:')
self.assertEqual(self.fs['/mnt/sysimage/etc/sysconfig/desktop'],
'DESKTOP="foo"\n')
|
mattias-ohlsson/anaconda
|
tests/pyanaconda_test/desktop_test.py
|
Python
|
gpl-2.0
| 3,463
|
from ..bpch2netCDF import *
import logging
import pytest
logging.basicConfig(filename='test.log', level=logging.DEBUG)
logging.info('Starting GEOSChem test.')
def test_get_folder():
return
logging.info('GEOSChem test complete')
|
tsherwen/AC_tools
|
AC_tools/Tests/test_generic.py
|
Python
|
mit
| 236
|
"""Usage:
episode new <project_name>
episode server <port>
episode build
episode deploy
episode watch
episode -h | --help | --version
"""
__version__ = '0.1.0'
import os
import re
import sys
import yaml
import time
import math
import uuid
import shutil
import subprocess
import http.server
from datetime import datetime
from jinja2 import Environment, FileSystemLoader
from markdown2 import Markdown
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
from docopt import docopt
EPISODE_PATH = os.path.dirname(__file__)
SEED_PATH = os.path.join(EPISODE_PATH, "seed")
TMP_ROOT_PATH = "/tmp"
TMP_FOLDER_PREFIX = "episode"
PAGINATION_PATH = "p"
TEMPLATE_PATH = "templates"
PAGE_FILE_EXT = [".md", ".markdown"]
CONTENT_CONF = {
# content_type(path): attribute
"posts": "posts",
"pages": "pages"
}
md_pattern = re.compile(r"(\n)*(?P<meta>(.*?\n)*?)\-+\n*?")
post_name_pattern = re.compile(r"(?P<year>(\d{4}))\-(?P<month>(\d{1,2}))\-(?P<day>(\d{1,2}))\-(?P<alias>(.+))")
md = Markdown(extras=["fenced-code-blocks", "tables"])
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield l[i:i+n]
class Page:
"""
Generate a Page(or post) object, contains meta info and content.
if obj.type is 'post', an example of page obj's useful data can be shown as follows:
obj.data = {
"title": "i'm a title",
"date": "2015-01-01",
"content": "This is the content",
"path": "/2015/01/01/test-post.html",
"alias": "test-post",
"template": "post",
"url": "http://example.com/2015/01/01/test-post.html"
}
"""
def __init__(self, file, config, path_templete="", date_template="%Y/%m/%d"):
self._path_template = path_templete
self._date_template = date_template
self.config = config
self._filename, self._filename_extension = os.path.splitext(os.path.basename(file))
self.date = None
self.formatted_date = None
self._file = open(file, "r").read()
self._parse_file_name()
self._parse_file()
@property
def path(self):
if self.type == "post":
return self.formatted_date
else:
return ""
def _parse_file_name(self):
matched = post_name_pattern.match(self._filename)
if matched:
year = int(matched.group("year"))
month = int(matched.group("month"))
day = int(matched.group("day"))
self.date = datetime(year=year, month=month, day=day)
self.alias = matched.group("alias")
self.formatted_date = self.date.strftime(self._date_template)
self.type = "post"
else:
self.alias = self._filename
self.type = "page"
def _parse_file(self):
matched = md_pattern.match(self._file)
meta = yaml.load(matched.group("meta"))
self.data = {
"title": meta.get("title"),
"content": md.convert(self._file[matched.end():]),
"path": self.path,
"alias": self.alias,
"template": ".".join([meta["template"], "html"]) if meta.get("template") else "default.html",
"url": os.path.join(self.config.get("url"), self.path, self.alias),
}
if meta.get("date"):
self.data["date"] = meta.get("date")
else:
self.data["date"] = self.date
class GitRepo:
def __init__(self, repo_address=None, dst=None):
self.repo_address = repo_address
self.dst = dst
def clone(self):
subprocess.check_call(["git", "clone", self.repo_address, self.dst])
def add_and_commit(self, message="Update posts"):
subprocess.check_call(["git", "add", "."])
try:
subprocess.check_call(["git", "commit", "-m", message])
except subprocess.CalledProcessError as e:
pass
def checkout_or_create(self, branch="source"):
try:
subprocess.check_call(["git", "checkout", branch])
except subprocess.CalledProcessError as e:
subprocess.check_call(["git", "checkout", "-b", branch])
def branch(self, branch):
try:
subprocess.check_call(["git", "checkout", branch])
except subprocess.CalledProcessError as e:
subprocess.check_call(["git", "checkout", "-b", branch])
def push(self, branch, force=False):
if force:
subprocess.check_call(["git", "push", "origin", branch, "-f"])
else:
subprocess.check_call(["git", "push", "origin", branch])
def pull(self, branch):
subprocess.check_call(["git", "pull", "origin", branch])
def fetch(self):
subprocess.check_call(["git", "fetch"])
def add_remote(self, address):
subprocess.check_call(["git", "remote", "add", "origin", address])
def init(self):
subprocess.check_call(["git", "init"])
class Initializer:
"""
To initialize a site project.
"""
def __init__(self, project_name):
if os.path.exists(project_name):
print("folder already exists.")
return
shutil.copytree(SEED_PATH, project_name)
os.chdir(project_name)
for content_type in CONTENT_CONF.keys():
os.mkdir(content_type)
self.git_repo = GitRepo()
self.git_repo.init()
self.git_repo.checkout_or_create()
print("Done! Enjoy it!")
class Episode:
"""
The main obj of episode static site generator.
the build workflow:
1. cleaning the working folders.
2. copy static files into site folders.
3. walking markdown files.
4. parsing them as Page objs and storing them in memory.
5. rendering pages into html templates, generating html files.
6. creating path out of the site's structure, putting html files into the correct destinations.
"""
def __init__(self):
self.posts = []
self.pages = []
tmp_build_folder = "_".join([TMP_FOLDER_PREFIX, str(uuid.uuid1())]) # todo: temp folder name needs to be considered
self.destination = os.path.join(TMP_ROOT_PATH, tmp_build_folder)
self.project_path = os.getcwd()
self._get_config()
self.env = Environment(loader=FileSystemLoader(self._get_path(TEMPLATE_PATH)))
self.env.globals["site"] = self.config
self.env.globals["pages"] = self.pages
self.git_repo = GitRepo(self.config.get("deploy_repo"))
def _get_path(self, folder):
return os.path.join(self.project_path, folder)
def _get_config(self):
config_path = os.path.join(self.project_path, "config.yaml")
stream = open(config_path, "r")
self.config = yaml.load(stream)
def _get_template_by_name(self, template_name):
return self.env.get_template("{}.html".format(template_name))
def _walk_files(self, content_type):
for f in os.listdir(content_type):
if os.path.splitext(f)[-1] in PAGE_FILE_EXT:
file_obj = Page(os.path.join(content_type, f),
config=self.config)
getattr(self, CONTENT_CONF[content_type]).append(file_obj.data)
self.posts.sort(key=lambda x: x['date'], reverse=True)
def _render_html_file(self, page):
target_path = os.path.join(self.destination, page.get("path"))
target_file = os.path.join(target_path, page.get("alias")) + ".html"
if not os.path.exists(target_path):
os.makedirs(target_path)
with open(target_file, 'w') as f:
f.write(self.env.get_template(page.get("template")).render(page))
def _render_pagination(self):
pagination = self.config.get("paginate")
pagination_folder = os.path.join(self.destination, PAGINATION_PATH)
post_count = len(self.posts)
print(self.posts)
total_pages = math.ceil(post_count/pagination)
previous_page = next_page = None
if post_count > pagination:
os.makedirs(pagination_folder)
for index, posts in enumerate(chunks(self.posts, pagination), start=1):
try:
if index == 1:
f = open(os.path.join(self.destination, "index.html"), 'w')
else:
f = open(os.path.join(pagination_folder, "{}.html".format(str(index))), 'w')
if index == 2:
previous_page = "index.html"
elif index > 1:
previous_page = os.path.join(PAGINATION_PATH, "{}.html".format(str(index-2)))
if index < total_pages:
next_page = os.path.join(PAGINATION_PATH, "{}.html".format(str(index+1)))
f.write(self.env.get_template("index.html").render({
"pagination_posts": posts,
"current_page": index,
"total_pages": total_pages,
"previous_page": previous_page,
"next_page": next_page
}))
finally:
f.close()
def _render(self):
for content_type in CONTENT_CONF.keys():
for item in getattr(self, content_type):
self._render_html_file(item)
self._render_pagination()
def _clean_folder(self):
for path in os.listdir('.'):
if not path.startswith('.'):
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def _copy_files(self, from_path, to_path, only_dirs=False):
for item in os.listdir(from_path):
src = os.path.join(from_path, item)
dst = os.path.join(to_path, item)
if os.path.isdir(src):
shutil.copytree(src, dst)
elif not only_dirs:
shutil.copy2(src, dst)
def build(self):
start = time.clock()
os.makedirs(self.destination)
self._copy_files(TEMPLATE_PATH, self.destination, only_dirs=True)
for path in CONTENT_CONF.keys():
if os.path.isdir(path):
self._walk_files(path)
self._render()
print("Done!", "Result path:")
print(self.destination)
end = time.clock()
print("run time: {time}s".format(time=end-start))
def deploy(self):
if not self.config.get("deploy_repo"):
return print("not specify deploy repo.")
self.git_repo.checkout_or_create('source')
self.git_repo.add_and_commit()
self.git_repo.push('source', force=True) # todo: if conflict?
self.build()
self.git_repo.checkout_or_create("master")
self._clean_folder()
self._copy_files(self.destination, os.getcwd())
self.git_repo.add_and_commit()
self.git_repo.push("master")
self.git_repo.checkout_or_create("source")
def server(self, port=8000):
self.build()
print("start server")
os.chdir(self.destination)
Handler = http.server.SimpleHTTPRequestHandler
httpd = http.server.HTTPServer(("", port), Handler)
print("Serving at http://127.0.0.1:{port}".format(port=port))
httpd.serve_forever()
def watch(self):
self.build()
event_handler = FileChangeEventHandler(self)
observer = Observer()
observer.schedule(event_handler, self.config.get("root"), recursive=False)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
class FileChangeEventHandler(FileSystemEventHandler): # todo: watching
def __init__(self, episode):
self.episode = episode
def on_created(self, event):
self.episode.build()
print("==create====src"+event.src_path)
def on_deleted(self, event):
self.episode.build()
print("==delete====src"+event.src_path)
def on_modified(self, event):
self.episode.build()
print("===modified===src"+event.src_path)
def start_server():
print("start server")
Episode().server()
def start_watch():
print("start watch")
Episode().watch()
def start_build():
print("start build")
Episode().build()
def start_new(project_name):
print("create a new project")
Initializer(project_name)
def start_deploy():
print("deploy to github")
Episode().deploy()
def command_options(arguments):
if arguments["new"]:
start_new(arguments["<project_name>"])
elif arguments["build"]:
start_build()
elif arguments["server"]:
start_server()
elif arguments["watch"]:
start_watch()
elif arguments["deploy"]:
start_deploy()
def run():
arguments = docopt(__doc__, version=__version__)
command_options(arguments)
|
Windfarer/episode
|
episode/__init__.py
|
Python
|
mit
| 12,989
|
#!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_user
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: create, update, or destroy Ansible Tower user.
description:
- Create, update, or destroy Ansible Tower users. See
U(https://www.ansible.com/tower) for an overview.
options:
username:
description:
- The username of the user.
required: True
first_name:
description:
- First name of the user.
required: False
default: null
last_name:
description:
- Last name of the user.
required: False
default: null
email:
description:
- Email address of the user.
required: True
password:
description:
- Password of the user.
required: False
default: null
superuser:
description:
- User is a system wide administator.
required: False
default: False
auditor:
description:
- User is a system wide auditor.
required: False
default: False
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Add tower user
tower_user:
username: jdoe
password: foobarbaz
email: jdoe@example.org
first_name: John
last_name: Doe
state: present
tower_config_file: "~/tower_cli.cfg"
'''
from ansible.module_utils.ansible_tower import tower_argument_spec, tower_auth_config, tower_check_mode, HAS_TOWER_CLI
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
def main():
argument_spec = tower_argument_spec()
argument_spec.update(dict(
username=dict(required=True),
first_name=dict(),
last_name=dict(),
password=dict(no_log=True),
email=dict(required=True),
superuser=dict(type='bool', default=False),
auditor=dict(type='bool', default=False),
state=dict(choices=['present', 'absent'], default='present'),
))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
username = module.params.get('username')
first_name = module.params.get('first_name')
last_name = module.params.get('last_name')
password = module.params.get('password')
email = module.params.get('email')
superuser = module.params.get('superuser')
auditor = module.params.get('auditor')
state = module.params.get('state')
json_output = {'username': username, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
user = tower_cli.get_resource('user')
try:
if state == 'present':
result = user.modify(username=username, first_name=first_name, last_name=last_name,
email=email, password=password, is_superuser=superuser,
is_auditor=auditor, create_on_missing=True)
json_output['id'] = result['id']
elif state == 'absent':
result = user.delete(username=username)
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Failed to update the user: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
|
ravibhure/ansible
|
lib/ansible/modules/web_infrastructure/ansible_tower/tower_user.py
|
Python
|
gpl-3.0
| 4,154
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-02 09:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Discipline',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('title', models.CharField(default='', max_length=256, verbose_name='Название дисциплины')),
('description', models.TextField(blank=True, max_length=16384, null=True, verbose_name='Короткое описание')),
('labor', models.PositiveIntegerField(default=0, verbose_name='зачётных единиц')),
('period', models.IntegerField(default=1, verbose_name='Период освоения в модуле')),
('form', models.CharField(choices=[('e', 'Экзамен'), ('z', 'Зачет')], default='z', max_length=1, verbose_name='Форма контроля')),
('results_text', models.TextField(blank=True, default='', max_length=16384, verbose_name='Результаты обучения')),
('uni_uid', models.CharField(blank=True, max_length=256, null=True)),
('uni_discipline', models.CharField(blank=True, max_length=256, null=True)),
('uni_number', models.CharField(blank=True, max_length=256, null=True)),
('uni_section', models.CharField(blank=True, max_length=256, null=True)),
('uni_file', models.CharField(blank=True, max_length=256, null=True)),
],
options={
'verbose_name': 'дисциплина',
'verbose_name_plural': 'дисциплины',
},
),
migrations.CreateModel(
name='Semester',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('year', models.PositiveIntegerField(default=1970, verbose_name='Год поступления')),
('admission_semester', models.PositiveIntegerField(default=0, verbose_name='Семестр поступления')),
('training_semester', models.PositiveIntegerField(default=0, verbose_name='Семестр изучения')),
('discipline', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='disciplines.Discipline')),
],
options={
'verbose_name': 'семестр изучения дисциплины',
'verbose_name_plural': 'семестры изучения дисциплины',
},
),
migrations.CreateModel(
name='TrainingTerms',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('title', models.CharField(default='', max_length=256, verbose_name='Наименование срока обучения')),
('limit', models.PositiveIntegerField(default=0, verbose_name='Лимит ЗЕ в год')),
],
options={
'verbose_name': 'срок обучения',
'verbose_name_plural': 'сроки обучения',
},
),
]
|
ITOO-UrFU/open-programs
|
open_programs/apps/disciplines/migrations/0001_initial.py
|
Python
|
unlicense
| 4,047
|
# -*- coding: utf-8 -*-
u"""
.. module:: test_contactform
"""
from django.core import mail
from django.test import Client
from django.test import TestCase
from apps.volontulo.tests import common
class TestPages(TestCase):
u"""Class responsible for testing contact forms."""
test_admin_email = test_admin_username = 'admin@admin.com'
test_admin_password = 'admin_password'
@classmethod
def setUpTestData(cls):
# admin user
cls.admin = common.initialize_administrator(
username=cls.test_admin_username, email=cls.test_admin_email,
password=cls.test_admin_password
)
# volunteer user - totally useless
cls.volunteer = common.initialize_empty_volunteer()
# organization user - no offers
common.initialize_empty_organization()
# volunteer user - offers, organizations
common.initialize_filled_volunteer_and_organization()
def setUp(self):
u"""Set up each test."""
self.client = Client()
# pylint: disable=invalid-name
def test__get_contact_with_administrator_form_by_anonymous(self):
u"""Request contact with administrator form by anonymous user."""
response = self.client.get('/contact', follow=True)
self.assertRedirects(
response,
'/login?next=/contact',
302,
200,
)
self.assertEqual(len(response.redirect_chain), 1)
self.assertEqual(
response.redirect_chain[0],
('http://testserver/login?next=/contact', 302),
)
# pylint: disable=invalid-name
def test__get_contact_with_administrator_form_by_volunteer(self):
u"""Request contact with administrator form by volunteer user."""
self.client.post('/login', {
'email': u'volunteer1@example.com',
'password': 'volunteer1',
})
response = self.client.get('/contact')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'contact.html')
self.assertTemplateUsed(response, 'contact_form.html')
self.assertContains(response, u'Formularz kontaktowy')
# pylint: disable=no-member
self.assertIn('contact_form', response.context)
# pylint: disable=invalid-name
def test__get_contact_with_administrator_form_by_organization(self):
u"""Request contact with administrator form by organization user."""
self.client.post('/login', {
'email': u'organization1@example.com',
'password': 'organization1',
})
response = self.client.get('/contact')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'contact.html')
self.assertTemplateUsed(response, 'contact_form.html')
self.assertContains(response, u'Formularz kontaktowy')
# pylint: disable=no-member
self.assertIn('contact_form', response.context)
# pylint: disable=invalid-name
def test__post_contact_with_administrator_form_by_anonymous(self):
u"""Post to contact with administrator form by anonymous user."""
response = self.client.get('/contact', follow=True)
self.assertRedirects(
response,
'/login?next=/contact',
302,
200,
)
self.assertEqual(len(response.redirect_chain), 1)
self.assertEqual(
response.redirect_chain[0],
('http://testserver/login?next=/contact', 302),
)
# pylint: disable=invalid-name
def test__contact_with_admin_form_by_volunteer_val_error(self):
u"""Post to contact with administrator form by volunteer user
assuming validation error."""
self.client.post('/login', {
'email': u'volunteer1@example.com',
'password': 'volunteer1',
})
form_params = {
'applicant': 'VOLUNTEER',
'administrator': self.admin.id,
'name': u'',
'email': u'',
'phone_no': u'',
'message': u'',
}
response = self.client.post(
'/contact',
form_params,
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'contact.html')
self.assertTemplateUsed(response, 'contact_form.html')
self.assertContains(response, u'Formularz kontaktowy')
# pylint: disable=no-member
self.assertIn('contact_form', response.context)
self.assertContains(response, u'Proszę poprawić błędy w formularzu:')
self.assertEqual(len(mail.outbox), 0)
def test__contact_with_admin_form_by_volunteer(self):
u"""Post to contact with administrator form by volunteer user"""
self.client.post('/login', {
'email': u'volunteer1@example.com',
'password': 'volunteer1',
})
form_params = {
'applicant': 'VOLUNTEER',
'administrator': self.admin.id,
'name': u'Bull Pillman',
'email': u'pull.billman@example.com',
'phone_no': u'+48 123 123 123',
'message': u"My crime is that of curiosity."
}
response = self.client.post(
'/contact',
form_params,
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'contact.html')
self.assertTemplateUsed(response, 'contact_form.html')
self.assertContains(response, u'Formularz kontaktowy')
# pylint: disable=no-member
self.assertIn('contact_form', response.context)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, u'Kontakt z administratorem')
self.assertContains(response, u'Email został wysłany.')
# pylint: disable=invalid-name
def test__contact_with_admin_form_by_organization_val_error(self):
u"""
Post to contact with administrator form by organization user
validation error.
"""
self.client.post('/login', {
'email': u'organization1@example.com',
'password': 'organization1',
})
# incorrect params
form_params = {
'applicant': 1,
'administrator': 1,
'name': u'',
'email': u'',
'phone_no': u'',
'message': u'',
}
response = self.client.post(
'/contact',
form_params,
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'contact.html')
self.assertTemplateUsed(response, 'contact_form.html')
self.assertContains(response, u'Formularz kontaktowy')
# pylint: disable=no-member
self.assertIn('contact_form', response.context)
self.assertEqual(len(mail.outbox), 0)
self.assertContains(response, u'Proszę poprawić błędy w formularzu:')
def test__contact_with_admin_form_by_organization_val_success(self):
u"""
Post to contact with administrator form by organization user
validation success.
"""
self.client.post('/login', {
'email': self.admin.email,
'password': self.test_admin_password
})
# correct params
form_params = {
'applicant': 'ORGANIZATION',
'administrator': self.admin.id,
'name': u'Bull Pillman',
'email': u'pull.billman@example.com',
'phone_no': u'+48 123 123 123',
'message': u"My crime is that of curiosity."
}
response = self.client.post(
'/contact',
form_params,
follow=True
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'contact.html')
self.assertTemplateUsed(response, 'contact_form.html')
self.assertContains(response, u'Formularz kontaktowy')
# pylint: disable=no-member
self.assertIn('contact_form', response.context)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, u'Kontakt z administratorem')
self.assertContains(response, u'Email został wysłany.')
|
mkwiat79/volontulo
|
apps/volontulo/tests/views/test_contactform.py
|
Python
|
mit
| 8,389
|
# Copyright (c) 2012-2013 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
# Giacomo Gabrielli
from m5.params import *
from m5.proxy import *
from m5.SimObject import SimObject
class ArmISA(SimObject):
type = 'ArmISA'
cxx_class = 'ArmISA::ISA'
cxx_header = "arch/arm/isa.hh"
system = Param.System(Parent.any, "System this ISA object belongs to")
midr = Param.UInt32(0x410fc0f0, "MIDR value")
# See section B4.1.93 - B4.1.94 of the ARM ARM
#
# !ThumbEE | !Jazelle | Thumb | ARM
# Note: ThumbEE is disabled for now since we don't support CP14
# config registers and jumping to ThumbEE vectors
id_pfr0 = Param.UInt32(0x00000031, "Processor Feature Register 0")
# !Timer | Virti | !M Profile | TrustZone | ARMv4
id_pfr1 = Param.UInt32(0x00001011, "Processor Feature Register 1")
# See section B4.1.89 - B4.1.92 of the ARM ARM
# VMSAv7 support
id_mmfr0 = Param.UInt32(0x10201103, "Memory Model Feature Register 0")
id_mmfr1 = Param.UInt32(0x00000000, "Memory Model Feature Register 1")
# no HW access | WFI stalling | ISB and DSB |
# all TLB maintenance | no Harvard
id_mmfr2 = Param.UInt32(0x01230000, "Memory Model Feature Register 2")
# SuperSec | Coherent TLB | Bcast Maint |
# BP Maint | Cache Maint Set/way | Cache Maint MVA
id_mmfr3 = Param.UInt32(0x02102211, "Memory Model Feature Register 3")
# See section B4.1.84 of ARM ARM
# All values are latest for ARMv7-A profile
id_isar0 = Param.UInt32(0x02101111, "Instruction Set Attribute Register 0")
id_isar1 = Param.UInt32(0x02112111, "Instruction Set Attribute Register 1")
id_isar2 = Param.UInt32(0x21232141, "Instruction Set Attribute Register 2")
id_isar3 = Param.UInt32(0x01112131, "Instruction Set Attribute Register 3")
id_isar4 = Param.UInt32(0x10010142, "Instruction Set Attribute Register 4")
id_isar5 = Param.UInt32(0x00000000, "Instruction Set Attribute Register 5")
fpsid = Param.UInt32(0x410430a0, "Floating-point System ID Register")
# [31:0] is implementation defined
id_aa64afr0_el1 = Param.UInt64(0x0000000000000000,
"AArch64 Auxiliary Feature Register 0")
# Reserved for future expansion
id_aa64afr1_el1 = Param.UInt64(0x0000000000000000,
"AArch64 Auxiliary Feature Register 1")
# 1 CTX CMPs | 2 WRPs | 2 BRPs | !PMU | !Trace | Debug v8-A
id_aa64dfr0_el1 = Param.UInt64(0x0000000000101006,
"AArch64 Debug Feature Register 0")
# Reserved for future expansion
id_aa64dfr1_el1 = Param.UInt64(0x0000000000000000,
"AArch64 Debug Feature Register 1")
# !CRC32 | !SHA2 | !SHA1 | !AES
id_aa64isar0_el1 = Param.UInt64(0x0000000000000000,
"AArch64 Instruction Set Attribute Register 0")
# Reserved for future expansion
id_aa64isar1_el1 = Param.UInt64(0x0000000000000000,
"AArch64 Instruction Set Attribute Register 1")
# 4K | 64K | !16K | !BigEndEL0 | !SNSMem | !BigEnd | 8b ASID | 40b PA
id_aa64mmfr0_el1 = Param.UInt64(0x0000000000f00002,
"AArch64 Memory Model Feature Register 0")
# Reserved for future expansion
id_aa64mmfr1_el1 = Param.UInt64(0x0000000000000000,
"AArch64 Memory Model Feature Register 1")
# !GICv3 CP15 | AdvSIMD | FP | !EL3 | !EL2 | EL1 (AArch64) | EL0 (AArch64)
# (no AArch32/64 interprocessing support for now)
id_aa64pfr0_el1 = Param.UInt64(0x0000000000000011,
"AArch64 Processor Feature Register 0")
# Reserved for future expansion
id_aa64pfr1_el1 = Param.UInt64(0x0000000000000000,
"AArch64 Processor Feature Register 1")
|
xiaoyuanW/gem5
|
src/arch/arm/ArmISA.py
|
Python
|
bsd-3-clause
| 5,654
|
import tkinter as tk
import datetime
from Data import Data
class Dates():
def fromToday(_days):
today = datetime.date.today()
date = today + datetime.timedelta(days=_days)
year, month, day = str(date.year), str(date.month), str(date.day)
if len(month) == 1:
month = '0' + month
if len(day) == 1:
day = '0' + day
return year + '-' + month + '-' + day
def isoToLocal(isodate):
year = int(isodate[0:4])
month = int(isodate[5:7])
day = int(isodate[8:10])
date = datetime.date(year, month, day)
localdate = date.strftime('%d-%m-%Y')
return localdate
def localToIso(localdate):
day = int(localdate[:2])
month = int(localdate[3:5])
year = int(localdate[6:])
date = datetime.date(year, month, day)
return date.isoformat()
from EditClient import EditClient
class Reminder(tk.Frame, Data):
def __init__(self, master=None):
super().__init__(master)
self.endThisWeek()
self.endAfterWeek()
def endAfterWeek(self):
for days in range(8, 365):
#date = self.isoToLocal(self.endContract(days))
isodate = Dates.fromToday(_days=days)
date = Dates.isoToLocal(isodate)
contracts = self.contractByDate(date)
for cod in contracts:
self.get_contract(cod)
#self.clientByContract(str(cod[0]))
tk.Label(self.master, text=self.contract["name"]).grid(row=line, column=0)
tk.Button(self.master, text=date,
command= lambda c_id = self.contract["id_client"] : self.open_client(c_id)).grid(row=line, column=1)
return
def endThisWeek(self):
color = 'red'
global line
line = 0
for days in range(8):
#date = self.isoToLocal(self.endContract(days))
isodate = Dates.fromToday(_days=days)
date = Dates.isoToLocal(isodate)
contracts = self.contractByDate(date)
for cod in contracts:
self.get_contract(cod)
#self.clientByContract(str(cod[0]))
tk.Label(self.master, text=self.contract["name"], fg=color).grid(row=line, column=0)
tk.Button(self.master, text=date,
command= lambda c_id = self.contract["id_client"] : self.open_client(c_id), fg=color).grid(row=line,column=1)
line += 1
color='orange'
def open_client(self, id_client):
top = tk.Tk()
client = EditClient(top, id_client)
client.addButtons()
#top = tk.Tk()
#app = Warning(top)
#top.mainloop()
#data = Data()
#data.db.close()
|
victor-prado/broker-manager
|
broker-manager/Dates.py
|
Python
|
mit
| 2,321
|
from portality.dao import DomainObject
from portality.core import app
from portality.models.v2.bibjson import JournalLikeBibJSON
from portality.models.v2 import shared_structs
from portality.models.account import Account
from portality.lib import es_data_mapping, dates, coerce
from portality.lib.seamless import SeamlessMixin
from portality.lib.coerce import COERCE_MAP
from copy import deepcopy
from datetime import datetime, timedelta
import string, uuid
from unidecode import unidecode
JOURNAL_STRUCT = {
"objects": [
"admin", "index"
],
"structs": {
"admin": {
"fields": {
"in_doaj": {"coerce": "bool"},
"ticked": {"coerce": "bool"},
"current_application": {"coerce": "unicode"}
},
"lists": {
"related_applications": {"contains": "object"}
},
"structs": {
"related_applications": {
"fields": {
"application_id": {"coerce": "unicode"},
"date_accepted": {"coerce": "utcdatetime"},
"status": {"coerce": "unicode"}
}
},
"contact": {
"name": {"coerce": "unicode"},
"email": {"coerce": "unicode"}
}
}
},
"index": {
"fields": {
"publisher_ac": {"coerce": "unicode"},
"institution_ac": {"coerce": "unicode"}
}
}
}
}
class ContinuationException(Exception):
pass
class JournalLikeObject(SeamlessMixin, DomainObject):
@classmethod
def find_by_issn(cls, issns, in_doaj=None, max=10):
if not isinstance(issns, list):
issns = [issns]
q = JournalQuery()
q.find_by_issn(issns, in_doaj=in_doaj, max=max)
result = cls.query(q=q.query)
# create an array of objects, using cls rather than Journal, which means subclasses can use it too
records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])]
return records
@classmethod
def issns_by_owner(cls, owner, in_doaj=None):
q = IssnQuery(owner, in_doaj=in_doaj)
res = cls.query(q=q.query())
issns = [term.get("key") for term in res.get("aggregations", {}).get("issns", {}).get("buckets", [])]
return issns
@classmethod
def get_by_owner(cls, owner):
q = OwnerQuery(owner)
res = cls.query(q=q.query())
# get_by_owner() in application.py predates this, but I've made it an override because it does application stuff
records = [cls(**r.get("_source")) for r in res.get("hits", {}).get("hits", [])]
return records
@classmethod
def issns_by_query(cls, query):
issns = []
for j in cls.iterate(query):
issns += j.known_issns()
return issns
@classmethod
def find_by_journal_url(cls, url, in_doaj=None, max=10):
q = JournalURLQuery(url, in_doaj, max)
result = cls.query(q=q.query())
# create an array of objects, using cls rather than Journal, which means subclasses can use it too
records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])]
return records
@classmethod
def recent(cls, max=10):
q = RecentJournalsQuery(max)
result = cls.query(q=q.query())
# create an array of objects, using cls rather than Journal, which means subclasses can use it too
records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])]
return records
############################################
## base property methods
@property
def data(self):
return self.__seamless__.data
@property
def has_apc(self):
return self.__seamless__.get_single("bibjson.apc.has_apc")
@property
def id(self):
return self.__seamless__.get_single("id")
def set_id(self, id=None):
if id is None:
id = self.makeid()
self.__seamless__.set_with_struct("id", id)
def set_created(self, date=None):
if date is None:
date = dates.now()
self.__seamless__.set_with_struct("created_date", date)
@property
def created_date(self):
return self.__seamless__.get_single("created_date")
@property
def created_timestamp(self):
return self.__seamless__.get_single("created_date", coerce=coerce.to_datestamp())
def set_last_updated(self, date=None):
if date is None:
date = dates.now()
self.__seamless__.set_with_struct("last_updated", date)
@property
def last_updated(self):
return self.__seamless__.get_single("last_updated")
@property
def last_updated_timestamp(self):
return self.__seamless__.get_single("last_updated", coerce=coerce.to_datestamp())
def last_updated_since(self, days=0):
return self.last_updated_timestamp > (datetime.utcnow() - timedelta(days=days))
def set_last_manual_update(self, date=None):
if date is None:
date = dates.now()
self.__seamless__.set_with_struct("last_manual_update", date)
@property
def last_manual_update(self):
return self.__seamless__.get_single("last_manual_update")
@property
def last_manual_update_timestamp(self):
return self.__seamless__.get_single("last_manual_update", coerce=coerce.to_datestamp())
def has_been_manually_updated(self):
lmut = self.last_manual_update_timestamp
if lmut is None:
return False
return lmut > datetime.utcfromtimestamp(0)
def has_seal(self):
return self.__seamless__.get_single("admin.seal", default=False)
def set_seal(self, value):
self.__seamless__.set_with_struct("admin.seal", value)
@property
def owner(self):
return self.__seamless__.get_single("admin.owner")
def set_owner(self, owner):
self.__seamless__.set_with_struct("admin.owner", owner)
def remove_owner(self):
self.__seamless__.delete("admin.owner")
@property
def owner_account(self):
if self.owner:
return Account.pull(self.owner)
return None
@property
def editor_group(self):
return self.__seamless__.get_single("admin.editor_group")
def set_editor_group(self, eg):
self.__seamless__.set_with_struct("admin.editor_group", eg)
def remove_editor_group(self):
self.__seamless__.delete("admin.editor_group")
@property
def editor(self):
return self.__seamless__.get_single("admin.editor")
def set_editor(self, ed):
self.__seamless__.set_with_struct("admin.editor", ed)
def remove_editor(self):
self.__seamless__.delete('admin.editor')
@property
def contact(self):
return self.__seamless__.get_single("admin.contact")
@property
def contact_name(self):
return self.__seamless__.get_single("admin.contact.name")
@contact_name.setter
def contact_name(self, name):
self.__seamless__.set_with_struct("admin.contact.name", name)
@property
def contact_email(self):
return self.__seamless__.get_single("admin.contact.email")
@contact_email.setter
def contact_email(self, email):
self.__seamless__.set_with_struct("admin.contact.email", email)
def set_contact(self, name, email):
self.contact_name = name
self.contact_email = email
def remove_contact(self):
self.__seamless__.delete("admin.contact")
def add_note(self, note, date=None, id=None):
if not date:
date = dates.now()
obj = {"date": date, "note": note, "id": id}
self.__seamless__.delete_from_list("admin.notes", matchsub=obj)
if not id:
obj["id"] = uuid.uuid4()
self.__seamless__.add_to_list_with_struct("admin.notes", obj)
def remove_note(self, note):
self.__seamless__.delete_from_list("admin.notes", matchsub=note)
def set_notes(self, notes):
self.__seamless__.set_with_struct("admin.notes", notes)
def remove_notes(self):
self.__seamless__.delete("admin.notes")
@property
def notes(self):
return self.__seamless__.get_list("admin.notes")
@property
def ordered_notes(self):
"""Orders notes by newest first"""
notes = self.notes
clusters = {}
for note in notes:
if "date" not in note:
note["date"] = "1970-01-01T00:00:00Z" # this really means something is broken with note date setting, which needs to be fixed
if note["date"] not in clusters:
clusters[note["date"]] = [note]
else:
clusters[note["date"]].append(note)
ordered_keys = sorted(list(clusters.keys()), reverse=True)
ordered = []
for key in ordered_keys:
clusters[key].reverse()
ordered += clusters[key]
return ordered
def bibjson(self):
bj = self.__seamless__.get_single("bibjson")
if bj is None:
self.__seamless__.set_single("bibjson", {})
bj = self.__seamless__.get_single("bibjson")
return JournalLikeBibJSON(bj)
def set_bibjson(self, bibjson):
bibjson = bibjson.data if isinstance(bibjson, JournalLikeBibJSON) else bibjson
self.__seamless__.set_with_struct("bibjson", bibjson)
######################################################
## DEPRECATED METHODS
def known_issns(self):
"""
DEPRECATED
all issns this journal is known by
This used to mean "all issns the journal has ever been known by", but that definition has changed since
continuations have been separated from the single journal object model.
Now this is just a proxy for self.bibjson().issns()
"""
return self.bibjson().issns()
def get_latest_contact_name(self):
return self.contact_name
def get_latest_contact_email(self):
return self.contact_email
def add_contact(self, name, email):
self.set_contact(name, email)
def remove_contacts(self):
self.remove_contact()
######################################################
## internal utility methods
def _generate_index(self):
# the index fields we are going to generate
titles = []
subjects = []
schema_subjects = []
schema_codes = []
schema_codes_tree = []
classification = []
langs = []
country = None
license = []
publisher = []
has_seal = None
classification_paths = []
unpunctitle = None
asciiunpunctitle = None
continued = "No"
has_editor_group = "No"
has_editor = "No"
# the places we're going to get those fields from
cbib = self.bibjson()
# get the title out of the current bibjson
if cbib.title is not None:
titles.append(cbib.title)
if cbib.alternative_title:
titles.append(cbib.alternative_title)
# get the subjects and concatenate them with their schemes from the current bibjson
for subs in cbib.subject:
scheme = subs.get("scheme")
term = subs.get("term")
subjects.append(term)
schema_subjects.append(scheme + ":" + term)
classification.append(term)
if "code" in subs:
schema_codes.append(scheme + ":" + subs.get("code"))
# now expand the classification to hold all its parent terms too
additional = []
for c in classification:
tp = cbib.term_path(c)
if tp is not None:
additional += tp
classification += additional
# add the keywords to the non-schema subjects (but not the classification)
subjects += cbib.keywords
# get the bibjson object to convert the languages to the english form
langs = cbib.language_name()
# get the english name of the country
country = cbib.country_name()
# get the type of the licenses
for l in cbib.licences:
license.append(l.get("type"))
# deduplicate the lists
titles = list(set(titles))
subjects = list(set(subjects))
schema_subjects = list(set(schema_subjects))
classification = list(set(classification))
license = list(set(license))
schema_codes = list(set(schema_codes))
# determine if the seal is applied
has_seal = "Yes" if self.has_seal() else "No"
# get the full classification paths for the subjects
classification_paths = cbib.lcc_paths()
schema_codes_tree = cbib.lcc_codes_full_list()
# create an unpunctitle
if cbib.title is not None:
throwlist = string.punctuation + '\n\t'
unpunctitle = "".join(c for c in cbib.title if c not in throwlist).strip()
try:
asciiunpunctitle = unidecode(unpunctitle)
except:
asciiunpunctitle = unpunctitle
# record if this journal object is a continuation
if len(cbib.replaces) > 0 or len(cbib.is_replaced_by) > 0:
continued = "Yes"
if self.editor_group is not None:
has_editor_group = "Yes"
if self.editor is not None:
has_editor = "Yes"
# build the index part of the object
index = {}
if country is not None:
index["country"] = country
if has_seal:
index["has_seal"] = has_seal
if unpunctitle is not None:
index["unpunctitle"] = unpunctitle
if asciiunpunctitle is not None:
index["asciiunpunctitle"] = asciiunpunctitle
index["continued"] = continued
index["has_editor_group"] = has_editor_group
index["has_editor"] = has_editor
index["issn"] = cbib.issns()
if len(titles) > 0:
index["title"] = titles
if len(subjects) > 0:
index["subject"] = subjects
if len(schema_subjects) > 0:
index["schema_subject"] = schema_subjects
if len(classification) > 0:
index["classification"] = classification
if len(langs) > 0:
index["language"] = langs
if len(license) > 0:
index["license"] = license
if len(classification_paths) > 0:
index["classification_paths"] = classification_paths
if len(schema_codes) > 0:
index["schema_code"] = schema_codes
if len(schema_codes_tree) > 0:
index["schema_codes_tree"] = schema_codes_tree
self.__seamless__.set_with_struct("index", index)
class Journal(JournalLikeObject):
__type__ = "journal"
__SEAMLESS_STRUCT__ = [
shared_structs.JOURNAL_BIBJSON,
shared_structs.SHARED_JOURNAL_LIKE,
JOURNAL_STRUCT
]
__SEAMLESS_COERCE__ = COERCE_MAP
def __init__(self, **kwargs):
# FIXME: hack, to deal with ES integration layer being improperly abstracted
if "_source" in kwargs:
kwargs = kwargs["_source"]
# FIXME: I have taken this out for the moment, as I'm not sure it's what we should be doing
#if kwargs:
# self.add_autogenerated_fields(**kwargs)
super(Journal, self).__init__(raw=kwargs)
@classmethod
def add_autogenerated_fields(cls, **kwargs):
bib = kwargs["bibjson"]
if "apc" in bib and bib["apc"] != '':
bib["apc"]["has_apc"] = len(bib["apc"]["max"]) != 0
else:
bib["apc"] = {"has_apc": False}
if "deposit_policy" in bib and bib["deposit_policy"] != []:
bib["deposit_policy"]["has_policy"] = True
else:
##change made in https://github.com/DOAJ/doaj/commit/e507123f423fe16fd270744055da0129e2b32005
bib["deposit_policy"] = {"has_policy": False}
if "other_charges" in bib and bib["other_charges"] != '':
bib["other_charges"]["has_other_charges"] = bib["other_charges"]["url"] is not None
else:
bib["other_charges"] = {"has_other_charges": False}
if "copyright" in bib and bib["copyright"]["url"] != '':
bib["copyright"]["author_retains"] = bib["copyright"]["url"] is not None
else:
bib["copyright"] = {"author_retains": False}
if "pid_scheme" in bib and bib["pid_scheme"] != '':
bib["pid_scheme"]["has_pid_scheme"] = len(bib["pid_scheme"]["scheme"]) != 0
else:
bib["pid_scheme"] = {"has_pid_scheme": False}
if "preservation" in bib and bib["preservation"] != '':
bib["preservation"]["has_preservation"] = (len(bib["preservation"]) != 0 or
bib["national_library"] is not None)
else:
bib["preservation"] = {"has_preservation": True}
#####################################################
## Journal-specific data access methods
@classmethod
def all_in_doaj(cls, page_size=5000):
q = JournalQuery()
return cls.iterate(q.all_in_doaj(), page_size=page_size, wrap=True)
@classmethod
def find_by_publisher(cls, publisher, exact=True):
q = PublisherQuery(publisher, exact)
result = cls.query(q=q.query())
records = [Journal(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])]
return records
@classmethod
def find_by_title(cls, title):
q = TitleQuery(title)
result = cls.query(q=q.query())
records = [Journal(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])]
return records
@classmethod
def delete_selected(cls, query, articles=False, snapshot_journals=True, snapshot_articles=True):
if articles:
# list the issns of all the journals
issns = cls.issns_by_query(query)
# issue a delete request over all the articles by those issns
from portality.models import Article
Article.delete_by_issns(issns, snapshot=snapshot_articles)
# snapshot the journal record
if snapshot_journals:
js = cls.iterate(query, page_size=1000)
for j in js:
j.snapshot()
# finally issue a delete request against the journals
cls.delete_by_query(query)
def all_articles(self):
from portality.models import Article
return Article.find_by_issns(self.known_issns())
def article_stats(self):
from portality.models import Article
q = ArticleStatsQuery(self.known_issns())
data = Article.query(q=q.query())
hits = data.get("hits", {})
total = hits.get("total", {}).get('value', 0)
latest = None
if total > 0:
latest = hits.get("hits", [])[0].get("_source").get("created_date")
return {
"total": total,
"latest": latest
}
def mappings(self):
return es_data_mapping.create_mapping(self.__seamless_struct__.raw, MAPPING_OPTS)
############################################
## base property methods
@property
def toc_id(self):
id_ = self.bibjson().get_preferred_issn()
if not id_:
id_ = self.id
return id_
@property
def last_update_request(self):
related = self.related_applications
if len(related) == 0:
return None
sorted(related, key=lambda x: x.get("date_accepted", "1970-01-01T00:00:00Z"))
return related[0].get("date_accepted", "1970-01-01T00:00:00Z")
############################################################
## revision history methods
def snapshot(self):
from portality.models import JournalHistory
snap = deepcopy(self.data)
if "id" in snap:
snap["about"] = snap["id"]
del snap["id"]
if "index" in snap:
del snap["index"]
if "last_updated" in snap:
del snap["last_updated"]
if "created_date" in snap:
del snap["created_date"]
hist = JournalHistory(**snap)
hist.save()
#######################################################################
## Conversion methods
def make_continuation(self, type, eissn=None, pissn=None, title=None):
# check that the type is one we know. Must be either 'replaces' or 'is_replaced_by'
if type not in ["replaces", "is_replaced_by"]:
raise ContinuationException("type must be one of 'replaces' or 'is_replaced_by'")
if eissn is None and pissn is None:
raise ContinuationException("You must create a continuation with at least one issn")
# take a copy of the raw data for this journal, and the issns for this journal
raw_cont = deepcopy(self.data)
bibjson = self.bibjson()
issns = bibjson.issns()
cissns = []
# make a new instance of the journal - this will be our continuation
del raw_cont["id"]
del raw_cont["created_date"]
del raw_cont["last_updated"]
j = Journal(**raw_cont)
# ensure that the journal is NOT in doaj. That will be for the admin to decide
j.set_in_doaj(False)
# get a copy of the continuation's bibjson, then remove the existing issns
cbj = j.bibjson()
del cbj.eissn
del cbj.pissn
# also remove any existing continuation information
del cbj.replaces
del cbj.is_replaced_by
del cbj.discontinued_date
# now write the new identifiers
if eissn is not None and eissn != "":
cissns.append(eissn)
cbj.eissn = eissn
if pissn is not None and pissn != "":
cissns.append(pissn)
cbj.pissn = pissn
# update the title
if title is not None:
cbj.title = title
# now add the issns of the original journal in the appropriate field
#
# This is a bit confusing - because we're asking this of a Journal object, the relationship type we're asking
# for relates to this journal, not to the continuation we are creating. This means that when setting the
# new continuations properties, we have to do the opposite to what we do to the journal's properties
#
# "replaces" means that the current journal replaces the new continuation
if type == "replaces":
bibjson.replaces = cissns
cbj.is_replaced_by = issns
# "is_replaced_by" means that the current journal is replaced by the new continuation
elif type == "is_replaced_by":
bibjson.is_replaced_by = cissns
cbj.replaces = issns
# save this journal
self.save()
# save the continuation, and return a copy to the caller
j.save()
return j
####################################################
## admin data methods
def is_in_doaj(self):
return self.__seamless__.get_single("admin.in_doaj", default=False)
def set_in_doaj(self, value):
self.__seamless__.set_with_struct("admin.in_doaj", value)
def is_ticked(self):
return self.__seamless__.get_single("admin.ticked", default=False)
def set_ticked(self, ticked):
self.__seamless__.set_with_struct("admin.ticked", ticked)
@property
def current_application(self):
return self.__seamless__.get_single("admin.current_application")
def set_current_application(self, application_id):
self.__seamless__.set_with_struct("admin.current_application", application_id)
def remove_current_application(self):
self.__seamless__.delete("admin.current_application")
@property
def related_applications(self):
return self.__seamless__.get_list("admin.related_applications")
def add_related_application(self, application_id, date_accepted=None, status=None):
obj = {"application_id": application_id}
self.__seamless__.delete_from_list("admin.related_applications", matchsub=obj)
if date_accepted is not None:
obj["date_accepted"] = date_accepted
if status is not None:
obj["status"] = status
self.__seamless__.add_to_list_with_struct("admin.related_applications", obj)
def set_related_applications(self, related_applications_records):
self.__seamless__.set_with_struct("admin.related_applications", related_applications_records)
def remove_related_applications(self):
self.__seamless__.delete("admin.related_applications")
def remove_related_application(self, application_id):
self.set_related_applications([r for r in self.related_applications if r.get("application_id") != application_id])
def related_application_record(self, application_id):
for record in self.related_applications:
if record.get("application_id") == application_id:
return record
return None
def latest_related_application_id(self):
related = self.related_applications
if len(related) == 0:
return None
if len(related) == 1:
return related[0].get("application_id")
sorted(related, key=lambda x: x.get("date_accepted", "1970-01-01T00:00:00Z"))
return related[0].get("application_id")
########################################################################
## Functions for handling continuations
def get_future_continuations(self):
irb = self.bibjson().is_replaced_by
q = ContinuationQuery(irb)
future = []
journals = self.q2obj(q=q.query())
subjournals = []
for j in journals:
subjournals += j.get_future_continuations()
future = journals + subjournals
return future
def get_past_continuations(self):
replaces = self.bibjson().replaces
q = ContinuationQuery(replaces)
past = []
journals = self.q2obj(q=q.query())
subjournals = []
for j in journals:
subjournals += j.get_past_continuations()
past = journals + subjournals
return past
#######################################################################
#####################################################
## operations we can do to the journal
def calculate_tick(self):
created_date = self.created_date
last_update_request = self.last_update_request
tick_threshold = app.config.get("TICK_THRESHOLD", '2014-03-19T00:00:00Z')
threshold = datetime.strptime(tick_threshold, "%Y-%m-%dT%H:%M:%SZ")
if created_date is None: # don't worry about the last_update_request date - you can't update unless you've been created!
# we haven't even saved the record yet. All we need to do is check that the tick
# threshold is in the past (which I suppose theoretically it could not be), then
# set it
if datetime.utcnow() >= threshold:
self.set_ticked(True)
else:
self.set_ticked(False)
return
# otherwise, this is an existing record, and we just need to update it
# convert the strings to datetime objects
created = datetime.strptime(created_date, "%Y-%m-%dT%H:%M:%SZ")
lud = None
if last_update_request is not None:
lud = datetime.strptime(last_update_request, "%Y-%m-%dT%H:%M:%SZ")
if created >= threshold and self.is_in_doaj():
self.set_ticked(True)
return
if lud is not None and lud >= threshold and self.is_in_doaj():
self.set_ticked(True)
return
self.set_ticked(False)
def propagate_in_doaj_status_to_articles(self):
for article in self.all_articles():
article.set_in_doaj(self.is_in_doaj())
article.save()
def prep(self, is_update=True):
self._ensure_in_doaj()
self.calculate_tick()
self._generate_index()
self._calculate_has_apc()
self._generate_autocompletes()
if is_update:
self.set_last_updated()
def save(self, snapshot=True, sync_owner=True, **kwargs):
self.prep()
self.verify_against_struct()
if sync_owner:
self._sync_owner_to_application()
res = super(Journal, self).save(**kwargs)
if snapshot:
self.snapshot()
return res
######################################################
## internal utility methods
def _generate_autocompletes(self):
bj = self.bibjson()
publisher = bj.publisher
institution = bj.institution
if publisher is not None:
self.__seamless__.set_with_struct("index.publisher_ac", publisher.lower())
if institution is not None:
self.__seamless__.set_with_struct("index.institution_ac", institution.lower())
def _ensure_in_doaj(self):
if self.__seamless__.get_single("admin.in_doaj", default=None) is None:
self.set_in_doaj(False)
def _sync_owner_to_application(self):
if self.current_application is None:
return
from portality.models.v2.application import Application
ca = Application.pull(self.current_application)
if ca is not None and ca.owner != self.owner:
ca.set_owner(self.owner)
ca.save(sync_owner=False)
def _calculate_has_apc(self):
# work out of the journal has an apc
has_apc = "No Information"
apc_present = self.bibjson().has_apc
if apc_present:
has_apc = "Yes"
elif self.is_ticked(): # Because if an item is not ticked we want to say "No Information"
has_apc = "No"
self.__seamless__.set_with_struct("index.has_apc", has_apc)
MAPPING_OPTS = {
"dynamic": None,
"coerces": app.config["DATAOBJ_TO_MAPPING_DEFAULTS"],
"exceptions": {
"admin.notes.note": {
"type": "text",
"index": False,
# "include_in_all": False # Removed in es6 fixme: do we need to look at copy_to for the mapping?
}
}
}
########################################################
## Data Access Queries
class JournalQuery(object):
"""
wrapper around the kinds of queries we want to do against the journal type
"""
issn_query = {
"track_total_hits": True,
"query": {
"bool": {
"must": [
{
"terms": {"index.issn.exact": "<issn>"}
}
]
}
}
}
all_doaj = {
"track_total_hits": True,
"query": {
"bool": {
"must": [
{"term": {"admin.in_doaj": True}}
]
}
}
}
_minified_fields = ["id", "bibjson.title", "last_updated"]
def __init__(self, minified=False, sort_by_title=False):
self.query = None
self.minified = minified
self.sort_by_title = sort_by_title
def find_by_issn(self, issns, in_doaj=None, max=10):
self.query = deepcopy(self.issn_query)
self.query["query"]["bool"]["must"][0]["terms"]["index.issn.exact"] = issns
if in_doaj is not None:
self.query["query"]["bool"]["must"].append({"term": {"admin.in_doaj": in_doaj}})
self.query["size"] = max
def all_in_doaj(self):
q = deepcopy(self.all_doaj)
if self.minified:
q["fields"] = self._minified_fields
if self.sort_by_title:
q["sort"] = [{"bibjson.title.exact": {"order": "asc"}}]
return q
class JournalURLQuery(object):
def __init__(self, url, in_doaj=None, max=10):
self.url = url
self.in_doaj = in_doaj
self.max = max
def query(self):
q = {
"track_total_hits": True,
"query": {
"bool": {
"must": [
{
"match": {"bibjson.ref.journal.exact": self.url}
}
]
}
},
"size" : self.max
}
if self.in_doaj is not None:
q["query"]["bool"]["must"].append({"term": {"admin.in_doaj": self.in_doaj}})
return q
class IssnQuery(object):
def __init__(self, owner, in_doaj=None):
self._owner = owner
self._in_doaj = in_doaj
def query(self):
musts = [{"term": { "admin.owner.exact": self._owner}}]
if self._in_doaj is not None:
musts.append({"term": { "admin.in_doaj": self._in_doaj}})
return {
"track_total_hits": True,
"query": {
"bool": {
"must": musts
}
},
"size": 0,
"aggs": {
"issns": {
"terms": {
"field": "index.issn.exact",
"size": 10000,
"order": { "_key": "asc" }
}
}
}
}
class OwnerQuery(object):
""" Query to supply all full journal sources by owner """
base_query = {
"track_total_hits": True,
"query": {
"term": {"admin.owner.exact": "<owner id here>"}
},
"size": 10000,
}
def __init__(self, owner):
self._query = deepcopy(self.base_query)
self._query["query"]["term"]["admin.owner.exact"] = owner
def query(self):
return self._query
class PublisherQuery(object):
exact_query = {
"track_total_hits": True,
"query": {
"term": {"bibjson.publisher.name.exact": "<publisher name here>"}
},
"size": 10000
}
inexact_query = {
"track_total_hits": True,
"query": {
"term": {"bibjson.publisher.name": "<publisher name here>"}
},
"size": 10000
}
def __init__(self, publisher, exact=True):
self.publisher = publisher
self.exact = exact
def query(self):
q = None
if self.exact:
q = deepcopy(self.exact_query)
q["query"]["term"]["bibjson.publisher.name.exact"] = self.publisher
else:
q = deepcopy(self.inexact_query)
q["query"]["term"]["bibjson.publisher.name"] = self.publisher.lower()
return q
class TitleQuery(object):
base_query = {
"track_total_hits": True,
"query": {
"term": {"index.title.exact": "<title here>"}
},
"size": 10000
}
def __init__(self, title):
self.title = title
def query(self):
q = deepcopy(self.base_query)
q["query"]["term"]["index.title.exact"] = self.title
return q
class ContinuationQuery(object):
def __init__(self, issns):
self.issns = issns
def query(self):
return {
"track_total_hits": True,
"query": {
"bool": {
"must": [
{"terms": {"index.issn.exact": self.issns}}
]
}
},
"size": 10000
}
class ArticleStatsQuery(object):
def __init__(self, issns):
self.issns = issns
def query(self):
return {
"track_total_hits": True,
"query": {
"bool": {
"must": [
{"terms": {"index.issn.exact": self.issns}},
{"term": {"admin.in_doaj": True}}
]
}
},
"size": 1,
"_source": {
"include": ["created_date"]
},
"sort": [{"created_date": {"order": "desc"}}]
}
class RecentJournalsQuery(object):
def __init__(self, max):
self.max = max
def query(self):
return {
"track_total_hits": True,
"query" : {"match_all" : {}},
"size" : self.max,
"sort" : [
{"created_date" : {"order" : "desc"}}
]
}
|
DOAJ/doaj
|
portality/models/v2/journal.py
|
Python
|
apache-2.0
| 36,695
|
from hwt.hdl.statements.statement import HwtSyntaxError
class HlsSyntaxError(HwtSyntaxError):
pass
|
Nic30/hwtHls
|
hwtHls/errors.py
|
Python
|
mit
| 105
|
"""Support for tracking consumption over given periods of time."""
from datetime import timedelta
import logging
from croniter import croniter
import voluptuous as vol
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
from .const import (
ATTR_TARIFF,
CONF_CRON_PATTERN,
CONF_METER,
CONF_METER_DELTA_VALUES,
CONF_METER_NET_CONSUMPTION,
CONF_METER_OFFSET,
CONF_METER_TYPE,
CONF_SOURCE_SENSOR,
CONF_TARIFF,
CONF_TARIFF_ENTITY,
CONF_TARIFFS,
DATA_TARIFF_SENSORS,
DATA_UTILITY,
DOMAIN,
METER_TYPES,
SERVICE_RESET,
SERVICE_SELECT_NEXT_TARIFF,
SERVICE_SELECT_TARIFF,
SIGNAL_RESET_METER,
)
_LOGGER = logging.getLogger(__name__)
TARIFF_ICON = "mdi:clock-outline"
ATTR_TARIFFS = "tariffs"
DEFAULT_OFFSET = timedelta(hours=0)
def validate_cron_pattern(pattern):
"""Check that the pattern is well-formed."""
if croniter.is_valid(pattern):
return pattern
raise vol.Invalid("Invalid pattern")
def period_or_cron(config):
"""Check that if cron pattern is used, then meter type and offsite must be removed."""
if CONF_CRON_PATTERN in config and CONF_METER_TYPE in config:
raise vol.Invalid(f"Use <{CONF_CRON_PATTERN}> or <{CONF_METER_TYPE}>")
if (
CONF_CRON_PATTERN in config
and CONF_METER_OFFSET in config
and config[CONF_METER_OFFSET] != DEFAULT_OFFSET
):
raise vol.Invalid(
f"When <{CONF_CRON_PATTERN}> is used <{CONF_METER_OFFSET}> has no meaning"
)
return config
def max_28_days(config):
"""Check that time period does not include more then 28 days."""
if config.days >= 28:
raise vol.Invalid(
"Unsupported offset of more then 28 days, please use a cron pattern."
)
return config
METER_CONFIG_SCHEMA = vol.Schema(
vol.All(
{
vol.Required(CONF_SOURCE_SENSOR): cv.entity_id,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_METER_TYPE): vol.In(METER_TYPES),
vol.Optional(CONF_METER_OFFSET, default=DEFAULT_OFFSET): vol.All(
cv.time_period, cv.positive_timedelta, max_28_days
),
vol.Optional(CONF_METER_DELTA_VALUES, default=False): cv.boolean,
vol.Optional(CONF_METER_NET_CONSUMPTION, default=False): cv.boolean,
vol.Optional(CONF_TARIFFS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_CRON_PATTERN): validate_cron_pattern,
},
period_or_cron,
)
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({cv.slug: METER_CONFIG_SCHEMA})}, extra=vol.ALLOW_EXTRA
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up an Utility Meter."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
hass.data[DATA_UTILITY] = {}
register_services = False
for meter, conf in config[DOMAIN].items():
_LOGGER.debug("Setup %s.%s", DOMAIN, meter)
hass.data[DATA_UTILITY][meter] = conf
hass.data[DATA_UTILITY][meter][DATA_TARIFF_SENSORS] = []
if not conf[CONF_TARIFFS]:
# only one entity is required
name = conf.get(CONF_NAME, meter)
hass.async_create_task(
discovery.async_load_platform(
hass,
SENSOR_DOMAIN,
DOMAIN,
{name: {CONF_METER: meter, CONF_NAME: name}},
config,
)
)
else:
# create tariff selection
await component.async_add_entities(
[TariffSelect(meter, list(conf[CONF_TARIFFS]))]
)
hass.data[DATA_UTILITY][meter][CONF_TARIFF_ENTITY] = "{}.{}".format(
DOMAIN, meter
)
# add one meter for each tariff
tariff_confs = {}
for tariff in conf[CONF_TARIFFS]:
name = f"{meter} {tariff}"
tariff_confs[name] = {
CONF_METER: meter,
CONF_NAME: name,
CONF_TARIFF: tariff,
}
hass.async_create_task(
discovery.async_load_platform(
hass, SENSOR_DOMAIN, DOMAIN, tariff_confs, config
)
)
register_services = True
if register_services:
component.async_register_entity_service(SERVICE_RESET, {}, "async_reset_meters")
component.async_register_entity_service(
SERVICE_SELECT_TARIFF,
{vol.Required(ATTR_TARIFF): cv.string},
"async_select_tariff",
)
component.async_register_entity_service(
SERVICE_SELECT_NEXT_TARIFF, {}, "async_next_tariff"
)
return True
class TariffSelect(RestoreEntity):
"""Representation of a Tariff selector."""
def __init__(self, name, tariffs):
"""Initialize a tariff selector."""
self._name = name
self._current_tariff = None
self._tariffs = tariffs
self._icon = TARIFF_ICON
async def async_added_to_hass(self):
"""Run when entity about to be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if not state or state.state not in self._tariffs:
self._current_tariff = self._tariffs[0]
else:
self._current_tariff = state.state
@property
def should_poll(self):
"""If entity should be polled."""
return False
@property
def name(self):
"""Return the name of the select input."""
return self._name
@property
def icon(self):
"""Return the icon to be used for this entity."""
return self._icon
@property
def state(self):
"""Return the state of the component."""
return self._current_tariff
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {ATTR_TARIFFS: self._tariffs}
async def async_reset_meters(self):
"""Reset all sensors of this meter."""
_LOGGER.debug("reset meter %s", self.entity_id)
async_dispatcher_send(self.hass, SIGNAL_RESET_METER, self.entity_id)
async def async_select_tariff(self, tariff):
"""Select new option."""
if tariff not in self._tariffs:
_LOGGER.warning(
"Invalid tariff: %s (possible tariffs: %s)",
tariff,
", ".join(self._tariffs),
)
return
self._current_tariff = tariff
self.async_write_ha_state()
async def async_next_tariff(self):
"""Offset current index."""
current_index = self._tariffs.index(self._current_tariff)
new_index = (current_index + 1) % len(self._tariffs)
self._current_tariff = self._tariffs[new_index]
self.async_write_ha_state()
|
rohitranjan1991/home-assistant
|
homeassistant/components/utility_meter/__init__.py
|
Python
|
mit
| 7,459
|
# -----------------------------------------------------------------------------
# Copyright * 2014, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The Crisis Mapping Toolkit (CMT) v1 platform is licensed under the Apache
# License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# -----------------------------------------------------------------------------
'''
This file provides a framework for running a process on many lake dates/locations.
'''
import logging
logging.basicConfig(level=logging.ERROR)
try:
import cmt.ee_authenticate
except:
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
import cmt.ee_authenticate
cmt.ee_authenticate.initialize()
import sys
import argparse
import time
import threading
import multiprocessing
import os
import functools
import traceback
import ee
#---------------------------------------------------------------------------
def get_image_collection_landsat5(bounds, start_date, end_date):
'''Retrieve Landsat 5 imagery for the selected location and dates.'''
ee_bounds = bounds
ee_points = ee.List(bounds.bounds().coordinates().get(0))
points = ee_points.getInfo()
points = map(functools.partial(apply, ee.Geometry.Point), points)
collection = ee.ImageCollection('LT5_L1T').filterDate(start_date, end_date).filterBounds(points[0]).filterBounds(points[1]).filterBounds(points[2]).filterBounds(points[3])
return collection
def get_image_collection_modis(region, start_date, end_date):
'''Retrieve MODIS imagery for the selected location and dates.'''
print 'Fetching MODIS data...'
ee_points = ee.List(region.bounds().coordinates().get(0))
points = ee_points.getInfo()
points = map(functools.partial(apply, ee.Geometry.Point), points)
highResModis = ee.ImageCollection('MOD09GQ').filterDate(start_date, end_date).filterBounds(points[0]).filterBounds(points[1]).filterBounds(points[2]).filterBounds(points[3])
lowResModis = ee.ImageCollection('MOD09GA').filterDate(start_date, end_date).filterBounds(points[0]).filterBounds(points[1]).filterBounds(points[2]).filterBounds(points[3])
#print highResModis.getInfo()
#print '================================='
#print lowResModis.getInfo()['bands']
#print lowResModis.select('sur_refl_b03').getInfo()
#print lowResModis.select('sur_refl_b06').getInfo()
#collection = highResModis.addBands(lowResModis.select('sur_refl_b03'))#.addBands(lowResModis.select('sur_refl_b06'))
# This set of code is needed to merge the low and high res MODIS bands
def merge_bands(element):
# A function to merge the bands together.
# After a join, results are in 'primary' and 'secondary' properties.
return ee.Image.cat(element.get('primary'), element.get('secondary'))
join = ee.Join.inner()
f = ee.Filter.equals('system:time_start', None, 'system:time_start')
modisJoined = ee.ImageCollection(join.apply(lowResModis, highResModis, f));
roughJoined = modisJoined.map(merge_bands);
# Clean up the joined band names
band_names_in = ['num_observations_1km','state_1km','SensorZenith','SensorAzimuth','Range','SolarZenith','SolarAzimuth','gflags','orbit_pnt',
'num_observations_500m','sur_refl_b03','sur_refl_b04','sur_refl_b05','sur_refl_b06','sur_refl_b07',
'QC_500m','obscov_500m','iobs_res','q_scan','num_observations', 'sur_refl_b01_1','sur_refl_b02_1','QC_250m','obscov']
band_names_out = ['num_observations_1km','state_1km','SensorZenith','SensorAzimuth','Range','SolarZenith','SolarAzimuth','gflags','orbit_pnt',
'num_observations_500m','sur_refl_b03','sur_refl_b04','sur_refl_b05','sur_refl_b06','sur_refl_b07',
'QC_500m','obscov_500m','iobs_res','q_scan','num_observations_250m', 'sur_refl_b01','sur_refl_b02','QC_250m','obscov']
collection = roughJoined.select(band_names_in, band_names_out)
return collection
def get_image_date(image_info):
'''Extract the (text format) date from EE image.getInfo() - look for it in several locations'''
if 'DATE_ACQUIRED' in image_info['properties']: # Landsat 5
this_date = image_info['properties']['DATE_ACQUIRED']
else:
# MODIS: The date is stored in the 'id' field in this format: 'MOD09GA/MOD09GA_005_2004_08_15'
text = image_info['id']
dateStart1 = text.rfind('MOD09GA_') + len('MOD09GA_')
dateStart2 = text.find('_', dateStart1) + 1
this_date = text[dateStart2:].replace('_', '-')
return this_date
#---------------------------------------------------------------------------
class LakeDataLoggerBase(object):
'''Log manager class to store the results of lake processing.
One of these will be used for each lake.
This class is a dummy base class that should be derived from.'''
def __init__(self, logDirectory, ee_lake, lake_name):
'''Initialize with lake information'''
self.ee_lake = ee_lake
self.base_directory = logDirectory
self.lake_name = lake_name
def getBaseDirectory(self):
'''The top level output folder'''
return self.base_directory
def getLakeDirectory(self):
raise Exception('Implement me!')
def getLakeName(self):
return self.lake_name
def computeLakePrefix(self):
'''Returns a file prefix for this lake in the log directory'''
return os.path.join(self.base_directory, self.lake_name)
#def findRecordByDate(self, date):
# '''Searches for a record with a particular date and returns it'''
# return None
def addDataRecord(self, dataRecord):
'''Adds a new record to the log and optionally save an image'''
return True
def sample_processing_function(bounds, image, image_date, logger):
'''Returns a dictionary of results.
This is the type of function that can be passed to "process_lake"'''
return {'water_count' : 1, 'cloud_count': 2}
def isLakeInBadList(name, output_directory, date=None):
'''Check the blacklist to see if we should skip a lake'''
# - The blacklist is a CSV file containing:
# lake name, date
# - If the date is left blank then applies to all dates.
# - If no date is passed in than only a blank date will match.
# Search the entire file for the name
list_path = os.path.join(output_directory, 'badLakeList.txt')
try:
file_handle = open(list_path, 'r')
found = False
for line in file_handle:
parts = line.strip().split(',')
if name != parts[0]: # Name does not match, keep searching
continue
# Name matches, check the date
if (parts[1]=='') or (parts[1] == date):
found = True
break
file_handle.close()
return found
except: # Fail silently
return False
def addLakeToBadList(name, output_directory, date=None):
'''Create a blacklist of lakes we will skip'''
# Just add the name to a plain text file
list_path = os.path.join(output_directory, 'badLakeList.txt')
file_handle = open(list_path, 'a')
if date: # Write "name, date"
file_handle.write(name +','+ str(date) +'\n')
else: # Write "name,"
file_handle.write(name + ',\n')
file_handle.close()
return True
# The maximum lake size Earth Engine can handle in square kilometers
MAX_LAKE_SIZE = 5000
MAX_LATITUDE = 55 # SRTM90 is not available beyond 60 latitude
def process_lake(lake, ee_lake, start_date, end_date, output_directory,
processing_function, logging_class, image_fetching_function):
'''Computes lake statistics over a date range and writes them to a log file.
processing_function is called with two arguments: a bounding box and an ee_image.'''
try:
# Extract the lake name
name = lake['properties']['LAKE_NAME']
name = name.replace("'","").replace(".","").replace(",","").replace(" ","_") # Strip out weird characters
if name == '': # Can't proceed without the name!
print 'Skipping lake with no name!'
print lake['properties']
return False
# Check if the lake is in the bad lake list
if isLakeInBadList(name, output_directory):
print 'Skipping known bad lake ' + name
return False
boundsInfo = ee_lake.geometry().bounds().getInfo()
# Take the lake boundary and expand it out in all directions by 1000 meters
# - Need to use bounding boxes instead of exact geometeries, otherwise
# Earth Engine's memory usage will explode!
ee_bounds = ee_lake.geometry().bounds().buffer(1000).bounds()
print 'Processing lake: ' + name
# Set up logging object for this lake
logger = logging_class(output_directory, ee_lake, name)
# Fetch all the landsat 5 imagery covering the lake on the date range
collection = image_fetching_function(ee_bounds, start_date, end_date)
ee_image_list = collection.toList(1000000)
num_images_found = len(ee_image_list.getInfo())
print 'Found ' + str(num_images_found) + ' images for this lake.'
# Iterate through all the images we retrieved
results = []
all_image_info = ee_image_list.getInfo()
for i in range(len(all_image_info)):
# Extract the date for this image
this_date = get_image_date(all_image_info[i])
if isLakeInBadList(name, output_directory, this_date):
print 'Skipping known bad instance: ' + name +' - '+ this_date
continue
print 'Processing date ' + str(this_date)
# Retrieve the image data and fetch the sun elevation (suggests the amount of light present)
this_ee_image = ee.Image(ee_image_list.get(i))
#sun_elevation = all_image_info[i]['properties']['SUN_ELEVATION'] # Easily available in Landsat5
# Call processing algorithms on the lake with second try in case EE chokes.
try:
result = processing_function(ee_bounds, this_ee_image, this_date, logger)
except Exception as e:
print 'Processing failed, skipping this date --> ' + str(e)
traceback.print_exc(file=sys.stdout)
continue
# Append some metadata to the record and log it
#r['sun_elevation'] = sun_elevation
result['date'] = this_date
logger.addDataRecord(result)
except Exception as e:
print 'Caught exception processing the lake!'
print str(e)
traceback.print_exc(file=sys.stdout)
print 'Finished processing lake: ' + name
#======================================================================================================
def main(processing_function, logging_class, image_fetching_function=get_image_collection_landsat5):
'''This main needs to be called from another file with some arguments'''
parser = argparse.ArgumentParser(description='Measure lake water levels.')
parser.add_argument('--start-date', dest='start_date', action='store', required=False, default=None, help='YYYY-MM-DD start date')
parser.add_argument('--end-date', dest='end_date', action='store', required=False, default=None, help='YYYY-MM-DD end date')
parser.add_argument('--lake', dest='lake', action='store', required=False, default=None, help='Specify a single lake to process')
parser.add_argument('--results-dir', dest='results_dir', action='store', required=False, default='results')
parser.add_argument('--max-lakes', dest='max_lakes', type=int, required=False, default=100, help='Limit to this many lakes')
parser.add_argument('--threads', dest='num_threads', type=int, required=False, default=4)
args = parser.parse_args()
if args.start_date == None: # Use a large date range
start_date = ee.Date('1984-01-01')
end_date = ee.Date('2015-01-01')
else: # Start date provided
start_date = ee.Date(args.start_date)
if args.end_date: # End date also provided
end_date = ee.Date(args.end_date)
else: # Use the input date plus one month
end_date = start_date.advance(1.0, 'month')
# --- This is the database containing all the lake locations!
if args.lake != None:
all_lakes = ee.FeatureCollection('ft:13s-6qZDKWXsLOWyN7Dap5o6Xuh2sehkirzze29o3', "geometry").filterMetadata(u'LAKE_NAME', u'equals', args.lake).toList(1000000)
if not all_lakes:
raise Exception('Failed to find user specified lake name!')
else:
all_lakes = ee.FeatureCollection('ft:13s-6qZDKWXsLOWyN7Dap5o6Xuh2sehkirzze29o3', "geometry").filterMetadata(
u'LAKE_NAME', u'not_equals', "").filterMetadata(
u'AREA_SKM', u'less_than', MAX_LAKE_SIZE).filterMetadata(
u'LAT_DEG', u'less_than', MAX_LATITUDE).filterMetadata(
u'LAT_DEG', u'greater_than', -MAX_LATITUDE).toList(args.max_lakes)
#pprint(ee.Feature(all_lakes.get(0)).getInfo())
# Fetch ee information for all of the lakes we loaded from the database
all_lakes_local = all_lakes.getInfo()
num_lakes = len(all_lakes_local)
print 'Found ' + str(num_lakes) + ' lakes.'
# Create output directory
if not os.path.exists(args.results_dir):
os.makedirs(args.results_dir)
# Create processing pool and multiprocessing manager
num_threads = args.num_threads
if num_lakes < num_threads:
num_threads = num_lakes
print 'Spawning ' + str(num_threads) + ' worker thread(s)'
pool = multiprocessing.Pool(processes=num_threads)
manager = multiprocessing.Manager()
lake_results = []
for i in range(len(all_lakes_local)): # For each lake...
# Get this one lake
ee_lake = ee.Feature(all_lakes.get(i))
#process_lake(all_lakes_local[i], ee_lake, start_date, end_date, args.results_dir, processing_function, logging_class, image_fetching_function)
# Spawn a processing thread for this lake
lake_results.append(pool.apply_async(process_lake, args=(all_lakes_local[i], ee_lake,
start_date, end_date,
args.results_dir,
processing_function, logging_class, image_fetching_function)))
# Wait until all threads have finished
print 'Waiting for all threads to complete...'
for r in lake_results:
r.get()
# Stop the queue and all the threads
print 'Cleaning up...'
pool.close()
pool.join()
|
asurunis/CrisisMappingToolkit
|
cmt/util/processManyLakes.py
|
Python
|
apache-2.0
| 15,844
|
from datetime import datetime
class LocationsModel(object):
"""Visa Mobile Location Confirmation Location Update data object model.
https://developer.visa.com/products/mlc/reference#mlc__mlc1
:param pyvdp.mlc.locationupdate.LocationModel.Header header: **Required**.
Instance of :func:`~pyvdp.mlc.locationupdate.LocationsModel.Header`.
:param str accuracy: **Optional**. Accuracy coefficient around returned coordinates. 0-10000 integer.
:param str cloudNotificationKey: **Optional**. Unique ID, assigned by mobile OS vendor that identifies device
for push notifications purposes. This enables VISA to send a location update request on expiration.
4K string.
:param int cloudNotificationProvider: **Optional**. Mobile OS vendor ID. Possible values are: 1 - Google,
2 - Apple, 3 - Microsoft.
:param str deviceId: **Required**. Device ID value, that must match device id, sent during enrollment request.
See :func:`pyvdp.mlc.enrollment.enrollments`. Max 50 characters string.
:param str issuerId: **Required**. Issuer ID provided by VISA during onboarding. 6 digits string.
:param str deviceLocationDateTime: **Required**. Datetime for location update on mobile device. This is generated
by mobile app. 50 characters string, YYYY-MMDDTHH:MM:SS.fffZ.
:param GeoLocationCoordinate geoLocationCoordinate: **Required**.
Instance of :func:`~pyvdp.mld.locationupdate.LocationsModel.GeoLocationCoordinate`.
:param int provider: **Optional**. Location provider value. Possible values are 1 (mobile app), 2 (mobile network
operator). Currently only '1' is supported.
:param int source: **Required**. Mobile device event, that triggered location update. Possible values are:
1 - location change event, 2 - wi-fi connection event.
**Request:**
.. code:: json
{
"accuracy": "5000",
"cloudNotificationKey": "03e3ae03-a627-4241-bad6-58f811c18e46",
"cloudNotificationProvider": "1",
"deviceId": "25b794-29a-4acb-9485-5a643d231f8U",
"deviceLocationDateTime": "2017-04-20T03:54:24.932Z",
"geoLocationCoordinate": {
"latitude": "37.55862902",
"longitude": "-122.2773385"
},
"header": {
"messageDateTime": "2017-04-20T03:54:24.932Z",
"messageId": "2d099794-e69a-4acb-9485-5a643d231f51"
},
"issuerId": "123457",
"provider": "1",
"source": "1"
}
**Response:**
.. code:: json
{
"status": "success",
"header": {
"messageId": "2d099794-e69a-4acb-9485-5a643d231f51",
"messageDateTime": "2017-04-20T03:54:24.932Z"
},
"deviceId": "25b794-29a-4acb-9485-5a643d231f8U",
"locationPulseInterval": "6000000"
}
"""
ATTRS = [
'header',
'accuracy',
'cloudNotificationKey',
'cloudNotificationProvider',
'deviceId',
'deviceLocationDateTime',
'geoLocationCoordinate',
'issuerId',
'provider',
'source'
]
def __init__(self, **kwargs):
for attr, value in kwargs.items():
if attr in self.ATTRS and value:
self.__setattr__(attr, value)
class Header(object):
"""Location update request header.
A part of :func:`~pyvdp.mlc.locationupdate.LocationsModel`.
:param str messageId: **Required**. Unique message identifier. Max 50 characters string.
"""
ATTRS = [
'messageId'
]
def __init__(self, **kwargs):
for attr, value in kwargs.items():
if attr in self.ATTRS and value:
self.__setattr__(attr, value)
self.messageDateTime = datetime.now().strftime('%Y-%m-%dT%H:%M:%S.%f')
class GeoLocationCoordinate(object):
"""Location update request geo coordinates data model.
A part of :func:`~pyvdp.mlc.locationupdate.LocationsModel`
:param float latitude: **Required**. Latitude, decimal value between -90.0 and 90.0.
:param float longitude: **Required**. Longitude, decimal value between -180.0 and 180.0
"""
ATTRS = [
'latitude',
'longitude'
]
def __init__(self, **kwargs):
for attr, value in kwargs.items():
if attr in self.ATTRS and value:
self.__setattr__(attr, value)
|
ppokrovsky/pyvdp
|
pyvdp/mlc/locationupdate/models.py
|
Python
|
mit
| 4,690
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Authors: Nemry Jonathan
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs.
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contact a Free Software
# Service Company.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Easy Debug Backend',
'version': '8.0.1.0.0',
'author': 'ACSONE SA/NV',
'maintainer': 'ACSONE SA/NV',
'website': 'https://acsone.eu',
'category': 'Other',
'depends': [
'base',
'web',
],
'description': """
Easy Debug Backend
==================
Constantly apply 'debug mode' for backend
""",
'data': [
'views/easy_debug_backend.xml',
],
'installable': False,
'auto_install': False,
}
|
acsone/acsone-addons
|
easy_debug_backend/__manifest__.py
|
Python
|
agpl-3.0
| 1,826
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para fileserve
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[fileserve.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
if premium:
# Accede a la home para precargar la cookie
data = scrapertools.cache_page("http://fileserve.com/index.php")
# Hace el login
url = "http://fileserve.com/login.php"
post = "loginUserName=%s&loginUserPassword=%s&autoLogin=on&ppp=102&loginFormSubmit=Login" % (user,password)
data = scrapertools.cache_page(url, post=post)
location = scrapertools.get_header_from_response(page_url,header_to_get="location")
logger.info("location="+location)
if location.startswith("http"):
extension = location[-4:]
video_urls.append( [ "%s (Premium) [fileserve]" % extension, location ] )
for video_url in video_urls:
logger.info("[fileserve.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'http://www.fileserve.com/file/([A-Z0-9a-z]{7})'
logger.info("[fileserve.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[Fileserve]"
url = "http://www.fileserve.com/file/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'fileserve' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
|
titienmiami/mmc.repository
|
plugin.video.tvalacarta/servers/fileserve.py
|
Python
|
gpl-2.0
| 2,081
|
# -*- coding: utf-8 -*-
"""Provide functions for the creation and manipulation of Quaternions.
"""
from __future__ import absolute_import, division, print_function
import numpy as np
from . import vector, vector3, vector4
from .utils import all_parameters_as_numpy_arrays, parameters_as_numpy_arrays
class index:
#: The index of the X value within the quaternion
x = 0
#: The index of the Y value within the quaternion
y = 1
#: The index of the Z value within the quaternion
z = 2
#: The index of the W value within the quaternion
w = 3
def create(x=0., y=0., z=0., w=1., dtype=None):
return np.array([x, y, z, w], dtype=dtype)
def create_from_x_rotation(theta, dtype=None):
thetaOver2 = theta * 0.5
return np.array(
[
np.sin(thetaOver2),
0.0,
0.0,
np.cos(thetaOver2)
],
dtype=dtype
)
def create_from_y_rotation(theta, dtype=None):
thetaOver2 = theta * 0.5
return np.array(
[
0.0,
np.sin(thetaOver2),
0.0,
np.cos(thetaOver2)
],
dtype=dtype
)
def create_from_z_rotation(theta, dtype=None):
thetaOver2 = theta * 0.5
return np.array(
[
0.0,
0.0,
np.sin(thetaOver2),
np.cos(thetaOver2)
],
dtype=dtype
)
@parameters_as_numpy_arrays('axis')
def create_from_axis_rotation(axis, theta, dtype=None):
dtype = dtype or axis.dtype
# make sure the vector is normalised
if not np.isclose(np.linalg.norm(axis), 1.):
axis = vector.normalise(axis)
thetaOver2 = theta * 0.5
sinThetaOver2 = np.sin(thetaOver2)
return np.array(
[
sinThetaOver2 * axis[0],
sinThetaOver2 * axis[1],
sinThetaOver2 * axis[2],
np.cos(thetaOver2)
],
dtype=dtype
)
@parameters_as_numpy_arrays('mat')
def create_from_matrix(mat, dtype=None):
# http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm
dtype = dtype or mat.dtype
quat = np.array(
[
np.sqrt(np.maximum(0., 1. + mat[0][0] - mat[1][1] - mat[2][2])) / 2.,
np.sqrt(np.maximum(0., 1. - mat[0][0] + mat[1][1] - mat[2][2])) / 2.,
np.sqrt(np.maximum(0., 1. - mat[0][0] - mat[1][1] + mat[2][2])) / 2.,
np.sqrt(np.maximum(0., 1. + mat[0][0] + mat[1][1] + mat[2][2])) / 2.,
],
dtype=dtype
)
# the method suggests this, but it produces the wrong results
#if np.sign(quat[0]) != np.sign(mat[2][1]-mat[1][2]):
# quat[0] *= -1
#if np.sign(quat[1]) != np.sign(mat[0][2]-mat[2][0]):
# quat[1] *= -1
#if np.sign(quat[2]) != np.sign(mat[1][0]-mat[0][1]):
# quat[2] *= -1
return quat
@parameters_as_numpy_arrays('eulers')
def create_from_eulers(eulers, dtype=None):
"""Creates a quaternion from a set of Euler angles.
Eulers are an array of length 3 in the following order::
[yaw, pitch, roll]
"""
dtype = dtype or eulers.dtype
pitch, yaw, roll = eulers
halfPitch = pitch * 0.5
sP = np.sin(halfPitch)
cP = np.cos(halfPitch)
halfRoll = roll * 0.5
sR = np.sin(halfRoll)
cR = np.cos(halfRoll)
halfYaw = yaw * 0.5
sY = np.sin(halfYaw)
cY = np.cos(halfYaw)
return np.array(
[
# x = -cy * sp * cr - sy * cp * sr
(-cY * sP * cR) - (sY * cP * sR),
# y = cy * sp * sr - sy * cp * cr
(cY * sP * sR) - (sY * cP * cR),
# z = sy * sp * cr - cy * cp * sr
(sY * sP * cR) - (cY * cP * sR),
# w = cy * cp * cr + sy * sp * sr
(cY * cP * cR) + (sY * sP * sR),
],
dtype=dtype
)
@parameters_as_numpy_arrays('axis')
def create_from_inverse_of_eulers(eulers, dtype=None):
"""Creates a quaternion from the inverse of a set of Euler angles.
Eulers are an array of length 3 in the following order::
[yaw, pitch, roll]
"""
dtype = dtype or eulers.dtype
pitch, roll, yaw = euler.pitch(eulers), euler.roll(eulers), euler.yaw(eulers)
halfRoll = roll * 0.5
sinRoll = np.sin(halfRoll)
cosRoll = np.cos(halfRoll)
halfPitch = pitch * 0.5
sinPitch = np.sin(halfPitch)
cosPitch = np.cos(halfPitch)
halfYaw = yaw * 0.5
sinYaw = np.sin(halfYaw)
cosYaw = np.cos(halfYaw)
return np.array(
[
# x = cy * sp * cr + sy * cp * sr
(cosYaw * sinPitch * cosRoll) + (sinYaw * cosPitch * sinRoll),
# y = -cy * sp * sr + sy * cp * cr
(-cosYaw * sinPitch * sinRoll) + (sinYaw * cosPitch * cosRoll),
# z = -sy * sp * cr + cy * cp * sr
(-sinYaw * sinPitch * cosRoll) + (cosYaw * cosPitch * sinRoll),
# w = cy * cp * cr + sy * sp * sr
(cosYaw * cosPitch * cosRoll) + (sinYaw * sinPitch * sinRoll)
],
dtype=dtype
)
@all_parameters_as_numpy_arrays
def cross(quat1, quat2):
"""Returns the cross-product of the two quaternions.
Quaternions are **not** communicative. Therefore, order is important.
This is NOT the same as a vector cross-product.
Quaternion cross-product is the equivalent of matrix multiplication.
"""
q1x, q1y, q1z, q1w = quat1
q2x, q2y, q2z, q2w = quat2
return np.array(
[
q1x * q2w + q1y * q2z - q1z * q2y + q1w * q2x,
-q1x * q2z + q1y * q2w + q1z * q2x + q1w * q2y,
q1x * q2y - q1y * q2x + q1z * q2w + q1w * q2z,
-q1x * q2x - q1y * q2y - q1z * q2z + q1w * q2w,
],
dtype=quat1.dtype
)
def is_zero_length(quat):
"""Checks if a quaternion is zero length.
:param numpy.array quat: The quaternion to check.
:rtype: boolean.
:return: True if the quaternion is zero length, otherwise False.
"""
return quat[0] == quat[1] == quat[2] == quat[3] == 0.0
def is_non_zero_length(quat):
"""Checks if a quaternion is not zero length.
This is the opposite to 'is_zero_length'.
This is provided for readability.
:param numpy.array quat: The quaternion to check.
:rtype: boolean
:return: False if the quaternion is zero length, otherwise True.
.. seealso:: is_zero_length
"""
return not is_zero_length(quat)
def squared_length(quat):
"""Calculates the squared length of a quaternion.
Useful for avoiding the performanc penalty of
the square root function.
:param numpy.array quat: The quaternion to measure.
:rtype: float, numpy.array
:return: If a 1d array was passed, it will be a scalar.
Otherwise the result will be an array of scalars with shape
vec.ndim with the last dimension being size 1.
"""
return vector4.squared_length(quat)
def length(quat):
"""Calculates the length of a quaternion.
:param numpy.array quat: The quaternion to measure.
:rtype: float, numpy.array
:return: If a 1d array was passed, it will be a scalar.
Otherwise the result will be an array of scalars with shape
vec.ndim with the last dimension being size 1.
"""
return vector4.length(quat)
def normalise(quat):
"""Ensure a quaternion is unit length (length ~= 1.0).
The quaternion is **not** changed in place.
:param numpy.array quat: The quaternion to normalise.
:rtype: numpy.array
:return: The normalised quaternion(s).
"""
return vector4.normalise(quat)
def rotation_angle(quat):
"""Calculates the rotation around the quaternion's axis.
:param numpy.array quat: The quaternion.
:rtype: float.
:return: The quaternion's rotation about the its axis in radians.
"""
# extract the W component
thetaOver2 = np.arccos(quat[3])
return thetaOver2 * 2.0
@all_parameters_as_numpy_arrays
def rotation_axis(quat):
"""Calculates the axis of the quaternion's rotation.
:param numpy.array quat: The quaternion.
:rtype: numpy.array.
:return: The quaternion's rotation axis.
"""
# extract W component
sinThetaOver2Sq = 1.0 - (quat[3] ** 2)
# check for zero before we sqrt
if sinThetaOver2Sq <= 0.0:
# identity quaternion or numerical imprecision.
# return a valid vector
# we'll treat -Z as the default
return np.array([0.0, 0.0, -1.0], dtype=quat.dtype)
oneOverSinThetaOver2 = 1.0 / np.sqrt(sinThetaOver2Sq)
# we use the x,y,z values
return np.array(
[
quat[0] * oneOverSinThetaOver2,
quat[1] * oneOverSinThetaOver2,
quat[2] * oneOverSinThetaOver2
],
dtype=quat.dtype
)
def dot(quat1, quat2):
"""Calculate the dot product of quaternions.
This is the same as a vector dot product.
:param numpy.array quat1: The first quaternion(s).
:param numpy.array quat2: The second quaternion(s).
:rtype: float, numpy.array
:return: If a 1d array was passed, it will be a scalar.
Otherwise the result will be an array of scalars with shape
vec.ndim with the last dimension being size 1.
"""
return vector4.dot(quat1, quat2)
@all_parameters_as_numpy_arrays
def conjugate(quat):
"""Calculates a quaternion with the opposite rotation.
:param numpy.array quat: The quaternion.
:rtype: numpy.array.
:return: A quaternion representing the conjugate.
"""
# invert x,y,z and leave w as is
return np.array(
[
-quat[0],
-quat[1],
-quat[2],
quat[3]
],
dtype=quat.dtype
)
@parameters_as_numpy_arrays('quat')
def power(quat, exponent):
"""Multiplies the quaternion by the exponent.
The quaternion is **not** changed in place.
:param numpy.array quat: The quaternion.
:param float scalar: The exponent.
:rtype: numpy.array.
:return: A quaternion representing the original quaternion
to the specified power.
"""
# check for identify quaternion
if np.fabs(quat[3]) > 0.9999:
# assert for the time being
assert False
print("rotation axis was identity")
return quat
alpha = np.arccos(quat[3])
newAlpha = alpha * exponent
multi = np.sin(newAlpha) / np.sin(alpha)
return np.array(
[
quat[0] * multi,
quat[1] * multi,
quat[2] * multi,
np.cos(newAlpha)
],
dtype=quat.dtype
)
def inverse(quat):
"""Calculates the inverse quaternion.
The inverse of a quaternion is defined as
the conjugate of the quaternion divided
by the magnitude of the original quaternion.
:param numpy.array quat: The quaternion to invert.
:rtype: numpy.array.
:return: The inverse of the quaternion.
"""
return conjugate(quat) / length(quat)
@all_parameters_as_numpy_arrays
def negate(quat):
"""Calculates the negated quaternion.
This is essentially the quaternion * -1.0.
:param numpy.array quat: The quaternion.
:rtype: numpy.array
:return: The negated quaternion.
"""
return quat * -1.0
def is_identity(quat):
return np.allclose(quat, [0.,0.,0.,1.])
@all_parameters_as_numpy_arrays
def apply_to_vector(quat, vec):
"""Rotates a vector by a quaternion.
:param numpy.array quat: The quaternion.
:param numpy.array vec: The vector.
:rtype: numpy.array
:return: The vector rotated by the quaternion.
:raise ValueError: raised if the vector is an unsupported size
.. seealso:: http://content.gpwiki.org/index.php/OpenGL:Tutorials:Using_Quaternions_to_represent_rotation
"""
def apply(quat, vec4):
"""
v = numpy.array(vec)
return v + 2.0 * vector.cross(
quat[:-1],
vector.cross(quat[:-1], v) + (quat[-1] * v)
)
"""
length = vector.length(vec4)
vec4[:] = vector.normalise(vec4)
# quat * vec * quat^-1
result = cross(quat, cross(vec4, conjugate(quat)))
result *= length
return result
if vec.size == 3:
# convert to vector4
# ignore w component by setting it to 0.
vec = np.array([vec[0], vec[1], vec[2], 0.0], dtype=vec.dtype)
vec = apply(quat, vec)
vec = vec[:3]
return vec
elif vec.size == 4:
vec = apply(quat, vec)
return vec
else:
raise ValueError("Vector size unsupported")
|
PhloxAR/phloxar
|
PhloxAR/math3/quaternion.py
|
Python
|
apache-2.0
| 12,537
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.j
~~~~~~~~~~~~~~~~~
Lexer for the J programming language.
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, include
from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \
String, Text
__all__ = ['JLexer']
class JLexer(RegexLexer):
"""
For `J <http://jsoftware.com/>`_ source code.
.. versionadded:: 2.1
"""
name = 'J'
aliases = ['j']
filenames = ['*.ijs']
mimetypes = ['text/x-j']
validName = r'\b[a-zA-Z]\w*'
tokens = {
'root': [
# Shebang script
(r'#!.*$', Comment.Preproc),
# Comments
(r'NB\..*', Comment.Single),
(r'\n+\s*Note', Comment.Multiline, 'comment'),
(r'\s*Note.*', Comment.Single),
# Whitespace
(r'\s+', Text),
# Strings
(r"'", String, 'singlequote'),
# Definitions
(r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'),
(r'(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b',
Name.Function, 'explicitDefinition'),
# Flow Control
(words(('for_', 'goto_', 'label_'), suffix=validName+r'\.'), Name.Label),
(words((
'assert', 'break', 'case', 'catch', 'catchd',
'catcht', 'continue', 'do', 'else', 'elseif',
'end', 'fcase', 'for', 'if', 'return',
'select', 'throw', 'try', 'while', 'whilst',
), suffix=r'\.'), Name.Label),
# Variable Names
(validName, Name.Variable),
# Standard Library
(words((
'ARGV', 'CR', 'CRLF', 'DEL', 'Debug',
'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF',
'LF2', 'Note', 'TAB', 'alpha17', 'alpha27',
'apply', 'bind', 'boxopen', 'boxxopen', 'bx',
'clear', 'cutLF', 'cutopen', 'datatype', 'def',
'dfh', 'drop', 'each', 'echo', 'empty',
'erase', 'every', 'evtloop', 'exit', 'expand',
'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
'getenv', 'hfd', 'inv', 'inverse', 'iospath',
'isatty', 'isutf8', 'items', 'leaf', 'list',
'nameclass', 'namelist', 'names', 'nc',
'nl', 'on', 'pick', 'rows',
'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
'sort', 'split', 'stderr', 'stdin', 'stdout',
'table', 'take', 'timespacex', 'timex', 'tmoutput',
'toCRLF', 'toHOST', 'toJ', 'tolower', 'toupper',
'type', 'ucp', 'ucpcount', 'usleep', 'utf8',
'uucp',
)), Name.Function),
# Copula
(r'=[.:]', Operator),
# Builtins
(r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/]', Operator),
# Short Keywords
(r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved),
(r'[aDiLpqsStux]\:', Keyword.Reserved),
(r'(_[0-9])\:', Keyword.Constant),
# Parens
(r'\(', Punctuation, 'parentheses'),
# Numbers
include('numbers'),
],
'comment': [
(r'[^)]', Comment.Multiline),
(r'^\)', Comment.Multiline, '#pop'),
(r'[)]', Comment.Multiline),
],
'explicitDefinition': [
(r'\b[nmuvxy]\b', Name.Decorator),
include('root'),
(r'[^)]', Name),
(r'^\)', Name.Label, '#pop'),
(r'[)]', Name),
],
'numbers': [
(r'\b_{1,2}\b', Number),
(r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number),
(r'_?\d+\.(?=\d+)', Number.Float),
(r'_?\d+x', Number.Integer.Long),
(r'_?\d+', Number.Integer),
],
'nounDefinition': [
(r'[^)]', String),
(r'^\)', Name.Label, '#pop'),
(r'[)]', String),
],
'parentheses': [
(r'\)', Punctuation, '#pop'),
# include('nounDefinition'),
include('explicitDefinition'),
include('root'),
],
'singlequote': [
(r"[^']", String),
(r"''", String),
(r"'", String, '#pop'),
],
}
|
tmm1/pygments.rb
|
vendor/pygments-main/pygments/lexers/j.py
|
Python
|
mit
| 4,527
|
#-*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.translation import get_language, activate
from django.template.context import Context
def get_language_for_user(user):
return user.get_profile().preferred_language
class BaseDeliveryBackend(object):
"""
Base backend for all DeliveryBackends
"""
supports_anonymous_users = False
def __init__(self, notification):
self.notification = notification
def can_send(self, user, notification):
"""
Determines if this backend should send a notification for the given combination of user and notice_type
"""
return True
def get_site(self):
return Site.objects.get_current()
def default_context(self):
return Context({
'default_http_protocol': getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http"),
'site': self.get_site(),
'notification': self.notification,
})
def deliver(self):
"""
deliver the given Notice to_users.
"""
current_language = get_language()
for user in self.notification.to_users:
if not self.supports_anonymous_users and user.is_anonymous():
continue
elif user.is_anonymous():
languages = [language_code for language_code, language in settings.LANGUAGES]
user = None
else:
languages = [get_language_for_user(user)]
if not (self.notification.can_send(user, self) and self.can_send(user, self.notification)):
continue
for language in languages:
activate(language)
# build the context
context = self.default_context()
context.update(self.notification.get_context(language=language))
context.update(self.notification.get_user_context(user))
self.deliver_to(user=user,
context=context,
notification=self.notification,
language=language)
activate(current_language)
def deliver_to(self, user, context, notification, language):
"""
handle delivery to a specific user. In this method the correct language of the user is already set and
the context has been modified accordingly for this user.
"""
raise NotImplementedError()
|
stefanfoulis/django-notifyme
|
notifyme/delivery_backends/base.py
|
Python
|
mit
| 2,513
|
# Copyright 2011 OpenStack Foundation
# Copyright 2011 Ilya Alekseyev
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import sys
import fixtures
import mock
from nova.cmd import manage
from nova import context
from nova import db
from nova.db import migration
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.db import fakes as db_fakes
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_network
from nova.tests.unit import test_flavors
class FixedIpCommandsTestCase(test.TestCase):
def setUp(self):
super(FixedIpCommandsTestCase, self).setUp()
db_fakes.stub_out_db_network_api(self.stubs)
self.commands = manage.FixedIpCommands()
def test_reserve(self):
self.commands.reserve('192.168.0.100')
address = db.fixed_ip_get_by_address(context.get_admin_context(),
'192.168.0.100')
self.assertEqual(address['reserved'], True)
def test_reserve_nonexistent_address(self):
self.assertEqual(2, self.commands.reserve('55.55.55.55'))
def test_unreserve(self):
self.commands.unreserve('192.168.0.100')
address = db.fixed_ip_get_by_address(context.get_admin_context(),
'192.168.0.100')
self.assertEqual(address['reserved'], False)
def test_unreserve_nonexistent_address(self):
self.assertEqual(2, self.commands.unreserve('55.55.55.55'))
def test_list(self):
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.list()
self.assertNotEqual(1, sys.stdout.getvalue().find('192.168.0.100'))
def test_list_just_one_host(self):
def fake_fixed_ip_get_by_host(*args, **kwargs):
return [db_fakes.fixed_ip_fields]
self.useFixture(fixtures.MonkeyPatch(
'nova.db.fixed_ip_get_by_host',
fake_fixed_ip_get_by_host))
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.list('banana')
self.assertNotEqual(1, sys.stdout.getvalue().find('192.168.0.100'))
class FloatingIpCommandsTestCase(test.TestCase):
def setUp(self):
super(FloatingIpCommandsTestCase, self).setUp()
db_fakes.stub_out_db_network_api(self.stubs)
self.commands = manage.FloatingIpCommands()
def test_address_to_hosts(self):
def assert_loop(result, expected):
for ip in result:
self.assertIn(str(ip), expected)
address_to_hosts = self.commands.address_to_hosts
# /32 and /31
self.assertRaises(exception.InvalidInput, address_to_hosts,
'192.168.100.1/32')
self.assertRaises(exception.InvalidInput, address_to_hosts,
'192.168.100.1/31')
# /30
expected = ["192.168.100.%s" % i for i in range(1, 3)]
result = address_to_hosts('192.168.100.0/30')
self.assertEqual(2, len(list(result)))
assert_loop(result, expected)
# /29
expected = ["192.168.100.%s" % i for i in range(1, 7)]
result = address_to_hosts('192.168.100.0/29')
self.assertEqual(6, len(list(result)))
assert_loop(result, expected)
# /28
expected = ["192.168.100.%s" % i for i in range(1, 15)]
result = address_to_hosts('192.168.100.0/28')
self.assertEqual(14, len(list(result)))
assert_loop(result, expected)
# /16
result = address_to_hosts('192.168.100.0/16')
self.assertEqual(65534, len(list(result)))
# NOTE(dripton): I don't test /13 because it makes the test take 3s.
# /12 gives over a million IPs, which is ridiculous.
self.assertRaises(exception.InvalidInput, address_to_hosts,
'192.168.100.1/12')
class NetworkCommandsTestCase(test.TestCase):
def setUp(self):
super(NetworkCommandsTestCase, self).setUp()
self.commands = manage.NetworkCommands()
self.net = {'id': 0,
'label': 'fake',
'injected': False,
'cidr': '192.168.0.0/24',
'cidr_v6': 'dead:beef::/64',
'multi_host': False,
'gateway_v6': 'dead:beef::1',
'netmask_v6': '64',
'netmask': '255.255.255.0',
'bridge': 'fa0',
'bridge_interface': 'fake_fa0',
'gateway': '192.168.0.1',
'broadcast': '192.168.0.255',
'dns1': '8.8.8.8',
'dns2': '8.8.4.4',
'vlan': 200,
'vlan_start': 201,
'vpn_public_address': '10.0.0.2',
'vpn_public_port': '2222',
'vpn_private_address': '192.168.0.2',
'dhcp_start': '192.168.0.3',
'project_id': 'fake_project',
'host': 'fake_host',
'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}
def fake_network_get_by_cidr(context, cidr):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(cidr, self.fake_net['cidr'])
return db_fakes.FakeModel(dict(test_network.fake_network,
**self.fake_net))
def fake_network_get_by_uuid(context, uuid):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(uuid, self.fake_net['uuid'])
return db_fakes.FakeModel(dict(test_network.fake_network,
**self.fake_net))
def fake_network_update(context, network_id, values):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(network_id, self.fake_net['id'])
self.assertEqual(values, self.fake_update_value)
self.fake_network_get_by_cidr = fake_network_get_by_cidr
self.fake_network_get_by_uuid = fake_network_get_by_uuid
self.fake_network_update = fake_network_update
def test_create(self):
def fake_create_networks(obj, context, **kwargs):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(kwargs['label'], 'Test')
self.assertEqual(kwargs['cidr'], '10.2.0.0/24')
self.assertEqual(kwargs['multi_host'], False)
self.assertEqual(kwargs['num_networks'], 1)
self.assertEqual(kwargs['network_size'], 256)
self.assertEqual(kwargs['vlan'], 200)
self.assertEqual(kwargs['vlan_start'], 201)
self.assertEqual(kwargs['vpn_start'], 2000)
self.assertEqual(kwargs['cidr_v6'], 'fd00:2::/120')
self.assertEqual(kwargs['gateway'], '10.2.0.1')
self.assertEqual(kwargs['gateway_v6'], 'fd00:2::22')
self.assertEqual(kwargs['bridge'], 'br200')
self.assertEqual(kwargs['bridge_interface'], 'eth0')
self.assertEqual(kwargs['dns1'], '8.8.8.8')
self.assertEqual(kwargs['dns2'], '8.8.4.4')
self.flags(network_manager='nova.network.manager.VlanManager')
from nova.network import manager as net_manager
self.stubs.Set(net_manager.VlanManager, 'create_networks',
fake_create_networks)
self.commands.create(
label='Test',
cidr='10.2.0.0/24',
num_networks=1,
network_size=256,
multi_host='F',
vlan=200,
vlan_start=201,
vpn_start=2000,
cidr_v6='fd00:2::/120',
gateway='10.2.0.1',
gateway_v6='fd00:2::22',
bridge='br200',
bridge_interface='eth0',
dns1='8.8.8.8',
dns2='8.8.4.4',
uuid='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa')
def test_list(self):
def fake_network_get_all(context):
return [db_fakes.FakeModel(self.net)]
self.stubs.Set(db, 'network_get_all', fake_network_get_all)
output = StringIO.StringIO()
sys.stdout = output
self.commands.list()
sys.stdout = sys.__stdout__
result = output.getvalue()
_fmt = "\t".join(["%(id)-5s", "%(cidr)-18s", "%(cidr_v6)-15s",
"%(dhcp_start)-15s", "%(dns1)-15s", "%(dns2)-15s",
"%(vlan)-15s", "%(project_id)-15s", "%(uuid)-15s"])
head = _fmt % {'id': 'id',
'cidr': 'IPv4',
'cidr_v6': 'IPv6',
'dhcp_start': 'start address',
'dns1': 'DNS1',
'dns2': 'DNS2',
'vlan': 'VlanID',
'project_id': 'project',
'uuid': "uuid"}
body = _fmt % {'id': self.net['id'],
'cidr': self.net['cidr'],
'cidr_v6': self.net['cidr_v6'],
'dhcp_start': self.net['dhcp_start'],
'dns1': self.net['dns1'],
'dns2': self.net['dns2'],
'vlan': self.net['vlan'],
'project_id': self.net['project_id'],
'uuid': self.net['uuid']}
answer = '%s\n%s\n' % (head, body)
self.assertEqual(result, answer)
def test_delete(self):
self.fake_net = self.net
self.fake_net['project_id'] = None
self.fake_net['host'] = None
self.stubs.Set(db, 'network_get_by_uuid',
self.fake_network_get_by_uuid)
def fake_network_delete_safe(context, network_id):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(network_id, self.fake_net['id'])
self.stubs.Set(db, 'network_delete_safe', fake_network_delete_safe)
self.commands.delete(uuid=self.fake_net['uuid'])
def test_delete_by_cidr(self):
self.fake_net = self.net
self.fake_net['project_id'] = None
self.fake_net['host'] = None
self.stubs.Set(db, 'network_get_by_cidr',
self.fake_network_get_by_cidr)
def fake_network_delete_safe(context, network_id):
self.assertTrue(context.to_dict()['is_admin'])
self.assertEqual(network_id, self.fake_net['id'])
self.stubs.Set(db, 'network_delete_safe', fake_network_delete_safe)
self.commands.delete(fixed_range=self.fake_net['cidr'])
def _test_modify_base(self, update_value, project, host, dis_project=None,
dis_host=None):
self.fake_net = self.net
self.fake_update_value = update_value
self.stubs.Set(db, 'network_get_by_cidr',
self.fake_network_get_by_cidr)
self.stubs.Set(db, 'network_update', self.fake_network_update)
self.commands.modify(self.fake_net['cidr'], project=project, host=host,
dis_project=dis_project, dis_host=dis_host)
def test_modify_associate(self):
self._test_modify_base(update_value={'project_id': 'test_project',
'host': 'test_host'},
project='test_project', host='test_host')
def test_modify_unchanged(self):
self._test_modify_base(update_value={}, project=None, host=None)
def test_modify_disassociate(self):
self._test_modify_base(update_value={'project_id': None, 'host': None},
project=None, host=None, dis_project=True,
dis_host=True)
class NeutronV2NetworkCommandsTestCase(test.TestCase):
def setUp(self):
super(NeutronV2NetworkCommandsTestCase, self).setUp()
self.flags(network_api_class='nova.network.neutronv2.api.API')
self.commands = manage.NetworkCommands()
def test_create(self):
self.assertEqual(2, self.commands.create())
def test_list(self):
self.assertEqual(2, self.commands.list())
def test_delete(self):
self.assertEqual(2, self.commands.delete())
def test_modify(self):
self.assertEqual(2, self.commands.modify('192.168.0.1'))
class ProjectCommandsTestCase(test.TestCase):
def setUp(self):
super(ProjectCommandsTestCase, self).setUp()
self.commands = manage.ProjectCommands()
def test_quota(self):
output = StringIO.StringIO()
sys.stdout = output
self.commands.quota(project_id='admin',
key='instances',
value='unlimited',
)
sys.stdout = sys.__stdout__
result = output.getvalue()
print_format = "%-36s %-10s" % ('instances', 'unlimited')
self.assertIn(print_format, result)
def test_quota_update_invalid_key(self):
self.assertEqual(2, self.commands.quota('admin', 'volumes1', '10'))
class VmCommandsTestCase(test.TestCase):
def setUp(self):
super(VmCommandsTestCase, self).setUp()
self.commands = manage.VmCommands()
self.fake_flavor = objects.Flavor(**test_flavors.DEFAULT_FLAVORS[0])
def test_list_without_host(self):
output = StringIO.StringIO()
sys.stdout = output
with mock.patch.object(objects.InstanceList, 'get_by_filters') as get:
get.return_value = objects.InstanceList(
objects=[fake_instance.fake_instance_obj(
context.get_admin_context(), host='foo-host',
instance_type=self.fake_flavor,
expected_attrs=('flavor'))])
self.commands.list()
sys.stdout = sys.__stdout__
result = output.getvalue()
self.assertIn('node', result) # check the header line
self.assertIn('m1.tiny', result) # flavor.name
self.assertIn('foo-host', result)
def test_list_with_host(self):
output = StringIO.StringIO()
sys.stdout = output
with mock.patch.object(objects.InstanceList, 'get_by_host') as get:
get.return_value = objects.InstanceList(
objects=[fake_instance.fake_instance_obj(
context.get_admin_context(),
instance_type=self.fake_flavor,
expected_attrs=('flavor'))])
self.commands.list(host='fake-host')
sys.stdout = sys.__stdout__
result = output.getvalue()
self.assertIn('node', result) # check the header line
self.assertIn('m1.tiny', result) # flavor.name
self.assertIn('fake-host', result)
class DBCommandsTestCase(test.TestCase):
def setUp(self):
super(DBCommandsTestCase, self).setUp()
self.commands = manage.DbCommands()
def test_archive_deleted_rows_negative(self):
self.assertEqual(1, self.commands.archive_deleted_rows(-1))
@mock.patch.object(migration, 'db_null_instance_uuid_scan',
return_value={'foo': 0})
def test_null_instance_uuid_scan_no_records_found(self, mock_scan):
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.null_instance_uuid_scan()
self.assertIn("There were no records found", sys.stdout.getvalue())
@mock.patch.object(migration, 'db_null_instance_uuid_scan',
return_value={'foo': 1, 'bar': 0})
def _test_null_instance_uuid_scan(self, mock_scan, delete):
self.useFixture(fixtures.MonkeyPatch('sys.stdout',
StringIO.StringIO()))
self.commands.null_instance_uuid_scan(delete)
output = sys.stdout.getvalue()
if delete:
self.assertIn("Deleted 1 records from table 'foo'.", output)
self.assertNotIn("Deleted 0 records from table 'bar'.", output)
else:
self.assertIn("1 records in the 'foo' table", output)
self.assertNotIn("0 records in the 'bar' table", output)
self.assertNotIn("There were no records found", output)
def test_null_instance_uuid_scan_readonly(self):
self._test_null_instance_uuid_scan(delete=False)
def test_null_instance_uuid_scan_delete(self):
self._test_null_instance_uuid_scan(delete=True)
def test_migrate_flavor_data_negative(self):
self.assertEqual(1, self.commands.migrate_flavor_data(-1))
class ServiceCommandsTestCase(test.TestCase):
def setUp(self):
super(ServiceCommandsTestCase, self).setUp()
self.commands = manage.ServiceCommands()
def test_service_enable_invalid_params(self):
self.assertEqual(2, self.commands.enable('nohost', 'noservice'))
def test_service_disable_invalid_params(self):
self.assertEqual(2, self.commands.disable('nohost', 'noservice'))
class CellCommandsTestCase(test.TestCase):
def setUp(self):
super(CellCommandsTestCase, self).setUp()
self.commands = manage.CellCommands()
def test_create_transport_hosts_multiple(self):
"""Test the _create_transport_hosts method
when broker_hosts is set.
"""
brokers = "127.0.0.1:5672,127.0.0.2:5671"
thosts = self.commands._create_transport_hosts(
'guest', 'devstack',
broker_hosts=brokers)
self.assertEqual(2, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(5672, thosts[0].port)
self.assertEqual('127.0.0.2', thosts[1].hostname)
self.assertEqual(5671, thosts[1].port)
def test_create_transport_hosts_single(self):
"""Test the _create_transport_hosts method when hostname is passed."""
thosts = self.commands._create_transport_hosts('guest', 'devstack',
hostname='127.0.0.1',
port=80)
self.assertEqual(1, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(80, thosts[0].port)
def test_create_transport_hosts_single_broker(self):
"""Test the _create_transport_hosts method for single broker_hosts."""
thosts = self.commands._create_transport_hosts(
'guest', 'devstack',
broker_hosts='127.0.0.1:5672')
self.assertEqual(1, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(5672, thosts[0].port)
def test_create_transport_hosts_both(self):
"""Test the _create_transport_hosts method when both broker_hosts
and hostname/port are passed.
"""
thosts = self.commands._create_transport_hosts(
'guest', 'devstack',
broker_hosts='127.0.0.1:5672',
hostname='127.0.0.2', port=80)
self.assertEqual(1, len(thosts))
self.assertEqual('127.0.0.1', thosts[0].hostname)
self.assertEqual(5672, thosts[0].port)
def test_create_transport_hosts_wrong_val(self):
"""Test the _create_transport_hosts method when broker_hosts
is wrongly sepcified
"""
self.assertRaises(ValueError,
self.commands._create_transport_hosts,
'guest', 'devstack',
broker_hosts='127.0.0.1:5672,127.0.0.1')
def test_create_transport_hosts_wrong_port_val(self):
"""Test the _create_transport_hosts method when port in
broker_hosts is wrongly sepcified
"""
self.assertRaises(ValueError,
self.commands._create_transport_hosts,
'guest', 'devstack',
broker_hosts='127.0.0.1:')
def test_create_transport_hosts_wrong_port_arg(self):
"""Test the _create_transport_hosts method when port
argument is wrongly sepcified
"""
self.assertRaises(ValueError,
self.commands._create_transport_hosts,
'guest', 'devstack',
hostname='127.0.0.1', port='ab')
@mock.patch.object(context, 'get_admin_context')
@mock.patch.object(db, 'cell_create')
def test_create_broker_hosts(self, mock_db_cell_create, mock_ctxt):
"""Test the create function when broker_hosts is
passed
"""
cell_tp_url = "fake://guest:devstack@127.0.0.1:5432"
cell_tp_url += ",guest:devstack@127.0.0.2:9999/"
ctxt = mock.sentinel
mock_ctxt.return_value = mock.sentinel
self.commands.create("test",
broker_hosts='127.0.0.1:5432,127.0.0.2:9999',
woffset=0, wscale=0,
username="guest", password="devstack")
exp_values = {'name': "test",
'is_parent': False,
'transport_url': cell_tp_url,
'weight_offset': 0.0,
'weight_scale': 0.0}
mock_db_cell_create.assert_called_once_with(ctxt, exp_values)
@mock.patch.object(context, 'get_admin_context')
@mock.patch.object(db, 'cell_create')
def test_create_hostname(self, mock_db_cell_create, mock_ctxt):
"""Test the create function when hostname and port is
passed
"""
cell_tp_url = "fake://guest:devstack@127.0.0.1:9999/"
ctxt = mock.sentinel
mock_ctxt.return_value = mock.sentinel
self.commands.create("test",
hostname='127.0.0.1', port="9999",
woffset=0, wscale=0,
username="guest", password="devstack")
exp_values = {'name': "test",
'is_parent': False,
'transport_url': cell_tp_url,
'weight_offset': 0.0,
'weight_scale': 0.0}
mock_db_cell_create.assert_called_once_with(ctxt, exp_values)
|
cloudbase/nova-virtualbox
|
nova/tests/unit/test_nova_manage.py
|
Python
|
apache-2.0
| 23,295
|
# -*- coding: utf8 -*-
"""
tentacles, python ORM
Copyright (C) 2010-2011, Guillaume Bour <guillaume@bour.cc>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 3
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = "Guillaume Bour <guillaume@bour.cc>"
__version__ = "$Revision$"
__date__ = "$Date$"
from database import SQLiteStorage as Storage
from table import Object
from queryset import *
|
gbour/Tentacles
|
tentacles/backends/sqlite3/__init__.py
|
Python
|
gpl-3.0
| 909
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Michel Petit <petit.michel@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import optparse
import teamlib
import sys
#import urllib2
import locale
import gettext
from gettext import gettext as _
from teamlib.version import APP_VERSION
from teamlib.version import APP_NAME
from teamlib.team import Team
local_path = os.path.realpath(os.path.dirname(sys.argv[0])) + os.sep + 'locale'
langs = []
lc, encoding = locale.getdefaultlocale()
if (lc):
#If we have a default, it's the first in the list
langs = [lc]
# Now lets get all of the supported languages on the system
language = os.environ.get('LANGUAGE', None)
if (language):
"""langage comes back something like en_CA:en_US:en_GB:en
on linuxy systems, on Win32 it's nothing, so we need to
split it up into a list"""
langs += language.split(":")
"""our defaults"""
langs += ["fr_FR", 'en_US']
gettext.bindtextdomain(APP_NAME, local_path)
gettext.textdomain(APP_NAME)
try:
lang = gettext.translation(APP_NAME, local_path, languages=langs, fallback = False)
_ = lang.ugettext
except IOError as e:
sys.stderr.write('Translation not implemented yet!\n')
sys.stderr.write(e.strerror + '\n')
def main():
parser = optparse.OptionParser(version="%prog " + APP_VERSION)
parser.add_option("-t", "--token", dest="token", help=_("Token you must get from your administrator or by your settings page on Ruche web site you are using."), metavar=_("STRING"))
parser.add_option("-u", "--url", dest="url", help=_("URL base to use for the request."), metavar=_("URL"))
parser.add_option("--csv", dest='csv', help=_("Export response as CSV."), metavar=_('FILE'))
parser.add_option("--xml", dest='xml', help=_("Export response as XML."), metavar=_('FILE'))
parser.add_option("--json", dest='json', help=_("Export response as JSON."), metavar=_('FILE'))
(options, args) = parser.parse_args()
if options.token and options.url:
t = Team(options.token, options.url)
if __name__ == "__main__":
main()
|
malenkiki/team
|
team.py
|
Python
|
mit
| 3,108
|
""" A wrapper for the 'gpg' command::
Portions of this module are derived from A.M. Kuchling's well-designed
GPG.py, using Richard Jones' updated version 1.3, which can be found
in the pycrypto CVS repository on Sourceforge:
http://pycrypto.cvs.sourceforge.net/viewvc/pycrypto/gpg/GPG.py
This module is *not* forward-compatible with amk's; some of the
old interface has changed. For instance, since I've added decrypt
functionality, I elected to initialize with a 'gnupghome' argument
instead of 'keyring', so that gpg can find both the public and secret
keyrings. I've also altered some of the returned objects in order for
the caller to not have to know as much about the internals of the
result classes.
While the rest of ISconf is released under the GPL, I am releasing
this single file under the same terms that A.M. Kuchling used for
pycrypto.
Steve Traugott, stevegt@terraluna.org
Thu Jun 23 21:27:20 PDT 2005
This version of the module has been modified from Steve Traugott's version
(see http://trac.t7a.org/isconf/browser/trunk/lib/python/isconf/GPG.py) by
Vinay Sajip to make use of the subprocess module (Steve's version uses os.fork()
and so does not work on Windows). Renamed to gnupg.py to avoid confusion with
the previous versions.
Modifications Copyright (C) 2008-2010 Vinay Sajip. All rights reserved.
A unittest harness (test_gnupg.py) has also been added.
"""
import locale
__author__ = "Vinay Sajip"
__date__ = "$07-Jan-2010 18:19:19$"
try:
from io import StringIO
from io import TextIOWrapper
from io import BufferedReader
from io import BufferedWriter
except ImportError:
from cStringIO import StringIO
class BufferedReader: pass
class BufferedWriter: pass
import locale
import logging
import os
import socket
from subprocess import Popen
from subprocess import PIPE
import threading
try:
import logging.NullHandler as NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logger = logging.getLogger(__name__)
if not logger.handlers:
logger.addHandler(NullHandler())
def _copy_data(instream, outstream):
# Copy one stream to another
sent = 0
while True:
data = instream.read(1024)
if data == "":
break
sent += len(data)
logger.debug("sending chunk (%d): %r", sent, data[:256])
try:
outstream.write(data)
except:
# Can sometimes get 'broken pipe' errors even when the data has all
# been sent
logger.exception('Error sending data')
break
outstream.close()
logger.debug("closed output, %d bytes sent", sent)
def _threaded_copy_data(instream, outstream):
wr = threading.Thread(target=_copy_data, args=(instream, outstream))
wr.setDaemon(True)
wr.start()
return wr
def _write_passphrase(stream, passphrase):
stream.write(passphrase + "\n")
logger.debug("Wrote passphrase")
def _is_sequence(instance):
return isinstance(instance,list) or isinstance(instance,tuple)
def _wrap_input(inp):
if isinstance(inp, BufferedWriter):
inp = TextIOWrapper(inp, locale.getpreferredencoding())
return inp
def _wrap_output(outp):
if isinstance(outp, BufferedReader):
outp = TextIOWrapper(outp)
return outp
class GPG(object):
"Encapsulate access to the gpg executable"
def __init__(self, gpgbinary='gpg', gnupghome=None, verbose=False):
"""Initialize a GPG process wrapper. Options are:
gpgbinary -- full pathname for GPG binary.
gnupghome -- full pathname to where we can find the public and
private keyrings. Default is whatever gpg defaults to.
>>> gpg = GPG(gnupghome="/tmp/pygpgtest")
"""
self.gpgbinary = gpgbinary
self.gnupghome = gnupghome
self.verbose = verbose
if gnupghome and not os.path.isdir(self.gnupghome):
os.makedirs(self.gnupghome,0x1C0)
p = self._open_subprocess(["--version"])
result = Verify() # any result will do for this
self._collect_output(p, result)
if p.returncode != 0:
raise ValueError("Error invoking gpg: %s: %s" % (p.returncode,
result.stderr))
def _open_subprocess(self, args, passphrase=False):
# Internal method: open a pipe to a GPG subprocess and return
# the file objects for communicating with it.
cmd = [self.gpgbinary, '--status-fd 2 --no-tty']
if self.gnupghome:
cmd.append('--homedir "%s" ' % self.gnupghome)
if passphrase:
cmd.append('--batch --passphrase-fd 0')
cmd.extend(args)
cmd = ' '.join(cmd)
if self.verbose:
print(cmd)
logger.debug("%s", cmd)
return Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)
def _read_response(self, stream, result):
# Internal method: reads all the output from GPG, taking notice
# only of lines that begin with the magic [GNUPG:] prefix.
#
# Calls methods on the response object for each valid token found,
# with the arg being the remainder of the status line.
lines = []
while True:
line = stream.readline()
lines.append(line)
if self.verbose:
print(line)
logger.debug("%s", line.rstrip())
if line == "": break
line = line.rstrip()
if line[0:9] == '[GNUPG:] ':
# Chop off the prefix
line = line[9:]
L = line.split(None, 1)
keyword = L[0]
if len(L) > 1:
value = L[1]
else:
value = ""
result.handle_status(keyword, value)
result.stderr = ''.join(lines)
def _read_data(self, stream, result):
# Read the contents of the file from GPG's stdout
chunks = []
while True:
data = stream.read(1024)
if data == "":
break
logger.debug("chunk: %s" % data)
chunks.append(data)
result.data = ''.join(chunks)
def _collect_output(self, process, result, writer=None):
"""
Drain the subprocesses output streams, writing the collected output
to the result. If a writer thread (writing to the subprocess) is given,
make sure it's joined before returning.
"""
stderr = _wrap_output(process.stderr)
rr = threading.Thread(target=self._read_response, args=(stderr, result))
rr.setDaemon(True)
rr.start()
stdout = _wrap_output(process.stdout)
dr = threading.Thread(target=self._read_data, args=(stdout, result))
dr.setDaemon(True)
dr.start()
dr.join()
rr.join()
if writer is not None:
writer.join()
process.wait()
def _handle_io(self, args, file, result, passphrase=None):
"Handle a call to GPG - pass input data, collect output data"
# Handle a basic data call - pass data to GPG, handle the output
# including status information. Garbage In, Garbage Out :)
p = self._open_subprocess(args, passphrase is not None)
stdin = _wrap_input(p.stdin)
if passphrase:
_write_passphrase(stdin, passphrase)
writer = _threaded_copy_data(file, stdin)
self._collect_output(p, result, writer)
return result
#
# SIGNATURE METHODS
#
def sign(self, message, **kwargs):
"""sign message"""
return self.sign_file(StringIO(message), **kwargs)
def sign_file(self, file, keyid=None, passphrase=None, clearsign=True):
"""sign file"""
args = ["-sa"]
if clearsign:
args.append("--clearsign")
if keyid:
args.append("--default-key %s" % keyid)
result = Sign()
#We could use _handle_io here except for the fact that if the
#passphrase is bad, gpg bails and you can't write the message.
#self._handle_io(args, StringIO(message), result, passphrase=passphrase)
p = self._open_subprocess(args, passphrase is not None)
try:
stdin = _wrap_input(p.stdin)
if passphrase:
_write_passphrase(stdin, passphrase)
writer = _threaded_copy_data(file, stdin)
except IOError:
logging.exception("error writing message")
writer = None
self._collect_output(p, result, writer)
return result
def verify(self, data):
"""Verify the signature on the contents of the string 'data'
>>> gpg = GPG(gnupghome="/tmp/pygpgtest")
>>> input = gpg.gen_key_input(Passphrase='foo')
>>> key = gpg.gen_key(input)
>>> assert key
>>> sig = gpg.sign('hello',keyid=key.fingerprint,passphrase='bar')
>>> assert not sig
>>> sig = gpg.sign('hello',keyid=key.fingerprint,passphrase='foo')
>>> assert sig
>>> verify = gpg.verify(str(sig))
>>> assert verify
"""
return self.verify_file(StringIO(data))
def verify_file(self, file):
"Verify the signature on the contents of the file-like object 'file'"
result = Verify()
self._handle_io(['--verify'], file, result)
return result
#
# KEY MANAGEMENT
#
def import_keys(self, key_data):
""" import the key_data into our keyring
>>> import shutil
>>> shutil.rmtree("/tmp/pygpgtest")
>>> gpg = GPG(gnupghome="/tmp/pygpgtest")
>>> input = gpg.gen_key_input()
>>> result = gpg.gen_key(input)
>>> print1 = result.fingerprint
>>> result = gpg.gen_key(input)
>>> print2 = result.fingerprint
>>> pubkey1 = gpg.export_keys(print1)
>>> seckey1 = gpg.export_keys(print1,secret=True)
>>> seckeys = gpg.list_keys(secret=True)
>>> pubkeys = gpg.list_keys()
>>> assert print1 in seckeys.fingerprints
>>> assert print1 in pubkeys.fingerprints
>>> str(gpg.delete_keys(print1))
'Must delete secret key first'
>>> str(gpg.delete_keys(print1,secret=True))
'ok'
>>> str(gpg.delete_keys(print1))
'ok'
>>> str(gpg.delete_keys("nosuchkey"))
'No such key'
>>> seckeys = gpg.list_keys(secret=True)
>>> pubkeys = gpg.list_keys()
>>> assert not print1 in seckeys.fingerprints
>>> assert not print1 in pubkeys.fingerprints
>>> result = gpg.import_keys('foo')
>>> assert not result
>>> result = gpg.import_keys(pubkey1)
>>> pubkeys = gpg.list_keys()
>>> seckeys = gpg.list_keys(secret=True)
>>> assert not print1 in seckeys.fingerprints
>>> assert print1 in pubkeys.fingerprints
>>> result = gpg.import_keys(seckey1)
>>> assert result
>>> seckeys = gpg.list_keys(secret=True)
>>> pubkeys = gpg.list_keys()
>>> assert print1 in seckeys.fingerprints
>>> assert print1 in pubkeys.fingerprints
>>> assert print2 in pubkeys.fingerprints
"""
result = ImportResult()
self._handle_io(['--import'], StringIO(key_data), result)
return result
def delete_keys(self, fingerprints, secret=False):
which='key'
if secret:
which='secret-key'
if _is_sequence(fingerprints):
fingerprints = ' '.join(fingerprints)
args = ["--batch --delete-%s %s" % (which, fingerprints)]
result = DeleteResult()
p = self._open_subprocess(args)
self._collect_output(p, result)
return result
def export_keys(self, keyids, secret=False):
"export the indicated keys. 'keyid' is anything gpg accepts"
which=''
if secret:
which='-secret-key'
if _is_sequence(keyids):
keyids = ' '.join(keyids)
args = ["--armor --export%s %s" % (which, keyids)]
p = self._open_subprocess(args)
# gpg --export produces no status-fd output; stdout will be
# empty in case of failure
#stdout, stderr = p.communicate()
result = DeleteResult() # any result will do
self._collect_output(p, result)
return result.data
def list_keys(self, secret=False):
""" list the keys currently in the keyring
>>> import shutil
>>> shutil.rmtree("/tmp/pygpgtest")
>>> gpg = GPG(gnupghome="/tmp/pygpgtest")
>>> input = gpg.gen_key_input()
>>> result = gpg.gen_key(input)
>>> print1 = result.fingerprint
>>> result = gpg.gen_key(input)
>>> print2 = result.fingerprint
>>> pubkeys = gpg.list_keys()
>>> assert print1 in pubkeys.fingerprints
>>> assert print2 in pubkeys.fingerprints
"""
which='keys'
if secret:
which='secret-keys'
args = "--list-%s --fixed-list-mode --fingerprint --with-colons" % (which)
args = [args]
p = self._open_subprocess(args)
# there might be some status thingumy here I should handle... (amk)
# ...nope, unless you care about expired sigs or keys (stevegt)
# Get the response information
result = ListKeys()
self._collect_output(p, result)
stdout = StringIO(result.data)
valid_keywords = 'pub uid sec fpr'.split()
while True:
line = stdout.readline()
if self.verbose:
print(line)
logger.debug("%s", line.rstrip())
if not line:
break
L = line.strip().split(':')
if not L:
continue
keyword = L[0]
if keyword in valid_keywords:
getattr(result, keyword)(L)
return result
def gen_key(self, input):
"""Generate a key; you might use gen_key_input() to create the
control input.
>>> gpg = GPG(gnupghome="/tmp/pygpgtest")
>>> input = gpg.gen_key_input()
>>> result = gpg.gen_key(input)
>>> assert result
>>> result = gpg.gen_key('foo')
>>> assert not result
"""
args = ["--gen-key --batch"]
result = GenKey()
file = StringIO(input)
self._handle_io(args, file, result)
return result
def gen_key_input(self, **kwargs):
"""
Generate --gen-key input per gpg doc/DETAILS
"""
parms = {}
for key, val in list(kwargs.items()):
key = key.replace('_','-').title()
parms[key] = val
parms.setdefault('Key-Type','RSA')
parms.setdefault('Key-Length',1024)
parms.setdefault('Name-Real', "Autogenerated Key")
parms.setdefault('Name-Comment', "Generated by gnupg.py")
try:
logname = os.environ['LOGNAME']
except KeyError:
logname = os.environ['USERNAME']
hostname = socket.gethostname()
parms.setdefault('Name-Email', "%s@%s" % (logname.replace(' ', '_'),
hostname))
out = "Key-Type: %s\n" % parms.pop('Key-Type')
for key, val in list(parms.items()):
out += "%s: %s\n" % (key, val)
out += "%commit\n"
return out
# Key-Type: RSA
# Key-Length: 1024
# Name-Real: ISdlink Server on %s
# Name-Comment: Created by %s
# Name-Email: isdlink@%s
# Expire-Date: 0
# %commit
#
#
# Key-Type: DSA
# Key-Length: 1024
# Subkey-Type: ELG-E
# Subkey-Length: 1024
# Name-Real: Joe Tester
# Name-Comment: with stupid passphrase
# Name-Email: joe@foo.bar
# Expire-Date: 0
# Passphrase: abc
# %pubring foo.pub
# %secring foo.sec
# %commit
#
# ENCRYPTION
#
def encrypt_file(self, file, recipients, sign=None,
always_trust=False, passphrase=None):
"Encrypt the message read from the file-like object 'file'"
args = ['--encrypt --armor']
if not _is_sequence(recipients):
recipients = (recipients,)
for recipient in recipients:
args.append('--recipient %s' % recipient)
if sign:
args.append("--sign --default-key %s" % sign)
if always_trust:
args.append("--always-trust")
result = Crypt()
self._handle_io(args, file, result, passphrase=passphrase)
return result
def encrypt(self, data, recipients, **kwargs):
"""Encrypt the message contained in the string 'data'
>>> import shutil
>>> if os.path.exists("/tmp/pygpgtest"):
... shutil.rmtree("/tmp/pygpgtest")
>>> gpg = GPG(gnupghome="/tmp/pygpgtest")
>>> input = gpg.gen_key_input(passphrase='foo')
>>> result = gpg.gen_key(input)
>>> print1 = result.fingerprint
>>> input = gpg.gen_key_input()
>>> result = gpg.gen_key(input)
>>> print2 = result.fingerprint
>>> result = gpg.encrypt("hello",print2)
>>> message = str(result)
>>> assert message != 'hello'
>>> result = gpg.decrypt(message)
>>> assert result
>>> str(result)
'hello'
>>> result = gpg.encrypt("hello again",print1)
>>> message = str(result)
>>> result = gpg.decrypt(message)
>>> result.status
'need passphrase'
>>> result = gpg.decrypt(message,passphrase='bar')
>>> result.status
'decryption failed'
>>> assert not result
>>> result = gpg.decrypt(message,passphrase='foo')
>>> result.status
'decryption ok'
>>> str(result)
'hello again'
>>> result = gpg.encrypt("signed hello",print2,sign=print1)
>>> result.status
'need passphrase'
>>> result = gpg.encrypt("signed hello",print2,sign=print1,passphrase='foo')
>>> result.status
'encryption ok'
>>> message = str(result)
>>> result = gpg.decrypt(message)
>>> result.status
'decryption ok'
>>> assert result.fingerprint == print1
"""
return self.encrypt_file(StringIO(data), recipients, **kwargs)
def decrypt(self, message, **kwargs):
return self.decrypt_file(StringIO(message), **kwargs)
def decrypt_file(self, file, always_trust=False, passphrase=None):
args = ["--decrypt"]
if always_trust:
args.append("--always-trust")
result = Crypt()
self._handle_io(args, file, result, passphrase)
return result
class Verify(object):
"Handle status messages for --verify"
def __init__(self):
self.valid = False
self.fingerprint = self.creation_date = self.timestamp = None
self.signature_id = self.key_id = None
self.username = None
def __nonzero__(self):
return self.valid
__bool__ = __nonzero__
def handle_status(self, key, value):
if key in ("TRUST_UNDEFINED", "TRUST_NEVER", "TRUST_MARGINAL",
"TRUST_FULLY", "TRUST_ULTIMATE"):
pass
elif key in ("PLAINTEXT", "PLAINTEXT_LENGTH"):
pass
elif key == "BADSIG":
self.valid = False
self.key_id, self.username = value.split(None, 1)
elif key == "GOODSIG":
self.valid = True
self.key_id, self.username = value.split(None, 1)
elif key == "VALIDSIG":
(self.fingerprint,
self.creation_date,
self.sig_timestamp,
self.expire_timestamp) = value.split()[:4]
elif key == "SIG_ID":
(self.signature_id,
self.creation_date, self.timestamp) = value.split()
else:
raise ValueError("Unknown status message: %r" % key)
class ImportResult(object):
"Handle status messages for --import"
counts = '''count no_user_id imported imported_rsa unchanged
n_uids n_subk n_sigs n_revoc sec_read sec_imported
sec_dups not_imported'''.split()
def __init__(self):
self.imported = []
self.results = []
self.fingerprints = []
for result in self.counts:
setattr(self, result, None)
def __nonzero__(self):
if self.not_imported: return False
if not self.fingerprints: return False
return True
__bool__ = __nonzero__
ok_reason = {
'0': 'Not actually changed',
'1': 'Entirely new key',
'2': 'New user IDs',
'4': 'New signatures',
'8': 'New subkeys',
'16': 'Contains private key',
}
problem_reason = {
'0': 'No specific reason given',
'1': 'Invalid Certificate',
'2': 'Issuer Certificate missing',
'3': 'Certificate Chain too long',
'4': 'Error storing certificate',
}
def handle_status(self, key, value):
if key == "IMPORTED":
# this duplicates info we already see in import_ok & import_problem
pass
elif key == "NODATA":
self.results.append({'fingerprint': None,
'problem': '0', 'text': 'No valid data found'})
elif key == "IMPORT_OK":
reason, fingerprint = value.split()
reasons = []
for code, text in list(self.ok_reason.items()):
if int(reason) | int(code) == int(reason):
reasons.append(text)
reasontext = '\n'.join(reasons) + "\n"
self.results.append({'fingerprint': fingerprint,
'ok': reason, 'text': reasontext})
self.fingerprints.append(fingerprint)
elif key == "IMPORT_PROBLEM":
try:
reason, fingerprint = value.split()
except:
reason = value
fingerprint = '<unknown>'
self.results.append({'fingerprint': fingerprint,
'problem': reason, 'text': self.problem_reason[reason]})
elif key == "IMPORT_RES":
import_res = value.split()
for i in range(len(self.counts)):
setattr(self, self.counts[i], int(import_res[i]))
else:
raise ValueError("Unknown status message: %r" % key)
def summary(self):
l = []
l.append('%d imported'%self.imported)
if self.not_imported:
l.append('%d not imported'%self.not_imported)
return ', '.join(l)
class ListKeys(list):
''' Handle status messages for --list-keys.
Handle pub and uid (relating the latter to the former).
Don't care about (info from src/DETAILS):
crt = X.509 certificate
crs = X.509 certificate and private key available
sub = subkey (secondary key)
ssb = secret subkey (secondary key)
uat = user attribute (same as user id except for field 10).
sig = signature
rev = revocation signature
pkd = public key data (special field format, see below)
grp = reserved for gpgsm
rvk = revocation key
'''
def __init__(self):
self.curkey = None
self.fingerprints = []
def key(self, args):
vars = ("""
type trust length algo keyid date expires dummy ownertrust uid
""").split()
self.curkey = {}
for i in range(len(vars)):
self.curkey[vars[i]] = args[i]
self.curkey['uids'] = [self.curkey['uid']]
del self.curkey['uid']
self.append(self.curkey)
pub = sec = key
def fpr(self, args):
self.curkey['fingerprint'] = args[9]
self.fingerprints.append(args[9])
def uid(self, args):
self.curkey['uids'].append(args[9])
def handle_status(self, key, value):
pass
class Crypt(Verify):
"Handle status messages for --encrypt and --decrypt"
def __init__(self):
Verify.__init__(self)
self.data = ''
self.ok = False
self.status = ''
def __nonzero__(self):
if self.ok: return True
return False
__bool__ = __nonzero__
def __str__(self):
return self.data
def handle_status(self, key, value):
if key in ("ENC_TO", "USERID_HINT", "GOODMDC", "END_DECRYPTION",
"BEGIN_SIGNING", "NO_SECKEY"):
pass
elif key in ("NEED_PASSPHRASE", "BAD_PASSPHRASE", "GOOD_PASSPHRASE",
"DECRYPTION_FAILED"):
self.status = key.replace("_", " ").lower()
elif key == "BEGIN_DECRYPTION":
self.status = 'decryption incomplete'
elif key == "BEGIN_ENCRYPTION":
self.status = 'encryption incomplete'
elif key == "DECRYPTION_OKAY":
self.status = 'decryption ok'
self.ok = True
elif key == "END_ENCRYPTION":
self.status = 'encryption ok'
self.ok = True
elif key == "INV_RECP":
self.status = 'invalid recipient'
elif key == "KEYEXPIRED":
self.status = 'key expired'
elif key == "SIG_CREATED":
self.status = 'sig created'
elif key == "SIGEXPIRED":
self.status = 'sig expired'
else:
Verify.handle_status(self, key, value)
class GenKey(object):
"Handle status messages for --gen-key"
def __init__(self):
self.type = None
self.fingerprint = None
def __nonzero__(self):
if self.fingerprint: return True
return False
__bool__ = __nonzero__
def __str__(self):
return self.fingerprint or ''
def handle_status(self, key, value):
if key in ("PROGRESS", "GOOD_PASSPHRASE", "NODATA"):
pass
elif key == "KEY_CREATED":
(self.type,self.fingerprint) = value.split()
else:
raise ValueError("Unknown status message: %r" % key)
class DeleteResult(object):
"Handle status messages for --delete-key and --delete-secret-key"
def __init__(self):
self.status = 'ok'
def __str__(self):
return self.status
problem_reason = {
'1': 'No such key',
'2': 'Must delete secret key first',
'3': 'Ambigious specification',
}
def handle_status(self, key, value):
if key == "DELETE_PROBLEM":
self.status = self.problem_reason.get(value,
"Unknown error: %r" % value)
else:
raise ValueError("Unknown status message: %r" % key)
class Sign(object):
"Handle status messages for --sign"
def __init__(self):
self.type = None
self.fingerprint = None
def __nonzero__(self):
if self.fingerprint: return True
return False
__bool__ = __nonzero__
def __str__(self):
return self.data or ''
def handle_status(self, key, value):
if key in ("USERID_HINT", "NEED_PASSPHRASE", "BAD_PASSPHRASE",
"GOOD_PASSPHRASE", "BEGIN_SIGNING"):
pass
elif key == "SIG_CREATED":
(self.type,
algo, hashalgo, cls,
self.timestamp, self.fingerprint
) = value.split()
else:
raise ValueError("Unknown status message: %r" % key)
|
ashemery/tariq
|
Tariq/gnupg.py
|
Python
|
gpl-3.0
| 27,595
|
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
# This file is part of the E-Cell System
#
# Copyright (C) 1996-2016 Keio University
# Copyright (C) 2008-2016 RIKEN
# Copyright (C) 2005-2009 The Molecular Sciences Institute
#
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
#
# E-Cell System is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# E-Cell System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with E-Cell System -- see the file COPYING.
# If not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#END_HEADER
#
#'Design: Gabor Bereczki <gabor@e-cell.org>',
#'Design and application Framework: Koichi Takahashi <shafi@e-cell.org>',
#'Programming: Gabor Bereczki' at
# E-CELL Project, Lab. for Bioinformatics, Keio University.
#
import os
import os.path
import gtk
import gtk.gdk
try:
import gnomecanvas
except:
import gnome.canvas as gnomecanvas
from ecell.ui.model_editor.Utils import *
from ecell.ui.model_editor.ListWindow import *
from ecell.ui.model_editor.Constants import *
from ecell.ui.model_editor.PathwayCanvas import *
from ecell.ui.model_editor.LayoutCommand import *
class PathwayEditor( ListWindow ):
def __init__( self, theModelEditor, aLayout, aRoot=None ):
"""
in: ModelEditor theModelEditor
returns nothing
"""
# init superclass
ListWindow.__init__( self, theModelEditor ,aRoot)
self.theLayout = aLayout
self.theModelEditor = theModelEditor
self.zoom=0.25
def openWindow( self ):
"""
in: nothing
returns nothing
"""
# superclass openwindow
ListWindow.openWindow( self )
# add signal handlers
canv=gnomecanvas.Canvas()
canv.show_all()
self['scrolledwindow1'].add(canv)
self.thePathwayCanvas = PathwayCanvas( self, canv )
self.theLayout.attachToCanvas( self.thePathwayCanvas )
self.addHandlers({
'on_zoom_in_button_clicked' : self.__zoom_in,\
'on_zoom_out_button_clicked' : self.__zoom_out,\
'on_zoom_to_fit_button_clicked' : self.__zoom_to_fit,\
'on_print_button_clicked' : self.__print,\
'on_selector_button_toggled' : self.__palette_toggled,\
'on_variable_button_toggled' : self.__palette_toggled, \
'on_system_button_toggled' : self.__palette_toggled,\
'on_process_button_toggled' : self.__palette_toggled,\
'on_text_button_toggled' : self.__palette_toggled,\
'on_layout_name_entry_activate' : self.__rename_layout,\
'on_layout_name_entry_editing_done' : self.__rename_layout,\
#'on_layout_name_entry_focus_out_event' : self.__rename_layout,
'on_delete_button_clicked': self.__DeleteLayoutButton_clicked,\
'on_clone_button_clicked': self.__CloneLayoutButton_clicked,\
'on_custom_button_toggled' : self.__palette_toggled,\
'on_search_entry_activate' : self.__search,\
'on_rename_button_clicked': self.__editLabel,\
'on_search_entry_editing_done' : self.__search })
self.theHBox = self['hbox7']
self.theLabel = self['layout_name_label']
self.theEntry = self['layout_name_entry']
self['top_frame'].remove(self.theHBox)
self.theHBox.remove( self.theEntry )
self.update()
#get Palette Button Widgets
selector = ListWindow.getWidget(self,'selector_button')
selector.set_active(True)
variable = ListWindow.getWidget(self,'variable_button')
process = ListWindow.getWidget(self,'process_button')
system = ListWindow.getWidget(self,'system_button')
custom = ListWindow.getWidget(self,'custom_button')
text = ListWindow.getWidget(self,'text_button')
self.zoomin=ListWindow.getWidget(self,'zoom_in_button')
self.zoomout=ListWindow.getWidget(self,'zoom_out_button')
self.zoomtofit=ListWindow.getWidget(self,'zoom_to_fit_button')
self.theButtonDict={ 'selector':PE_SELECTOR, 'variable':PE_VARIABLE , 'process':PE_PROCESS, 'system':PE_SYSTEM , 'custom':PE_CUSTOM , 'text':PE_TEXT}
self.thePaletteButtonDict={'selector': selector, 'variable' : variable , 'process': process, 'system' : system, 'custom' : custom, 'text':text}
self.theButtonKeys=self.thePaletteButtonDict.keys().sort()
# Sets the return PaletteButton value
self.__CurrPaletteButton = 'selector'
self.__PrevPaletteButton = None
self.isFirst=True
def getLabelWidget( self ):
return self.theHBox
def update( self, arg1 = None, arg2 = None):
if not self.exists():
return
self.theEntry.set_text( self.theLayout.getName() )
self.theLabel.set_text( self.theLayout.getName() )
self.theLayout.resumeSelection()
def deleted( self, *args ):
# detach canvas from layout
self.thePathwayCanvas.getLayout().detachFromCanvas()
self.theModelEditor.thePathwayEditorList.remove(self)
ListWindow.deleted( self, args )
if self.theModelEditor.theObjectEditorWindow!=None:
self.theModelEditor.theObjectEditorWindow.destroy(self)
def getPathwayCanvas( self ):
return self.thePathwayCanvas
def getPaletteButton(self):
return self.theButtonDict[self.__CurrPaletteButton]
def toggle(self,aName,aStat):
if aStat:
self.thePaletteButtonDict[aName].set_active(True)
else:
self.thePaletteButtonDict[aName].set_active(False)
def getLayout( self ):
return self.theLayout
############################################################
#Callback Handlers
############################################################
def __editLabel( self, *args ):
self.theHBox.remove( self.theLabel )
self.theHBox.pack_start( self.theEntry )
self.theEntry.show()
self.theEntry.grab_focus()
self['rename_button'].set_sensitive( False )
def __zoom_in( self, *args ):
aZoomratio=self.thePathwayCanvas.getZoomRatio()
aNewratio=aZoomratio+self.zoom
self.thePathwayCanvas.setZoomRatio(aNewratio)
if not self.zoomout.get_property('sensitive'):
self.zoomout.set_sensitive(True)
if not self.zoomtofit.get_property('sensitive'):
self.zoomtofit.set_sensitive(True)
def __rename_layout( self, *args ):
if len(self.theEntry.get_text())>0:
oldName = self.theLayout.getName()
newName = self.theEntry.get_text()
aCommand = RenameLayout( self.theLayout.theLayoutManager, oldName, newName )
if not isIDEligible( newName ):
self.theModelEditor.printMessage( "Only alphanumeric characters and _ are allowed in layout names!", ME_ERROR )
if aCommand.isExecutable() and isIDEligible( newName ):
self.theModelEditor.doCommandList( [aCommand] )
else:
self.theEntry.set_text(oldName)
self.theLabel.set_text(oldName)
self.theHBox.remove( self.theEntry )
self.theHBox.pack_start( self.theLabel)
self.theLabel.show()
self['rename_button'].set_sensitive( True )
def __zoom_out( self, *args ):
width,height=self.thePathwayCanvas.getSize()
if width<860:
self.zoomout.set_sensitive(False)
self.zoomtofit.set_sensitive(False)
return
if width>860:
aZoomratio=self.thePathwayCanvas.getZoomRatio()
aNewratio=aZoomratio-self.zoom
self.thePathwayCanvas.setZoomRatio(aNewratio)
def __zoom_to_fit( self, *args ):
aNewratio=self.zoom
self.thePathwayCanvas.setZoomRatio(aNewratio)
self.zoomtofit.set_sensitive(False)
self.zoomout.set_sensitive(False)
def __print( self, *args ):
self.theModelEditor.printMessage("Sorry, not implemented !", ME_ERROR )
def __palette_toggled( self, *args ):
aButtonName=args[0].get_name().split('_')[0]
if self.isFirst:
if aButtonName =='custom' or aButtonName =='text':
self.theModelEditor.printMessage("Sorry, not implemented !", ME_ERROR )
if aButtonName!=self.__CurrPaletteButton:
self.isFirst=False
self.toggle(aButtonName,True)
self.toggle(self.__CurrPaletteButton,False)
self.__CurrPaletteButton=aButtonName
elif aButtonName==self.__CurrPaletteButton:
self.isFirst=False
if self.__CurrPaletteButton=='selector':
self.toggle(self.__CurrPaletteButton,True)
else:
self.toggle(self.__CurrPaletteButton,False)
self.toggle('selector',True)
self.__CurrPaletteButton='selector'
else:
self.isFirst=True
def __search( self, *args ):
self.theModelEditor.printMessage("Sorry, not implemented !", ME_ERROR )
def __DeleteLayoutButton_clicked(self, *args):
layoutManager = self.theModelEditor.theLayoutManager
layoutName = self.theLayout.getName()
if layoutName == 'Choose...':
self.theModelEditor.printMessage("This is not a valid layout name", ME_WARNING)
return
aCommand = DeleteLayout( layoutManager, layoutName)
self.theModelEditor.doCommandList( [ aCommand ] )
def __CloneLayoutButton_clicked(self, *args):
layoutManager = self.theModelEditor.theLayoutManager
layoutName = self.theLayout.getName()
if layoutName == 'Choose...':
self.theModelEditor.printMessage("This is not a valid layout name", ME_WARNING)
return
aCommand = CloneLayout( layoutManager, layoutName)
self.theModelEditor.doCommandList( [ aCommand ] )
newLayoutName = "copyOf" + layoutName
self.theModelEditor.createPathwayEditor( layoutManager.getLayout( newLayoutName ) )
|
ecell/ecell3
|
ecell/frontend/model-editor/ecell/ui/model_editor/PathwayEditor.py
|
Python
|
lgpl-3.0
| 11,027
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('geokey_sapelli', '0005_sapellifield_truefalse'),
]
operations = [
migrations.AddField(
model_name='sapelliproject',
name='sapelli_fingerprint',
field=models.IntegerField(default=-1),
preserve_default=False,
),
]
|
ExCiteS/geokey-sapelli
|
geokey_sapelli/migrations/0006_sapelliproject_sapelli_fingerprint.py
|
Python
|
mit
| 468
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_node_affinity import V1NodeAffinity
class TestV1NodeAffinity(unittest.TestCase):
""" V1NodeAffinity unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1NodeAffinity(self):
"""
Test V1NodeAffinity
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_node_affinity.V1NodeAffinity()
pass
if __name__ == '__main__':
unittest.main()
|
mbohlool/client-python
|
kubernetes/test/test_v1_node_affinity.py
|
Python
|
apache-2.0
| 945
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from collections import Mapping
import os
from pyLibrary import dot
from pyLibrary.dot import set_default, wrap, unwrap
from pyLibrary.parsers import URL
DEBUG = False
_convert = None
_Log = None
_Except = None
def _late_import():
global _convert
global _Log
global _Except
from pyLibrary import convert as _convert
from pyLibrary.debugs.logs import Log as _Log
from pyLibrary.debugs.exceptions import Except as _Except
_ = _convert
_ = _Log
_ = _Except
def get(url):
"""
USE json.net CONVENTIONS TO LINK TO INLINE OTHER JSON
"""
if not _Log:
_late_import()
if url.find("://") == -1:
_Log.error("{{url}} must have a prototcol (eg http://) declared", url=url)
base = URL("")
if url.startswith("file://") and url[7] != "/":
if os.sep=="\\":
base = URL("file:///" + os.getcwd().replace(os.sep, "/").rstrip("/") + "/.")
else:
base = URL("file://" + os.getcwd().rstrip("/") + "/.")
elif url[url.find("://") + 3] != "/":
_Log.error("{{url}} must be absolute", url=url)
phase1 = _replace_ref(wrap({"$ref": url}), base) # BLANK URL ONLY WORKS IF url IS ABSOLUTE
try:
phase2 = _replace_locals(phase1, [phase1])
return wrap(phase2)
except Exception, e:
_Log.error("problem replacing locals in\n{{phase1}}", phase1=phase1)
def expand(doc, doc_url):
"""
ASSUMING YOU ALREADY PULED THE doc FROM doc_url, YOU CAN STILL USE THE
EXPANDING FEATURE
"""
if doc_url.find("://") == -1:
_Log.error("{{url}} must have a prototcol (eg http://) declared", url= doc_url)
phase1 = _replace_ref(doc, URL(doc_url)) # BLANK URL ONLY WORKS IF url IS ABSOLUTE
phase2 = _replace_locals(phase1, [phase1])
return wrap(phase2)
def _replace_ref(node, url):
if url.path.endswith("/"):
url.path = url.path[:-1]
if isinstance(node, Mapping):
ref = None
output = {}
for k, v in node.items():
if k == "$ref":
ref = URL(v)
else:
output[k] = _replace_ref(v, url)
if not ref:
return output
node = output
if not ref.scheme and not ref.path:
# DO NOT TOUCH LOCAL REF YET
output["$ref"] = ref
return output
if not ref.scheme:
# SCHEME RELATIVE IMPLIES SAME PROTOCOL AS LAST TIME, WHICH
# REQUIRES THE CURRENT DOCUMENT'S SCHEME
ref.scheme = url.scheme
# FIND THE SCHEME AND LOAD IT
if ref.scheme in scheme_loaders:
new_value = scheme_loaders[ref.scheme](ref, url)
else:
raise _Log.error("unknown protocol {{scheme}}", scheme=ref.scheme)
if ref.fragment:
new_value = dot.get_attr(new_value, ref.fragment)
if DEBUG:
_Log.note("Replace {{ref}} with {{new_value}}", ref=ref, new_value=new_value)
if not output:
output = new_value
else:
output = unwrap(set_default(output, new_value))
if DEBUG:
_Log.note("Return {{output}}", output=output)
return output
elif isinstance(node, list):
output = [_replace_ref(n, url) for n in node]
# if all(p[0] is p[1] for p in zip(output, node)):
# return node
return output
return node
def _replace_locals(node, doc_path):
if isinstance(node, Mapping):
# RECURS, DEEP COPY
ref = None
output = {}
for k, v in node.items():
if k == "$ref":
ref = v
else:
output[k] = _replace_locals(v, [v] + doc_path)
if not ref:
return output
# REFER TO SELF
frag = ref.fragment
if frag[0] == ".":
# RELATIVE
for i, p in enumerate(frag):
if p != ".":
if i>len(doc_path):
_Log.error("{{frag|quote}} reaches up past the root document", frag=frag)
new_value = dot.get_attr(doc_path[i-1], frag[i::])
break
else:
new_value = doc_path[len(frag) - 1]
else:
# ABSOLUTE
new_value = dot.get_attr(doc_path[-1], frag)
new_value = _replace_locals(new_value, [new_value] + doc_path)
if not output:
return new_value # OPTIMIZATION FOR CASE WHEN node IS {}
else:
return unwrap(set_default(output, new_value))
elif isinstance(node, list):
candidate = [_replace_locals(n, [n] + doc_path) for n in node]
# if all(p[0] is p[1] for p in zip(candidate, node)):
# return node
return candidate
return node
###############################################################################
## SCHEME LOADERS ARE BELOW THIS LINE
###############################################################################
def get_file(ref, url):
from pyLibrary.env.files import File
if ref.path.startswith("~"):
home_path = os.path.expanduser("~")
if os.sep == "\\":
home_path = "/" + home_path.replace(os.sep, "/")
if home_path.endswith("/"):
home_path = home_path[:-1]
ref.path = home_path + ref.path[1::]
elif not ref.path.startswith("/"):
# CONVERT RELATIVE TO ABSOLUTE
ref.path = "/".join(url.path.rstrip("/").split("/")[:-1]) + "/" + ref.path
path = ref.path if os.sep != "\\" else ref.path[1::].replace("/", "\\")
try:
if DEBUG:
_Log.note("reading file {{path}}", path=path)
content = File(path).read()
except Exception, e:
content = None
_Log.error("Could not read file {{filename}}", filename=path, cause=e)
try:
new_value = _convert.json2value(content, params=ref.query, flexible=True, leaves=True)
except Exception, e:
if not _Except:
_late_import()
e = _Except.wrap(e)
try:
new_value = _convert.ini2value(content)
except Exception, f:
raise _Log.error("Can not read {{file}}", file=path, cause=e)
new_value = _replace_ref(new_value, ref)
return new_value
def get_http(ref, url):
from pyLibrary.env import http
params = url.query
new_value = _convert.json2value(http.get(ref), params=params, flexible=True, leaves=True)
return new_value
def get_env(ref, url):
# GET ENVIRONMENT VARIABLES
ref = ref.host
try:
new_value = _convert.json2value(os.environ[ref])
except Exception, e:
new_value = os.environ[ref]
return new_value
def get_param(ref, url):
# GET PARAMETERS FROM url
param = url.query
new_value = param[ref.host]
return new_value
scheme_loaders = {
"http": get_http,
"file": get_file,
"env": get_env,
"param": get_param
}
|
klahnakoski/MoDataSubmission
|
pyLibrary/jsons/ref.py
|
Python
|
mpl-2.0
| 7,353
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""securityPolicy Directive Schema
$Id: metadirectives.py 67630 2006-04-27 00:54:03Z jim $
"""
from zope.interface import Interface
from zope.configuration.fields import GlobalObject, GlobalInterface
from zope.configuration.fields import Tokens, PythonIdentifier
from zope.schema import InterfaceField, Id, TextLine
from zope.security.zcml import Permission
##############################################################################
# BBB 2006/04/03 -- to be removed after 12 months
import zope.deferredimport
zope.deferredimport.deprecated(
"It has been renamed to zope.security.zcml.IPermissionDirective. "
"This reference will be gone in Zope 3.5",
IBaseDefineDirective = 'zope.security.zcml:IPermissionDirective'
)
##############################################################################
class IModule(Interface):
"""Group security declarations about a module"""
module = GlobalObject(
title=u"Module",
description=u"Pointer to the module object.",
required=True)
class IAllow(Interface):
"""Allow access to selected module attributes
Access is unconditionally allowed to any names provided directly
in the attributes attribute or to any names defined by
interfaces listed in the interface attribute.
"""
attributes = Tokens(
title=u"Attributes",
description=u"The attributes to provide access to.",
value_type = PythonIdentifier(),
required=False)
interface = Tokens(
title=u"Interface",
description=u"Interfaces whos names to provide access to. Access "
u"will be provided to all of the names defined by the "
u"interface(s). Multiple interfaces can be supplied.",
value_type = GlobalInterface(),
required=False)
class IRequire(Interface):
"""Require a permission to access selected module attributes
The given permission is required to access any names provided
directly in the attributes attribute or any names defined by
interfaces listed in the interface attribute.
"""
permission = Permission(
title=u"Permission ID",
description=u"The id of the permission to require.")
class IBasePrincipalDirective(Interface):
"""Base interface for principal definition directives."""
id = Id(
title=u"Id",
description=u"Id as which this object will be known and used.",
required=True)
title = TextLine(
title=u"Title",
description=u"Provides a title for the object.",
required=True)
description = TextLine(
title=u"Title",
description=u"Provides a description for the object.",
required=False)
class IDefinePrincipalDirective(IBasePrincipalDirective):
"""Define a new principal."""
login = TextLine(
title=u"Username/Login",
description=u"Specifies the Principal's Username/Login.",
required=True)
password = TextLine(
title=u"Password",
description=u"Specifies the Principal's Password.",
required=True)
password_manager = TextLine(
title=u"Password Manager Name",
description=(u"Name of the password manager will be used"
" for encode/check the password"),
default=u"Plain Text"
)
class IDefineUnauthenticatedPrincipalDirective(IBasePrincipalDirective):
"""Define a new unauthenticated principal."""
class IDefineUnauthenticatedGroupDirective(IBasePrincipalDirective):
"""Define the unauthenticated group."""
class IDefineAuthenticatedGroupDirective(IBasePrincipalDirective):
"""Define the authenticated group."""
class IDefineEverybodyGroupDirective(IBasePrincipalDirective):
"""Define the everybody group."""
|
Donkyhotay/MoonPy
|
zope/app/security/metadirectives.py
|
Python
|
gpl-3.0
| 4,426
|
"""TestSuite"""
import sys
import unittest
from rez.vendor.unittest2 import case, util
__unittest = True
class BaseTestSuite(unittest.TestSuite):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
# Can't guarantee hash invariant, so flag as unhashable
__hash__ = None
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
# sanity checks
if not hasattr(test, '__call__'):
raise TypeError("%r is not callable" % (repr(test),))
if isinstance(test, type) and issubclass(test,
(case.TestCase, TestSuite)):
raise TypeError("TestCases and TestSuites must be instantiated "
"before passing them to addTest()")
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, basestring):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result):
self._wrapped_run(result)
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self._wrapped_run(debug, True)
self._tearDownPreviousClass(None, debug)
self._handleModuleTearDown(debug)
################################
# private methods
def _wrapped_run(self, result, debug=False):
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
if hasattr(test, '_wrapped_run'):
test._wrapped_run(result, debug)
elif not debug:
test(result)
else:
test.debug()
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
try:
setUpClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
try:
setUpModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
try:
tearDownModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
try:
tearDownClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
# Inspired by the ErrorHolder from Twisted:
# http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py
# attribute used by TestResult._exc_info_to_string
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return "<ErrorHolder description=%r>" % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
# could call result.addError(...) - but this test-like object
# shouldn't be run anyway
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
|
cwmartin/rez
|
src/rez/vendor/unittest2/suite.py
|
Python
|
lgpl-3.0
| 9,425
|
from os.path import abspath, dirname, join, split
from glob import glob
from functools import partial
from subprocess import Popen, PIPE
import gzip
from click import echo
from psycopg2 import (connect, OperationalError)
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from natsort import natsorted
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.data_access.sql_connection import SQLConnectionHandler
get_db_file = partial(join, join(dirname(dirname(abspath(__file__))), '..',
'db'))
LAYOUT_FP = get_db_file('ag_unpatched.sql')
INITIALIZE_FP = get_db_file('initialize.sql')
POPULATE_FP = get_db_file('ag_test_patch22.sql.gz')
PATCHES_DIR = get_db_file('patches')
def _check_db_exists(db, cursor):
r"""Check if the database db exists on the postgres server
Parameters
----------
db : str
The database name
cursor : psycopg2.cursor
The cursor connected to the server
"""
cursor.execute('SELECT datname FROM pg_database')
# It's a list of tuple, so just create the tuple to check if exists
return (db,) in cursor.fetchall()
def create_database(force=False):
# Connect to the postgres server
try:
conn = connect(dbname='postgres',
user=AMGUT_CONFIG.user, password=AMGUT_CONFIG.password,
host=AMGUT_CONFIG.host, port=AMGUT_CONFIG.port)
except OperationalError as e:
raise OperationalError("Cannot connect to the server, error is %s" %
str(e))
# Set the isolation level to AUTOCOMMIT so we can execute a create database
# sql query
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
# Get the cursor
cur = conn.cursor()
db_exists = _check_db_exists(AMGUT_CONFIG.database, cur)
# Check that the database does not already exist
if db_exists and force:
return
elif db_exists:
raise ValueError("Database '{}' already present on the system"
.format(AMGUT_CONFIG.database))
# Create the database
cur.execute('CREATE DATABASE %s' % AMGUT_CONFIG.database)
cur.close()
conn.close()
def build(verbose=False):
conn = connect(user=AMGUT_CONFIG.user, password=AMGUT_CONFIG.password,
host=AMGUT_CONFIG.host, port=AMGUT_CONFIG.port,
database=AMGUT_CONFIG.database)
cur = conn.cursor()
# create the schema and set a search path
cur.execute('CREATE SCHEMA IF NOT EXISTS ag')
cur.execute('CREATE SCHEMA IF NOT EXISTS barcodes')
cur.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"')
if verbose:
echo("Building SQL layout")
with open(LAYOUT_FP) as f:
# We have to skip the "create schema" line here
cur.execute('\n'.join(f.readlines()[1:]))
cur.execute('SET SEARCH_PATH TO ag, barcodes, public')
with open(INITIALIZE_FP) as f:
cur.execute(f.read())
conn.commit()
def initialize(verbose=False):
"""Initialize the database with permissions and, optionally, a new user
Parameters
----------
verbose : bool, optional
Show messages while working, default False
"""
conn = connect(user=AMGUT_CONFIG.user, password=AMGUT_CONFIG.password,
host=AMGUT_CONFIG.host, port=AMGUT_CONFIG.port,
database=AMGUT_CONFIG.database)
cur = conn.cursor()
if verbose:
echo('Granting privileges')
cur.execute("""GRANT USAGE ON schema public, ag, barcodes
TO %s""" % AMGUT_CONFIG.user)
cur.execute('GRANT CONNECT ON DATABASE %s TO %s' %
(AMGUT_CONFIG.database, AMGUT_CONFIG.user))
cur.execute('GRANT INSERT, UPDATE, DELETE, SELECT ON ALL TABLES IN SCHEMA'
' public, ag, barcodes TO %s;' % AMGUT_CONFIG.user)
cur.execute('GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public, ag, barcodes'
' TO %s;' % AMGUT_CONFIG.user)
conn.commit()
def make_settings_table():
conn = SQLConnectionHandler()
settings = AMGUT_CONFIG.get_settings()
columns = [' '.join([setting[0], 'varchar']) for setting in settings]
column_names = [setting[0] for setting in settings]
num_values = len(settings)
sql = "INSERT INTO settings ({}) VALUES ({})".format(
', '.join(column_names), ', '.join(['%s'] * num_values))
args = [str(setting[1]) for setting in settings]
with conn.get_postgres_cursor() as cur:
create_sql = ("CREATE TABLE ag.settings ({}, current_patch varchar "
"NOT NULL DEFAULT 'unpatched')")
create_sql = create_sql.format(', '.join(columns))
cur.execute(create_sql)
cur.execute(sql, args)
def populate_test_db():
with gzip.open(POPULATE_FP, 'rb') as f:
test_db = f.read()
command = ['psql', '-d', AMGUT_CONFIG.database]
proc = Popen(command, stdin=PIPE, stdout=PIPE)
proc.communicate(test_db)
def patch_db(patches_dir=PATCHES_DIR, verbose=False):
"""Patches the database schema based on the settings table
Pulls the current patch from the settings table and applies all subsequent
patches found in the patches directory.
"""
conn = SQLConnectionHandler()
current_patch = conn.execute_fetchone(
"SELECT current_patch FROM settings")[0]
current_patch_fp = join(patches_dir, current_patch)
sql_glob = join(patches_dir, '*.sql')
patch_files = natsorted(glob(sql_glob))
if current_patch == 'unpatched':
next_patch_index = 0
elif current_patch_fp not in patch_files:
raise RuntimeError("Cannot find patch file %s" % current_patch)
else:
next_patch_index = patch_files.index(current_patch_fp) + 1
patch_update_sql = "UPDATE settings SET current_patch = %s"
for patch_fp in patch_files[next_patch_index:]:
patch_filename = split(patch_fp)[-1]
with conn.get_postgres_cursor() as cur:
cur.execute('SET SEARCH_PATH TO ag, barcodes, public')
with open(patch_fp, 'U') as patch_file:
if verbose:
echo('\tApplying patch %s...' % patch_filename)
cur.execute(patch_file.read())
cur.execute(patch_update_sql, [patch_filename])
conn._connection.commit()
# Idempotent patches implemented in Python can be run here
def rebuild_test(verbose=False):
conn = connect(user=AMGUT_CONFIG.user, password=AMGUT_CONFIG.password,
host=AMGUT_CONFIG.host, port=AMGUT_CONFIG.port,
database=AMGUT_CONFIG.database)
with conn.cursor() as cur:
test = cur.execute("SELECT test_environment FROM ag.settings")
test = cur.fetchone()[0]
if test != 'true':
print "ABORTING: Not working on test database"
return
conn.close()
if verbose:
print "Dropping database %s" % AMGUT_CONFIG.database
p = Popen(['dropdb', '--if-exists', AMGUT_CONFIG.database])
retcode = p.wait()
if retcode != 0:
raise RuntimeError("Could not delete database %s: retcode %d" %
(AMGUT_CONFIG.database, retcode))
if verbose:
print "Rebuilding database"
create_database()
populate_test_db()
initialize(verbose)
if verbose:
print "Patching database"
patch_db(verbose=verbose)
|
ElDeveloper/american-gut-web
|
amgut/lib/data_access/env_management.py
|
Python
|
bsd-3-clause
| 7,411
|
from cchloader.adapters import CchAdapter
from cchloader.models.cch_autocons import CchAutoconsSchema
from marshmallow import Schema, fields, pre_load
class A5dBaseAdapter(Schema):
""" A5D Adapter
"""
@pre_load
def fix_numbers(self, data):
for attr, field in self.fields.iteritems():
if isinstance(field, fields.Integer):
if not data.get(attr):
data[attr] = None
return data
@pre_load
def fix_season(self, data):
valid_values = [0, 1]
season = data.get('season')
if season and season.isdigit() and season in map(str, valid_values):
data['season'] = int(season)
else:
data['season'] = None
@pre_load
def fix_source(self, data):
valid_values = [1, 2, 3, 4, 5, 6]
source = data.get('source')
if source and source.isdigit() and int(source) in valid_values:
data['source'] = int(source)
else:
data['source'] = None
class A5dAdapter(A5dBaseAdapter, CchAdapter, CchAutoconsSchema):
pass
|
Som-Energia/cchloader
|
cchloader/adapters/a5d.py
|
Python
|
gpl-3.0
| 1,099
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Product.code'
db.alter_column('products_items', 'code', self.gf('django.db.models.fields.CharField')(max_length=20, null=True))
def backwards(self, orm):
# Changing field 'Product.code'
db.alter_column('products_items', 'code', self.gf('django.db.models.fields.CharField')(default=None, max_length=20))
models = {
u'products.category': {
'Meta': {'object_name': 'Category', 'db_table': "'products_categories'"},
'cover': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complementary': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_standalone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'legacy_id': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['products.Category']"}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
u'products.photo': {
'Meta': {'object_name': 'Photo', 'db_table': "'products_photos'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'is_main': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'legacy_hash': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'photos'", 'to': u"orm['products.Product']"})
},
u'products.product': {
'Meta': {'object_name': 'Product', 'db_table': "'products_items'"},
'categories': ('mptt.fields.TreeManyToManyField', [], {'related_name': "'products'", 'symmetrical': 'False', 'to': u"orm['products.Category']"}),
'code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'dimensions': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'discount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'franchisee_discount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'franchisee_price': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_new': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_wholesale': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'pack_amount': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'price': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'sku': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'weight': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'wholesale_discount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'wholesale_legacy_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'wholesale_price': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'})
}
}
complete_apps = ['products']
|
Lisaveta-K/lisaveta-k.github.io
|
_site/tomat/apps/products/migrations/0004_blank_1c_code.py
|
Python
|
mit
| 5,691
|
##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Robert McGibbon
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
##############################################################################
# Imports
##############################################################################
from __future__ import print_function, division
import os
import itertools
import numpy as np
from mdtraj.utils import ensure_type, cast_indices, in_units_of
from mdtraj.formats.registry import _FormatRegistry
from mdtraj.utils.six import string_types
from mdtraj.utils.six.moves import xrange
__all__ = ['MDCRDTrajectoryFile', 'load_mdcrd']
##############################################################################
# Classes
##############################################################################
class _EOF(IOError):
pass
@_FormatRegistry.register_loader('.mdcrd')
@_FormatRegistry.register_loader('.crd')
def load_mdcrd(filename, top=None, stride=None, atom_indices=None, frame=None):
"""Load an AMBER mdcrd file.
Parameters
----------
filename : str
String filename of AMBER mdcrd file.
top : {str, Trajectory, Topology}
The BINPOS format does not contain topology information. Pass in either
the path to a pdb file, a trajectory, or a topology to supply this
information.
stride : int, default=None
Only read every stride-th frame
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates from the
file.
frame : int, optional
Use this option to load only a single frame from a trajectory on disk.
If frame is None, the default, the entire trajectory will be loaded.
If supplied, ``stride`` will be ignored.
Returns
-------
trajectory : md.Trajectory
The resulting trajectory, as an md.Trajectory object.
See Also
--------
mdtraj.MDCRDTrajectoryFile : Low level interface to MDCRD files
"""
from mdtraj.core.trajectory import _parse_topology, Trajectory
# we make it not required in the signature, but required here. although this
# is a little wierd, its good because this function is usually called by a
# dispatch from load(), where top comes from **kwargs. So if its not supplied
# we want to give the user an informative error message
if top is None:
raise ValueError('"top" argument is required for load_mdcrd')
if not isinstance(filename, string_types):
raise TypeError('filename must be of type string for load_mdcrd. '
'you supplied %s' % type(filename))
topology = _parse_topology(top)
atom_indices = cast_indices(atom_indices)
if atom_indices is not None:
topology = topology.subset(atom_indices)
with MDCRDTrajectoryFile(filename, n_atoms=topology._numAtoms) as f:
if frame is not None:
f.seek(frame)
xyz, cell_lengths = f.read(n_frames=1, atom_indices=atom_indices)
else:
xyz, cell_lengths = f.read(stride=stride, atom_indices=atom_indices)
in_units_of(xyz, f.distance_unit, Trajectory._distance_unit, inplace=True)
if cell_lengths is not None:
in_units_of(cell_lengths, f.distance_unit, Trajectory._distance_unit, inplace=True)
# Assume that its a rectilinear box
cell_angles = 90.0 * np.ones_like(cell_lengths)
time = np.arange(len(xyz))
if frame is not None:
time += frame
elif stride is not None:
time *= stride
t = Trajectory(xyz=xyz, topology=topology, time=time)
if cell_lengths is not None:
t.unitcell_lengths = cell_lengths
t.unitcell_angles = cell_angles
return t
@_FormatRegistry.register_fileobject('.mdcrd')
@_FormatRegistry.register_fileobject('.crd')
class MDCRDTrajectoryFile(object):
"""Interface for reading and writing to an AMBER mdcrd files.
This is a file-like object, that both reading or writing depending
on the `mode` flag. It implements the context manager protocol,
so you can also use it with the python 'with' statement.
The conventional units in the mdcrd file are angstroms. The format only
supports storing the cartesian coordinates and box lengths.
Parameters
----------
filename : str
The filename to open. A path to a file on disk.
n_atoms : int
The number of atoms in the system. This is _required_ when mode == 'r'
and irrelivant when mode == 'w'.
mode : {'r', 'w'}
The mode in which to open the file, either 'r' for read or 'w' for
write.
has_box = 'detect'
Does the mdcrd file contain box length information? This is optional
when mode == 'r' (and irrelvant when mode == 'w'). The presence or
absense of box information can generally be infered from the file,
but there might be corner cases in which this is not possible,
because of limitations in the mdcrd format.
force_overwrite : bool
If opened in write mode, and a file by the name of `filename` already
exists on disk, should we overwrite it?
"""
distance_unit = 'angstroms'
def __init__(self, filename, n_atoms=None, mode='r', has_box='detect',
force_overwrite=True):
"""Open an AMBER mdcrd file for reading/writing.
"""
self._is_open = False
self._filename = filename
self._n_atoms = n_atoms
self._mode = mode
self._w_has_box = None
self._frame_index = 0
self._has_box = has_box
# track which line we're on. this is not essential, but its useful
# when reporting errors to the user to say what line it occured on.
self._line_counter = 0
if has_box not in [True, False, "detect"]:
raise ValueError('has_box must be one of [True, False, "detect"]')
if mode == 'r':
if n_atoms is None:
raise ValueError('To open a mdcrd file in mode="r", you must '
'supply the number of atoms, "n_atoms"')
if not os.path.exists(filename):
raise IOError("The file '%s' doesn't exist" % filename)
self._fh = open(filename, 'r')
self._is_open = True
self._fh.readline() # read comment
self._line_counter += 1
elif mode == 'w':
if os.path.exists(filename) and not force_overwrite:
raise IOError("The file '%s' already exists" % filename)
self._fh = open(filename, 'w')
self._is_open = True
else:
raise ValueError('mode must be one of "r" or "w". '
'you supplied "%s"' % mode)
def close(self):
"""Close the mdcrd file"""
if self._is_open:
self._fh.close()
self._is_open = False
def __del__(self):
self.close()
def __enter__(self):
"Support the context manager protocol"
return self
def __exit__(self, *exc_info):
"Support the context manager protocol"
self.close()
def read(self, n_frames=None, stride=None, atom_indices=None):
"""Read data from a mdcrd file
Parameters
----------
n_frames : int, None
The number of frames you would like to read from the file.
If None, all of the remaining frames will be loaded.
stride : np.ndarray, optional
Read only every stride-th frame.
atom_indices : array_like, optional
If not none, then read only a subset of the atoms coordinates
from the file.
Returns
-------
xyz : np.ndarray, shape=(n_frames, n_atoms, 3), dtype=np.float32
The cartesian coordinates, in angstroms
cell_lengths : {np.ndarray, None}
If the file contains unitcell lengths, they will be returned as an
array of shape=(n_frames, 3). Otherwise, unitcell_angles will be
None.
"""
if not self._mode == 'r':
raise ValueError('read() is only available when file is opened '
'in mode="r"')
if n_frames is None:
frame_counter = itertools.count()
else:
frame_counter = xrange(n_frames)
if stride is None:
stride = 1
coords, boxes = [], []
for i in frame_counter:
try:
coord, box = self._read()
if atom_indices is not None:
coord = coord[atom_indices, :]
except _EOF:
break
coords.append(coord)
boxes.append(box)
for j in range(stride - 1):
# throw away these frames
try:
self._read()
except _EOF:
break
coords = np.array(coords)
if all(b is None for b in boxes):
# if there was no box information in any frame, that's cool
return coords, None
if not all(b is not None for b in boxes):
# but if some of them had box information and others didn't
# that probably means there was a bug in the parsing.
raise IOError('Inconsistent box information. Try manually '
'setting has_box? Your mdcrd file might be '
'corrupt.')
return coords, np.array(boxes, dtype=np.float32)
def _read(self):
"Read a single frame"
i = 0
coords = np.empty(self._n_atoms*3, dtype=np.float32)
box = None
while i < self._n_atoms * 3:
line = self._fh.readline()
self._line_counter += 1
if line == '':
raise _EOF()
try:
items = [float(elem) for elem in line.split()]
assert len(items) != 0 # trigger the exception below too
except Exception:
raise IOError('mdcrd parse error on line %d of "%s". This file '
'does not apear to be a valid mdcrd file.' % \
(self._line_counter, self._filename))
length = len(items)
if i + length > len(coords):
raise IOError('mdcrd parse error: specified n_atoms (%d) is '
'likely incorrect. Incorrct buffer size '
'encountered. ' % self._n_atoms)
coords[i:i+length] = items
i += length
if i == self._n_atoms * 3:
if self._has_box is False:
break
# peek ahead for box
here = self._fh.tell()
line = self._fh.readline()
peek = [float(elem) for elem in line.split()]
if len(peek) == 3:
box = peek
else:
if self._has_box is True:
raise IOError('Box information not found in file.')
self._fh.seek(here)
break
self._frame_index += 1
return coords.reshape(self._n_atoms, 3), box
def write(self, xyz, cell_lengths=None):
"""Write one or more frames of data to a mdcrd file
Parameters
----------
xyz : np.ndarray, shape=(n_frames, n_atoms, 3)
The cartesian coordinates of the atoms to write. By convention, the
lengths should be in units of angstroms.
cell_lengths : np.ndarray, shape=(n_frames, 3), dtype=float32, optional
The length of the periodic box in each frame, in each direction,
`a`, `b`, `c`. By convention the lengths should be in units
of angstroms.
"""
if not self._mode == 'w':
raise ValueError('write() is only available when file is opened '
'in mode="w"')
xyz = ensure_type(xyz, np.float32, 3, 'xyz', can_be_none=False,
shape=(None, None, 3), warn_on_cast=False,
add_newaxis_on_deficient_ndim=True)
cell_lengths = ensure_type(cell_lengths, np.float32, 2, 'cell_lengths',
can_be_none=True, shape=(len(xyz), 3), warn_on_cast=False,
add_newaxis_on_deficient_ndim=True)
if self._w_has_box is None:
# this is the first write()
self._n_atoms = xyz.shape[1]
self._fh.write('TITLE : Created by MDTraj with %d atoms\n' % self._n_atoms)
if cell_lengths is None:
self._w_has_box = False
else:
self._w_has_box = True
elif self._w_has_box is True:
if cell_lengths is None:
raise ValueError('This mdcrd file must contain unitcell '
'information')
elif self._w_has_box is False:
if cell_lengths is not None:
raise ValueError('This mdcrd file must not contain unitcell '
'information')
else:
raise RuntimeError()
for i in range(xyz.shape[0]):
for j, coord in enumerate(xyz[i].reshape(-1)):
lfdone = False
out = "%8.3f" % coord
if len(out) > 8:
raise ValueError('Overflow error')
self._fh.write(out)
if (j+1) % 10 == 0:
self._fh.write("\n")
lfdone = True
if not lfdone:
self._fh.write("\n")
if cell_lengths is not None:
self._fh.write("%8.3f %8.3f %8.3f\n" % tuple(cell_lengths[i]))
def seek(self, offset, whence=0):
"""Move to a new file position
Parameters
----------
offset : int
A number of frames.
whence : {0, 1, 2}
0: offset from start of file, offset should be >=0.
1: move relative to the current position, positive or negative
2: move relative to the end of file, offset should be <= 0.
Seeking beyond the end of a file is not supported
"""
if self._mode == 'r':
advance, absolute = None, None
if whence == 0 and offset >= 0:
if offset >= self._frame_index:
advance = offset - self._frame_index
else:
absolute = offset
elif whence == 1 and offset >= 0:
advance = offset
elif whence == 1 and offset < 0:
absolute = offset + self._frame_index
elif whence == 2 and offset <= 0:
raise NotImplementedError('offsets from the end are not supported yet')
else:
raise IOError('Invalid argument')
if advance is not None:
for i in range(advance):
self._read() # advance and throw away these frames
elif absolute is not None:
self._fh.close()
self._fh = open(self._filename, 'r')
self._fh.readline() # read comment
self._frame_index = 0
self._line_counter = 1
for i in range(absolute):
self._read()
else:
raise RuntimeError()
else:
raise NotImplementedError('offsets in write mode are not supported yet')
def tell(self):
"""Current file position
Returns
-------
offset : int
The current frame in the file.
"""
return int(self._frame_index)
def __len__(self):
"Number of frames in the file"
raise NotImplementedError()
|
marscher/mdtraj
|
MDTraj/formats/mdcrd.py
|
Python
|
lgpl-2.1
| 16,789
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_system_session_helper
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_system_session_helper.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_system_session_helper_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_session_helper': {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_session_helper.fortios_system(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
}
set_method_mock.assert_called_with('system', 'session-helper', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_session_helper_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_session_helper': {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_session_helper.fortios_system(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
}
set_method_mock.assert_called_with('system', 'session-helper', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_session_helper_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_session_helper': {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_session_helper.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'session-helper', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_session_helper_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_session_helper': {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_session_helper.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'session-helper', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_session_helper_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_session_helper': {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_session_helper.fortios_system(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
}
set_method_mock.assert_called_with('system', 'session-helper', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_system_session_helper_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_session_helper': {
'random_attribute_not_valid': 'tag',
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_session_helper.fortios_system(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'port': '5',
'protocol': '6'
}
set_method_mock.assert_called_with('system', 'session-helper', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
simonwydooghe/ansible
|
test/units/modules/network/fortios/test_fortios_system_session_helper.py
|
Python
|
gpl-3.0
| 8,151
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
Grass7Algorithm.py
---------------------
Date : February 2015
Copyright : (C) 2014-2015 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
from builtins import range
__author__ = 'Victor Olaya'
__date__ = 'February 2015'
__copyright__ = '(C) 2012-2015, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import sys
import os
import uuid
import importlib
from qgis.PyQt.QtCore import QCoreApplication, QUrl
from qgis.core import (QgsRasterLayer,
QgsApplication,
QgsMapLayer,
QgsProcessingUtils,
QgsProcessing,
QgsMessageLog,
QgsVectorFileWriter,
QgsProcessingAlgorithm,
QgsProcessingParameterDefinition,
QgsProcessingException,
QgsProcessingParameterExtent,
QgsProcessingParameterEnum,
QgsProcessingParameterNumber,
QgsProcessingParameterString,
QgsProcessingParameterField,
QgsProcessingParameterPoint,
QgsProcessingParameterBoolean,
QgsProcessingParameterVectorLayer,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterMultipleLayers,
QgsProcessingParameterVectorDestination,
QgsProcessingParameterRasterDestination,
QgsProcessingParameterFileDestination,
QgsProcessingParameterFolderDestination,
QgsProcessingOutputFolder,
QgsProcessingOutputVectorLayer,
QgsProcessingOutputRasterLayer,
QgsProcessingOutputHtml,
QgsProcessingUtils)
from qgis.utils import iface
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.parameters import (getParameterFromString)
from .Grass7Utils import Grass7Utils
#from processing.tools import dataobjects, system
from processing.tools.system import isWindows, getTempFilename
pluginPath = os.path.normpath(os.path.join(
os.path.split(os.path.dirname(__file__))[0], os.pardir))
class Grass7Algorithm(QgsProcessingAlgorithm):
GRASS_OUTPUT_TYPE_PARAMETER = 'GRASS_OUTPUT_TYPE_PARAMETER'
GRASS_MIN_AREA_PARAMETER = 'GRASS_MIN_AREA_PARAMETER'
GRASS_SNAP_TOLERANCE_PARAMETER = 'GRASS_SNAP_TOLERANCE_PARAMETER'
GRASS_REGION_EXTENT_PARAMETER = 'GRASS_REGION_PARAMETER'
GRASS_REGION_CELLSIZE_PARAMETER = 'GRASS_REGION_CELLSIZE_PARAMETER'
GRASS_REGION_ALIGN_TO_RESOLUTION = 'GRASS_REGION_ALIGN_TO_RESOLUTION'
GRASS_RASTER_FORMAT_OPT = 'GRASS_RASTER_FORMAT_OPT'
GRASS_RASTER_FORMAT_META = 'GRASS_RASTER_FORMAT_META'
OUTPUT_TYPES = ['auto', 'point', 'line', 'area']
QGIS_OUTPUT_TYPES = {QgsProcessing.TypeVectorAnyGeometry: 'auto',
QgsProcessing.TypeVectorPoint: 'point',
QgsProcessing.TypeVectorLine: 'line',
QgsProcessing.TypeVectorPolygon: 'area'}
def __init__(self, descriptionfile):
super().__init__()
self._name = ''
self._display_name = ''
self._group = ''
self.grass7Name = ''
self.params = []
self.hardcodedStrings = []
self.inputLayers = []
self.descriptionFile = descriptionfile
# Default GRASS parameters
self.region = None
self.cellSize = None
self.snaptTolerance = None
self.outputType = None
self.minArea = None
self.alignToResolution = None
# Load parameters from a description file
self.defineCharacteristicsFromFile()
self.numExportedLayers = 0
# Do we need this anymore?
self.uniqueSuffix = str(uuid.uuid4()).replace('-', '')
# Use the ext mechanism
name = self.name().replace('.', '_')
try:
self.module = importlib.import_module(
'processing.algs.grass7.ext.{}'.format(name))
except ImportError:
self.module = None
def createInstance(self):
return self.__class__(self.descriptionFile)
def name(self):
return self._name
def displayName(self):
return self._display_name
def group(self):
return self._group
def icon(self):
return QgsApplication.getThemeIcon("/providerGrass.svg")
def svgIconPath(self):
return QgsApplication.iconPath("providerGrass.svg")
def tr(self, string, context=''):
if context == '':
context = self.__class__.__name__
return QCoreApplication.translate(context, string)
def helpUrl(self):
helpPath = Grass7Utils.grassHelpPath()
if helpPath == '':
return None
if os.path.exists(helpPath):
return QUrl.fromLocalFile(os.path.join(helpPath, '{}.html'.format(self.grass7Name))).toString()
else:
return helpPath + '{}.html'.format(self.grass7Name)
def initAlgorithm(self, config=None):
"""
Algorithm initialization
"""
for p in self.params:
# We use createOutput argument for automatic output creation
res = self.addParameter(p, True)
# File destinations are not automatically added as outputs
if (isinstance(p, QgsProcessingParameterFileDestination)
and p.defaultFileExtension().lower() == 'html'):
self.addOutput(QgsProcessingOutputHtml(p.name(), p.description()))
def defineCharacteristicsFromFile(self):
"""
Create algorithm parameters and outputs from a text file.
"""
with open(self.descriptionFile) as lines:
# First line of the file is the Grass algorithm name
line = lines.readline().strip('\n').strip()
self.grass7Name = line
# Second line if the algorithm name in Processing
line = lines.readline().strip('\n').strip()
self._name = line
self._display_name = QCoreApplication.translate("GrassAlgorithm", line)
if " - " not in self._name:
self._name = self.grass7Name + " - " + self._name
self._display_name = self.grass7Name + " - " + self._display_name
self._name = self._name[:self._name.find(' ')].lower()
# Read the grass group
line = lines.readline().strip('\n').strip()
self._group = QCoreApplication.translate("GrassAlgorithm", line)
hasRasterOutput = False
hasRasterInput = False
hasVectorInput = False
vectorOutputs = False
# Then you have parameters/output definition
line = lines.readline().strip('\n').strip()
while line != '':
try:
line = line.strip('\n').strip()
if line.startswith('Hardcoded'):
self.hardcodedStrings.append(line[len('Hardcoded|'):])
parameter = getParameterFromString(line)
if parameter is not None:
self.params.append(parameter)
if isinstance(parameter, QgsProcessingParameterVectorLayer):
hasVectorInput = True
elif isinstance(parameter, QgsProcessingParameterRasterLayer):
hasRasterInput = True
elif isinstance(parameter, QgsProcessingParameterMultipleLayers):
if parameter.layerType() < 3 or parameter.layerType() == 5:
hasVectorInput = True
elif parameter.layerType() == 3:
hasRasterInput = True
elif isinstance(parameter, QgsProcessingParameterVectorDestination):
vectorOutputs = True
elif isinstance(parameter, QgsProcessingParameterRasterDestination):
hasRasterOutput = True
line = lines.readline().strip('\n').strip()
except Exception as e:
QgsMessageLog.logMessage(self.tr('Could not open GRASS GIS 7 algorithm: {0}\n{1}').format(self.descriptionFile, line), self.tr('Processing'), QgsMessageLog.CRITICAL)
raise e
param = QgsProcessingParameterExtent(
self.GRASS_REGION_EXTENT_PARAMETER,
self.tr('GRASS GIS 7 region extent'),
optional=True
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.params.append(param)
if hasRasterOutput or hasRasterInput:
# Add a cellsize parameter
param = QgsProcessingParameterNumber(
self.GRASS_REGION_CELLSIZE_PARAMETER,
self.tr('GRASS GIS 7 region cellsize (leave 0 for default)'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0, maxValue=sys.float_info.max + 1, defaultValue=0.0
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.params.append(param)
if hasRasterOutput:
# Add a createopt parameter for format export
param = QgsProcessingParameterString(
self.GRASS_RASTER_FORMAT_OPT,
self.tr('Output Rasters format options (createopt)'),
multiLine=True, optional=True
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.params.append(param)
# Add a metadata parameter for format export
param = QgsProcessingParameterString(
self.GRASS_RASTER_FORMAT_META,
self.tr('Output Rasters format metadata options (metaopt)'),
multiLine=True, optional=True
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.params.append(param)
if hasVectorInput:
param = QgsProcessingParameterNumber(self.GRASS_SNAP_TOLERANCE_PARAMETER,
self.tr('v.in.ogr snap tolerance (-1 = no snap)'),
type=QgsProcessingParameterNumber.Double,
minValue=-1.0, maxValue=sys.float_info.max + 1,
defaultValue=-1.0)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.params.append(param)
param = QgsProcessingParameterNumber(self.GRASS_MIN_AREA_PARAMETER,
self.tr('v.in.ogr min area'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0, maxValue=sys.float_info.max + 1,
defaultValue=0.0001)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.params.append(param)
if vectorOutputs:
param = QgsProcessingParameterEnum(self.GRASS_OUTPUT_TYPE_PARAMETER,
self.tr('v.out.ogr output type'),
self.OUTPUT_TYPES)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.params.append(param)
def getDefaultCellSize(self):
"""
Determine a default cell size from all the raster layers.
"""
cellsize = 0.0
layers = [l for l in self.inputLayers if isinstance(l, QgsRasterLayer)]
for layer in layers:
cellsize = max(layer.rasterUnitsPerPixelX(), cellsize)
if cellsize == 0.0:
cellsize = 100.0
return cellsize
def grabDefaultGrassParameters(self, parameters, context):
"""
Imports default GRASS parameters (EXTENT, etc) into
object attributes for faster retrieving.
"""
# GRASS region extent
self.region = self.parameterAsExtent(parameters,
self.GRASS_REGION_EXTENT_PARAMETER,
context)
# GRASS cell size
if self.parameterDefinition(self.GRASS_REGION_CELLSIZE_PARAMETER):
self.cellSize = self.parameterAsDouble(parameters,
self.GRASS_REGION_CELLSIZE_PARAMETER,
context)
# GRASS snap tolerance
self.snapTolerance = self.parameterAsDouble(parameters,
self.GRASS_SNAP_TOLERANCE_PARAMETER,
context)
# GRASS min area
self.minArea = self.parameterAsDouble(parameters,
self.GRASS_MIN_AREA_PARAMETER,
context)
# GRASS output type
self.outputType = self.parameterAsString(parameters,
self.GRASS_OUTPUT_TYPE_PARAMETER,
context)
# GRASS align to resolution
self.alignToResolution = self.parameterAsBool(parameters,
self.GRASS_REGION_ALIGN_TO_RESOLUTION,
context)
def processAlgorithm(self, parameters, context, feedback):
if isWindows():
path = Grass7Utils.grassPath()
if path == '':
raise QgsProcessingException(
self.tr('GRASS GIS 7 folder is not configured. Please '
'configure it before running GRASS GIS 7 algorithms.'))
# Create brand new commands lists
self.commands = []
self.outputCommands = []
self.exportedLayers = {}
# If GRASS session has been created outside of this algorithm then
# get the list of layers loaded in GRASS otherwise start a new
# session
existingSession = Grass7Utils.sessionRunning
if existingSession:
self.exportedLayers = Grass7Utils.getSessionLayers()
else:
Grass7Utils.startGrassSession()
# Handle default GRASS parameters
self.grabDefaultGrassParameters(parameters, context)
# Handle ext functions for inputs/command/outputs
for fName in ['Inputs', 'Command', 'Outputs']:
fullName = 'process{}'.format(fName)
if self.module and hasattr(self.module, fullName):
getattr(self.module, fullName)(self, parameters, context)
else:
getattr(self, fullName)(parameters, context)
# Run GRASS
loglines = []
loglines.append(self.tr('GRASS GIS 7 execution commands'))
for line in self.commands:
feedback.pushCommandInfo(line)
loglines.append(line)
if ProcessingConfig.getSetting(Grass7Utils.GRASS_LOG_COMMANDS):
QgsMessageLog.logMessage("\n".join(loglines), self.tr('Processing'), QgsMessageLog.INFO)
Grass7Utils.executeGrass(self.commands, feedback, self.outputCommands)
# If the session has been created outside of this algorithm, add
# the new GRASS GIS 7 layers to it otherwise finish the session
if existingSession:
Grass7Utils.addSessionLayers(self.exportedLayers)
else:
Grass7Utils.endGrassSession()
# Return outputs map
outputs = {}
for out in self.outputDefinitions():
outName = out.name()
if outName in parameters:
outputs[outName] = parameters[outName]
if isinstance(out, QgsProcessingOutputHtml):
self.convertToHtml(parameters[outName])
return outputs
def processInputs(self, parameters, context):
"""Prepare the GRASS import commands"""
inputs = [p for p in self.parameterDefinitions()
if isinstance(p, (QgsProcessingParameterVectorLayer,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterMultipleLayers))]
for param in inputs:
paramName = param.name()
if not paramName in parameters:
continue
if isinstance(parameters[paramName], str) and len(parameters[paramName]) == 0:
continue
# Raster inputs needs to be imported into temp GRASS DB
if isinstance(param, QgsProcessingParameterRasterLayer):
if paramName not in self.exportedLayers:
self.loadRasterLayerFromParameter(
paramName, parameters, context)
# Vector inputs needs to be imported into temp GRASS DB
elif isinstance(param, QgsProcessingParameterVectorLayer):
if paramName not in self.exportedLayers:
self.loadVectorLayerFromParameter(
paramName, parameters, context)
# TODO: find the best replacement for ParameterTable
#if isinstance(param, ParameterTable):
# pass
# For multiple inputs, process each layer
elif isinstance(param, QgsProcessingParameterMultipleLayers):
layers = self.parameterAsLayerList(parameters, paramName, context)
for idx, layer in enumerate(layers):
layerName = '{}_{}'.format(paramName, idx)
# Add a raster layer
if layer.type() == QgsMapLayer.RasterLayer:
self.loadRasterLayer(layerName, layer)
# Add a vector layer
elif layer.type() == QgsMapLayer.VectorLayer:
self.loadVectorLayer(layerName, layer)
self.postInputs()
def postInputs(self):
"""
After layer imports, we need to update some internal parameters
"""
# If projection has not already be set, use the project
self.setSessionProjectionFromProject()
# Build GRASS region
if self.region.isEmpty():
self.region = QgsProcessingUtils.combineLayerExtents(self.inputLayers)
command = 'g.region n={} s={} e={} w={}'.format(
self.region.yMaximum(), self.region.yMinimum(),
self.region.xMaximum(), self.region.xMinimum()
)
# Handle cell size
if self.parameterDefinition(self.GRASS_REGION_CELLSIZE_PARAMETER):
if self.cellSize:
cellSize = self.cellSize
else:
cellSize = self.getDefaultCellSize()
command += ' res={}'.format(cellSize)
# Handle align to resolution
if self.alignToResolution:
command += ' -a'
# Add the default parameters commands
self.commands.append(command)
QgsMessageLog.logMessage('processInputs end. Commands: {}'.format(self.commands), 'Grass7', QgsMessageLog.INFO)
def processCommand(self, parameters, context, delOutputs=False):
"""
Prepare the GRASS algorithm command
:param parameters:
:param context:
:param delOutputs: do not add outputs to commands.
"""
noOutputs = [o for o in self.parameterDefinitions() if o not in self.destinationParameterDefinitions()]
command = '{} '.format(self.grass7Name)
command += '{}'.join(self.hardcodedStrings)
# Add algorithm command
for param in noOutputs:
paramName = param.name()
value = None
# Exclude default GRASS parameters
if paramName in [self.GRASS_REGION_CELLSIZE_PARAMETER,
self.GRASS_REGION_EXTENT_PARAMETER,
self.GRASS_MIN_AREA_PARAMETER,
self.GRASS_SNAP_TOLERANCE_PARAMETER,
self.GRASS_OUTPUT_TYPE_PARAMETER,
self.GRASS_REGION_ALIGN_TO_RESOLUTION,
self.GRASS_RASTER_FORMAT_OPT,
self.GRASS_RASTER_FORMAT_META]:
continue
# Raster and vector layers
if isinstance(param, (QgsProcessingParameterRasterLayer,
QgsProcessingParameterVectorLayer)):
if paramName in self.exportedLayers:
value = self.exportedLayers[paramName]
else:
value = self.parameterAsCompatibleSourceLayerPath(
parameters, paramName, context,
QgsVectorFileWriter.supportedFormatExtensions()
)
# MultipleLayers
elif isinstance(param, QgsProcessingParameterMultipleLayers):
layers = self.parameterAsLayerList(parameters, paramName, context)
values = []
for idx in range(len(layers)):
layerName = '{}_{}'.format(paramName, idx)
values.append(self.exportedLayers[layerName])
value = ','.join(values)
# For booleans, we just add the parameter name
elif isinstance(param, QgsProcessingParameterBoolean):
if self.parameterAsBool(parameters, paramName, context):
command += ' {}'.format(paramName)
# For enumeration, we need to grab the string value
elif isinstance(param, QgsProcessingParameterEnum):
idx = self.parameterAsEnum(parameters, paramName, context)
value = '"{}"'.format(param.options()[idx])
# For strings, we just translate as string
elif isinstance(param, QgsProcessingParameterString):
data = self.parameterAsString(parameters, paramName, context)
# if string is empty, we don't add it
if len(data) > 0:
value = '"{}"'.format(
self.parameterAsString(parameters, paramName, context)
)
# For fields, we just translate as string
elif isinstance(param, QgsProcessingParameterField):
value = '{}'.format(
self.parameterAsString(parameters, paramName, context)
)
# For numbers and points, we translate as a string
elif isinstance(param, (QgsProcessingParameterNumber,
QgsProcessingParameterPoint)):
value = self.parameterAsString(parameters, paramName, context)
# For everything else, we assume that it is a string
else:
value = '"{}"'.format(
self.parameterAsString(parameters, paramName, context)
)
if value:
command += ' {}={}'.format(paramName, value)
# Handle outputs
if not delOutputs:
for out in self.destinationParameterDefinitions():
outName = out.name()
# For File destination
if isinstance(out, QgsProcessingParameterFileDestination):
# for HTML reports, we need to redirect stdout
if out.defaultFileExtension().lower() == 'html':
command += ' > "{}"'.format(
self.parameterAsFileOutput(
parameters, outName, context)
)
else:
command += ' {}="{}"'.format(
outName,
self.parameterAsFileOutput(
parameters, outName, context))
# For folders destination
elif isinstance(out, QgsProcessingParameterFolderDestination):
# We need to add a unique temporary basename
uniqueBasename = outName + self.uniqueSuffix
command += ' {}={}'.format(outName, uniqueBasename)
else:
# We add an output name to make sure it is unique if the session
# uses this algorithm several times.
#value = self.parameterAsOutputLayer(parameters, outName, context)
uniqueOutputName = outName + self.uniqueSuffix
command += ' {}={}'.format(outName, uniqueOutputName)
# Add output file to exported layers, to indicate that
# they are present in GRASS
self.exportedLayers[outName] = uniqueOutputName
command += ' --overwrite'
self.commands.append(command)
QgsMessageLog.logMessage('processCommands end. Commands: {}'.format(self.commands), 'Grass7', QgsMessageLog.INFO)
def vectorOutputType(self, parameters, context):
"""Determine vector output types for outputs"""
self.outType = 'auto'
if self.parameterDefinition(self.GRASS_OUTPUT_TYPE_PARAMETER):
typeidx = self.parameterAsEnum(parameters,
self.GRASS_OUTPUT_TYPE_PARAMETER,
context)
self.outType = ('auto' if typeidx
is None else self.OUTPUT_TYPES[typeidx])
def processOutputs(self, parameters, context):
"""Prepare the GRASS v.out.ogr commands"""
# TODO: support multiple raster formats.
# TODO: support multiple vector formats.
# Determine general vector output type
self.vectorOutputType(parameters, context)
for out in self.destinationParameterDefinitions():
outName = out.name()
if isinstance(out, QgsProcessingParameterRasterDestination):
self.exportRasterLayerFromParameter(outName, parameters, context)
elif isinstance(out, QgsProcessingParameterVectorDestination):
self.exportVectorLayerFromParameter(outName, parameters, context)
elif isinstance(out, QgsProcessingParameterFolderDestination):
self.exportRasterLayersIntoDirectory(outName, parameters, context)
def loadRasterLayerFromParameter(self, name, parameters, context, external=True, band=1):
"""
Creates a dedicated command to load a raster into
the temporary GRASS DB.
:param name: name of the parameter.
:param parameters: algorithm parameters dict.
:param context: algorithm context.
:param external: True if using r.external.
:param band: imports only specified band. None for all bands.
"""
layer = self.parameterAsRasterLayer(parameters, name, context)
self.loadRasterLayer(name, layer, external, band)
def loadRasterLayer(self, name, layer, external=True, band=1):
"""
Creates a dedicated command to load a raster into
the temporary GRASS DB.
:param name: name of the parameter.
:param layer: QgsMapLayer for the raster layer.
:param external: True if using r.external.
:param band: imports only specified band. None for all bands.
"""
self.inputLayers.append(layer)
self.setSessionProjectionFromLayer(layer)
destFilename = 'a' + os.path.basename(getTempFilename())
self.exportedLayers[name] = destFilename
command = '{0} input="{1}" {2}output="{3}" --overwrite -o'.format(
'r.external' if external else 'r.in.gdal',
os.path.normpath(layer.source()),
'band={} '.format(band) if band else '',
destFilename)
self.commands.append(command)
def exportRasterLayerFromParameter(self, name, parameters, context, colorTable=True):
"""
Creates a dedicated command to export a raster from
temporary GRASS DB into a file via gdal.
:param name: name of the parameter.
:param parameters: Algorithm parameters dict.
:param context: Algorithm context.
:param colorTable: preserve color Table.
"""
fileName = os.path.normpath(
self.parameterAsOutputLayer(parameters, name, context))
grassName = '{}{}'.format(name, self.uniqueSuffix)
outFormat = Grass7Utils.getRasterFormatFromFilename(fileName)
createOpt = self.parameterAsString(parameters, self.GRASS_RASTER_FORMAT_OPT, context)
metaOpt = self.parameterAsString(parameters, self.GRASS_RASTER_FORMAT_META, context)
self.exportRasterLayer(grassName, fileName, colorTable, outFormat, createOpt, metaOpt)
def exportRasterLayer(self, grassName, fileName,
colorTable=True, outFormat='GTiff',
createOpt=None,
metaOpt=None):
"""
Creates a dedicated command to export a raster from
temporary GRASS DB into a file via gdal.
:param grassName: name of the raster to export.
:param fileName: file path of raster layer.
:param colorTable: preserve color Table.
:param outFormat: file format for export.
:param createOpt: creation options for format.
:param metatOpt: metadata options for export.
"""
if not createOpt:
if outFormat in Grass7Utils.GRASS_RASTER_FORMATS_CREATEOPTS:
createOpt = Grass7Utils.GRASS_RASTER_FORMATS_CREATEOPTS[outFormat]
for cmd in [self.commands, self.outputCommands]:
# Adjust region to layer before exporting
cmd.append('g.region raster={}'.format(grassName))
cmd.append(
'r.out.gdal -c -m{0} input="{1}" output="{2}" format="{3}" {4}{5} --overwrite'.format(
' -t' if colorTable else '',
grassName, fileName,
outFormat,
' createopt="{}"'.format(createOpt) if createOpt else '',
' metaopt="{}"'.format(metaOpt) if metaOpt else ''
)
)
def exportRasterLayersIntoDirectory(self, name, parameters, context, colorTable=True):
"""
Creates a dedicated loop command to export rasters from
temporary GRASS DB into a directory via gdal.
:param name: name of the output directory parameter.
:param parameters: Algorithm parameters dict.
:param context: Algorithm context.
:param colorTable: preserve color Table.
"""
# Grab directory name and temporary basename
outDir = os.path.normpath(
self.parameterAsString(parameters, name, context))
basename = name + self.uniqueSuffix
# Add a loop export from the basename
for cmd in [self.commands, self.outputCommands]:
# Adjust region to layer before exporting
# TODO: Does-it works under MS-Windows or MacOSX?
cmd.append("for r in $(g.list type=rast pattern='{}*'); do".format(basename))
cmd.append(" r.out.gdal -m{0} input=${{r}} output={1}/${{r}}.tif {2}".format(
' -t' if colorTable else '', outDir,
'--overwrite -c createopt="TFW=YES,COMPRESS=LZW"'
)
)
cmd.append("done")
def loadVectorLayerFromParameter(self, name, parameters, context, external=None):
"""
Creates a dedicated command to load a vector into
the temporary GRASS DB.
:param name: name of the parameter
:param parameters: Parameters of the algorithm.
:param context: Processing context
:param external: use v.external (v.in.ogr if False).
"""
layer = self.parameterAsVectorLayer(parameters, name, context)
self.loadVectorLayer(name, layer, external)
def loadVectorLayer(self, name, layer, external=None):
"""
Creates a dedicated command to load a vector into
temporary GRASS DB.
:param name: name of the parameter
:param layer: QgsMapLayer for the vector layer.
:param external: use v.external (v.in.ogr if False).
"""
# TODO: support selections
# TODO: support multiple input formats
if external is None:
external = ProcessingConfig.getSetting(
Grass7Utils.GRASS_USE_VEXTERNAL)
self.inputLayers.append(layer)
self.setSessionProjectionFromLayer(layer)
destFilename = 'a' + os.path.basename(getTempFilename())
self.exportedLayers[name] = destFilename
command = '{0}{1}{2} input="{3}" output="{4}" --overwrite -o'.format(
'v.external' if external else 'v.in.ogr',
' min_area={}'.format(self.minArea) if not external else '',
' snap={}'.format(self.snapTolerance) if not external else '',
os.path.normpath(layer.source()),
destFilename)
self.commands.append(command)
def exportVectorLayerFromParameter(self, name, parameters, context):
"""
Creates a dedicated command to export a raster from
temporary GRASS DB into a file via gdal.
:param grassName: name of the parameter
:param fileName: file path of raster layer
:param colorTable: preserve color Table.
"""
fileName = os.path.normpath(
self.parameterAsOutputLayer(parameters, name, context))
# Find if there is a dataType
dataType = self.outType
if self.outType == 'auto':
parameter = self.parameterDefinition(name)
if parameter:
layerType = parameter.dataType()
if layerType in self.QGIS_OUTPUT_TYPES:
dataType = self.QGIS_OUTPUT_TYPES[layerType]
grassName = '{}{}'.format(name, self.uniqueSuffix)
self.exportVectorLayer(grassName, fileName, dataType)
def exportVectorLayer(self, grassName, fileName, dataType='auto', layer=None, nocats=False):
"""
Creates a dedicated command to export a vector from
temporary GRASS DB into a file via ogr.
:param grassName: name of the parameter
:param fileName: file path of raster layer
"""
for cmd in [self.commands, self.outputCommands]:
cmd.append(
'v.out.ogr{0} type={1} {2} input="{3}" output="{4}" {5}'.format(
'' if nocats else ' -c',
dataType,
'layer={}'.format(layer) if layer else '',
grassName,
fileName,
'format=ESRI_Shapefile --overwrite'
)
)
def setSessionProjectionFromProject(self):
"""
Set the projection from the project.
We creates a PROJ4 definition which is transmitted to Grass
"""
if not Grass7Utils.projectionSet and iface:
proj4 = iface.mapCanvas().mapSettings().destinationCrs().toProj4()
command = 'g.proj -c proj4="{}"'.format(proj4)
self.commands.append(command)
Grass7Utils.projectionSet = True
def setSessionProjectionFromLayer(self, layer):
"""
Set the projection from a QgsVectorLayer.
We creates a PROJ4 definition which is transmitted to Grass
"""
if not Grass7Utils.projectionSet:
proj4 = str(layer.crs().toProj4())
command = 'g.proj -c proj4="{}"'.format(proj4)
self.commands.append(command)
Grass7Utils.projectionSet = True
def convertToHtml(self, fileName):
# Read HTML contents
lines = []
html = False
with open(fileName, 'r', encoding='utf-8') as f:
lines = f.readlines()
if len(lines) > 1 and '<html>' not in lines[0]:
# Then write into the HTML file
with open(fileName, 'w', encoding='utf-8') as f:
f.write('<html><head>')
f.write('<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /></head>')
f.write('<body><p>')
for line in lines:
f.write('{}</br>'.format(line))
f.write('</p></body></html>')
def canExecute(self):
message = Grass7Utils.checkGrassIsInstalled()
return not message, message
def checkParameterValues(self, parameters, context):
if self.module:
if hasattr(self.module, 'checkParameterValuesBeforeExecuting'):
func = getattr(self.module, 'checkParameterValuesBeforeExecuting')
#return func(self, parameters, context), None
return None, func(self, parameters, context)
return super(Grass7Algorithm, self).checkParameterValues(parameters, context)
|
nirvn/QGIS
|
python/plugins/processing/algs/grass7/Grass7Algorithm.py
|
Python
|
gpl-2.0
| 38,271
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Reads stdin and writes snps in bed format to stdout
#
import sys
def main():
# Set column names
chr_col = "alternate_ids"
pos_col = "position"
snp_col = "rsid"
# Set column separator
sep = "\t"
# Set possible missing values
missing_vals = set(["", ".", "NA", "NaN"])
# Get header and header positions
header = sys.stdin.readline().rstrip().split("\t")
chr_index = header.index(chr_col)
pos_index = header.index(pos_col)
snp_index = header.index(snp_col)
# For each line convert to bed format
for line in sys.stdin:
# Get parts
parts = line.rstrip().split("\t")
chrom = parts[chr_index]
pos = int(parts[pos_index])
snp = parts[snp_index]
# Make snp name if missing
if snp in missing_vals:
snp = "{0}:{1}".format(chrom, pos)
# Print bed
outline = [chrom, pos - 1, pos, snp, ".", "."]
outline = [str(x) for x in outline]
print("\t".join(outline))
if __name__ == '__main__':
main()
|
edm1/gwas-enrichment
|
make-features/make-genomic-features/1_snp-to-bed.py
|
Python
|
mit
| 1,100
|
import sys
sys.path.append('/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7')
sys.path.append('/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages')
import socket
import sqlite3
import struct
import pcapy
import datetime
import time
import os
import optparse
import glob
from exp_description import *
import matplotlib.pyplot as plt
import scipy as stats
import statsmodels.api as sm
#import numpy as np
import sys,getopt
def get_discovery_retries(startstep):
retries=[startstep]
while retries[-1] < dead/1000:
retrystep = 2**len(retries)
if retrystep < dead/1000:
retries.append(retrystep)
else:
break
return retries
def experiment_analyzer(sdratio):
'''
'''
global conn
global exp
count = 0
c = conn.cursor()
x = []
y=[]
delay = []
pkt_delay = []
time_offset = []
for n in range(4):
# print n
delay.append([])
pkt_delay.append([])
# print dead
for i in range(exp.get_run_count()):
#delay[n].append(30000)
delay[n].append(dead)
#pkt_delay[n].append(30000)
pkt_delay[n].append(dead)
#loop over run sequence
fail = 0
succeeded = 0
timedout = 0
valid_results = [0,0,0,0]
num_responders = len(exp.get_responders())
needed = min(num_responders, int(round(sdratio * num_responders) / 100 + 0.5))
for run_number in range(exp.get_run_count()):
###### SETUP LEVELS OF ALL FACTORS FOR THIS RUN ########
exp.set_run_number(run_number)
run_definition = exp.get_run_identifier()
#print "exporting run number %d with combination %s" %(run_number,run_definition)
#print "run_definition", run_definition
c.execute("SELECT RunID FROM run_ids WHERE run_identifier=?",[run_definition])
run_id = c.fetchone()[0]
fact = exp.get_current_factor_level_by_id('fact_pairs')
#print fact
run_fact = exp.get_current_factor_level_by_id('fact_replication_id')
all_nodes = exp.get_all_spec_nodes()
for node in all_nodes:
if node['real_id']== exp.get_requester()["real_id"]:
#events=run_analyzer(run_id,node,c)
#for deadline in range(30):
# resp[deadline] = resp[deadline] + get_responsiveness(run_id, node, deadline*1000)
#delay[run_number] = get_delay(run_id, node)
#res=check_packets(run_id, node, 30000)
res = check_packets(run_id, node, dead)
# checks for invalid runs and excludes the response times
actornodes = [node]
actornodes = actornodes + exp.get_responders()
fails = check_routes(run_id, actornodes)
#print res
if res == -1 or fails > 0:
fail = fail + 1
else:
index=valid_results[fact]
delse = get_delay(run_id, node, needed)
delay[fact][index] = delse
x.append(delse)
y.append(run_id)
valid_results[fact] = valid_results[fact] + 1
if delse < dead:
succeeded = succeeded + 1
else:
timedout = timedout + 1
break
sys.stdout.write("\rAnalyzed Runs: %d, Valid: %d, Succeeded: %d, Timed out: %d, Failed: %d" % (run_number+1, valid_results[0], succeeded, timedout, fail))
sys.stdout.flush()
sys.stdout.write("\n")
######## RESPONSIVENESS 1zu1 ############
#for i in range(250):
# print delay[3][i]
symbols = ['k-','k--','k-.','k:']
#for fact in range(4):
fact=0
res = []
#z=stats.norm.cdf(x)
#print z
for i in range(30000):
ok = 0
#print pkt_delay
for n in range(valid_results[fact]):
if pkt_delay[fact][n]<i:
ok = ok + 1
#print ok
res.append((ok*100/valid_results[fact])*0.01)
ecdf = sm.distributions.ECDF(x)
### Plotting starts here ###
fn_split=fn.split("_")
# print "Client: %s" % fn_split[0]
# print "Provider: %s" % fn_split[1]
# print "%d VoIP Streams Load" % int(fn_split[3])
if int(fn_split[3]) > 0:
legend_string = "%d VoIP Streams Load" % int(fn_split[3])
else:
legend_string = "No Load"
plt.figure(1)
plt.plot(ecdf.x, ecdf.y, linestyle='-', drawstyle='steps', label=legend_string)
#Checks for validity of the routes from the ExtraRunMeasurements
def check_route(run_id, node):
# c.execute("SELECT Content FROM ExtraRunMeasurements WHERE runID=? and nodeID=?", [run_id,node['real_id']] )
c.execute("SELECT Content FROM ExtraRunMeasurements WHERE runID=? and nodeID=?", [run_id,node['real_id']] )
routes = str(c.fetchone())
if 'fail' in routes :
# print run_id,node['real_id']
# print " invalid run"
#fail = fail+1
return 1
def check_routes(run_id, actornodes):
actorsstring = ', '.join('?' * len(actornodes))
query_string = "SELECT count(NodeID) FROM ExtraRunMeasurements WHERE runID=? and Content like 'fail%%' and nodeID in (%s)" % actorsstring
query_args = [run_id]
for actor in actornodes:
query_args.append(actor['real_id'])
c.execute(query_string, query_args)
fails = c.fetchone()[0]
# if fails > 0:
# print "Run %d, Failed routes %s" % (run_id, fails)
return fails
def check_packets(run_id, node, deadline_ms):
'''
Steps: First select packets of this run, "sent" and "received". "sent" is to be handled with care, as previously sent packets
can be received again and have to be filtered out. This unfortunately can also be packets from other runs.
When a packet from another run is detected as sent, it can be removed from the list, when a response arrives
within the search window, then this search is false positive and must FAIL.
'''
# first get start/stop times
c.execute("SELECT CommonTime FROM Events WHERE runID=? and nodeID=? and EventType='sd_start_search'", [run_id,node['real_id']] )
#print "start_search"
rows = c.fetchone()
#print rows
if rows==None:
#print "Error, no search found in run ", run_id, node['real_id']
return -1
start_search_time = rows[0]
start = db_timestamp_to_datetime(start_search_time)
c.execute("SELECT CommonTime FROM Events WHERE runID=? and nodeID=? and EventType='sd_service_add'", [run_id,node['real_id']] )
#print "sd_service_add"
find_result = c.fetchone()
#print find_result
if find_result==None:
#print "0"
stop = start+datetime.timedelta(milliseconds=deadline_ms)
else:
stop = db_timestamp_to_datetime(find_result[0])
if stop > start+datetime.timedelta(milliseconds=deadline_ms):
stop = start+datetime.timedelta(milliseconds=deadline_ms)
c.execute("SELECT * FROM Packets WHERE RunID=? and NodeID=? and SrcNodeID=? ORDER BY CommonTime ASC", [run_id, node['real_id'],node['real_id']])
rows_send = c.fetchall()
#print rows_send
c.execute("SELECT * FROM Packets WHERE RunID=? and NodeID=? and SrcNodeID!=? ORDER BY CommonTime ASC", [run_id, node['real_id'],node['real_id']])
rows_recv = c.fetchall()
#print rows_recv
# consider only packets within the search/timedout interval
for sent in list(rows_send):
sent_time = db_timestamp_to_datetime(sent[2])
if sent_time < start or sent_time>stop:
rows_send.remove(sent)
pkt_analyzer = packet_analyzer()
#print start
#print stop
# also, consider only responses
for received in list(rows_recv):
received_time = db_timestamp_to_datetime(received[2])
pkt = received[4]
ip=pkt_analyzer.decode_ip_packet(pkt)
udp=pkt_analyzer.decode_udp_packet(ip['data'])
mdns=pkt_analyzer.decode_mdns_packet(udp['data'])
# mdns flag say, if response or not, must be response
if received_time < start or received_time > stop or mdns['flags'] & 128!=128:
#print "removing", received
rows_recv.remove(received)
# list packets by their transaction ID
# remove duplicates and out of order packets from the sent
sent_by_id = {}
for i,sent in enumerate(list(rows_send)):
pkt = sent[4]
id = socket.ntohs(struct.unpack('H',pkt[28:30])[0])
if i==0:
last_id=id-1
#print "Out of order %d %d" %( run_id, id), sent
if id==last_id+1:
last_id = id
#print "correct oder", sent
sent_by_id[id] = sent
# find responses and BAD RESPONSES
for received in rows_recv:
pkt = received[4]
id = socket.ntohs(struct.unpack('H',pkt[28:30])[0])
#print "ResponseID: %d" %id
found = 0
for id_sent,sent in sent_by_id.items():
if id==id_sent:
#print "found same IDs", id
found = 1
t_requ = db_timestamp_to_datetime(sent[2])
t_resp = db_timestamp_to_datetime(received[2])
delay = t_resp - t_requ
return delay.seconds*1000 + delay.microseconds / 1000
if found==0:
#print "Fail Runid=%s" %run_id, received
return -1
return deadline_ms
class packet_analyzer():
def __init__(self):
pass
def get_dnssd_query_response_rtt(self):
'''
'''
pass
def decode_mdns_packet(self, p):
d={}
d['transaction_ID']=socket.ntohs(struct.unpack('H',p[0:2])[0])
d['flags']=struct.unpack('H',p[2:4])[0]
d['n_questions']=socket.ntohs(struct.unpack('H',p[4:6])[0])
d['n_answerRRs']=socket.ntohs(struct.unpack('H',p[6:8])[0])
d['n_authRRs']=socket.ntohs(struct.unpack('H',p[8:10])[0])
d['n_addRRs']=socket.ntohs(struct.unpack('H',p[10:12])[0])
return d
def decode_udp_packet(self, p):
d={}
d['src_port']=socket.ntohs(struct.unpack('H',p[0:2])[0])
d['dst_port']=socket.ntohs(struct.unpack('H',p[2:4])[0])
d['length']=socket.ntohs(struct.unpack('H',p[4:6])[0])
d['checksum']=socket.ntohs(struct.unpack('H',p[6:8])[0])
d['data']=p[8:]
return d
def decode_ip_packet(self, s):
d={}
d['version']=(ord(s[0]) & 0xf0) >> 4
d['header_len']=ord(s[0]) & 0x0f
d['tos']=ord(s[1])
d['total_len']=socket.ntohs(struct.unpack('H',s[2:4])[0])
d['id']=socket.ntohs(struct.unpack('H',s[4:6])[0])
d['flags']=(ord(s[6]) & 0xe0) >> 5
d['fragment_offset']=socket.ntohs(struct.unpack('H',s[6:8])[0] & 0x1f)
d['ttl']=ord(s[8])
d['protocol']=ord(s[9])
d['checksum']=socket.ntohs(struct.unpack('H',s[10:12])[0])
d['source_address']=struct.unpack('i',s[12:16])[0]
d['destination_address']=struct.unpack('i',s[16:20])[0]
if d['header_len']>5:
d['options']=s[20:4*(d['header_len']-5)]
else:
d['options']=None
d['data']=s[4*d['header_len']:]
return d
def decode_eth_packet(self, p):
d={}
d['dst_mac']=0#struct.unpack('H',p[0:6])[0]
d['src_mac']=0#struct.unpack('H',p[6:12])[0]
d['type']=socket.ntohs(struct.unpack('H',p[12:14])[0])
d['data']=p[14:]
return d
def packet_tracker(self, hdr, data):
'''
scans packets for pairs with same queryID and for the first return rtt
'''
global query
global avg
global count
global max
global min
curr_hdr={}
curr_hdr['ts_s'],curr_hdr['ts_us'] = hdr.getts()
curr_hdr['len']=hdr.getlen()
ts = datetime.datetime.fromtimestamp(curr_hdr['ts_s'])
ts = ts + datetime.timedelta(microseconds=curr_hdr['ts_us'])
d3 = None
#d = self.decode_eth_packet(data)
#print d
#if d['type']==2048: #IP
d = self.decode_ip_packet(data)
if d['protocol']==17: # UDP
d2 = self.decode_udp_packet(d['data'])
if d2['dst_port']==5353:
d3 = self.decode_mdns_packet(d2['data'])
if d3==None:
print "not a mdns packet", d3
return
# if this is a query, save the id and time
if d3['flags']==0: #Query
self.queries.append({'id':d3['transaction_ID'], 'ts':ts})
else: #response
#if query[d3['transaction_ID']]==None:
# print "Invalid response, ignoring this packet"
# return
self.responses.append({'id':d3['transaction_ID'], 'ts':ts})
def load_packet_into_list(self, filename):
self.responses = []
self.queries = []
#print ("Parsing file %s" % (filename))
p = pcapy.open_offline(filename)
p.loop(0, self.packet_tracker)
#print self.queries
#print ""
#print self.responses
def find_first_rtt_between(self, start_ts, end_ts):
match = 0
for (id,query) in enumerate(self.queries):
if query['ts']>start_ts and query['ts']<end_ts:
#print query
for (id2,response) in enumerate(self.responses):
if response['ts']>start_ts and response['ts']<end_ts:
if response['id']==query['id']:
diff = response['ts']-query['ts']
#print "Found match, diff ",diff
match = match + 1
print match
counter = 0
events_merge_file_name = "merged_events.csv"
event_log_file_name = "event_log_"
def _get_subdirs(dir):
return [name for name in os.listdir(dir)
if os.path.isdir(os.path.join(dir,name))]
def _get_files(dir, mask):
return
def runcapture_dir_analyzer(dir):
if dir=="capture":
return
def parse_line(line, run, owner):
'''
Each line consists of a timestamp,type,param
The timestamp is concerted into epoch value
'''
list = line.split(',')
dt, _, us= list[0].partition(".")
dt= datetime.datetime.strptime(dt, "%Y-%m-%d %H:%M:%S")
us= int(us.rstrip("Z"), 10)
ret = dt + datetime.timedelta(microseconds=us)+datetime.timedelta(milliseconds=run.timediff_ms)
return {'ts':ret, 'type':list[1], 'param':list[2], 'origin':owner}
#gt("2008-08-12T12:20:30.656234Z")
#datetime.datetime(2008, 8, 12, 12, 20, 30, 656234)
def db_timestamp_to_datetime(db_timestamp):
dt, _, us= db_timestamp.partition(".")
dt= datetime.datetime.strptime(dt, "%Y-%m-%d %H:%M:%S")
us= int(us.rstrip("Z"), 10)
return dt + datetime.timedelta(microseconds=us)
def get_delay(run_id, node, needed):
''' gets the search --> add delay from the event list
'''
y=[]
c.execute("SELECT CommonTime FROM Events WHERE runID=? and nodeID=? and EventType='sd_start_search'", [run_id,node['real_id']] )
start_search_time = c.fetchone()[0]
c.execute("SELECT CommonTime FROM Events WHERE runID=? and nodeID=? and EventType='sd_service_add'", [run_id,node['real_id']] )
find_result = c.fetchall()
if find_result==None:
return dead
elif len(find_result) < needed:
return dead
else:
stop_search_time = find_result[needed-1][0]
start = db_timestamp_to_datetime(start_search_time)
stop = db_timestamp_to_datetime(stop_search_time)
delay = stop-start
#print delay.seconds*1000 + delay.microseconds / 1000
delsec = delay.seconds*1000 + delay.microseconds / 1000
#print "response"
#print delsec
#print "run id"
#print run_id
#plt.xlabel('Deadline in ms',{'fontsize':'x-large'})
#plt.ylabel('Responsiveness',{'fontsize':'x-large'})
return delsec
def print_plot():
plt.xlabel('Deadline in ms',{'fontsize':'x-large'})
plt.ylabel('Responsiveness',{'fontsize':'x-large'})
#plt.legend(('no load', '26 VoIP', '53 VoIP', '80 VoIP'),
# 'right', shadow=True)
plt.grid(True)
plt.legend(loc = "lower right")
plt.xlim([0,dead])
plt.hold(True)
# plt.set_xticklabels(plt.get_xticks()/1000)
# savename = fn + "_" + str(sdratio) + ".pdf"
savename = "plot.pdf"
plt.savefig(savename, dpi=600)
if __name__ == '__main__':
global conn
global cfg_search_fail_value
global csv_file
global dead
global fn
cfg_search_fail_value = -1000
# Option parser
parser = optparse.OptionParser(
description='Analyzer for done Service Discovery Experiments.',
prog=os.path.basename(sys.argv[0]),
version='%s 0.0.1' % os.path.basename(sys.argv[0]),
)
# multi = False
csv_file = "/tmp/results.csv"
parser.add_option('-d', '--database', action='append', default = [], dest='database', help='the database file')
parser.add_option('-l', metavar='deadline', type='int', dest='deadline', help='the deadline')
parser.add_option('-x', metavar='exp_file', dest='exp_file', help='the abstract experiment description')
parser.add_option('-o', metavar='csv_file', dest='csv_file', help='the file to which the results are written')
# parser.add_option('-m', action='store_true', dest='multi', help='analyze a multiple instances experiment')
parser.add_option('-r' ,metavar='ratio', type='int', dest='sdratio', help='percentage of service instances needed to be found', default = 100)
options, arguments = parser.parse_args()
if options.csv_file!=None:
csv_file = options.csv_file
if options.database == []:
print "Database file is needed"
exit()
# else:
# for db in options.database:
# print db
# exit()
if options.deadline == None:
print "Deadline is needed"
exit()
else:
dead = options.deadline
# print "dead",dead
#print "Manu"
for database in options.database:
fn = str(database.split('.')[0].split('/')[-1])
print "Database %s" % fn
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT expXML FROM ExperimentInfo")
row=c.fetchone()
if row==None:
print "no XML description in database file"
fd = open('/tmp/exp_xml','w')
fd.write(row[0])
fd.close()
if options.exp_file != None:
exp = experiment_description(options.exp_file)
else:
exp = experiment_description('/tmp/exp_xml')
# print exp.platform_specs.actor_map
#print exp.get_requester()
experiment_analyzer(options.sdratio)
print_plot()
|
adittrich/excovery
|
master/Analyze_graph.py
|
Python
|
mit
| 16,509
|
from __future__ import division
# These functions have their own module in order to be compiled with the right
# __future__ flag (and be tested alongside the 2.x legacy division operator).
def truediv_usecase(x, y):
return x / y
def itruediv_usecase(x, y):
x /= y
return x
|
stefanseefeld/numba
|
numba/tests/true_div_usecase.py
|
Python
|
bsd-2-clause
| 289
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""Interface documentation.
Define the interfaces that are implemented by various buildbot classes.
"""
# E0211: Method has no argument
# E0213: Method should have "self" as first argument
# pylint: disable-msg=E0211,E0213
from zope.interface import Interface, Attribute
# exceptions that can be raised while trying to start a build
class NoSlaveError(Exception):
pass
class BuilderInUseError(Exception):
pass
class BuildSlaveTooOldError(Exception):
pass
class LatentBuildSlaveFailedToSubstantiate(Exception):
pass
class IChangeSource(Interface):
"""
Service which feeds Change objects to the changemaster. When files or
directories are changed in version control, this object should represent
the changes as a change dictionary and call::
self.master.addChange(who=.., rev=.., ..)
See 'Writing Change Sources' in the manual for more information.
"""
master = Attribute('master',
'Pointer to BuildMaster, automatically set when started.')
def describe():
"""Return a string which briefly describes this source."""
class ISourceStamp(Interface):
"""
@cvar branch: branch from which source was drawn
@type branch: string or None
@cvar revision: revision of the source, or None to use CHANGES
@type revision: varies depending on VC
@cvar patch: patch applied to the source, or None if no patch
@type patch: None or tuple (level diff)
@cvar changes: the source step should check out the latest revision
in the given changes
@type changes: tuple of L{buildbot.changes.changes.Change} instances,
all of which are on the same branch
@cvar project: project this source code represents
@type project: string
@cvar repository: repository from which source was drawn
@type repository: string
"""
def canBeMergedWith(self, other):
"""
Can this SourceStamp be merged with OTHER?
"""
def mergeWith(self, others):
"""Generate a SourceStamp for the merger of me and all the other
SourceStamps. This is called by a Build when it starts, to figure
out what its sourceStamp should be."""
def getAbsoluteSourceStamp(self, got_revision):
"""Get a new SourceStamp object reflecting the actual revision found
by a Source step."""
def getText(self):
"""Returns a list of strings to describe the stamp. These are
intended to be displayed in a narrow column. If more space is
available, the caller should join them together with spaces before
presenting them to the user."""
class IEmailSender(Interface):
"""I know how to send email, and can be used by other parts of the
Buildbot to contact developers."""
pass
class IEmailLookup(Interface):
def getAddress(user):
"""Turn a User-name string into a valid email address. Either return
a string (with an @ in it), None (to indicate that the user cannot
be reached by email), or a Deferred which will fire with the same."""
class IStatus(Interface):
"""I am an object, obtainable from the buildmaster, which can provide
status information."""
def getTitle():
"""Return the name of the project that this Buildbot is working
for."""
def getTitleURL():
"""Return the URL of this Buildbot's project."""
def getBuildbotURL():
"""Return the URL of the top-most Buildbot status page, or None if
this Buildbot does not provide a web status page."""
def getURLForThing(thing):
"""Return the URL of a page which provides information on 'thing',
which should be an object that implements one of the status
interfaces defined in L{buildbot.interfaces}. Returns None if no
suitable page is available (or if no Waterfall is running)."""
def getChangeSources():
"""Return a list of IChangeSource objects."""
def getChange(number):
"""Return an IChange object."""
def getSchedulers():
"""Return a list of ISchedulerStatus objects for all
currently-registered Schedulers."""
def getBuilderNames(categories=None):
"""Return a list of the names of all current Builders."""
def getBuilder(name):
"""Return the IBuilderStatus object for a given named Builder. Raises
KeyError if there is no Builder by that name."""
def getSlaveNames():
"""Return a list of buildslave names, suitable for passing to
getSlave()."""
def getSlave(name):
"""Return the ISlaveStatus object for a given named buildslave."""
def getBuildSets():
"""
Return a list of un-completed build sets.
@returns: list of L{IBuildSetStatus} implementations, via Deferred.
"""
def generateFinishedBuilds(builders=[], branches=[],
num_builds=None, finished_before=None,
max_search=200):
"""Return a generator that will produce IBuildStatus objects each
time you invoke its .next() method, starting with the most recent
finished build and working backwards.
@param builders: this is a list of Builder names, and the generator
will only produce builds that ran on the given
Builders. If the list is empty, produce builds from
all Builders.
@param branches: this is a list of branch names, and the generator
will only produce builds that used the given
branches. If the list is empty, produce builds from
all branches.
@param num_builds: the generator will stop after providing this many
builds. The default of None means to produce as
many builds as possible.
@type finished_before: int: a timestamp, seconds since the epoch
@param finished_before: if provided, do not produce any builds that
finished after the given timestamp.
@type max_search: int
@param max_search: this method may have to examine a lot of builds
to find some that match the search parameters,
especially if there aren't any matching builds.
This argument imposes a hard limit on the number
of builds that will be examined within any given
Builder.
"""
def subscribe(receiver):
"""Register an IStatusReceiver to receive new status events. The
receiver will immediately be sent a set of 'builderAdded' messages
for all current builders. It will receive further 'builderAdded' and
'builderRemoved' messages as the config file is reloaded and builders
come and go. It will also receive 'buildsetSubmitted' messages for
all outstanding BuildSets (and each new BuildSet that gets
submitted). No additional messages will be sent unless the receiver
asks for them by calling .subscribe on the IBuilderStatus objects
which accompany the addedBuilder message."""
def unsubscribe(receiver):
"""Unregister an IStatusReceiver. No further status messgaes will be
delivered."""
class IBuildSetStatus(Interface):
"""I represent a set of Builds, each run on a separate Builder but all
using the same source tree."""
def getReason():
pass
def getID():
"""Return the BuildSet's ID string, if any. The 'try' feature uses a
random string as a BuildSetID to relate submitted jobs with the
resulting BuildSet."""
def getResponsibleUsers():
pass # not implemented
def getInterestedUsers():
pass # not implemented
def getBuilderNames():
"""Return a list of the names of all Builders on which this set will
do builds.
@returns: list of names via Deferred"""
def isFinished():
pass
def waitUntilFinished():
"""Return a Deferred that fires (with this IBuildSetStatus object)
when all builds have finished."""
def getResults():
"""Return SUCCESS/FAILURE, or None if the buildset is not finished
yet"""
class IBuildRequestStatus(Interface):
"""I represent a request to build a particular set of source code on a
particular Builder. These requests may be merged by the time they are
finally turned into a Build."""
def getSourceStamp():
"""
Get a SourceStamp object which can be used to re-create the source tree
that this build used. This method will return an absolute SourceStamp
if possible, and its results may change as the build progresses.
Specifically, a "HEAD" build may later be more accurately specified by
an absolute SourceStamp with the specific revision information.
This method will return None if the source information is no longer
available.
@returns: SourceStamp via Deferred
"""
def getBuilds():
"""Return a list of IBuildStatus objects for each Build that has been
started in an attempt to satify this BuildRequest."""
def subscribe(observer):
"""Register a callable that will be invoked (with a single
IBuildStatus object) for each Build that is created to satisfy this
request. There may be multiple Builds created in an attempt to handle
the request: they may be interrupted by the user or abandoned due to
a lost slave. The last Build (the one which actually gets to run to
completion) is said to 'satisfy' the BuildRequest. The observer will
be called once for each of these Builds, both old and new."""
def unsubscribe(observer):
"""Unregister the callable that was registered with subscribe()."""
def getSubmitTime():
"""Return the time when this request was submitted. Returns a
Deferred."""
class ISlaveStatus(Interface):
def getName():
"""Return the name of the build slave."""
def getAdmin():
"""Return a string with the slave admin's contact data."""
def getHost():
"""Return a string with the slave host info."""
def isConnected():
"""Return True if the slave is currently online, False if not."""
def lastMessageReceived():
"""Return a timestamp (seconds since epoch) indicating when the most
recent message was received from the buildslave."""
class ISchedulerStatus(Interface):
def getName():
"""Return the name of this Scheduler (a string)."""
def getPendingBuildsets():
"""Return an IBuildSet for all BuildSets that are pending. These
BuildSets are waiting for their tree-stable-timers to expire."""
# TODO: this is not implemented anywhere
class IBuilderStatus(Interface):
def getName():
"""Return the name of this Builder (a string)."""
def getCategory():
"""Return the category of this builder (a string)."""
def getDescription():
"""Return the description of this builder (a string)."""
def getState():
# TODO: this isn't nearly as meaningful as it used to be
"""Return a tuple (state, builds) for this Builder. 'state' is the
so-called 'big-status', indicating overall status (as opposed to
which step is currently running). It is a string, one of 'offline',
'idle', or 'building'. 'builds' is a list of IBuildStatus objects
(possibly empty) representing the currently active builds."""
def getSlaves():
"""Return a list of ISlaveStatus objects for the buildslaves that are
used by this builder."""
def getPendingBuildRequestStatuses():
"""
Get a L{IBuildRequestStatus} implementations for all unclaimed build
requests.
@returns: list of objects via Deferred
"""
def getCurrentBuilds():
"""Return a list containing an IBuildStatus object for each build
currently in progress."""
# again, we could probably provide an object for 'waiting' and
# 'interlocked' too, but things like the Change list might still be
# subject to change
def getLastFinishedBuild():
"""Return the IBuildStatus object representing the last finished
build, which may be None if the builder has not yet finished any
builds."""
def getBuild(number):
"""Return an IBuildStatus object for a historical build. Each build
is numbered (starting at 0 when the Builder is first added),
getBuild(n) will retrieve the Nth such build. getBuild(-n) will
retrieve a recent build, with -1 being the most recent build
started. If the Builder is idle, this will be the same as
getLastFinishedBuild(). If the Builder is active, it will be an
unfinished build. This method will return None if the build is no
longer available. Older builds are likely to have less information
stored: Logs are the first to go, then Steps."""
def getEvent(number):
"""Return an IStatusEvent object for a recent Event. Builders
connecting and disconnecting are events, as are ping attempts.
getEvent(-1) will return the most recent event. Events are numbered,
but it probably doesn't make sense to ever do getEvent(+n)."""
def generateFinishedBuilds(branches=[],
num_builds=None,
max_buildnum=None, finished_before=None,
max_search=200,
):
"""Return a generator that will produce IBuildStatus objects each
time you invoke its .next() method, starting with the most recent
finished build, then the previous build, and so on back to the oldest
build available.
@param branches: this is a list of branch names, and the generator
will only produce builds that involve the given
branches. If the list is empty, the generator will
produce all builds regardless of what branch they
used.
@param num_builds: if provided, the generator will stop after
providing this many builds. The default of None
means to produce as many builds as possible.
@param max_buildnum: if provided, the generator will start by
providing the build with this number, or the
highest-numbered preceding build (i.e. the
generator will not produce any build numbered
*higher* than max_buildnum). The default of None
means to start with the most recent finished
build. -1 means the same as None. -2 means to
start with the next-most-recent completed build,
etc.
@type finished_before: int: a timestamp, seconds since the epoch
@param finished_before: if provided, do not produce any builds that
finished after the given timestamp.
@type max_search: int
@param max_search: this method may have to examine a lot of builds
to find some that match the search parameters,
especially if there aren't any matching builds.
This argument imposes a hard limit on the number
of builds that will be examined.
"""
def subscribe(receiver):
"""Register an IStatusReceiver to receive new status events. The
receiver will be given builderChangedState, buildStarted, and
buildFinished messages."""
def unsubscribe(receiver):
"""Unregister an IStatusReceiver. No further status messgaes will be
delivered."""
class IEventSource(Interface):
def eventGenerator(branches=[], categories=[], committers=[], minTime=0):
"""This function creates a generator which will yield all of this
object's status events, starting with the most recent and progressing
backwards in time. These events provide the IStatusEvent interface.
At the moment they are all instances of buildbot.status.builder.Event
or buildbot.status.builder.BuildStepStatus .
@param branches: a list of branch names. The generator should only
return events that are associated with these branches. If the list is
empty, events for all branches should be returned (i.e. an empty list
means 'accept all' rather than 'accept none').
@param categories: a list of category names. The generator
should only return events that are categorized within the
given category. If the list is empty, events for all
categories should be returned.
@param comitters: a list of committers. The generator should only
return events caused by one of the listed committers. If the list is
empty or None, events from every committers should be returned.
@param minTime: a timestamp. Do not generate events occuring prior to
this timestamp.
"""
class IBuildStatus(Interface):
"""I represent the status of a single Build/BuildRequest. It could be
in-progress or finished."""
def getBuilder():
"""
Return the BuilderStatus that owns this build.
@rtype: implementor of L{IBuilderStatus}
"""
def isFinished():
"""Return a boolean. True means the build has finished, False means
it is still running."""
def waitUntilFinished():
"""Return a Deferred that will fire when the build finishes. If the
build has already finished, this deferred will fire right away. The
callback is given this IBuildStatus instance as an argument."""
def getReason():
"""Return a string that indicates why the build was run. 'changes',
'forced', and 'periodic' are the most likely values. 'try' will be
added in the future."""
def getSourceStamps():
"""Return a list of SourceStamp objects which can be used to re-create
the source tree that this build used.
This method will return None if the source information is no longer
available."""
# TODO: it should be possible to expire the patch but still remember
# that the build was r123+something.
def getChanges():
"""Return a list of Change objects which represent which source
changes went into the build."""
def getRevisions():
"""Returns a string representing the list of revisions that led to
the build, rendered from each Change.revision"""
def getResponsibleUsers():
"""Return a list of Users who are to blame for the changes that went
into this build. If anything breaks (at least anything that wasn't
already broken), blame them. Specifically, this is the set of users
who were responsible for the Changes that went into this build. Each
User is a string, corresponding to their name as known by the VC
repository."""
def getInterestedUsers():
"""Return a list of Users who will want to know about the results of
this build but who did not actually make the Changes that went into it
(build sheriffs, code-domain owners)."""
def getNumber():
"""Within each builder, each Build has a number. Return it."""
def getPreviousBuild():
"""Convenience method. Returns None if the previous build is
unavailable."""
def getSteps():
"""Return a list of IBuildStepStatus objects. For invariant builds
(those which always use the same set of Steps), this should always
return the complete list, however some of the steps may not have
started yet (step.getTimes()[0] will be None). For variant builds,
this may not be complete (asking again later may give you more of
them)."""
def getTimes():
"""Returns a tuple of (start, end). 'start' and 'end' are the times
(seconds since the epoch) when the Build started and finished. If
the build is still running, 'end' will be None."""
# while the build is running, the following methods make sense.
# Afterwards they return None
def getETA():
"""Returns the number of seconds from now in which the build is
expected to finish, or None if we can't make a guess. This guess will
be refined over time."""
def getCurrentStep():
"""Return an IBuildStepStatus object representing the currently
active step."""
# Once you know the build has finished, the following methods are legal.
# Before ths build has finished, they all return None.
def getSlavename():
"""Return the name of the buildslave which handled this build."""
def getText():
"""Returns a list of strings to describe the build. These are
intended to be displayed in a narrow column. If more space is
available, the caller should join them together with spaces before
presenting them to the user."""
def getResults():
"""Return a constant describing the results of the build: one of the
constants in buildbot.status.builder: SUCCESS, WARNINGS,
FAILURE, SKIPPED or EXCEPTION."""
def getLogs():
"""Return a list of logs that describe the build as a whole. Some
steps will contribute their logs, while others are are less important
and will only be accessible through the IBuildStepStatus objects.
Each log is an object which implements the IStatusLog interface."""
def getTestResults():
"""Return a dictionary that maps test-name tuples to ITestResult
objects. This may return an empty or partially-filled dictionary
until the build has completed."""
# subscription interface
def subscribe(receiver, updateInterval=None):
"""Register an IStatusReceiver to receive new status events. The
receiver will be given stepStarted and stepFinished messages. If
'updateInterval' is non-None, buildETAUpdate messages will be sent
every 'updateInterval' seconds."""
def unsubscribe(receiver):
"""Unregister an IStatusReceiver. No further status messgaes will be
delivered."""
class ITestResult(Interface):
"""I describe the results of a single unit test."""
def getName():
"""Returns a tuple of strings which make up the test name. Tests may
be arranged in a hierarchy, so looking for common prefixes may be
useful."""
def getResults():
"""Returns a constant describing the results of the test: SUCCESS,
WARNINGS, FAILURE."""
def getText():
"""Returns a list of short strings which describe the results of the
test in slightly more detail. Suggested components include
'failure', 'error', 'passed', 'timeout'."""
def getLogs():
# in flux, it may be possible to provide more structured information
# like python Failure instances
"""Returns a dictionary of test logs. The keys are strings like
'stdout', 'log', 'exceptions'. The values are strings."""
class IBuildStepStatus(Interface):
"""I hold status for a single BuildStep."""
def getName():
"""Returns a short string with the name of this step. This string
may have spaces in it."""
def getBuild():
"""Returns the IBuildStatus object which contains this step."""
def getTimes():
"""Returns a tuple of (start, end). 'start' and 'end' are the times
(seconds since the epoch) when the Step started and finished. If the
step has not yet started, 'start' will be None. If the step is still
running, 'end' will be None."""
def getExpectations():
"""Returns a list of tuples (name, current, target). Each tuple
describes a single axis along which the step's progress can be
measured. 'name' is a string which describes the axis itself, like
'filesCompiled' or 'tests run' or 'bytes of output'. 'current' is a
number with the progress made so far, while 'target' is the value
that we expect (based upon past experience) to get to when the build
is finished.
'current' will change over time until the step is finished. It is
'None' until the step starts. When the build is finished, 'current'
may or may not equal 'target' (which is merely the expectation based
upon previous builds)."""
def getURLs():
"""Returns a dictionary of URLs. Each key is a link name (a short
string, like 'results' or 'coverage'), and each value is a URL. These
links will be displayed along with the LogFiles.
"""
def getLogs():
"""Returns a list of IStatusLog objects. If the step has not yet
finished, this list may be incomplete (asking again later may give
you more of them)."""
def isFinished():
"""Return a boolean. True means the step has finished, False means it
is still running."""
def waitUntilFinished():
"""Return a Deferred that will fire when the step finishes. If the
step has already finished, this deferred will fire right away. The
callback is given this IBuildStepStatus instance as an argument."""
# while the step is running, the following methods make sense.
# Afterwards they return None
def getETA():
"""Returns the number of seconds from now in which the step is
expected to finish, or None if we can't make a guess. This guess will
be refined over time."""
# Once you know the step has finished, the following methods are legal.
# Before ths step has finished, they all return None.
def getText():
"""Returns a list of strings which describe the step. These are
intended to be displayed in a narrow column. If more space is
available, the caller should join them together with spaces before
presenting them to the user."""
def getResults():
"""Return a tuple describing the results of the step: (result,
strings). 'result' is one of the constants in
buildbot.status.builder: SUCCESS, WARNINGS, FAILURE, or SKIPPED.
'strings' is an optional list of strings that the step wants to
append to the overall build's results. These strings are usually
more terse than the ones returned by getText(): in particular,
successful Steps do not usually contribute any text to the overall
build."""
# subscription interface
def subscribe(receiver, updateInterval=10):
"""Register an IStatusReceiver to receive new status events. The
receiver will be given logStarted and logFinished messages. It will
also be given a ETAUpdate message every 'updateInterval' seconds."""
def unsubscribe(receiver):
"""Unregister an IStatusReceiver. No further status messgaes will be
delivered."""
class IStatusEvent(Interface):
"""I represent a Builder Event, something non-Build related that can
happen to a Builder."""
def getTimes():
"""Returns a tuple of (start, end) like IBuildStepStatus, but end==0
indicates that this is a 'point event', which has no duration.
SlaveConnect/Disconnect are point events. Ping is not: it starts
when requested and ends when the response (positive or negative) is
returned"""
def getText():
"""Returns a list of strings which describe the event. These are
intended to be displayed in a narrow column. If more space is
available, the caller should join them together with spaces before
presenting them to the user."""
LOG_CHANNEL_STDOUT = 0
LOG_CHANNEL_STDERR = 1
LOG_CHANNEL_HEADER = 2
class IStatusLog(Interface):
"""I represent a single Log, which is a growing list of text items that
contains some kind of output for a single BuildStep. I might be finished,
in which case this list has stopped growing.
Each Log has a name, usually something boring like 'log' or 'output'.
These names are not guaranteed to be unique, however they are usually
chosen to be useful within the scope of a single step (i.e. the Compile
step might produce both 'log' and 'warnings'). The name may also have
spaces. If you want something more globally meaningful, at least within a
given Build, try::
'%s.%s' % (log.getStep.getName(), log.getName())
The Log can be presented as plain text, or it can be accessed as a list
of items, each of which has a channel indicator (header, stdout, stderr)
and a text chunk. An HTML display might represent the interleaved
channels with different styles, while a straight download-the-text
interface would just want to retrieve a big string.
The 'header' channel is used by ShellCommands to prepend a note about
which command is about to be run ('running command FOO in directory
DIR'), and append another note giving the exit code of the process.
Logs can be streaming: if the Log has not yet finished, you can
subscribe to receive new chunks as they are added.
A ShellCommand will have a Log associated with it that gathers stdout
and stderr. Logs may also be created by parsing command output or
through other synthetic means (grepping for all the warnings in a
compile log, or listing all the test cases that are going to be run).
Such synthetic Logs are usually finished as soon as they are created."""
def getName():
"""Returns a short string with the name of this log, probably 'log'.
"""
def getStep():
"""Returns the IBuildStepStatus which owns this log."""
# TODO: can there be non-Step logs?
def isFinished():
"""Return a boolean. True means the log has finished and is closed,
False means it is still open and new chunks may be added to it."""
def waitUntilFinished():
"""Return a Deferred that will fire when the log is closed. If the
log has already finished, this deferred will fire right away. The
callback is given this IStatusLog instance as an argument."""
def subscribe(receiver, catchup):
"""Register an IStatusReceiver to receive chunks (with logChunk) as
data is added to the Log. If you use this, you will also want to use
waitUntilFinished to find out when the listener can be retired.
Subscribing to a closed Log is a no-op.
If 'catchup' is True, the receiver will immediately be sent a series
of logChunk messages to bring it up to date with the partially-filled
log. This allows a status client to join a Log already in progress
without missing any data. If the Log has already finished, it is too
late to catch up: just do getText() instead.
If the Log is very large, the receiver will be called many times with
a lot of data. There is no way to throttle this data. If the receiver
is planning on sending the data on to somewhere else, over a narrow
connection, you can get a throttleable subscription by using
C{subscribeConsumer} instead."""
def unsubscribe(receiver):
"""Remove a receiver previously registered with subscribe(). Attempts
to remove a receiver which was not previously registered is a no-op.
"""
def subscribeConsumer(consumer):
"""Register an L{IStatusLogConsumer} to receive all chunks of the
logfile, including all the old entries and any that will arrive in
the future. The consumer will first have their C{registerProducer}
method invoked with a reference to an object that can be told
C{pauseProducing}, C{resumeProducing}, and C{stopProducing}. Then the
consumer's C{writeChunk} method will be called repeatedly with each
(channel, text) tuple in the log, starting with the very first. The
consumer will be notified with C{finish} when the log has been
exhausted (which can only happen when the log is finished). Note that
a small amount of data could be written via C{writeChunk} even after
C{pauseProducing} has been called.
To unsubscribe the consumer, use C{producer.stopProducing}."""
# once the log has finished, the following methods make sense. They can
# be called earlier, but they will only return the contents of the log up
# to the point at which they were called. You will lose items that are
# added later. Use C{subscribe} or C{subscribeConsumer} to avoid missing
# anything.
def hasContents():
"""Returns True if the LogFile still has contents available. Returns
False for logs that have been pruned. Clients should test this before
offering to show the contents of any log."""
def getText():
"""Return one big string with the contents of the Log. This merges
all non-header chunks together."""
def readlines(channel=LOG_CHANNEL_STDOUT):
"""Read lines from one channel of the logfile. This returns an
iterator that will provide single lines of text (including the
trailing newline).
"""
def getTextWithHeaders():
"""Return one big string with the contents of the Log. This merges
all chunks (including headers) together."""
def getChunks():
"""Generate a list of (channel, text) tuples. 'channel' is a number,
0 for stdout, 1 for stderr, 2 for header. (note that stderr is merged
into stdout if PTYs are in use)."""
class IStatusLogConsumer(Interface):
"""I am an object which can be passed to IStatusLog.subscribeConsumer().
I represent a target for writing the contents of an IStatusLog. This
differs from a regular IStatusReceiver in that it can pause the producer.
This makes it more suitable for use in streaming data over network
sockets, such as an HTTP request. Note that the consumer can only pause
the producer until it has caught up with all the old data. After that
point, C{pauseProducing} is ignored and all new output from the log is
sent directoy to the consumer."""
def registerProducer(producer, streaming):
"""A producer is being hooked up to this consumer. The consumer only
has to handle a single producer. It should send .pauseProducing and
.resumeProducing messages to the producer when it wants to stop or
resume the flow of data. 'streaming' will be set to True because the
producer is always a PushProducer.
"""
def unregisterProducer():
"""The previously-registered producer has been removed. No further
pauseProducing or resumeProducing calls should be made. The consumer
should delete its reference to the Producer so it can be released."""
def writeChunk(chunk):
"""A chunk (i.e. a tuple of (channel, text)) is being written to the
consumer."""
def finish():
"""The log has finished sending chunks to the consumer."""
class IStatusReceiver(Interface):
"""I am an object which can receive build status updates. I may be
subscribed to an IStatus, an IBuilderStatus, or an IBuildStatus."""
def buildsetSubmitted(buildset):
"""A new BuildSet has been submitted to the buildmaster.
@type buildset: implementor of L{IBuildSetStatus}
"""
def requestSubmitted(request):
"""A new BuildRequest has been submitted to the buildmaster.
@type request: implementor of L{IBuildRequestStatus}
"""
def requestCancelled(builder, request):
"""A BuildRequest has been cancelled on the given Builder.
@type builder: L{buildbot.status.builder.BuilderStatus}
@type request: implementor of L{IBuildRequestStatus}
"""
def builderAdded(builderName, builder):
"""
A new Builder has just been added. This method may return an
IStatusReceiver (probably 'self') which will be subscribed to receive
builderChangedState and buildStarted/Finished events.
@type builderName: string
@type builder: L{buildbot.status.builder.BuilderStatus}
@rtype: implementor of L{IStatusReceiver}
"""
def builderChangedState(builderName, state):
"""Builder 'builderName' has changed state. The possible values for
'state' are 'offline', 'idle', and 'building'."""
def buildStarted(builderName, build):
"""Builder 'builderName' has just started a build. The build is an
object which implements IBuildStatus, and can be queried for more
information.
This method may return an IStatusReceiver (it could even return
'self'). If it does so, stepStarted and stepFinished methods will be
invoked on the object for the steps of this one build. This is a
convenient way to subscribe to all build steps without missing any.
This receiver will automatically be unsubscribed when the build
finishes.
It can also return a tuple of (IStatusReceiver, interval), in which
case buildETAUpdate messages are sent ever 'interval' seconds, in
addition to the stepStarted and stepFinished messages."""
def buildETAUpdate(build, ETA):
"""This is a periodic update on the progress this Build has made
towards completion."""
def changeAdded(change):
"""A new Change was added to the ChangeMaster. By the time this event
is received, all schedulers have already received the change."""
def stepStarted(build, step):
"""A step has just started. 'step' is the IBuildStepStatus which
represents the step: it can be queried for more information.
This method may return an IStatusReceiver (it could even return
'self'). If it does so, logStarted and logFinished methods will be
invoked on the object for logs created by this one step. This
receiver will be automatically unsubscribed when the step finishes.
Alternatively, the method may return a tuple of an IStatusReceiver
and an integer named 'updateInterval'. In addition to
logStarted/logFinished messages, it will also receive stepETAUpdate
messages about every updateInterval seconds."""
def stepTextChanged(build, step, text):
"""The text for a step has been updated.
This is called when calling setText() on the step status, and
hands in the text list."""
def stepText2Changed(build, step, text2):
"""The text2 for a step has been updated.
This is called when calling setText2() on the step status, and
hands in text2 list."""
def stepETAUpdate(build, step, ETA, expectations):
"""This is a periodic update on the progress this Step has made
towards completion. It gets an ETA (in seconds from the present) of
when the step ought to be complete, and a list of expectation tuples
(as returned by IBuildStepStatus.getExpectations) with more detailed
information."""
def logStarted(build, step, log):
"""A new Log has been started, probably because a step has just
started running a shell command. 'log' is the IStatusLog object
which can be queried for more information.
This method may return an IStatusReceiver (such as 'self'), in which
case the target's logChunk method will be invoked as text is added to
the logfile. This receiver will automatically be unsubsribed when the
log finishes."""
def logChunk(build, step, log, channel, text):
"""Some text has been added to this log. 'channel' is one of
LOG_CHANNEL_STDOUT, LOG_CHANNEL_STDERR, or LOG_CHANNEL_HEADER, as
defined in IStatusLog.getChunks."""
def logFinished(build, step, log):
"""A Log has been closed."""
def stepFinished(build, step, results):
"""A step has just finished. 'results' is the result tuple described
in IBuildStepStatus.getResults."""
def buildFinished(builderName, build, results):
"""
A build has just finished. 'results' is the result tuple described
in L{IBuildStatus.getResults}.
@type builderName: string
@type build: L{buildbot.status.build.BuildStatus}
@type results: tuple
"""
def builderRemoved(builderName):
"""The Builder has been removed."""
def slaveConnected(slaveName):
"""The slave has connected."""
def slaveDisconnected(slaveName):
"""The slave has disconnected."""
def checkConfig(otherStatusReceivers):
"""Verify that there are no other status receivers which conflict with
the current one.
@type otherStatusReceivers: A list of L{IStatusReceiver} objects which
will contain self.
"""
class IControl(Interface):
def addChange(change):
"""Add a change to the change queue, for analysis by schedulers."""
def getBuilder(name):
"""Retrieve the IBuilderControl object for the given Builder."""
class IBuilderControl(Interface):
def submitBuildRequest(ss, reason, props=None):
"""Create a BuildRequest, which will eventually cause a build of the
given SourceStamp to be run on this builder. This returns a
BuildRequestStatus object via a Deferred, which can be used to keep
track of the builds that are performed."""
def rebuildBuild(buildStatus, reason="<rebuild, no reason given>"):
"""Rebuild something we've already built before. This submits a
BuildRequest to our Builder using the same SourceStamp as the earlier
build. This has no effect (but may eventually raise an exception) if
this Build has not yet finished."""
def getPendingBuildRequestControls():
"""
Get a list of L{IBuildRequestControl} objects for this Builder.
Each one corresponds to an unclaimed build request.
@returns: list of objects via Deferred
"""
def getBuild(number):
"""Attempt to return an IBuildControl object for the given build.
Returns None if no such object is available. This will only work for
the build that is currently in progress: once the build finishes,
there is nothing to control anymore."""
def ping():
"""Attempt to contact the slave and see if it is still alive. This
returns a Deferred which fires with either True (the slave is still
alive) or False (the slave did not respond). As a side effect, adds an
event to this builder's column in the waterfall display containing the
results of the ping. Note that this may not fail for a long time, it is
implemented in terms of the timeout on the underlying TCP connection."""
# TODO: this ought to live in ISlaveControl, maybe with disconnect()
# or something. However the event that is emitted is most useful in
# the Builder column, so it kinda fits here too.
class IBuildRequestControl(Interface):
def subscribe(observer):
"""Register a callable that will be invoked (with a single
IBuildControl object) for each Build that is created to satisfy this
request. There may be multiple Builds created in an attempt to handle
the request: they may be interrupted by the user or abandoned due to
a lost slave. The last Build (the one which actually gets to run to
completion) is said to 'satisfy' the BuildRequest. The observer will
be called once for each of these Builds, both old and new."""
def unsubscribe(observer):
"""Unregister the callable that was registered with subscribe()."""
def cancel():
"""Remove the build from the pending queue. Has no effect if the
build has already been started."""
class IBuildControl(Interface):
def getStatus():
"""Return an IBuildStatus object for the Build that I control."""
def stopBuild(reason="<no reason given>"):
"""Halt the build. This has no effect if the build has already
finished."""
class ILogFile(Interface):
"""This is the internal interface to a LogFile, used by the BuildStep to
write data into the log.
"""
def addStdout(data):
pass
def addStderr(data):
pass
def addHeader(data):
pass
def finish():
"""The process that is feeding the log file has finished, and no
further data will be added. This closes the logfile."""
class ILogObserver(Interface):
"""Objects which provide this interface can be used in a BuildStep to
watch the output of a LogFile and parse it incrementally.
"""
# internal methods
def setStep(step):
pass
def setLog(log):
pass
# methods called by the LogFile
def logChunk(build, step, log, channel, text):
pass
class IBuildSlave(Interface):
# this is a marker interface for the BuildSlave class
pass
class ILatentBuildSlave(IBuildSlave):
"""A build slave that is not always running, but can run when requested.
"""
substantiated = Attribute('Substantiated',
'Whether the latent build slave is currently '
'substantiated with a real instance.')
def substantiate():
"""Request that the slave substantiate with a real instance.
Returns a deferred that will callback when a real instance has
attached."""
# there is an insubstantiate too, but that is not used externally ATM.
def buildStarted(sb):
"""Inform the latent build slave that a build has started.
@param sb: a L{LatentSlaveBuilder}. The sb is the one for whom the
build finished.
"""
def buildFinished(sb):
"""Inform the latent build slave that a build has finished.
@param sb: a L{LatentSlaveBuilder}. The sb is the one for whom the
build finished.
"""
class IRenderable(Interface):
"""An object that can be interpolated with properties from a build.
"""
def getRenderingFor(iprops):
"""Return a deferred that fires with interpolation with the given properties
@param iprops: the L{IProperties} provider supplying the properties.
"""
class IProperties(Interface):
"""
An object providing access to build properties
"""
def getProperty(name, default=None):
"""Get the named property, returning the default if the property does
not exist.
@param name: property name
@type name: string
@param default: default value (default: @code{None})
@returns: property value
"""
def hasProperty(name):
"""Return true if the named property exists.
@param name: property name
@type name: string
@returns: boolean
"""
def has_key(name):
"""Deprecated name for L{hasProperty}."""
def setProperty(name, value, source, runtime=False):
"""Set the given property, overwriting any existing value. The source
describes the source of the value for human interpretation.
@param name: property name
@type name: string
@param value: property value
@type value: JSON-able value
@param source: property source
@type source: string
@param runtime: (optional) whether this property was set during the
build's runtime: usually left at its default value
@type runtime: boolean
"""
def getProperties():
"""Get the L{buildbot.process.properties.Properties} instance storing
these properties. Note that the interface for this class is not
stable, so where possible the other methods of this interface should be
used.
@returns: L{buildbot.process.properties.Properties} instance
"""
def getBuild():
"""Get the L{buildbot.process.build.Build} instance for the current
build. Note that this object is not available after the build is
complete, at which point this method will return None.
Try to avoid using this method, as the API of L{Build} instances is not
well-defined.
@returns L{buildbot.process.build.Build} instance
"""
def render(value):
"""Render @code{value} as an L{IRenderable}. This essentially coerces
@code{value} to an L{IRenderable} and calls its @L{getRenderingFor}
method.
@name value: value to render
@returns: rendered value
"""
class IScheduler(Interface):
pass
class ITriggerableScheduler(Interface):
"""
A scheduler that can be triggered by buildsteps.
"""
def trigger(sourcestamps, set_props=None):
"""Trigger a build with the given source stamp and properties.
"""
class IBuildStepFactory(Interface):
def buildStep():
"""
"""
|
rossburton/yocto-autobuilder
|
lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/interfaces.py
|
Python
|
gpl-2.0
| 50,356
|
__productname__ = 'vew'
__version__ = '0.0.1'
__copyright__ = "Copyright (c) 2015 Shawn Axsom"
__author__ = "Shawn Axsom"
__author_email__ = "shawn.axsom@gmail.com"
__description__ = "Git Viewer"
__url__ = "https://github.com/axs221/vew"
__license__ = "Licensed under the MIT License"
|
axs221/vew
|
src/__init__.py
|
Python
|
gpl-2.0
| 285
|
"""
Peg.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.geometry.creation import extrude
from fabmetheus_utilities.geometry.creation import lineation
from fabmetheus_utilities.geometry.creation import solid
from fabmetheus_utilities.geometry.geometry_utilities import evaluate
from fabmetheus_utilities.geometry.solids import cylinder
from fabmetheus_utilities.vector3 import Vector3
import math
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Art of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/02/05 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def addPegOutput(bevel, endZ, outputs, radius, start, topOverBottom, xmlElement):
'Add beveled cylinder to outputs given bevel, endZ, radius and start.'
height = abs(start.z - endZ)
bevelStartRatio = max(1.0 - bevel / height, 0.5)
oneMinusBevelStartRatio = 1.0 - bevelStartRatio
trunkEndZ = bevelStartRatio * endZ + oneMinusBevelStartRatio * start.z
trunkTopOverBottom = bevelStartRatio * topOverBottom + oneMinusBevelStartRatio
sides = evaluate.getSidesMinimumThreeBasedOnPrecision(max(radius.real, radius.imag), xmlElement )
cylinder.addCylinderOutputByEndStart(trunkEndZ, radius, outputs, sides, start, trunkTopOverBottom)
capRadius = radius * trunkTopOverBottom
capStart = bevelStartRatio * Vector3(start.x, start.y, endZ) + oneMinusBevelStartRatio * start
radiusMaximum = max(radius.real, radius.imag)
endRadiusMaximum = radiusMaximum * topOverBottom - bevel
trunkRadiusMaximum = radiusMaximum * trunkTopOverBottom
capTopOverBottom = endRadiusMaximum / trunkRadiusMaximum
cylinder.addCylinderOutputByEndStart(endZ, capRadius, outputs, sides, capStart, capTopOverBottom)
def getGeometryOutput(derivation, xmlElement):
'Get vector3 vertexes from attribute dictionary.'
if derivation == None:
derivation = PegDerivation(xmlElement)
positives = []
radius = complex(derivation.radius, derivation.radius)
addPegOutput(derivation.bevel, derivation.endZ, positives, radius, derivation.start, derivation.topOverBottom, xmlElement)
return extrude.getGeometryOutputByNegativesPositives([], positives, xmlElement)
def getGeometryOutputByArguments(arguments, xmlElement):
'Get vector3 vertexes from attribute dictionary by arguments.'
evaluate.setAttributeDictionaryByArguments(['radius', 'endZ', 'start'], arguments, xmlElement)
return getGeometryOutput(None, xmlElement)
def getNewDerivation(xmlElement):
'Get new derivation.'
return PegDerivation(xmlElement)
def getTopAddBiconicOutput(bottomRadians, height, outputs, radius, sides, start, tipRadius, topRadians):
'Get top and add biconic cylinder to outputs.'
radiusMaximum = max(radius.real, radius.imag)
topRadiusMaximum = radiusMaximum - height * math.tan(bottomRadians)
trunkEndZ = start.z + height
trunkTopOverBottom = topRadiusMaximum / radiusMaximum
topRadiusComplex = trunkTopOverBottom * radius
cylinder.addCylinderOutputByEndStart(trunkEndZ, radius, outputs, sides, start, trunkTopOverBottom)
tipOverTop = tipRadius / topRadiusMaximum
if tipOverTop >= 1.0:
return trunkEndZ
capStart = Vector3(start.x, start.y, trunkEndZ)
capEndZ = trunkEndZ + (topRadiusMaximum - tipRadius) / math.tan(topRadians)
cylinder.addCylinderOutputByEndStart(capEndZ, topRadiusComplex, outputs, sides, capStart, tipOverTop)
return capEndZ
def processXMLElement(xmlElement):
'Process the xml element.'
solid.processXMLElementByGeometry(getGeometryOutput(None, xmlElement), xmlElement)
class PegDerivation:
'Class to hold peg variables.'
def __init__(self, xmlElement):
'Set defaults.'
self.endZ = evaluate.getEvaluatedFloat(10.0, 'endZ', xmlElement)
self.start = evaluate.getVector3ByPrefix(Vector3(), 'start', xmlElement)
self.radius = lineation.getFloatByPrefixBeginEnd('radius', 'diameter', 2.0, xmlElement)
self.topOverBottom = evaluate.getEvaluatedFloat(0.8, 'topOverBottom', xmlElement)
self.xmlElement = xmlElement
# Set derived variables.
self.bevelOverRadius = evaluate.getEvaluatedFloat(0.25, 'bevelOverRadius', xmlElement)
self.bevel = self.bevelOverRadius * self.radius
self.bevel = evaluate.getEvaluatedFloat(self.bevel, 'bevel', xmlElement)
self.clearanceOverRadius = evaluate.getEvaluatedFloat(0.0, 'clearanceOverRadius', xmlElement)
self.clearance = self.clearanceOverRadius * self.radius
self.clearance = evaluate.getEvaluatedFloat(self.clearance, 'clearance', xmlElement)
def __repr__(self):
'Get the string representation of this PegDerivation.'
return str(self.__dict__)
|
natetrue/ReplicatorG
|
skein_engines/skeinforge-40/fabmetheus_utilities/geometry/creation/peg.py
|
Python
|
gpl-2.0
| 4,748
|
"""
# Name: Tier 1 Rapid Benefit Indicator Assessment - Social Equity
# Purpose: Calculate reliabilty of site benefit product into the future
# Author: Justin Bousquin
#
# Version Notes:
# Developed in ArcGIS 10.3
#0.1.0 converted from .pyt
"""
###########IMPORTS###########
import os
import time
import arcpy
from decimal import Decimal
arcpy.env.parallelProcessingFactor = "100%" #use all available resources
arcpy.env.overwriteOutput = True #overwrite existing files
##########USER INPUTS##########
#sovi, sovi_field, sovi_High, buff_dist, outTbl
sovi = ""#vulnerability Feature dataset
sovi_field = ""#field in feature dataset e.g. "vulnerability"
sovi_High = ""#list of values from field to consider highly vulnerable
buff_dist = ""#Buffer Distance e.g. "1 Miles"
outTbl = ""#output file
###############################
###########FUNCTIONS###########
def message(string, severity = 0):
"""Generic message
Purpose: prints string message in py or pyt.
"""
print(string)
if severity == 1:
arcpy.AddWarning(string)
else:
arcpy.AddMessage(string)
def exec_time(start, task):
"""Global Timer
Purpose: Returns the time since the last function assignment,
and a task message.
Notes: used during testing to compare efficiency of each step
"""
end = time.clock()
comp_time = time.strftime("%H:%M:%S", time.gmtime(end-start))
message("Run time for " + task + ": " + str(comp_time))
start = time.clock()
return start
def dec(x):
"""decimal.Decimal"""
return Decimal(x)
def get_ext(FC):
"""get extension"""
ext = arcpy.Describe(FC).extension
if len(ext) > 0:
ext = "." + ext
return ext
def del_exists(item):
""" Delete if exists
Purpose: if a file exists it is deleted and noted in a message.
"""
if arcpy.Exists(item):
try:
arcpy.Delete_management(item)
message("'{}' already exists and will be replaced.".format(item))
except:
message("'{}' exists but could not be deleted.".format(item))
def field_exists(table, field):
"""Check if field exists in table
Notes: return true/false
"""
fieldList = [f.name for f in arcpy.ListFields(table)]
return True if field in fieldList else False
def find_ID(table):
"""return an ID field where orig_ID > ORIG_FID > OID@
"""
if field_exists(table, "orig_ID"):
return "orig_ID"
elif field_exists(table, "ORIG_FID"):
return "ORIG_FID"
else:
return arcpy.Describe(table).OIDFieldName
def fieldName(name):
"""return acceptable field name from string
"""
Fname = name[0:8] # Correct length <9
for char in ['.', ' ', ',', '!', '@', '#', '$', '%', '^', '&', '*']:
if char in Fname:
Fname = Fname.replace(char, "_")
return Fname
def unique_values(table, field):
"""Unique Values
Purpose: returns a sorted list of unique values
Notes: used to find unique field values in table column
"""
with arcpy.da.SearchCursor(table, [field]) as cursor:
return sorted({row[0] for row in cursor if row[0]})
def checkSpatialReference(match_dataset, in_dataset, output=None):
"""Check Spatial Reference
Purpose: Checks that in_dataset spatial reference name matches
match_dataset and re-projects if not.
Inputs: \n match_dataset(Feature Class/Feature Layer/Feature Dataset):
The dataset with the spatial reference that will be matched.
in_dataset (Feature Class/Feature Layer/Feature Dataset):
The dataset that will be projected if it does not match.
output: \n Path, filename and extension for projected in_dataset
Defaults to match_dataset location.
Return: \n Either the original FC or the projected 'output' is returned.
"""
matchSR = arcpy.Describe(match_dataset).spatialReference
otherSR = arcpy.Describe(in_dataset).spatialReference
if matchSR.name != otherSR.name:
message("'{}' Spatial reference does not match.".format(in_dataset))
try:
if output is None:
# Output defaults to match_dataset location
path = os.path.dirname(match_dataset) + os.sep
ext = get_ext(match_dataset)
out_name = os.path.splitext(os.path.basename(in_dataset))[0]
output = path + out_name + "_prj" + ext
del_exists(output) # delete if output exists
# Project (doesn't work on Raster)
arcpy.Project_management(in_dataset, output, matchSR)
message("File was re-projected and saved as:\n" + output)
return output
except:
message("Warning: spatial reference could not be updated.", 1)
return in_dataset
else:
return in_dataset
def selectStr_by_list(field, lst):
"""Selection Query String from list
Purpose: return a string for a where clause from a list of field values
"""
exp = ''
for item in lst:
if type(item) in [str, unicode]: # sequence
exp += "{} = '{}' OR ".format(field, item)
elif type(item) == float:
decP = len(repr(item).split(".")[1]) # decimal places
if decP >= 15:
exp += 'ROUND({},{}) = {} OR '.format(field, decP, repr(item))
else:
exp += '{} = {} OR '.format(field, repr(item))
elif type(item) in [int, long]: # numeric
exp += '"{}" = {} OR '.format(field, item)
else:
message("'{}' in list, unknown type '{}'".format(item, type(item)))
return (exp[:-4])
def simple_buffer(outTbl, tempName, bufferDist):
""" Create buffer using tempName"""
path = os.path.dirname(outTbl) + os.sep
buf = path + tempName + get_ext(outTbl) # Set temp file name
del_exists(buf)
arcpy.Buffer_analysis(outTbl, buf, bufferDist)
return buf
def percent_cover(poly, bufPoly, units="SQUAREMETERS"):
"""Percent Cover
Purpose:"""
arcpy.MakeFeatureLayer_management(poly, "polyLyr")
lst = []
orderLst = []
# ADD handle for when no overlap?
# Check for "orig_ID" then "ORIG_FID" then use OID@
field = find_ID(bufPoly)
with arcpy.da.SearchCursor(bufPoly, ["SHAPE@", field]) as cursor:
for row in cursor:
totalArea = dec(row[0].getArea("PLANAR", units))
match = "INTERSECT" # default
arcpy.SelectLayerByLocation_management("polyLyr", match, row[0])
lyrLst = []
with arcpy.da.SearchCursor("polyLyr", ["SHAPE@"]) as cursor2:
for row2 in cursor2:
p = 4 # dimension = polygon
interPoly = row2[0].intersect(row[0], p)
interArea = dec(interPoly.getArea("PLANAR", units))
lyrLst.append((interArea/totalArea)*100)
lst.append(sum(lyrLst))
orderLst.append(row[1])
arcpy.Delete_management("polyLyr")
# Sort by ID field
orderLst, lst = (list(x) for x in zip(*sorted(zip(orderLst, lst))))
return lst
def lst_to_AddField_lst(table, field_lst, list_lst, type_lst):
"""Lists to ADD Field
Purpose:
Notes: Table, list of new fields, list of listes of field values,
list of field datatypes.
"""
if len(field_lst) != len(list_lst) or len(field_lst) != len(type_lst):
message("ERROR: lists aren't the same length!")
# "" defaults to "DOUBLE"
type_lst = ["Double" if x == "" else x for x in type_lst]
for i, field in enumerate(field_lst):
# Add fields
arcpy.AddField_management(table, field, type_lst[i])
# Add values
lst_to_field(table, field, list_lst[i])
def lst_to_field(table, field, lst):
"""Add List to Field
Purpose:
Notes: 1 field at a time
Example: lst_to_field(featureClass, "fieldName", lst)
"""
if len(lst) == 0:
message("No values to add to '{}'.".format(field))
elif field_exists(table, field):
with arcpy.da.UpdateCursor(table, [field]) as cursor:
# For row in cursor:
for i, row in enumerate(cursor):
row[0] = lst[i]
cursor.updateRow(row)
else:
message("{} field not found in {}".format(field, table))
#########RELIABILITY##########
def socEq_MODULE(PARAMS):
"""Social Equity of Benefits"""
mod_str = "Social Equity of Benefits analysis"
message(mod_str + "...")
sovi = PARAMS[0]
field, SoVI_High = PARAMS[1], PARAMS[2]
bufferDist = PARAMS[3]
outTbl = PARAMS[4]
message("Checking input variables...")
sovi = checkSpatialReference(outTbl, sovi) # check projection
message("Input variables OK")
# Buffer sites by specified distance
buf = simple_buffer(outTbl, "sovi_buffer", bufferDist)
# List all the unique values in the specified field
arcpy.MakeFeatureLayer_management(sovi, "lyr")
full_fieldLst = unique_values("lyr", field)
# Add field for SoVI_High
name = "Vul_High"
f_type = "DOUBLE"
if not field_exists(outTbl, name):
arcpy.AddField_management(outTbl, name, f_type,
"", "", "", "", "", "", "")
else:
message("'{}' values overwritten in table:\n{}".format(name, outTbl))
# Populate new field
sel = "NEW_SELECTION"
wClause = selectStr_by_list(field, SoVI_High)
arcpy.SelectLayerByAttribute_management("lyr", sel, wClause)
pct_lst = percent_cover("lyr", buf)
lst_to_field(outTbl, name, pct_lst)
# Add fields for the rest of the possible values if 6 or less
fieldLst = [x for x in full_fieldLst if x not in SoVI_High]
message("There are {} unique values for '{}'".format(len(fieldLst), field))
if len(fieldLst) < 6:
message("Creating new fields for each...")
# Add fields for each unique in field
for val in fieldLst:
name = fieldName("sv_" + str(val))
if not field_exists(outTbl, name):
arcpy.AddField_management(outTbl, name, f_type, "", "", "",
val, "", "", "")
else: # field already existed
message("'{}' values overwritten in table:\n{}".format(name,
outTbl))
wClause = selectStr_by_list(field, [val])
arcpy.SelectLayerByAttribute_management("lyr", sel, wClause)
pct_lst = percent_cover("lyr", buf)
lst_to_field(outTbl, name, pct_lst)
else:
message("This is too many values to create unique fields for each, " +
"just calculating {} coverage".format(SoVI_High))
arcpy.Delete_management(buf)
arcpy.Delete_management("lyr")
message(mod_str + " complete")
##############################
###########EXECUTE############
try:
start = time.clock()
soc_PARAMS = [sovi, sovi_field, sovi_High, buff_dist, outTbl]
socEq_MODULE(soc_PARAMS)
start = exec_time(start, "Social equity assessment")
except Exception:
message("Error occured during assessment.", 1)
traceback.print_exc()
|
jbousquin/Rapid-Benefit-Indicators-Tools
|
py_Spatial/py_standaloneScripts/Social_Equity.py
|
Python
|
mit
| 11,257
|
from subprocess import call
import distutils.util
from ConfigParser import ConfigParser
from assetjet import __version__
def build_installer(appName, filename, installerName):
# read the connection settings from the config file
cfg = ConfigParser()
cfg.readfp(open('ftpserver.cfg'))
innoSetupLoc = cfg.get('Deploy', 'innoSetupLoc')
# Flag to only allow 64bit versions to be installed on 64bit systems
if distutils.util.get_platform() in ['win-amd64']:
architecturesAllowed = 'x64'
else:
architecturesAllowed = ''
# Compile it
print('compiling inno setup..')
call('{0} \
"/dAppName={1}" \
"/dVersion={2}" \
"/dArchitecturesAllowed={3}" \
"/dOutputBaseFilename={4}" \
inno_installer.iss'.format(innoSetupLoc,
appName,
__version__,
architecturesAllowed,
installerName)
)
if __name__ == '__main__':
from main import appName, filename, installerName
build_installer(appName, filename, installerName)
|
peterbrook/assetjet
|
deploy/build_inno_setup.py
|
Python
|
gpl-3.0
| 1,191
|
from event.utils import upload_path
def upload_path_barcodes(instance, filename):
return upload_path('barcodes/', filename, instance.id)
|
brickfiestastem/brickfiesta
|
afol/utils.py
|
Python
|
agpl-3.0
| 143
|
#
# Copyright 2013 Geodelic
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from sqlobject import SQLObject, StringCol, ForeignKey, IntCol, DatabaseIndex
from time import time
CNAME = 'CNAME'
MASTER = 'MASTER'
NS = 'NS'
SOA = 'SOA'
class domain(SQLObject):
class sqlmeta:
table = 'domains'
name = StringCol(length=255, notNone=True)
master = StringCol(length=128, default=None)
last_check = IntCol(default=None)
type = StringCol(length=6, notNone=True)
notified_serial = IntCol(default=None)
account = StringCol(length=40, default=None)
nameIndex = DatabaseIndex(name)
class record(SQLObject):
class sqlmeta:
table = 'records'
domain = ForeignKey('domain', cascade=True)
name = StringCol(length=255)
type = StringCol(length=6)
content = StringCol(length=255)
ttl = IntCol(default=120)
prio = IntCol(default=None)
change_date = IntCol()
nameIndex = DatabaseIndex(name)
contentIndex = DatabaseIndex(content)
def update(self, **kwargs):
kwargs['change_date'] = int(time())
return self.set(**kwargs)
_updated = False
@classmethod
def updated(cls, updated=None):
if updated and not cls._updated:
cls._updated = True
return cls._updated
class supermaster(SQLObject):
class sqlmeta:
table = 'supermasters'
ip = StringCol(length=25, notNone=True)
nameserver = StringCol(length=255, notNone=True)
account = StringCol(length=40)
|
dlobue/powerupdater
|
powerupdater/pdnsmodels.py
|
Python
|
apache-2.0
| 2,022
|
#!/usr/bin/python
import subprocess
#print "Updating"
#command = ["git", "fetch", "origin"]
#subprocess.check_output(command)
#command = ["git", "pull", "origin", "master"]
#subprocess.check_output(command)
#pull from git
print "Starting wezbot"
subprocess.call(["python", "/home/pi/wezbot/code/wezbot.py"])
|
krishnaz/wezbot
|
code/updater.py
|
Python
|
gpl-2.0
| 311
|
#!/usr/bin/env python
#
# Copyright 2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from __future__ import division
from __future__ import unicode_literals
from gnuradio import gr
from gnuradio import audio
from gnuradio import blocks
from gnuradio import vocoder
def build_graph():
tb = gr.top_block()
src = audio.source(8000)
src_scale = blocks.multiply_const_ff(32767)
f2s = blocks.float_to_short()
enc = vocoder.g723_40_encode_sb()
dec = vocoder.g723_40_decode_bs()
s2f = blocks.short_to_float()
sink_scale = blocks.multiply_const_ff(1.0 / 32767.)
sink = audio.sink(8000)
tb.connect(src, src_scale, f2s, enc, dec, s2f, sink_scale, sink)
return tb
if __name__ == '__main__':
tb = build_graph()
tb.start()
input ('Press Enter to exit: ')
tb.stop()
tb.wait()
|
michaelld/gnuradio
|
gr-vocoder/examples/g723_40_audio_loopback.py
|
Python
|
gpl-3.0
| 1,547
|
import structlog
from flask import Blueprint, request
from conditional.models.models import UserLog
from conditional.util.ldap import ldap_is_eboard
from conditional.util.ldap import ldap_is_rtp
from conditional.util.ldap import ldap_get_member
from conditional.util.flask import render_template
logger = structlog.get_logger()
log_bp = Blueprint('log_bp', __name__)
@log_bp.route('/logs')
def display_logs():
log = logger.new(request=request)
log.info('Display Logs')
username = request.headers.get('x-webauth-user')
account = ldap_get_member(username)
log.info(account.displayName)
if not ldap_is_eboard(account) and not ldap_is_rtp(account):
return "must be rtp or eboard", 403
logs = UserLog.query.all()
return render_template(request, "logs.html", logs=logs, username=username)
|
RamZallan/conditional
|
conditional/blueprints/logs.py
|
Python
|
mit
| 834
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sqlalchemy as sa
from sqlalchemy.ext import declarative
Base = declarative.declarative_base()
class Job(Base):
__tablename__ = "job"
id = sa.Column(sa.Binary(255), primary_key=True)
funcname = sa.Column(sa.Binary(255))
arg = sa.Column(sa.LargeBinary)
|
SpamapS/gearstore
|
gearstore/store/sqla_models.py
|
Python
|
apache-2.0
| 888
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from TestCase import TestCase
from WidgetTest import WidgetTest
from MenuTest import MenuTest
from SplitContainerTest import SplitContainerTest
from WindowTest import WindowTest
from ListContainerTest import ListContainerTest
from EventSignalCombinerTest import EventSignalCombinerTest
from FrameTest import FrameTest
from NameGadgetTest import NameGadgetTest
from LinearContainerTest import LinearContainerTest
from NodeGadgetTest import NodeGadgetTest
from GadgetTest import GadgetTest
from TabbedContainerTest import TabbedContainerTest
from NodeGraphTest import NodeGraphTest
from WidgetSignalTest import WidgetSignalTest
from EventLoopTest import EventLoopTest
from SplinePlugGadgetTest import SplinePlugGadgetTest
from TextWidgetTest import TextWidgetTest
from BoolWidgetTest import BoolWidgetTest
from ImageTest import ImageTest
from ButtonTest import ButtonTest
from CollapsibleTest import CollapsibleTest
from ImageGadgetTest import ImageGadgetTest
from StandardNodeGadgetTest import StandardNodeGadgetTest
from ColorSwatchTest import ColorSwatchTest
from VariantTest import VariantTest
from GridContainerTest import GridContainerTest
from NoduleTest import NoduleTest
from ProgressBarTest import ProgressBarTest
from ContainerWidgetTest import ContainerWidgetTest
from SelectionMenuTest import SelectionMenuTest
from StandardStyleTest import StandardStyleTest
from CompoundParameterValueWidgetTest import CompoundParameterValueWidgetTest
from EditorWidgetTest import EditorWidgetTest
from NumericSliderTest import NumericSliderTest
from RenderableGadgetTest import RenderableGadgetTest
from PlugValueWidgetTest import PlugValueWidgetTest
from PathListingWidgetTest import PathListingWidgetTest
from MultiLineTextWidgetTest import MultiLineTextWidgetTest
from LabelTest import LabelTest
from ScrolledContainerTest import ScrolledContainerTest
from ParameterValueWidgetTest import ParameterValueWidgetTest
from NodeEditorTest import NodeEditorTest
from ScriptWindowTest import ScriptWindowTest
from CompoundPlugValueWidgetTest import CompoundPlugValueWidgetTest
from CompoundEditorTest import CompoundEditorTest
from MultiSelectionMenuTest import MultiSelectionMenuTest
from StandardGraphLayoutTest import StandardGraphLayoutTest
from StandardNodeUITest import StandardNodeUITest
from ViewTest import ViewTest
from SliderTest import SliderTest
from NumericPlugValueWidgetTest import NumericPlugValueWidgetTest
from CompoundNumericPlugValueWidgetTest import CompoundNumericPlugValueWidgetTest
from NameLabelTest import NameLabelTest
from NameWidgetTest import NameWidgetTest
from GLWidgetTest import GLWidgetTest
from BookmarksTest import BookmarksTest
from SectionedCompoundDataPlugValueWidgetTest import SectionedCompoundDataPlugValueWidgetTest
from PlaybackTest import PlaybackTest
from SpacerGadgetTest import SpacerGadgetTest
from BoxUITest import BoxUITest
from ConnectionGadgetTest import ConnectionGadgetTest
from MessageWidgetTest import MessageWidgetTest
from ModuleTest import ModuleTest
from PlugLayoutTest import PlugLayoutTest
from ViewportGadgetTest import ViewportGadgetTest
from VectorDataWidgetTest import VectorDataWidgetTest
if __name__ == "__main__":
unittest.main()
|
davidsminor/gaffer
|
python/GafferUITest/__init__.py
|
Python
|
bsd-3-clause
| 5,081
|
"""Structure-oriented XBlocks."""
from xblock.core import XBlock
from xblock.fragment import Fragment
class ExtraViewsMixin(object):
'''
This is a mixin which will redirect all functions ending with
`_view` to `view()` if not implemented otherwise.
This allows us to test other views in structural elements. For
example, if we have a `<vertical_demo>` with a few blocks we are
testing, we can test `student_view`, `studio_view` (if developing
for edx-platform) and others.
'''
def __getattr__(self, key):
if key.endswith('_view'):
return self.view
raise AttributeError(key)
class Sequence(XBlock, ExtraViewsMixin):
"""
XBlock that models edx-platform style sequentials.
WARNING: This is an experimental module, subject to future change or removal.
"""
has_children = True
def view(self, context=None):
"""Provide default student view."""
frag = Fragment()
child_frags = self.runtime.render_children(self, context=context)
frag.add_frags_resources(child_frags)
frag.add_content(self.runtime.render_template("sequence.html", children=child_frags))
frag.add_css_url('http://code.jquery.com/ui/1.9.2/themes/base/jquery-ui.css')
frag.add_javascript_url('http://ajax.googleapis.com/ajax/libs/jqueryui/1.9.2/jquery-ui.min.js')
# mess things up
frag.add_javascript("""
function Sequence(runtime, element) {
$(element).children('.tabs').tabs();
};
""")
frag.initialize_js('Sequence')
return frag
class VerticalBlock(XBlock, ExtraViewsMixin):
"""A simple container."""
has_children = True
def view(self, context=None):
"""Provide default student view."""
result = Fragment()
child_frags = self.runtime.render_children(self, context=context)
result.add_frags_resources(child_frags)
result.add_css("""
.vertical {
border: solid 1px #888; padding: 3px;
}
""")
result.add_content(self.runtime.render_template("vertical.html", children=child_frags))
return result
class SidebarBlock(XBlock, ExtraViewsMixin):
"""A slightly-different vertical."""
has_children = True
def view(self, context=None):
"""Provide default student view."""
result = Fragment()
child_frags = self.runtime.render_children(self, context=context)
result.add_frags_resources(child_frags)
result.add_css("""
.sidebar {
border: solid 1px #888;
padding: 10px;
background: #ccc;
}
""")
html = []
html.append(u"<div class='sidebar'>")
for child in child_frags:
html.append(child.body_html())
html.append(u"</div>")
result.add_content("".join(html))
return result
|
Lyla-Fischer/xblock-sdk
|
sample_xblocks/basic/structure.py
|
Python
|
agpl-3.0
| 2,972
|
import datetime
import pickle
import os
from twisted.python import log
from twisted.enterprise import adbapi
#import twistedpg
from . import lookup
from .configuration import configuration
#dbpool = adbapi.ConnectionPool("twistedpg", "host=borch.frikanalen.no port=5433 user=postgres password=SECRET dbname=frikanalen") # fyll inn!
def date_to_cache_filename(date):
"""Return a schedule-pickle filename based on a datetime
Not tested"""
return lookup.cache_path(os.path.join(configuration.schedule_cache_root,"plan%4i%02i%02i.pickle" % (date.year, date.month, date.day)))
def get_schedule_by_date(date):
"""Fetch schedule from picklecache by date
Not tested (properly)"""
fn = date_to_cache_filename(date)
try:
f = open(fn, "rb")
except IOError:
return None
l = pickle.load(f)
f.close()
return l
if __name__=="__main__":
from twisted.internet import reactor
import pprint
date = datetime.date.today()
#date = datetime.date(year=2011, month=1, day=1)
cache_schedule(date, 14).addCallback(lambda x: pprint.pprint(get_schedule_by_date(date)[0])).addCallback(lambda x: reactor.stop())
reactor.run()
#print date_to_cache_filename(datetime.date.today())
|
Frikanalen/mltplayout
|
src/vision/pgsched.py
|
Python
|
gpl-3.0
| 1,242
|
# Natural Language Toolkit: Parsers
#
# Copyright (C) 2001-2016 NLTK Project
# Author: Steven Bird <stevenbird1@gmail.com>
# Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
#
"""
NLTK Parsers
Classes and interfaces for producing tree structures that represent
the internal organization of a text. This task is known as "parsing"
the text, and the resulting tree structures are called the text's
"parses". Typically, the text is a single sentence, and the tree
structure represents the syntactic structure of the sentence.
However, parsers can also be used in other domains. For example,
parsers can be used to derive the morphological structure of the
morphemes that make up a word, or to derive the discourse structure
for a set of utterances.
Sometimes, a single piece of text can be represented by more than one
tree structure. Texts represented by more than one tree structure are
called "ambiguous" texts. Note that there are actually two ways in
which a text can be ambiguous:
- The text has multiple correct parses.
- There is not enough information to decide which of several
candidate parses is correct.
However, the parser module does *not* distinguish these two types of
ambiguity.
The parser module defines ``ParserI``, a standard interface for parsing
texts; and two simple implementations of that interface,
``ShiftReduceParser`` and ``RecursiveDescentParser``. It also contains
three sub-modules for specialized kinds of parsing:
- ``nltk.parser.chart`` defines chart parsing, which uses dynamic
programming to efficiently parse texts.
- ``nltk.parser.probabilistic`` defines probabilistic parsing, which
associates a probability with each parse.
"""
from nltk.parse.api import ParserI
from nltk.parse.chart import (ChartParser, SteppingChartParser, TopDownChartParser,
BottomUpChartParser, BottomUpLeftCornerChartParser,
LeftCornerChartParser)
from nltk.parse.featurechart import (FeatureChartParser, FeatureTopDownChartParser,
FeatureBottomUpChartParser,
FeatureBottomUpLeftCornerChartParser)
from nltk.parse.earleychart import (IncrementalChartParser, EarleyChartParser,
IncrementalTopDownChartParser,
IncrementalBottomUpChartParser,
IncrementalBottomUpLeftCornerChartParser,
IncrementalLeftCornerChartParser,
FeatureIncrementalChartParser,
FeatureEarleyChartParser,
FeatureIncrementalTopDownChartParser,
FeatureIncrementalBottomUpChartParser,
FeatureIncrementalBottomUpLeftCornerChartParser)
from nltk.parse.pchart import (BottomUpProbabilisticChartParser, InsideChartParser,
RandomChartParser, UnsortedChartParser,
LongestChartParser)
from nltk.parse.recursivedescent import (RecursiveDescentParser,
SteppingRecursiveDescentParser)
from nltk.parse.shiftreduce import (ShiftReduceParser, SteppingShiftReduceParser)
from nltk.parse.util import load_parser, TestGrammar, extract_test_sentences
from nltk.parse.viterbi import ViterbiParser
from nltk.parse.dependencygraph import DependencyGraph
from nltk.parse.projectivedependencyparser import (ProjectiveDependencyParser,
ProbabilisticProjectiveDependencyParser)
from nltk.parse.nonprojectivedependencyparser import (NonprojectiveDependencyParser,
NaiveBayesDependencyScorer,
ProbabilisticNonprojectiveParser)
from nltk.parse.malt import MaltParser
from nltk.parse.evaluate import DependencyEvaluator
from nltk.parse.transitionparser import TransitionParser
from nltk.parse.bllip import BllipParser
|
adazey/Muzez
|
libs/nltk/parse/__init__.py
|
Python
|
gpl-3.0
| 4,269
|
# -*- coding: utf-8 -*-
from setuptools import setup
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
DESCRIPTION = open(os.path.join(here, 'DESCRIPTION')).read()
version = '0.0.1'
setup(name='ssp',
version=version,
description="System Service Processor communication library",
long_description=DESCRIPTION,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Systems Administration',
],
author='Yury Konovalov',
author_email='YKonovalov@gmail.com',
url='https://github.com/mdcic/ssp',
license='GPLv3+',
packages=[
'ssp', 'ssp.remote', 'ssp.chassis',
'ssp.chassis.common', 'ssp.chassis.dell',
'ssp.chassis.ibm', 'ssp.chassis.wbem',
'ssp.chassis.ipmi'
],
scripts=['tools/ssp-chassis-scanner'],
include_package_data=True,
install_requires=[
'pywbem',
'paramiko'
]
)
|
mdcic/ssp
|
setup.py
|
Python
|
gpl-3.0
| 1,270
|
"""
Interfaces and classes for GenericFunction.
Needs to be subclassed before being used.
@todo: Change GenericFunction to my enum thing
"""
import abc
from ..valueabc.interface_type import InterfaceType
from . import support
_NOT_IMPLEMENTED = lambda self, *args, **kwargs: NotImplemented
class GenericFunctionInterface(InterfaceType):
"""
Operator (non-instanced, function-like class).
In .invoke(): if no invocation was possible, and you want to attempt default,
then raise GenericsNoDispatch
"""
interface = abc.abstractproperty(_NOT_IMPLEMENTED) # type: InterfaceType
invoke = abc.abstractproperty(_NOT_IMPLEMENTED) # type: Callable[[AnyArgs], Any]
#exception = abc.abstractproperty(_NOT_IMPLEMENTED) # type: Exception
#message = abc.abstractmethod(_NOT_IMPLEMENTED) # type: Callable[[AnyArgs], Any]
default = None # type: Optional[Callable[[AnyArgs], Any]]
# Mixin
def __call__(self, subject, *args, **kwargs):
"""
Strategy function, when a generic function is called.
Complication: two distinct error conditions are possible:
(error #1): subject fails to meet interface
(error #2): subject met interface, but invoke failed
"""
if isinstance(subject, self.interface):
# dispatch
self.invoke(subject, *args, **kwargs)
else:
# try default
default = getattr(self, 'default', None)
if callable(default):
return default(subject, *args, **kwargs)
else:
raise support.GenericsInterfaceFailed(str.format(
"'subject' does not satisfy generics interface "
"'{interface_name}', and no default exists.",
interface_name=support.get_name(self.interface)
))
class GenericFunction(GenericFunctionInterface):
def __call__(self, subject, *args, **kwargs):
"""
Complication: two distinct error conditions are possible:
# (error #1): subject fails to meet interface
# (error #2): subject met interface, but invoke failed
"""
default = getattr(self, 'default', None)
print()
print("subject:", type(subject), subject)
print("self.interface:", type(self.interface), self.interface)
print()
import pdb
pdb.set_trace()
print()
if not isinstance(subject, self.interface):
if callable(default):
return default(subject, *args, **kwargs)
else:
raise support.GenericsInterfaceFailed(str.format(
"'subject' does not satisfy generics interface "
"'{interface_name}', and no default exists.",
interface_name=support.get_name(self.interface)
))
else:
self.invoke(subject, *args, **kwargs)
|
OaklandPeters/pyinterfaces
|
pyinterfaces/generics/generic_function.py
|
Python
|
mit
| 2,964
|
class Tree(object):
'''An object representing a tree path for a given commit'''
ROOT = None
def __init__(self, repo, ref, path = ROOT, size = None):
self.repo = repo
self.ref = ref
self.path = path
self.size = size
@property
def trees(self):
return self.repo._trees(self.ref, self.path)
@property
def features(self):
return self.repo.features(self.ref, self.path)
@property
def featuretype(self):
return self.repo.featuretype(self.ref, self.path)
@property
def children(self):
return self.repo.children(self.ref, self.path)
@property
def count(self):
return self.repo.count(self.ref, self.path)
def exportshp(self, shapefile):
'''exports this tree to the specified shapefile'''
self.repo.exportshp(self.ref, self.path, shapefile)
def __str__(self):
return self.ref + ":" + self.path
|
roscoeZA/GeoGigSync
|
src/geogigpy/tree.py
|
Python
|
cc0-1.0
| 1,018
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.