code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
#
# Copyright 2007 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Base class for implementing API proxy stubs."""
import logging
import random
import threading
from google.appengine.api import apiproxy_rpc
from google.appengine.api import request_info
from google.appengine.runtime import apiproxy_errors
MAX_REQUEST_SIZE = 1 << 20
REQ_SIZE_EXCEEDS_LIMIT_MSG_TEMPLATE = ('The request to API call %s.%s() was too'
' large.')
logging.getLogger('google.appengine.api.stubs').setLevel(logging.INFO)
class APIProxyStub(object):
"""Base class for implementing API proxy stub classes.
To implement an API proxy stub:
- Extend this class.
- Override `__init__` to pass in appropriate default service name.
- Implement service methods as `_Dynamic_<method>(request, response)`.
"""
_ACCEPTS_REQUEST_ID = False
THREADSAFE = False
def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE,
request_data=None):
"""Constructor.
Args:
service_name: Service name expected for all calls.
max_request_size: `int`. Maximum allowable size of the incoming request.
An `apiproxy_errors.RequestTooLargeError` will be raised if the inbound
request exceeds this size. Default is 1 MB. Subclasses can override it.
request_data: A `request_info.RequestInfo` instance used to look up
state associated with the request that generated an API call.
"""
self.__service_name = service_name
self.__max_request_size = max_request_size
self.request_data = request_data or request_info._local_request_info
self._mutex = threading.RLock()
self.__error = None
self.__error_dict = {}
def CreateRPC(self):
"""Creates RPC object instance.
Returns:
An instance of RPC.
"""
return apiproxy_rpc.RPC(stub=self)
def CheckRequest(self, service, call, request):
"""Check if a request meet some common restrictions.
Args:
service: Must be name as provided to `service_name` of constructor.
call: A string representing the rpc to make.
request: A protocol buffer of the type corresponding to `call`.
"""
assert service == self.__service_name, ('Expected "%s" service name, '
'was "%s"' % (self.__service_name,
service))
if request.ByteSize() > self.__max_request_size:
raise apiproxy_errors.RequestTooLargeError(
REQ_SIZE_EXCEEDS_LIMIT_MSG_TEMPLATE % (service, call))
messages = []
assert request.IsInitialized(messages), messages
def MakeSyncCall(self, service, call, request, response, request_id=None):
"""The main RPC entry point.
Args:
service: Must be name as provided to `service_name` of constructor.
call: A string representing the rpc to make. Must be part of
the underlying services methods and impemented by `_Dynamic_<call>`.
request: A protocol buffer of the type corresponding to `call`.
response: A protocol buffer of the type corresponding to `call`.
request_id: A unique string identifying the request associated with the
API call.
"""
self.CheckRequest(service, call, request)
exception_type, frequency = self.__error_dict.get(call, (None, None))
if exception_type and frequency:
if random.random() <= frequency:
raise exception_type
if self.__error:
if random.random() <= self.__error_rate:
raise self.__error
method = getattr(self, '_Dynamic_' + call)
if self._ACCEPTS_REQUEST_ID:
method(request, response, request_id)
else:
method(request, response)
def SetError(self, error, method=None, error_rate=1):
"""Set an error condition that may be raised when calls made to stub.
If a method is specified, the error will only apply to that call.
The error rate is applied to the method specified or all calls if
method is not set.
Args:
error: An instance of `apiproxy_errors.Error` or `None` for no error.
method: A string representing the method that the error will affect.
error_rate: a number from `[0, 1]` that sets the chance of the error,
defaults to `1`.
"""
assert error is None or isinstance(error, apiproxy_errors.Error)
if method and error:
self.__error_dict[method] = error, error_rate
else:
self.__error_rate = error_rate
self.__error = error
def Synchronized(method):
"""Decorator to acquire a mutex around an `APIProxyStub` method.
Args:
method: An unbound method of `APIProxyStub` or a subclass.
Returns:
The `method`, altered such it acquires `self._mutex` throughout its
execution.
"""
def WrappedMethod(self, *args, **kwargs):
with self._mutex:
return method(self, *args, **kwargs)
return WrappedMethod
|
GoogleCloudPlatform/appengine-python-standard
|
src/google/appengine/api/apiproxy_stub.py
|
Python
|
apache-2.0
| 5,483
|
"""The StarLine component."""
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .account import StarlineAccount
from .const import (
DOMAIN,
PLATFORMS,
SERVICE_UPDATE_STATE,
SERVICE_SET_SCAN_INTERVAL,
CONF_SCAN_INTERVAL,
DEFAULT_SCAN_INTERVAL,
)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured StarLine."""
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up the StarLine device from a config entry."""
account = StarlineAccount(hass, config_entry)
await account.update()
if not account.api.available:
raise ConfigEntryNotReady
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
hass.data[DOMAIN][config_entry.entry_id] = account
device_registry = await hass.helpers.device_registry.async_get_registry()
for device in account.api.devices.values():
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id, **account.device_info(device)
)
for domain in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, domain)
)
async def async_set_scan_interval(call):
"""Service for set scan interval."""
options = dict(config_entry.options)
options[CONF_SCAN_INTERVAL] = call.data[CONF_SCAN_INTERVAL]
hass.config_entries.async_update_entry(entry=config_entry, options=options)
hass.services.async_register(DOMAIN, SERVICE_UPDATE_STATE, account.update)
hass.services.async_register(
DOMAIN,
SERVICE_SET_SCAN_INTERVAL,
async_set_scan_interval,
schema=vol.Schema(
{
vol.Required(CONF_SCAN_INTERVAL): vol.All(
vol.Coerce(int), vol.Range(min=10)
)
}
),
)
config_entry.add_update_listener(async_options_updated)
await async_options_updated(hass, config_entry)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
for domain in PLATFORMS:
await hass.config_entries.async_forward_entry_unload(config_entry, domain)
account: StarlineAccount = hass.data[DOMAIN][config_entry.entry_id]
account.unload()
return True
async def async_options_updated(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Triggered by config entry options updates."""
account: StarlineAccount = hass.data[DOMAIN][config_entry.entry_id]
scan_interval = config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
account.set_update_interval(scan_interval)
|
joopert/home-assistant
|
homeassistant/components/starline/__init__.py
|
Python
|
apache-2.0
| 2,876
|
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from models import Host
from models import InventoryJob
from models import InventoryJobHistory
from constants import JobStatus
from handlers.loader import get_inventory_handler_class
from context import InventoryContext
from utils import create_log_directory
from multi_process import WorkUnit
import traceback
import sys
class InventoryWorkUnit(WorkUnit):
def __init__(self, host_id, job_id):
WorkUnit.__init__(self)
self.host_id = host_id
self.job_id = job_id
def get_unique_key(self):
return self.host_id
def start(self, db_session, logger, process_name):
host = None
inventory_job = None
try:
inventory_job = db_session.query(InventoryJob).filter(InventoryJob.id == self.job_id).first()
if inventory_job is None:
logger.error('Unable to retrieve inventory job: %s' % self.job_id)
return
host_id = inventory_job.host_id
host = db_session.query(Host).filter(Host.id == host_id).first()
if host is None:
logger.error('Unable to retrieve host: %s' % host_id)
ctx = InventoryContext(db_session, host, inventory_job)
handler_class = get_inventory_handler_class(ctx)
if handler_class is None:
logger.error('Unable to get handler for %s, inventory job %s', host.software_platform, self.job_id)
inventory_job.set_status(JobStatus.IN_PROGRESS)
inventory_job.session_log = create_log_directory(host.connection_param[0].host_or_ip, inventory_job.id)
db_session.commit()
handler = handler_class()
handler.execute(ctx)
if ctx.success:
self.archive_inventory_job(db_session, inventory_job, JobStatus.COMPLETED)
else:
# removes the host object as host.packages may have been modified.
db_session.expunge(host)
self.archive_inventory_job(db_session, inventory_job, JobStatus.FAILED)
# Reset the pending retrieval flag
inventory_job.request_update = False
db_session.commit()
except Exception:
try:
self.log_exception(logger, host)
self.archive_inventory_job(db_session, inventory_job, JobStatus.FAILED, trace=sys.exc_info)
# Reset the pending retrieval flag
inventory_job.request_update = False
db_session.commit()
except Exception:
self.log_exception(logger, host)
finally:
db_session.close()
def log_exception(self, logger, host):
logger.exception('InventoryManager hit exception - hostname = %s, inventory job = %s',
host.hostname if host is not None else 'Unknown', self.job_id)
def archive_inventory_job(self, db_session, inventory_job, job_status, trace=None):
inventory_job.set_status(job_status)
hist = InventoryJobHistory()
hist.host_id = inventory_job.host_id
hist.set_status(job_status)
hist.session_log = inventory_job.session_log
if trace is not None:
hist.trace = traceback.format_exc()
db_session.add(hist)
|
smjurcak/csm
|
csmserver/work_units/inventory_work_unit.py
|
Python
|
apache-2.0
| 4,795
|
'''
Menu for Community Scripts
Author: Christoph Stoettner
Mail: christoph.stoettner@stoeps.de
Documentation: http://scripting101.stoeps.de
Version: 5.0.1
Date: 09/19/2015
License: Apache 2.0
History: Changed by Jan Alderlieste
'''
import sys
import os
import ibmcnx.functions
import ibmcnx.menu.MenuClass
import java
from java.lang import String
from java.util import HashSet
from java.util import HashMap
# Only load commands if not initialized directly (call from menu)
# if __name__ == "__main__":
# execfile( "ibmcnx/loadCnxApps.py" )
global globdict
globdict = globals()
def docDocumentation():
print '###########################################################'
print '# #'
print '# Not implemented in the menu! #'
print '# #'
print '# call with: #'
print '# wsadmin.sh -lang jython -f ibmcnx/doc/Documentation.py #'
print '# #'
print '###########################################################'
# execfile( 'ibmcnx/doc/Documentation.py', globdict )
global globdict
globdict = globals()
doc = ibmcnx.menu.MenuClass.cnxMenu()
doc.AddItem('Show JVM Heap Sizes (ibmcnx/doc/JVMHeap.py)',
ibmcnx.functions.docJVMHeap)
doc.AddItem('Show JVM Settings (ibmcnx/doc/JVMSettings.py)',
ibmcnx.functions.docJVMSettings)
doc.AddItem('Show JVM Trace Settings (ibmcnx/doc/traceSettings.py)',
ibmcnx.functions.doctracesettings)
doc.AddItem('Show SystemOut/Err Log Sizes (ibmcnx/doc/LogFiles.py)',
ibmcnx.functions.docLogFiles)
doc.AddItem('Show all used ports (ibmcnx/doc/Ports.py)',
ibmcnx.functions.docPorts)
doc.AddItem('Show all used variables (ibmcnx/doc/Variables.py)',
ibmcnx.functions.docVariables)
doc.AddItem('Show all j2ee roles of inst. applications (ibmcnx/doc/j2eeroles.py)',
ibmcnx.functions.docj2eeroles)
doc.AddItem('Show all datasources and parameters (ibmcnx/doc/DataSources.py)',
ibmcnx.functions.docdatasources)
doc.AddItem('Show users with employee.extended role (ibmcnx/doc/ProfRoleID.py',
ibmcnx.functions.docroleid)
doc.AddItem('Show inactive user profiles (ibmcnx/doc/ProfilesInactive.py',
ibmcnx.functions.docinactiveprof)
doc.AddItem(
'Create a file with all documentation (ibmcnx/doc/Documentation.py)', docDocumentation)
doc.AddItem('Back to Main Menu (ibmcnx/menu/cnxmenu.py)',
ibmcnx.functions.cnxBackToMainMenu)
doc.AddItem("Exit", ibmcnx.functions.bye)
state_doc = 'True'
menutitle = "HCL Connections Documentation"
while state_doc == 'True':
count = len(doc.menuitems)
doc.Show(menutitle)
###########################
# # Robust error handling ##
# # only accept int ##
###########################
## Wait for valid input in while...not ###
is_valid_doc = 0
while not is_valid_doc:
try:
inputstring = '\tEnter your choice [1-' + str(count) + ']: '
n = int(raw_input(inputstring))
if n <= count and n > 0:
is_valid_doc = 1 # set it to 1 to validate input and to terminate the while..not loop
else:
print ("'%s' is not a valid menu option.") % n
except ValueError, e:
print ("'%s' is not a valid integer." % e.args[0].split(": ")[1])
# n = input( "your choice> " )
doc.Do(n - 1)
|
stoeps13/ibmcnx2
|
ibmcnx/menu/docs.py
|
Python
|
apache-2.0
| 3,638
|
import sys
import logging
logger = logging.getLogger(__name__)
def configure_logging():
root = logging.getLogger()
root.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(name)12s %(levelname)7s - %(message)s')
handler.setFormatter(formatter)
root.addHandler(handler)
|
edouard-lopez/parlr
|
config.py
|
Python
|
apache-2.0
| 394
|
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2015,2016,2018 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the update network command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestUpdateNetwork(TestBrokerCommand):
def test_100_update(self):
command = ["update", "network", "--network", "excx-net",
"--network_environment", "excx",
"--building", "ut", "--type", "dmz-net",
"--side", "b", "--comments", "New network comments"]
self.noouttest(command)
def test_110_verify(self):
command = ["show", "network", "--network", "excx-net",
"--network_environment", "excx"]
out = self.commandtest(command)
self.matchoutput(out, "Comments: New network comments", command)
self.matchoutput(out, "Sysloc: ut.ny.na", command)
self.matchoutput(out, "Network Type: dmz-net", command)
self.matchoutput(out, "Side: b", command)
def test_120_update_rename(self):
command = ["update", "network", "--network", "netsvcmap",
"--rename_to", "rename-test", "--comments", "New comment"]
self.noouttest(command)
def test_121_update_rename_verify(self):
command = ["show", "network", "--network", "rename-test"]
out = self.commandtest(command)
self.matchoutput(out, "Network: rename-test", command)
self.matchoutput(out, "Comments: New comment", command)
def test_122_update_rename_existing(self):
net = self.net["np06bals03_v103"]
command = ["update", "network", "--network", "rename-test",
"--rename_to", "np06bals03_v103"]
out,err = self.successtest(command)
self.matchoutput(err, "WARNING: Network name {} is already used for address {}/{}."
.format("np06bals03_v103", net.ip, net.prefixlen), command)
command = ["update", "network", "--ip", net.ip, "--rename_to", "netsvcmap"]
self.noouttest(command)
def test_200_update_utdmz1(self):
net = self.net["ut_dmz1"]
command = ["update_network",
"--ip=%s" % net.ip,
"--network_compartment="]
self.noouttest(command)
def test_201_verify_utdmz1(self):
command = ["search", "network", "--network_compartment", "perimeter.ut"]
self.noouttest(command)
# There should be a test_constraint_network.py one day...
def test_900_delinuse(self):
net = self.net["unknown0"]
command = ["del", "network", "--ip", net.ip]
out = self.badrequesttest(command)
self.matchoutput(out, "Network %s [%s] is still in use" %
(net.name, net), command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestUpdateNetwork)
unittest.TextTestRunner(verbosity=2).run(suite)
|
quattor/aquilon
|
tests/broker/test_update_network.py
|
Python
|
apache-2.0
| 3,647
|
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import alembic
from oslo_serialization import jsonutils
import sqlalchemy as sa
from nailgun.db import db
from nailgun.db import dropdb
from nailgun.db.migration import ALEMBIC_CONFIG
from nailgun.db.migration import make_alembic_config_from_extension
from nailgun.extensions.consts import extensions_migration_buffer_table_name
from nailgun.test import base
from volume_manager.extension import VolumeManagerExtension
_core_test_revision = '1e50a4903910'
_extension_test_revision = '086cde3de7cf'
def setup_module():
dropdb()
# Run core migration in order to create buffer table
alembic.command.upgrade(ALEMBIC_CONFIG, _core_test_revision)
prepare()
# Run extension migrations
ext_alembic_config = make_alembic_config_from_extension(
VolumeManagerExtension)
alembic.command.upgrade(ext_alembic_config, _extension_test_revision)
def prepare():
meta = base.reflect_db_metadata()
# Fill in migration table with data
db.execute(
meta.tables[extensions_migration_buffer_table_name].insert(),
[{'extension_name': 'volume_manager',
'data': jsonutils.dumps({'node_id': 1, 'volumes': [{'volume': 1}]})},
{'extension_name': 'volume_manager',
'data': jsonutils.dumps({'node_id': 2, 'volumes': [{'volume': 2}]})},
{'extension_name': 'some_different_extension',
'data': 'some_data'}])
db.commit()
class TestVolumeManagerExtensionAddVolumesTable(base.BaseAlembicMigrationTest):
@classmethod
def setUpClass(cls):
setup_module()
def test_add_volumes_table(self):
result = db.execute(
sa.select([
self.meta.tables['volume_manager_node_volumes'].c.node_id,
self.meta.tables['volume_manager_node_volumes'].c.volumes]))
records = list(result)
node_ids = [r[0] for r in records]
self.assertItemsEqual(node_ids, [1, 2])
volumes = [jsonutils.loads(r[1]) for r in records]
self.assertItemsEqual(
[[{'volume': 1}], [{'volume': 2}]],
volumes)
result = db.execute(
sa.select([
self.meta.tables[
extensions_migration_buffer_table_name].c.extension_name,
self.meta.tables[
extensions_migration_buffer_table_name].c.data]))
self.assertEqual(
list(result),
[('some_different_extension', 'some_data')])
|
gitfred/fuel-extension-volume-manager
|
volume_manager/tests/test_migration_volume_manager_extension_001_add_volumes_table.py
|
Python
|
apache-2.0
| 3,073
|
#!/usr/bin/env python
# encoding: utf-8
"""
Author: Isabel Restrepo
August 12, 2012
Compute rigid transformation between two point clounds using feature correspondances
"""
import os
import sys
import glob
import time
from optparse import OptionParser
from xml.etree.ElementTree import ElementTree
from vpcl_adaptor import *
from boxm2_utils import *
parser = OptionParser()
parser.add_option("--srcRoot", action="store", type="string", dest="src_scene_root", help="root folder, this is where the .ply input and output files should reside")
parser.add_option("--tgtRoot", action="store", type="string", dest="tgt_scene_root", help="root folder, this is where the .ply input and output files should reside")
parser.add_option("--basenameIn", action="store", type="string", dest="basename_in", help="basename of .ply file")
parser.add_option("-r", "--radius", action="store", type="int", dest="radius", help="radius (multiple of resolution)");
parser.add_option("-p", "--percent", action="store", type="int", dest="percentile", help="data percentile");
parser.add_option("-d", "--descriptor", action="store", type="string", dest="descriptor_type", help="name of the descriptor i.e FPFH");
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="verbose - if false std is redirected to a logfile");
(opts, args) = parser.parse_args()
print opts
print args
#path to where all scenes are
src_scene_root=opts.src_scene_root;
tgt_scene_root=opts.tgt_scene_root;
radius = opts.radius; #gets multiplied by the resolution of the scene
percentile = opts.percentile;
descriptor_type = opts.descriptor_type;
verbose=opts.verbose;
if not verbose:
vpcl_batch.set_stdout("./logs/log_" + descriptor_type + 'percetile' + str(percentile) +'.log')
src_fname = src_scene_root + "/" + opts.basename_in + "_" + str(percentile) + ".ply"
src_features_dir = src_scene_root + "/" + descriptor_type + "_" + str(radius);
src_features_fname = src_features_dir + "/descriptors_" + str(percentile) + ".pcd";
tgt_fname = tgt_scene_root + "/" + opts.basename_in + "_" + str(percentile) + ".ply"
tgt_features_dir = tgt_scene_root + "/" + descriptor_type + "_" + str(radius);
tgt_features_fname = tgt_features_dir + "/descriptors_" + str(percentile) + ".pcd";
tform_cloud_fname = tgt_features_dir + "/tform_cloud_" + str(percentile) + ".pcd";
tform_fname = tgt_features_dir + "/transformation_" + str(percentile) + ".txt";
if verbose :
print src_fname, src_features_fname
print tgt_fname, tgt_features_fname, tform_cloud_fname, tform_fname
compute_rigid_transformation(src_fname, tgt_fname, src_features_fname, tgt_features_fname, tform_cloud_fname, tform_fname, descriptor_type);
if not verbose:
vpcl_batch.reset_stdout();
print "Done"
sys.exit(0)
|
mirestrepo/voxels-at-lems
|
registration_eval/unused/compute_rigid_transform.py
|
Python
|
bsd-2-clause
| 2,803
|
# -*- coding: ISO-8859-15 -*-
from core.Uusipuu import UusipuuModule
import random, time
class Module(UusipuuModule):
def startup(self):
if 'memo' not in self.config:
self.config['memo'] = {}
def privmsg(self, user, target, msg):
if target != self.channel:
return
pieces = msg.strip().split(' ', 1)
if len(pieces) != 2:
return
cmd = pieces[0].strip()
params = pieces[1].strip()
if cmd == '??':
self.meta_show(user, params)
elif cmd == '?!':
self.meta_searchkey(user, params.strip())
elif cmd == '?#':
self.meta_searchvalue(user, params.strip())
def cmd_memo(self, user, target, params):
pieces = params.strip().split(' ', 1)
if len(pieces) != 2:
self.chanmsg('Insufficient parameters')
return
cmd = pieces[0].strip()
params = pieces[1].strip()
if cmd == 'add':
self.meta_addmemo(user, params)
elif cmd in ['del', 'delete', 'remove']:
self.meta_delmemo(user, params)
elif cmd == 'show':
self.meta_show(user, params)
elif cmd == 'info':
self.meta_info(user, params)
elif cmd in ['search', 'searchkey', 'sk']:
self.meta_searchkey(user, params.strip())
elif cmd in ['searchvalue', 'sv']:
self.meta_searchvalue(user, params.strip())
def meta_show(self, user, key):
self.do_show(user, key)
def meta_info(self, user, key):
self.do_show(user, key)
self.do_info(user, key)
def meta_searchkey(self, user, key):
nick = user.split('!', 1)[0]
keys = [x for x in self.config['memo'] if x.count(key)]
if not keys:
self.chanmsg('No keys found matching "%s"' % (key))
return
self.do_show(user, random.choice(keys))
def meta_searchvalue(self, user, value):
nick = user.split('!', 1)[0]
keys = [x for x in self.config['memo'] \
if self.config['memo'][x]['value'].count(value)]
if not keys:
self.chanmsg('No values found matching "%s"' % (value))
return
self.do_show(user, random.choice(keys))
def do_show(self, user, key):
nick = user.split('!', 1)[0]
if key not in self.config['memo']:
self.chanmsg('Entry not found (%s)' % key)
return ()
self.chanmsg('%s: %s' % (key, str(self.config['memo'][key]['value'])))
def do_info(self, user, key):
if key not in self.config['memo']:
return
self.chanmsg('%s created by %s [%s]' % (key,
self.config['memo'][key]['user'],
time.ctime(self.config['memo'][key]['added'])))
def meta_addmemo(self, user, params):
nick = user.split('!', 1)[0]
pieces = params.strip().split(' ', 1)
if len(pieces) < 2:
self.chanmsg('Insufficient parameters')
return
key, value = pieces[0].strip(), pieces[1].strip()
if key in self.config['memo']:
self.chanmsg('%s: An entry by that name already exists' % nick)
return
self.config['memo'][key] = {
'value': value,
'user': user,
'added': int(time.time()),
}
self.save()
self.chanmsg('Memo entry "%s" successfully added' % (str(key)))
def meta_delmemo(self, user, params):
nick = user.split('!', 1)[0]
pieces = params.strip().split(' ', 1)
key = pieces[0].strip()
if key not in self.config['memo']:
self.chanmsg('Entry not found (%s)' % key)
return
del self.config['memo'][key]
self.save()
self.chanmsg('Memo entry "%s" successfully removed' % (key))
# vim: set et sw=4:
|
desaster/uusipuu
|
modules/memo.py
|
Python
|
bsd-2-clause
| 3,945
|
#from django.conf import settings
NOTICE_FROM_ANONYMOUS = 1
|
anscii/django-modelnotice
|
modelnotice/default_settings.py
|
Python
|
bsd-2-clause
| 60
|
'''A convenient class for parsing HTML pages.'''
from __future__ import unicode_literals
from HTMLParser import HTMLParser
import logging
import re
from RSSvk.core import Error
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class HTMLPageParser(HTMLParser):
'''A convenient class for parsing HTML pages.'''
tag_name_regex = '[a-zA-Z][-.a-zA-Z0-9:_]*'
'''A regular expression for tag name.'''
attribute_name_regex = tag_name_regex
'''A regular expression for attribute name.'''
tag_attrs_regex = re.sub(r'\s*', '', r'''
(?:\s+
''' + attribute_name_regex + r'''
(?:\s*=\s*
(?:
'[^']*'
|"[^"]*"
|[^'"/>\s]+
)
)?
)*
''')
'''A regular expression for tag attributes.'''
script_regex = re.compile('<script' + tag_attrs_regex + '>.*?</script>', re.DOTALL | re.IGNORECASE)
'''A regular expression for matching scripts.'''
__invalid_tag_attr_spacing_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
# Two attributes without a space between them
\s+ # whitespace before attribute name
''' + attribute_name_regex + r''' # attribute name
\s*=\s* # value indicator
(?:
'[^']*' # LITA-enclosed value
|"[^"]*" # LIT-enclosed value
)
)
([^\s>]) # Do not include / to make the preparation replacement for __invalid_tag_attr_regex
''', re.VERBOSE)
'''
A regular expression for matching a common error in specifying tag
attributes.
'''
__invalid_tag_attr_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
)
\s+(?:
# Invalid characters instead of an attribute
[^\sa-zA-Z/>]\S*
|
# Sole slash
/\s
|
# Invalid characters starting from slash instead of an attribute
/[^>\s]+
)
''', re.VERBOSE)
'''
A regular expression for matching HTML errors like:
<a class="app photo"/app2322149_58238998?from_id=2381857&loc=addneighbour onclick="return cur.needLoginBox()">
'''
__empty_tags = 'area|base|basefont|br|col|frame|hr|img|input|link|meta|param'
'''A list of all HTML empty tags.'''
__misopened_tag_regex = re.compile(r'<(' + __empty_tags + tag_attrs_regex + r')\s*>', re.IGNORECASE)
'''A regular expression for matching opened tags that should be closed.'''
__tag_stack = None
'''A stack of currently opened HTML tags.'''
__cur_data = None
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
def __init__(self):
HTMLParser.__init__(self)
def handle_charref(self, name):
'''Handles a character reference of the form &#ref;.'''
self.__accumulate_data('&#' + name + ';')
def handle_data(self, data):
'''Handles data.'''
self.__accumulate_data(data)
def handle_endtag(self, tag_name):
'''Handles end of a tag.'''
self.__handle_data_if_exists()
if self.__get_cur_tag()['name'] == tag_name:
self.__close_tag(self.__tag_stack.pop())
else:
for tag_id in xrange(len(self.__tag_stack) - 1, -1, -1):
if self.__tag_stack[tag_id]['name'] == tag_name:
for tag in reversed(self.__tag_stack[tag_id + 1:]):
self.__close_tag(tag, forced = True)
self.__tag_stack.pop()
self.__close_tag(self.__tag_stack.pop())
break
else:
LOG.debug('Dropping excess end tag "%s"...', tag_name)
def handle_entityref(self, name):
'''Handles a general entity reference of the form &name;.'''
self.__accumulate_data('&' + name + ';')
def handle_root_data(self, tag, data):
'''Handles data inside of the root of the document.'''
LOG.debug('%s', data)
def handle_root(self, tag, attrs, empty):
'''Handles a tag inside of the root of the document.'''
LOG.debug('<%s %s%s>', tag['name'], attrs, '/' if empty else '')
tag['new_tag_handler'] = self.handle_root
tag['data_handler'] = self.handle_root_data
tag['end_tag_handler'] = self.handle_root_end
def handle_root_end(self, tag):
'''Handles end of the root of the document.'''
LOG.debug('</%s>', tag['name'])
def handle_startendtag(self, tag, attrs):
'''Handles start of an XHTML-style empty tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, True)
def handle_starttag(self, tag, attrs):
'''Handles start of a tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, False)
def reset(self):
'''Resets the parser.'''
HTMLParser.reset(self)
self.__tag_stack = [{
# Add fake root tag
'name': None,
'new_tag_handler': self.handle_root,
'data_handler': self.handle_root_data,
'end_tag_handler': self.handle_root_end,
}]
def parse(self, html):
'''Parses the specified HTML page.'''
html = self.__fix_html(html)
self.reset()
try:
# Run the parser
self.feed(html)
self.close()
finally:
# Close all unclosed tags
for tag in self.__tag_stack[1:]:
self.__close_tag(tag, True)
def __accumulate_data(self, data):
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
if self.__cur_data is None:
self.__cur_data = data
else:
self.__cur_data += data
def __close_tag(self, tag, forced = False):
'''Forces closing of an unclosed tag.'''
if forced:
LOG.debug('Force closing of unclosed tag "%s".', tag['name'])
else:
LOG.debug('Tag %s closed.', tag)
if 'end_tag_handler' in tag:
tag['end_tag_handler'](tag)
LOG.debug('Current tag: %s.', self.__get_cur_tag())
def __fix_html(self, html):
'''Fixes various things that may confuse the Python's HTML parser.'''
html = self.script_regex.sub('', html)
loop_replacements = (
lambda html: self.__invalid_tag_attr_spacing_regex.subn(r'\1 \2', html),
lambda html: self.__invalid_tag_attr_regex.subn(r'\1 ', html),
)
for loop_replacement in loop_replacements:
for i in xrange(0, 1000):
html, changed = loop_replacement(html)
if not changed:
break
else:
raise Error('Too many errors in the HTML or infinite loop.')
html = self.__misopened_tag_regex.sub(r'<\1 />', html)
return html
def __get_cur_tag(self):
'''Returns currently opened tag.'''
return self.__tag_stack[-1]
def __handle_data_if_exists(self):
'''Handles accumulated data (if exists).'''
data = self.__cur_data
if data is None:
return
self.__cur_data = None
tag = self.__get_cur_tag()
handler = tag.get('data_handler')
if handler is not None:
LOG.debug('Data "%s" in "%s" with handler %s.',
data, tag['name'], handler.func_name)
handler(tag, data)
def __handle_start_tag(self, tag_name, attrs, empty):
'''Handles start of any tag.'''
tag = { 'name': tag_name }
handler = self.__get_cur_tag().get('new_tag_handler')
if handler is not None:
attrs = self.__parse_attrs(attrs)
LOG.debug('Start tag: %s %s with handler %s.',
tag, attrs, handler.func_name)
handler(tag, attrs, empty)
if not empty:
self.__tag_stack.append(tag)
def __parse_attrs(self, attrs_tuple):
'''Converts tag attributes from a tuple to a dictionary.'''
attrs = {}
for attr, value in attrs_tuple:
attrs[attr.lower()] = value
return attrs
|
Densvin/RSSVK
|
vkfeed/tools/html_parser.py
|
Python
|
bsd-2-clause
| 8,733
|
# -*- coding: utf-8 -*-
class Company(object):
def __init__(self, name=None, code=None, phone=None, digit=None):
# Company's name
self.name = name
# Codename
self.code = code
# The digit of the invoice number
if digit is None:
digit = []
self.digit = digit
# Phone number of the service center
self.phone = phone
def __repr__(self):
return '[%s] %s (%s)' % (
self.code,
self.name,
self.phone
)
class Track(object):
def __init__(self, time=None, location=None, status=None,
phone1=None, phone2=None):
# Time
self.time = time
# Location
self.location = location
# Status
self.status = status
# Phone number 1
self.phone1 = phone1
# Phone number 2
self.phone2 = phone2
def __repr__(self):
return '[%s] %s - %s / %s / %s' % (
self.time,
self.status,
self.location,
self.phone1,
self.phone2
)
class Tracker(object):
def __init__(self):
self._tracks = []
@property
def tracks(self):
return self._tracks
def add_track(self, new_track):
if not isinstance(new_track, Track):
raise TypeError('The new_track must be Track!')
self._tracks.append(new_track)
def track_by_status(self, status):
"""
Find the tracking information matching the status
:param str status: The status to find the tracking information
:return: The tracking information matching the status
"""
tracks = list(filter(lambda x: x.status == status, self._tracks))
if len(tracks) > 0:
return tracks[-1]
raise LookupError("Can't find the track by status %s" % status)
def __iter__(self):
return iter(self._tracks)
class Parcel(object):
def __init__(self, sender=None, receiver=None, invoice_number=None,
address=None, note=None):
# The sender's name
self.sender = sender
# The receiver's name
self.receiver = receiver
# Invoice number
self.invoice_number = invoice_number
# The receiver's address
self.address = address
# Note for the parcel
self.note = note
def __repr__(self):
return '[%s] From: %s, To: %s, %s' % (
self.invoice_number,
self.sender,
self.receiver,
self.note
)
|
iBluemind/armatis
|
armatis/models.py
|
Python
|
bsd-2-clause
| 2,596
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 14, 2011
@author:Isabel Restrepo
A script to run (fast) k-means on J sets of random subsamples
"""
import os;
import dbrec3d_batch
import multiprocessing
import Queue
import time
import random
import optparse
import sys
from numpy import log, ceil
from xml.etree.ElementTree import ElementTree
import glob
#time.sleep(30);
class dbvalue:
def __init__(self, index, type):
self.id = index # unsigned integer
self.type = type # string
class bof_job():
def __init__(self, cm_i_file, CM_set, max_it, fm_i_file):
self.cm_i_file = cm_i_file;
self.CM_set = CM_set;
self.max_it = max_it;
self.fm_i_file = fm_i_file;
def execute_bof_jobs(jobs, num_procs=4):
work_queue=multiprocessing.Queue();
result_queue=multiprocessing.Queue();
for job in jobs:
work_queue.put(job)
for i in range(num_procs):
worker= bof_worker(work_queue,result_queue)
worker.start();
print("worker with name ",worker.name," started!")
# collect the results off the queue
#important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting
# results = []
# while len(results) < len(jobs):
# result = result_queue.get()
# results.append(result)
#
# return results
class bof_worker(multiprocessing.Process):
def __init__(self,work_queue,result_queue):
# base class initialization
multiprocessing.Process.__init__(self)
# job management stuff
self.work_queue = work_queue
self.result_queue = result_queue
self.kill_received = False
def run(self):
while not self.kill_received:
# get a task
try:
job = self.work_queue.get_nowait()
except Queue.Empty:
break
start_time = time.time();
dbrec3d_batch.set_stdout('logs/log_' + str(os.getpid())+ ".txt");
dbrec3d_batch.init_process("bofKMeansOnVectorProcess");
dbrec3d_batch.set_input_string(0, job.cm_i_file);
dbrec3d_batch.set_input_from_db(1, job.CM_set);
dbrec3d_batch.set_input_unsigned(2, job.max_it);
dbrec3d_batch.set_input_string(3, job.fm_i_file);
dbrec3d_batch.run_process();
dbrec3d_batch.clear();
dbrec3d_batch.reset_stdout();
print ("Runing time for worker:", self.name)
print(time.time() - start_time);
#output exit code in this case
#important: having a result queue makes the execute_jobs wait for all jobs in the queue before exiting
#self.result_queue.put(0);
#*******************The Main Algorithm ************************#
if __name__=="__main__":
dbrec3d_batch.register_processes();
dbrec3d_batch.register_datatypes();
#Parse inputs
parser = optparse.OptionParser(description='bof Statistics Pass 0');
parser.add_option('--init_k_means_dir', action="store", dest="init_k_means_dir");
parser.add_option('--num_cores', action="store", dest="num_cores", type="int", default=4);
parser.add_option('--max_it', action="store", dest="max_it", type="int", default=100);
options, args = parser.parse_args()
init_k_means_dir = options.init_k_means_dir; #path where all CM_i means are saved and where the ouput FM_i will be written to
num_cores = options.num_cores;
max_it = options.max_it;
if not os.path.isdir(init_k_means_dir +"/"):
print "Invalid init_k_means Dir"
sys.exit(-1);
CM_path = init_k_means_dir + "/CM";
if not os.path.isdir(CM_path +"/"):
print "Invalid CM Dir"
sys.exit(-1);
CM_files = glob.glob1(CM_path, 'CM*');
FM_path = init_k_means_dir + "/FM";
if not os.path.isdir(FM_path +"/"):
os.mkdir(FM_path +"/");
start_time = time.time();
#Combine all CM_i means into one set CM to be passed for k-means
mean_file_sfx = CM_path + "/CM_" ;
dbrec3d_batch.init_process("bofCombineMeansProcess");
dbrec3d_batch.set_input_string(0, mean_file_sfx);
dbrec3d_batch.run_process();
(id, type) = dbrec3d_batch.commit_output(0);
CM_set= dbvalue(id, type);
#Begin multiprocessing
job_list=[];
#Enqueue jobs
for CM_file in CM_files:
cm_file = CM_path + "/" + CM_file;
fm_file = FM_path + "/FM" + CM_file.strip('CM');
current_job = bof_job(cm_file, CM_set, max_it, fm_file);
job_list.append(current_job);
execute_bof_jobs(job_list, num_cores);
|
mirestrepo/voxels-at-lems
|
dbrec3d/bof/pca/learn_codebook/refined_init_k_means/k_means_on_CM_means.py
|
Python
|
bsd-2-clause
| 4,760
|
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('textbox15.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with textbox(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_textbox('E9', 'This is some text',
{'align': {'horizontal': 'center'}})
workbook.close()
self.assertExcelEqual()
|
jmcnamara/XlsxWriter
|
xlsxwriter/test/comparison/test_textbox15.py
|
Python
|
bsd-2-clause
| 900
|
import sys
if '' not in sys.path:
sys.path.append('')
import time
import unittest
from pyactors.logs import file_logger
from pyactors.exceptions import EmptyInboxException
from tests import ForkedGreActor as TestActor
from multiprocessing import Manager
class ForkedGreenletActorTest(unittest.TestCase):
def test_run(self):
''' test_forked_green_actors.test_run
'''
test_name = 'test_forked_gen_actors.test_run'
logger = file_logger(test_name, filename='logs/%s.log' % test_name)
actor = TestActor()
actor.start()
while actor.processing:
time.sleep(0.1)
actor.stop()
result = []
while True:
try:
result.append(actor.inbox.get())
except EmptyInboxException:
break
self.assertEqual(len(result), 10)
self.assertEqual(actor.processing, False)
self.assertEqual(actor.waiting, False)
if __name__ == '__main__':
unittest.main()
|
snakeego/pyactors
|
tests/test_forked_green_actors.py
|
Python
|
bsd-2-clause
| 1,024
|
#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
# ACCESS_KEY = ''
# MESSAGE_ID = ''
try:
ACCESS_KEY
except NameError:
print('You need to set an ACCESS_KEY constant in this file')
sys.exit(1)
try:
MESSAGE_ID
except NameError:
print('You need to set a MESSAGE_ID constant in this file')
sys.exit(1)
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the VoiceMessage object for the specified MESSAGE_ID.
vmsg = client.voice_message(MESSAGE_ID)
# Print the object information.
print('\nThe following information was returned as a VoiceMessage object:\n')
print(' id : %s' % vmsg.id)
print(' href : %s' % vmsg.href)
print(' originator : %s' % vmsg.originator)
print(' body : %s' % vmsg.body)
print(' reference : %s' % vmsg.reference)
print(' language : %s' % vmsg.language)
print(' voice : %s' % vmsg.voice)
print(' repeat : %s' % vmsg.repeat)
print(' ifMachine : %s' % vmsg.ifMachine)
print(' scheduledDatetime : %s' % vmsg.scheduledDatetime)
print(' createdDatetime : %s' % vmsg.createdDatetime)
print(' recipients : %s\n' % vmsg.recipients)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a VoiceMessage object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
|
messagebird/python-rest-api
|
examples/voice_message.py
|
Python
|
bsd-2-clause
| 1,642
|
with open('README.txt') as f:
long_description = f.read()
from distutils.core import setup
setup(
name = "nomit",
packages = ["nomit"],
version = "1.0",
description = "Process Monit HTTP/XML",
author = "Markus Juenemann",
author_email = "markus@juenemann.net",
url = "https://github.com/mjuenema/nomit",
download_url = "https://github.com/mjuenema/nomit/tarball/1.0",
keywords = ["xml", "Monit", "MMonit"],
classifiers = [
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Monitoring",
],
long_description = long_description
)
|
mjuenema/nomit
|
setup.py
|
Python
|
bsd-2-clause
| 902
|
# Copyright 2018 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
from lino.core.roles import UserRole
class NotesUser(UserRole):
pass
class NotesStaff(NotesUser):
pass
|
lino-framework/xl
|
lino_xl/lib/notes/roles.py
|
Python
|
bsd-2-clause
| 228
|
from __future__ import absolute_import
__author__ = 'noe'
from pyemma._base.model import SampledModel
from pyemma.msm.models.msm import MSM
from pyemma.util.types import is_iterable
class SampledMSM(MSM, SampledModel):
def __init__(self, samples, ref=None, conf=0.95):
r""" Constructs a sampled MSM
Parameters
----------
samples : list of MSM
Sampled MSM objects
ref : EstimatedMSM
Single-point estimator, e.g. containing a maximum likelihood or mean MSM
conf : float, optional, default=0.95
Confidence interval. By default two-sigma (95.4%) is used. Use 95.4% for two sigma or 99.7% for three sigma.
"""
# validate input
assert is_iterable(samples), 'samples must be a list of MSM objects, but is not.'
assert isinstance(samples[0], MSM), 'samples must be a list of MSM objects, but is not.'
# construct superclass 1
SampledModel.__init__(self, samples, conf=conf)
# construct superclass 2
if ref is None:
Pref = self.sample_mean('P')
MSM.__init__(self, Pref, dt_model=samples[0].dt_model, neig=samples[0].neig, ncv=samples[0].ncv)
else:
MSM.__init__(self, ref.Pref, pi=ref.pi, reversible=ref.reversible, dt_model=ref.dt_model,
neig=ref.neig, ncv=ref.ncv)
# TODO: maybe rename to parametrize in order to avoid confusion with set_params that has a different behavior?
def set_model_params(self, samples=None, conf=0.95,
P=None, pi=None, reversible=None, dt_model='1 step', neig=None):
"""
Parameters
----------
samples : list of MSM objects
sampled MSMs
conf : float, optional, default=0.68
Confidence interval. By default one-sigma (68.3%) is used. Use 95.4% for two sigma or 99.7% for three sigma.
"""
# set model parameters of superclass
SampledModel.set_model_params(self, samples=samples, conf=conf)
MSM.set_model_params(self, P=P, pi=pi, reversible=reversible, dt_model=dt_model, neig=neig)
#
# class SampledEstimatedMSM(EstimatedMSM, SampledModel):
#
# def __init__(self, samples, ref, Pref='mle', conf=0.95):
# r""" Constructs a sampled MSM
#
# Parameters
# ----------
# samples : list of MSM
# Sampled MSM objects
# ref : EstimatedMSM
# Single-point estimator, e.g. containing a maximum likelihood or mean MSM
# conf : float, optional, default=0.68
# Confidence interval. By default one-sigma (68.3%) is used. Use 95.4% for two sigma or 99.7% for three sigma.
#
# """
# # construct superclass 1
# SampledModel.__init__(self, samples, conf=conf)
# # use reference or mean MSM.
# if ref is None:
# Pref = self.sample_mean('P')
# else:
# Pref = ref.P
# # construct superclass 2
# EstimatedMSM.__init__(self, ref.discrete_trajectories_full, ref.timestep, ref.lagtime, ref.connectivity,
# ref.active_set, ref.connected_sets, ref.count_matrix_full, ref.count_matrix_active, Pref)
# def _do_sample_eigendecomposition(self, k, ncv=None):
# """Conducts the eigenvalue decompositions for all sampled matrices.
#
# Stores k eigenvalues, left and right eigenvectors for all sampled matrices
#
# Parameters
# ----------
# k : int
# The number of eigenvalues / eigenvectors to be kept
# ncv : int (optional)
# Relevant for eigenvalue decomposition of reversible transition matrices.
# ncv is the number of Lanczos vectors generated, `ncv` must be greater than k;
# it is recommended that ncv > 2*k
#
# """
# from msmtools.analysis import rdl_decomposition
# from pyemma.util import linalg
#
# # left eigenvectors
# self.sample_Ls = np.empty((self._nsample), dtype=object)
# # eigenvalues
# self.sample_eigenvalues = np.empty((self._nsample), dtype=object)
# # right eigenvectors
# self.sample_Rs = np.empty((self._nsample), dtype=object)
# # eigenvector assignments
# self.sample_eig_assignments = np.empty((self._nsample), dtype=object)
#
# for i in range(self._nsample):
# if self._reversible:
# R, D, L = rdl_decomposition(self.sample_Ps[i], k=k, norm='reversible', ncv=ncv)
# # everything must be real-valued
# R = R.real
# D = D.real
# L = L.real
# else:
# R, D, L = rdl_decomposition(self.sample_Ps[i], k=k, norm='standard', ncv=ncv)
# # assign ordered
# I = linalg.match_eigenvectors(self.eigenvectors_right(), R,
# w_ref=self.stationary_distribution, w=self.sample_mus[i])
# self.sample_Ls[i] = L[I,:]
# self.sample_eigenvalues[i] = np.diag(D)[I]
# self.sample_Rs[i] = R[:,I]
#
# def _ensure_sample_eigendecomposition(self, k=None, ncv=None):
# """Ensures that eigendecomposition has been performed with at least k eigenpairs
#
# k : int
# number of eigenpairs needed. This setting is mandatory for sparse transition matrices
# (if you set sparse=True in the initialization). For dense matrices, k will be ignored
# as all eigenvalues and eigenvectors will be computed and stored.
# ncv : int (optional)
# Relevant for eigenvalue decomposition of reversible transition matrices.
# ncv is the number of Lanczos vectors generated, `ncv` must be greater than k;
# it is recommended that ncv > 2*k
#
# """
# # check input?
# if self._sparse:
# if k is None:
# raise ValueError(
# 'You have requested sparse=True, then the number of eigenvalues neig must also be set.')
# else:
# # override setting - we anyway have to compute all eigenvalues, so we'll also store them.
# k = self._nstates
# # ensure that eigenvalue decomposition with k components is done.
# try:
# m = len(self.sample_eigenvalues[0]) # this will raise and exception if self._eigenvalues doesn't exist yet.
# if m < k:
# # not enough eigenpairs present - recompute:
# self._do_sample_eigendecomposition(k, ncv=ncv)
# except:
# # no eigendecomposition yet - compute:
# self._do_sample_eigendecomposition(k, ncv=ncv)
#
# @property
# def stationary_distribution_mean(self):
# """Sample mean for the stationary distribution on the active set.
#
# See also
# --------
# MSM.stationary_distribution
#
# """
# return np.mean(self.sample_mus, axis=0)
#
# @property
# def stationary_distribution_std(self):
# """Sample standard deviation for the stationary distribution on the active set.
#
# See also
# --------
# MSM.stationary_distribution
#
# """
# return np.std(self.sample_mus, axis=0)
#
# @property
# def stationary_distribution_conf(self):
# """Sample confidence interval for the stationary distribution on the active set.
#
# See also
# --------
# MSM.stationary_distribution
#
# """
# return stat.confidence_interval(self.sample_mus, alpha=self._confidence)
#
# def eigenvalues_mean(self, k=None, ncv=None):
# """Sample mean for the eigenvalues.
#
# See also
# --------
# MSM.eigenvalues
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return np.mean(self.sample_eigenvalues, axis=0)
#
# def eigenvalues_std(self, k=None, ncv=None):
# """Sample standard deviation for the eigenvalues.
#
# See also
# --------
# MSM.eigenvalues
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return np.std(self.sample_eigenvalues, axis=0)
#
# def eigenvalues_conf(self, k=None, ncv=None):
# """Sample confidence interval for the eigenvalues.
#
# See also
# --------
# MSM.eigenvalues
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return stat.confidence_interval(self.sample_eigenvalues, alpha=self._confidence)
#
# def eigenvectors_left_mean(self, k=None, ncv=None):
# """Sample mean for the left eigenvectors.
#
# See also
# --------
# MSM.eigenvectors_left
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return np.mean(self.sample_Ls, axis=0)
#
# def eigenvectors_left_std(self, k=None, ncv=None):
# """Sample standard deviation for the left eigenvectors.
#
# See also
# --------
# MSM.eigenvectors_left
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return np.std(self.sample_Ls, axis=0)
#
# def eigenvectors_left_conf(self, k=None, ncv=None):
# """Sample confidence interval for the left eigenvectors.
#
# See also
# --------
# MSM.eigenvectors_left
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return stat.confidence_interval(self.sample_Ls, alpha=self._confidence)
#
#
# # def eigenvectors_right_mean(self, k=None, ncv=None):
# # """Sample mean for the right eigenvectors.
# #
# # See also
# # --------
# # MSM.eigenvectors_right
# #
# # """
# # self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# # return np.mean(self.sample_Rs, axis=0)
# #
# # def eigenvectors_right_std(self, k=None, ncv=None):
# # """Sample standard deviation for the right eigenvectors.
# #
# # See also
# # --------
# # MSM.eigenvectors_right
# #
# # """
# # self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# # return np.std(self.sample_Rs, axis=0)
# #
# # def eigenvectors_right_conf(self, k=None, ncv=None):
# # """Sample confidence interval for the right eigenvectors.
# #
# # See also
# # --------
# # MSM.eigenvectors_right
# #
# # """
# # self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# # return stat.confidence_interval_arr(self.sample_Rs, alpha=self._confidence)
#
# def _sample_timescales(self):
# """Compute sample timescales from the sample eigenvalues"""
# res = np.empty((self._nsample), dtype=np.object)
# for i in range(self._nsample):
# res[i] = -self._lag / np.log(np.abs(self.sample_eigenvalues[i][1:]))
# return res
#
# def timescales_mean(self, k=None, ncv=None):
# """Sample mean for the timescales.
#
# See also
# --------
# MSM.timescales
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return np.mean(self._sample_timescales(), axis=0)
#
# def timescales_std(self, k=None, ncv=None):
# """Sample standard deviation for the timescales.
#
# See also
# --------
# MSM.timescales
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return np.std(self._sample_timescales(), axis=0)
#
# def timescales_conf(self, k=None, ncv=None):
# """Sample confidence interval for the timescales.
#
# See also
# --------
# MSM.timescales
#
# """
# self._ensure_sample_eigendecomposition(k=k, ncv=ncv)
# return stat.confidence_interval(self._sample_timescales(), alpha=self._confidence)
#
#
# def _sample_mfpt(self, A, B):
# """Compute sample timescales from the sample eigenvalues"""
# res = np.zeros((self._nsample))
# for i in range(self._nsample):
# res[i] = self._mfpt(self.sample_Ps[i], A, B, mu=self.sample_mus[i])
# return res
#
# def mfpt_mean(self, A, B):
# """Sample mean for the A->B mean first passage time.
#
# See also
# --------
# MSM.mfpt
#
# """
# return np.mean(self._sample_mfpt(A,B), axis=0)
#
# def mfpt_std(self, A, B):
# """Sample standard deviation for the A->B mean first passage time.
#
# See also
# --------
# MSM.mfpt
#
# """
# return np.std(self._sample_mfpt(A,B), axis=0)
#
# def mfpt_conf(self, A, B):
# """Sample confidence interval for the A->B mean first passage time.
#
# See also
# --------
# MSM.mfpt
#
# """
# return stat.confidence_interval(self._sample_mfpt(A,B), alpha=self._confidence)
#
# def _sample_committor_forward(self, A, B):
# """Compute sample timescales from the sample eigenvalues"""
# res = np.empty((self._nsample), dtype=np.object)
# for i in range(self._nsample):
# res[i] = self._committor_forward(self.sample_Ps[i], A, B)
# return res
#
# def committor_forward_mean(self, A, B):
# """Sample mean for the A->B forward committor.
#
# See also
# --------
# MSM.committor_forward
#
# """
# return np.mean(self._sample_committor_forward(A,B), axis=0)
#
# def committor_forward_std(self, A, B):
# """Sample standard deviation for the A->B forward committor.
#
# See also
# --------
# MSM.committor_forward
#
# """
# return np.std(self._sample_committor_forward(A,B), axis=0)
#
# def committor_forward_conf(self, A, B):
# """Sample confidence interval for the A->B forward committor.
#
# See also
# --------
# MSM.committor_forward
#
# """
# return stat.confidence_interval(self._sample_committor_forward(A,B), alpha=self._confidence)
#
#
# def _sample_committor_backward(self, A, B):
# """Compute sample timescales from the sample eigenvalues"""
# res = np.empty((self._nsample), dtype=np.object)
# for i in range(self._nsample):
# res[i] = self._committor_backward(self.sample_Ps[i], A, B, mu=self.sample_mus[i])
# return res
#
# def committor_backward_mean(self, A, B):
# """Sample mean for the A->B backward committor.
#
# See also
# --------
# MSM.committor_backward
#
# """
# return np.mean(self._sample_committor_backward(A,B), axis=0)
#
# def committor_backward_std(self, A, B):
# """Sample standard deviation for the A->B backward committor.
#
# See also
# --------
# MSM.committor_backward
#
# """
# return np.std(self._sample_committor_backward(A,B), axis=0)
#
# def committor_backward_conf(self, A, B):
# """Sample confidence interval for the A->B backward committor.
#
# See also
# --------
# MSM.committor_backward
#
# """
# return stat.confidence_interval(self._sample_committor_backward(A,B), alpha=self._confidence)
|
trendelkampschroer/PyEMMA
|
pyemma/msm/models/msm_sampled.py
|
Python
|
bsd-2-clause
| 15,417
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
u"""
OMD Livestatus dynamic inventory script
=======================================
If running as an OMD site user, i.e. if ${OMD_ROOT} is set, we try to
connect to the Livestatus socket at the default location
${OMD_ROOT}/tmp/run/live
Alternatively, the path to the Livestatus socket can be set from the
environment via
export OMD_LIVESTATUS_SOCKET=/omd/sites/mysite/tmp/run/live
or on the command-line with --socket.
Inspired by the DigitalOcean inventory script:
https://github.com/ansible/ansible/blob/devel/contrib/inventory/digital_ocean.py
:author: Andreas Härpfer <andreas.haerpfer@consol.de>
"""
from __future__ import print_function
__version__ = '0.2'
import datetime
import os
import sys
import optparse # Legacy ... 2.6 still out there
import socket
import subprocess
try:
import json
except ImportError:
import simplejson as json
try:
maketrans = str.maketrans # Python 3
except AttributeError:
from string import maketrans # Python 2
class OMDLivestatusInventory(object):
#: default socket path
_def_socket_path = u'/tmp/run/live'
#: Livestatus query string
_def_host_query = (u'GET hosts\n'
'Columns: address name alias groups host_custom_variables\n'
'OutputFormat: json\n')
#: string of bad characters in host or group names
_bad_chars = u'.,;:[]/ '
#: replacement char for bad chars
_replacement_char = u'_'
def __init__(self, location=None, method='socket', by_ip=False):
self.data = {}
self.inventory = {}
self.method = method
#: translation table for sanitizing group names
#
# See the following to find out why this can't be a class variable:
# http://stackoverflow.com/questions/13905741/accessing-class-variables-from-a-list-comprehension-in-the-class-definition
# This version only works for byte strings but not for unicode :-(
#self._trans_table = maketrans(
# self._bad_chars, self._replacement_char * len(_bad_chars))
# Unicode version; see also:
# http://stackoverflow.com/questions/1324067/how-do-i-get-str-translate-to-work-with-unicode-strings
self._trans_table = dict((ord(char), self._replacement_char)
for char in self._bad_chars)
if not location:
if 'OMD_LIVESTATUS_SOCKET' in os.environ:
self.location = os.environ['OMD_LIVESTATUS_SOCKET']
elif 'OMD_ROOT' in os.environ:
self.location = (os.environ['OMD_ROOT']
+ OMDLivestatusInventory._def_socket_path)
else:
raise EnvironmentError(
'Unable to determine location of Livestatus socket.')
else:
self.location = location
self.load_from_omd()
if by_ip:
self.build_inventory_by_ip()
else:
self.build_inventory_by_name()
def load_from_omd(self):
"""Read host data from livestatus socket.
Populates self.data['hosts'].
"""
self.data['hosts'] = []
if self.method == 'ssh':
answer = json.loads(self._read_from_ssh())
else:
answer = json.loads(self._read_from_socket())
for host in answer:
self.data['hosts'].append(
dict(zip((u'ip', u'name', u'alias', u'groups', u'custom_vars'),
host)))
def _read_from_socket(self):
"""Read data from local Livestatus socket."""
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.location)
s.send(OMDLivestatusInventory._def_host_query.encode('utf-8'))
s.shutdown(socket.SHUT_WR)
return s.recv(100000000).decode('utf-8')
def _read_from_ssh(self):
"""Read data from remote Livestatus socket via SSH.
Assumes non-interactive (e.g. via ssh-agent) access to the
remote host. The `unixcat` command (part of Livestatus) has to
be available via $PATH at the remote end.
"""
l = self.location.split(':', 1)
l.append('.' + OMDLivestatusInventory._def_socket_path)
host, path = l[0], l[1]
cmd = ['ssh', host,
'-o', 'BatchMode=yes',
'-o', 'ConnectTimeout=10',
'unixcat {0}'.format(path)]
p = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate(
input=OMDLivestatusInventory._def_host_query.encode('utf-8'))
if p.returncode:
raise RuntimeError(err)
return out.decode('utf-8')
def build_inventory_by_ip(self):
"""Create Ansible inventory by IP address instead of by name.
Cave: contrary to hostnames IP addresses are not guaranteed to
be unique in OMD! Since there is only one set of hostvars for a
given IP, duplicate IPs might mean that you are loosing data.
When creating static inventory output we issue a warning for
duplicate IPs. For the default JSON output this warning is
suppressed since Ansible discards any output on STDERR.
Group names are sanitized to not contain characters that Ansible
can't digest. In particular group names in Ansible must not
contain blanks!
"""
inventory = {}
hostvars = {}
for host in self.data['hosts']:
for group in host['groups'] or [u'_NOGROUP']:
sanitized_group = group.translate(self._trans_table)
if sanitized_group in inventory:
inventory[sanitized_group].append(host['ip'])
else:
inventory[sanitized_group] = [host['ip']]
# Detect duplicate IPs in inventory. Keep first occurence
# in hostvars instead of overwriting with later data.
ip = host['ip']
if ip not in hostvars:
hostvars[ip] = {
'omd_name': host['name'],
'omd_alias': host['alias'],
'omd_custom_vars': host['custom_vars'],
}
#else:
# # duplicate IP
# pass
self.inventory = inventory
self.inventory['_meta'] = {
'hostvars': hostvars
}
def build_inventory_by_name(self):
"""Create Ansible inventory by OMD name.
Group names are sanitized to not contain characters that Ansible
can't digest. In particular group names in Ansible must not
contain blanks!
"""
inventory = {}
hostvars = {}
for host in self.data['hosts']:
for group in host['groups'] or [u'_NOGROUP']:
sanitized_group = group.translate(self._trans_table)
if sanitized_group in inventory:
inventory[sanitized_group].append(host['name'])
else:
inventory[sanitized_group] = [host['name']]
hostvars[host['name']] = {
'ansible_host': host['ip'],
'omd_alias': host['alias'],
'omd_custom_vars': host['custom_vars'],
}
self.inventory = inventory
self.inventory['_meta'] = {
'hostvars': hostvars
}
def list(self, indent=None, sort_keys=False):
"""Return full inventory data as JSON."""
return json.dumps(self.inventory, indent=indent, sort_keys=sort_keys)
def host(self, name, indent=None, sort_keys=False):
"""Return hostvars for a single host as JSON."""
if name in self.inventory['_meta']['hostvars']:
return(json.dumps(
self.inventory['_meta']['hostvars'][name],
indent=indent,
sort_keys=sort_keys
))
else:
return("{}")
def static(self):
"""Return data in static inventory format."""
out = []
out.append('# File created: {}'.format(datetime.datetime.now()))
for group in [k for k in self.inventory.keys() if k != '_meta']:
out.append('\n[{0}]'.format(group))
for host in self.inventory[group]:
vars = self.inventory['_meta']['hostvars'][host]
hostvars = []
for varname in vars.keys():
hostvars.append('{0}="{1}"'.format(varname, vars[varname]))
out.append('{0}\t{1}'.format(host, ' '.join(hostvars)))
return '\n'.join(out)
def _save_method(option, opt_str, value, parser):
parser.values.method = opt_str.lstrip('-')
parser.values.location = value
def parse_arguments():
"""Parse command line arguments."""
parser = optparse.OptionParser(version='%prog {0}'.format(__version__))
parser.set_defaults(method='socket')
output_group = optparse.OptionGroup(parser, 'Output formats')
output_group.add_option(
'--list', action='store_true', dest='list', default=False,
help='Return full Ansible inventory as JSON (default action).')
output_group.add_option(
'--host', type='string', dest='host', default=None,
help='Return Ansible hostvars for HOST as JSON.')
output_group.add_option(
'--static', action='store_true', dest='static', default=False,
help='Print inventory in static file format to stdout.')
output_group.add_option(
'--by-ip', action='store_true', dest='by_ip', default=False,
help='Create inventory by IP (instead of the default by name).')
parser.add_option_group(output_group)
connect_group = optparse.OptionGroup(parser, 'Connection options')
connect_group.add_option(
'--socket', type='string', dest='location', default=None,
action='callback', callback=_save_method,
help=('Set path to Livestatus socket. If omitted, try to use '
'$OMD_LIVESTATUS_SOCKET or $OMD_ROOT/tmp/run/live.'
))
connect_group.add_option(
'--ssh', type='string', dest='location', default=None,
action='callback', callback=_save_method,
help=('Connect to Livestatus socket via SSH. LOCATION has the '
'form [user@]host[:path], the default path is ./tmp/run/live.'
))
parser.add_option_group(connect_group)
opts, args = parser.parse_args()
# Make `list` the default action.
if not opts.host:
opts.list = True
return opts, args
if __name__ == '__main__':
opts, args = parse_arguments()
inv = OMDLivestatusInventory(opts.location,
method=opts.method,
by_ip=opts.by_ip)
if opts.static:
print(inv.static())
elif opts.list:
print(inv.list(indent=4, sort_keys=True))
elif opts.host:
print(inv.host(opts.host, indent=4, sort_keys=True))
else:
print('Missing command.')
sys.exit(1)
|
ahaerpfer/ansible-inventory-omd-livestatus
|
omd_livestatus.py
|
Python
|
bsd-2-clause
| 11,214
|
from __future__ import annotations
from os import getenv
import gc
import sys
from unittest import TestCase, main
from unittest import skip as skip
from unittest import skipIf as skipIf
import logging
from progressivis import Scheduler, log_level
from progressivis.storage import init_temp_dir_if, cleanup_temp_dir
import numpy as np
from typing import Any, Type, Optional
_ = skip # shut-up pylint
__ = skipIf
class ProgressiveTest(TestCase):
CRITICAL = logging.CRITICAL
ERROR = logging.ERROR
WARNING = logging.WARNING
INFO = logging.INFO
DEBUG = logging.DEBUG
NOTSET = logging.NOTSET
levels = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
"NOTSET": logging.NOTSET,
}
def __init__(self, *args: Any) -> None:
super(ProgressiveTest, self).__init__(*args)
self._output: bool = False
self._scheduler: Optional[Scheduler] = None
self._temp_dir_flag: bool = False
level: Any = getenv("LOGLEVEL")
if level in ProgressiveTest.levels:
level = ProgressiveTest.levels[level]
if level:
print(f"Logger level {level} for {self}", file=sys.stderr)
self.log(int(level))
@staticmethod
def terse(x: Any) -> None:
_ = x
print(".", end="", file=sys.stderr)
@staticmethod
async def _stop(scheduler: Scheduler, run_number: int) -> None:
await scheduler.stop()
def setUp(self) -> None:
np.random.seed(42)
def tearDown(self) -> None:
# print('Logger level for %s back to ERROR' % self, file=sys.stderr)
# self.log()
gc.collect()
logger = logging.getLogger()
logger.setLevel(logging.NOTSET)
while logger.hasHandlers():
logger.removeHandler(logger.handlers[0])
@classmethod
def cleanup(self) -> None:
cleanup_temp_dir()
@classmethod
def setUpClass(cls: Type[ProgressiveTest]) -> None:
cleanup_temp_dir()
init_temp_dir_if()
@classmethod
def tearDownClass(cls: Type[ProgressiveTest]) -> None:
cleanup_temp_dir()
def scheduler(self, clean: bool = False) -> Scheduler:
if self._scheduler is None or clean:
self._scheduler = Scheduler()
return self._scheduler
@staticmethod
def log(level: int = logging.NOTSET, package: str = "progressivis") -> None:
log_level(level, package=package)
@staticmethod
def main() -> None:
main()
|
jdfekete/progressivis
|
tests/__init__.py
|
Python
|
bsd-2-clause
| 2,610
|
"""Leetcode 100. Same Tree
Easy
URL: https://leetcode.com/problems/same-tree/
Given two binary trees, write a function to check if they are the same or not.
Two binary trees are considered the same if they are structurally identical and
the nodes have the same value.
Example 1:
Input: 1 1
/ \ / \
2 3 2 3
[1,2,3], [1,2,3]
Output: true
Example 2:
Input: 1 1
/ \
2 2
[1,2], [1,null,2]
Output: false
Example 3:
Input: 1 1
/ \ / \
2 1 1 2
[1,2,1], [1,1,2]
Output: false
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val):
self.val = val
self.left = None
self.right = None
class SolutionPreorderRecur(object):
def isSameTree(self, p, q):
"""
:type p: TreeNode
:type q: TreeNode
:rtype: bool
Apply recursive preorder traversal to check same tree.
Time complexity: O(n).
Space complexity: O(n).
"""
# Check if both root don't exist.
if not p and not q:
return True
# Check if just one of roots exits.
if not p or not q:
return False
# If both exist, check their values are the same.
if p.val != q.val:
return False
# Recursively check left/right subtrees.
return (self.isSameTree(p.left, q.left) and
self.isSameTree(p.right, q.right))
class SolutionPreorderIter(object):
def isSameTree(self, p, q):
"""
:type p: TreeNode
:type q: TreeNode
:rtype: bool
Apply iterative preorder traversal to check same tree.
Time complexity: O(n).
Space complexity: O(n).
"""
stack = [(p, q)]
while stack:
cur_p, cur_q = stack.pop()
# Check if both root don't exist, continue,
# since there may be other node pairs to check.
if not cur_p and not cur_q:
continue
# Check if just one of roots exits.
if not cur_p or not cur_q:
return False
# If both exist, check their values are the same.
if cur_p.val != cur_q.val:
return False
# Add root's right and then left to stack, since stack is FILO.
stack.append((cur_p.right, cur_q.right))
stack.append((cur_p.left, cur_q.left))
return True
def main():
# Input: 1 1
# / \ / \
# 2 3 2 3
# [1,2,3], [1,2,3]
# Output: true
p = TreeNode(1)
p.left = TreeNode(2)
p.right = TreeNode(3)
q = TreeNode(1)
q.left = TreeNode(2)
q.right = TreeNode(3)
print SolutionPreorderRecur().isSameTree(p, q)
print SolutionPreorderIter().isSameTree(p, q)
# Input: 1 1
# / \
# 2 2
# [1,2], [1,null,2]
# Output: false
p = TreeNode(1)
p.left = TreeNode(2)
q = TreeNode(1)
q.right = TreeNode(2)
print SolutionPreorderRecur().isSameTree(p, q)
print SolutionPreorderIter().isSameTree(p, q)
# Input: 1 1
# / \ / \
# 2 1 1 2
# [1,2,1], [1,1,2]
# Output: false
p = TreeNode(1)
p.left = TreeNode(2)
p.right = TreeNode(1)
q = TreeNode(1)
q.left = TreeNode(1)
q.right = TreeNode(2)
print SolutionPreorderRecur().isSameTree(p, q)
print SolutionPreorderIter().isSameTree(p, q)
# Input: [10,5,15], [10,5,null,null,15]
p = TreeNode(10)
p.left = TreeNode(5)
p.right = TreeNode(15)
q = TreeNode(10)
q.left = TreeNode(5)
q.left.right = TreeNode(15)
print SolutionPreorderRecur().isSameTree(p, q)
print SolutionPreorderIter().isSameTree(p, q)
if __name__ == '__main__':
main()
|
bowen0701/algorithms_data_structures
|
lc0100_same_tree.py
|
Python
|
bsd-2-clause
| 4,044
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
methods = ['method_name_1','method_name_2']
|
ganeshgore/myremolab
|
server/src/test/unit/voodoo/gen/loader/APItype1.py
|
Python
|
bsd-2-clause
| 422
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp import error
class MetaObserver(object):
"""This is a simple facility for exposing internal SNMP Engine
working details to pysnmp applications. These details are
basically local scope variables at a fixed point of execution.
Two modes of operations are offered:
1. Consumer: app can request an execution point context by execution point ID.
2. Provider: app can register its callback function (and context) to be invoked
once execution reaches specified point. All local scope variables
will be passed to the callback as in #1.
It's important to realize that execution context is only guaranteed
to exist to functions that are at the same or deeper level of invocation
relative to execution point specified.
"""
def __init__(self):
self.__observers = {}
self.__contexts = {}
self.__execpoints = {}
def registerObserver(self, cbFun, *execpoints, **kwargs):
if cbFun in self.__contexts:
raise error.PySnmpError('duplicate observer %s' % cbFun)
else:
self.__contexts[cbFun] = kwargs.get('cbCtx')
for execpoint in execpoints:
if execpoint not in self.__observers:
self.__observers[execpoint] = []
self.__observers[execpoint].append(cbFun)
def unregisterObserver(self, cbFun=None):
if cbFun is None:
self.__observers.clear()
self.__contexts.clear()
else:
for execpoint in dict(self.__observers):
if cbFun in self.__observers[execpoint]:
self.__observers[execpoint].remove(cbFun)
if not self.__observers[execpoint]:
del self.__observers[execpoint]
def storeExecutionContext(self, snmpEngine, execpoint, variables):
self.__execpoints[execpoint] = variables
if execpoint in self.__observers:
for cbFun in self.__observers[execpoint]:
cbFun(snmpEngine, execpoint, variables, self.__contexts[cbFun])
def clearExecutionContext(self, snmpEngine, *execpoints):
if execpoints:
for execpoint in execpoints:
del self.__execpoints[execpoint]
else:
self.__execpoints.clear()
def getExecutionContext(self, execpoint):
return self.__execpoints[execpoint]
|
etingof/pysnmp
|
pysnmp/entity/observer.py
|
Python
|
bsd-2-clause
| 2,572
|
import logging
from .. import exceptions
from ..plan import COMPLETE, Plan
from ..status import NotSubmittedStatus, NotUpdatedStatus
from . import build
import difflib
import json
logger = logging.getLogger(__name__)
def diff_dictionaries(old_dict, new_dict):
"""Diffs two single dimension dictionaries
Returns the number of changes and an unordered list
expressing the common entries and changes.
Args:
old_dict(dict): old dictionary
new_dict(dict): new dictionary
Returns: list()
int: number of changed records
list: [str(<change type>), <key>, <value>]
Where <change type>: +, - or <space>
"""
old_set = set(old_dict)
new_set = set(new_dict)
added_set = new_set - old_set
removed_set = old_set - new_set
common_set = old_set & new_set
changes = 0
output = []
for key in added_set:
changes += 1
output.append(["+", key, new_dict[key]])
for key in removed_set:
changes += 1
output.append(["-", key, old_dict[key]])
for key in common_set:
if str(old_dict[key]) != str(new_dict[key]):
changes += 1
output.append(["-", key, old_dict[key]])
output.append(["+", key, new_dict[key]])
else:
output.append([" ", key, new_dict[key]])
return [changes, output]
def print_diff_parameters(parameter_diff):
"""Handles the printing of differences in parameters.
Args:
parameter_diff (list): A list dictionaries detailing the differences
between two parameters returned by
:func:`stacker.actions.diff.diff_dictionaries`
"""
print """--- Old Parameters
+++ New Parameters
******************"""
for line in parameter_diff:
print "%s%s = %s" % (line[0], line[1], line[2])
def diff_parameters(old_params, new_params):
"""Compares the old vs. new parameters and prints a "diff"
If there are no changes, we print nothing.
Args:
old_params(dict): old paramters
new_params(dict): new parameters
Returns:
list: A list of differences
"""
[changes, diff] = diff_dictionaries(old_params, new_params)
if changes == 0:
return []
return diff
def print_stack_changes(stack_name, new_stack, old_stack, new_params,
old_params):
"""Prints out the paramters (if changed) and stack diff"""
from_file = "old_%s" % (stack_name,)
to_file = "new_%s" % (stack_name,)
lines = difflib.context_diff(
old_stack, new_stack,
fromfile=from_file, tofile=to_file)
template_changes = list(lines)
if not template_changes:
print "*** No changes to template ***"
else:
param_diffs = diff_parameters(old_params, new_params)
print_diff_parameters(param_diffs)
print "".join(template_changes)
class Action(build.Action):
""" Responsible for diff'ing CF stacks in AWS and on disk
Generates the build plan based on stack dependencies (these dependencies
are determined automatically based on references to output values from
other stacks).
The plan is then used to pull the current CloudFormation template from
AWS and compare it to the generated templated based on the current
config.
"""
def _normalize_json(self, template):
"""Normalizes our template for diffing
Args:
template(str): json string representing the template
Returns:
list: json representation of the parameters
"""
obj = json.loads(template)
json_str = json.dumps(obj, sort_keys=True, indent=4)
result = []
lines = json_str.split("\n")
for line in lines:
result.append(line + "\n")
return result
def _print_new_stack(self, stack, parameters):
"""Prints out the parameters & stack contents of a new stack"""
print "New template parameters:"
for param in sorted(parameters,
key=lambda param: param['ParameterKey']):
print "%s = %s" % (param['ParameterKey'], param['ParameterValue'])
print "\nNew template contents:"
print "".join(stack)
def _diff_stack(self, stack, **kwargs):
"""Handles the diffing a stack in CloudFormation vs our config"""
if not build.should_submit(stack):
return NotSubmittedStatus()
if not build.should_update(stack):
return NotUpdatedStatus()
# get the current stack template & params from AWS
try:
[old_template, old_params] = self.provider.get_stack_info(
stack.fqn)
except exceptions.StackDoesNotExist:
old_template = None
old_params = {}
stack.resolve_variables(self.context, self.provider)
# generate our own template & params
new_template = stack.blueprint.rendered
parameters = self.build_parameters(stack)
new_params = dict()
for p in parameters:
new_params[p['ParameterKey']] = p['ParameterValue']
new_stack = self._normalize_json(new_template)
print "============== Stack: %s ==============" % (stack.name,)
# If this is a completely new template dump our params & stack
if not old_template:
self._print_new_stack(new_stack, parameters)
else:
# Diff our old & new stack/parameters
old_stack = self._normalize_json(old_template)
print_stack_changes(stack.name, new_stack, old_stack, new_params,
old_params)
return COMPLETE
def _generate_plan(self):
plan = Plan(description="Diff stacks")
stacks = self.context.get_stacks_dict()
dependencies = self._get_dependencies()
for stack_name in self.get_stack_execution_order(dependencies):
plan.add(
stacks[stack_name],
run_func=self._diff_stack,
requires=dependencies.get(stack_name),
)
return plan
def run(self, *args, **kwargs):
plan = self._generate_plan()
debug_plan = self._generate_plan()
debug_plan.outline(logging.DEBUG)
logger.info("Diffing stacks: %s", ", ".join(plan.keys()))
plan.execute()
"""Don't ever do anything for pre_run or post_run"""
def pre_run(self, *args, **kwargs):
pass
def post_run(self, *args, **kwargs):
pass
|
mhahn/stacker
|
stacker/actions/diff.py
|
Python
|
bsd-2-clause
| 6,533
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016-2017, Thierry Lemeunier <thierry at lemeunier dot net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .BaseWindow import BaseWindow
class TitledOnBorderWindow(BaseWindow):
"""
A window with a border and a title on border. Subclass of BaseWindow.
"""
def __init__(self, parent, h, w, y, x, title, modal=False, menu=False, colourT=False, colourD=False):
"""Create base window"""
BaseWindow.__init__(self, parent, h, w, y, x, modal=modal, menu=menu)
self.title = title
self.colourT = colourT
self.colourD = colourD
self._create()
def redraw(self):
"""See mother class"""
self._create()
BaseWindow.redraw(self)
def _create(self):
self.window.attrset(self.colourD)
self.window.border()
self.window.addstr(0, 2, '[ ' + self.title + ' ]', self.colourT)
self.window.refresh()
self.window.attrset(0)
|
thethythy/Mnemopwd
|
mnemopwd/client/uilayer/uicomponents/TitledOnBorderWindow.py
|
Python
|
bsd-2-clause
| 2,237
|
default_app_config = 'user_deletion.apps.UserDeletionConfig'
|
incuna/django-user-deletion
|
user_deletion/__init__.py
|
Python
|
bsd-2-clause
| 61
|
from django.conf.urls import url
from .views import simple_password
urlpatterns = [
url(r'^$', view=simple_password, name="simple_auth_password"),
]
|
bennylope/django-simple-auth
|
simple_auth/urls.py
|
Python
|
bsd-2-clause
| 156
|
#!/usr/bin/env python3
# Software License Agreement (BSD License)
#
# Copyright (c) 2020, UFACTORY, Inc.
# All rights reserved.
#
# Author: Vinman <vinman.wen@ufactory.cc> <vinman.cub@gmail.com>
import re
import time
import math
import threading
try:
from multiprocessing.pool import ThreadPool
except:
ThreadPool = None
try:
import asyncio
except:
asyncio = None
from .events import Events
from ..core.config.x_config import XCONF
from ..core.comm import SerialPort, SocketPort
from ..core.wrapper import UxbusCmdSer, UxbusCmdTcp
from ..core.utils.log import logger, pretty_print
from ..core.utils import convert
from ..core.config.x_code import ControllerWarn, ControllerError, ControllerErrorCodeMap, ControllerWarnCodeMap
from .utils import xarm_is_connected, compare_time, compare_version, xarm_is_not_simulation_mode, filter_invaild_number, xarm_is_pause, xarm_wait_until_cmdnum_lt_max
from .code import APIState
from ..tools.threads import ThreadManage
from ..version import __version__
controller_error_keys = ControllerErrorCodeMap.keys()
controller_warn_keys = ControllerWarnCodeMap.keys()
print('SDK_VERSION: {}'.format(__version__))
class Base(Events):
def __init__(self, port=None, is_radian=False, do_not_open=False, **kwargs):
if kwargs.get('init', False):
super(Base, self).__init__()
self._port = port
self._debug = kwargs.get('debug', False)
self._baudrate = kwargs.get('baudrate', XCONF.SerialConf.SERIAL_BAUD)
self._timeout = kwargs.get('timeout', None)
self._filters = kwargs.get('filters', None)
self._enable_heartbeat = kwargs.get('enable_heartbeat', False)
self._enable_report = kwargs.get('enable_report', True)
self._report_type = kwargs.get('report_type', 'rich')
self._forbid_uds = kwargs.get('forbid_uds', False)
self._check_tcp_limit = kwargs.get('check_tcp_limit', False)
self._check_joint_limit = kwargs.get('check_joint_limit', True)
self._check_cmdnum_limit = kwargs.get('check_cmdnum_limit', True)
self._check_simulation_mode = kwargs.get('check_simulation_mode', True)
self._max_cmd_num = kwargs.get('max_cmdnum', 512)
if not isinstance(self._max_cmd_num, int):
self._max_cmd_num = 512
self._max_cmd_num = min(XCONF.MAX_CMD_NUM, self._max_cmd_num)
self._check_robot_sn = kwargs.get('check_robot_sn', False)
self._check_is_ready = kwargs.get('check_is_ready', True)
self._check_is_pause = kwargs.get('check_is_pause', True)
self._timed_comm = kwargs.get('timed_comm', True)
self._timed_comm_interval = kwargs.get('timed_comm_interval', 30)
self._timed_comm_t = None
self._timed_comm_t_alive = False
self._max_callback_thread_count = kwargs.get('max_callback_thread_count', 0)
self._asyncio_loop = None
self._asyncio_loop_alive = False
self._asyncio_loop_thread = None
self._pool = None
self._thread_manage = ThreadManage()
self._rewrite_modbus_baudrate_method = kwargs.get('rewrite_modbus_baudrate_method', True)
self._min_tcp_speed, self._max_tcp_speed = 0.1, 1000 # mm/s
self._min_tcp_acc, self._max_tcp_acc = 1.0, 50000 # mm/s^2
self._tcp_jerk = 1000 # mm/s^3
self._min_joint_speed, self._max_joint_speed = 0.01, 4.0 # rad/s
self._min_joint_acc, self._max_joint_acc = 0.01, 20.0 # rad/s^2
self._joint_jerk = 20.0 # rad/s^3
self._rot_jerk = 2.3
self._max_rot_acc = 2.7
self._stream_type = 'serial'
self._stream = None
self.arm_cmd = None
self._stream_report = None
self._report_thread = None
self._only_report_err_warn_changed = True
self._last_position = [201.5, 0, 140.5, 3.1415926, 0, 0] # [x(mm), y(mm), z(mm), roll(rad), pitch(rad), yaw(rad)]
self._last_angles = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] # [servo_1(rad), servo_2(rad), servo_3(rad), servo_4(rad), servo_5(rad), servo_6(rad), servo_7(rad)]
self._last_tcp_speed = 100 # mm/s, rad/s
self._last_tcp_acc = 2000 # mm/s^2, rad/s^2
self._last_joint_speed = 0.3490658503988659 # 20 °/s
self._last_joint_acc = 8.726646259971648 # 500 °/s^2
self._mvtime = 0
self._version = None
self._robot_sn = None
self._control_box_sn = None
self._position = [201.5, 0, 140.5, 3.1415926, 0, 0]
self._pose_aa = [201.5, 0, 140.5, 3.1415926, 0, 0]
self._angles = [0] * 7
self._position_offset = [0] * 6
self._world_offset = [0] * 6
self._state = 4
self._mode = 0
self._joints_torque = [0, 0, 0, 0, 0, 0, 0] # 力矩
self._tcp_load = [0, [0, 0, 0]] # 负载[重量, 重心], [weight, [x, y, z]]
self._collision_sensitivity = 0 # 碰撞灵敏度
self._teach_sensitivity = 0 # 示教灵敏度
self._error_code = 0
self._warn_code = 0
self._servo_codes = [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0]]
self._cmd_num = 0
self._arm_type = XCONF.Robot.Type.XARM7_X4
self._arm_axis = XCONF.Robot.Axis.XARM7
axis = kwargs.get('axis', self._arm_axis)
if axis in [5, 6, 7]:
self._arm_axis = axis
arm_type = kwargs.get('type', self._arm_type)
if arm_type in [3, 5, 6, 7, 8]:
self._arm_type = arm_type
self._arm_master_id = 0
self._arm_slave_id = 0
self._arm_motor_tid = 0
self._arm_motor_fid = 0
self._arm_motor_brake_states = [-1, -1, -1, -1, -1, -1, -1, -1] # [motor-1-brake-state, ..., motor-7-brake, reserved]
self._arm_motor_enable_states = [-1, -1, -1, -1, -1, -1, -1, -1] # [motor-1-enable-state, ..., motor-7-enable, reserved]
self._gravity_direction = [0, 0, -1]
self._is_ready = False
self._is_sync = False
self._is_first_report = True
self._first_report_over = False
self._default_is_radian = is_radian
self._sleep_finish_time = time.time()
self._is_old_protocol = False
self._major_version_number = 0 # 固件主版本号
self._minor_version_number = 0 # 固件次版本号
self._revision_version_number = 0 # 固件修正版本号
self._temperatures = [0, 0, 0, 0, 0, 0, 0]
self._voltages = [0, 0, 0, 0, 0, 0, 0]
self._currents = [0, 0, 0, 0, 0, 0, 0]
self._is_set_move = False
self._pause_cond = threading.Condition()
self._pause_lock = threading.Lock()
self._pause_cnts = 0
self._realtime_tcp_speed = 0
self._realtime_joint_speeds = [0, 0, 0, 0, 0, 0, 0]
self._count = -1
self._last_report_time = time.time()
self._max_report_interval = 0
self._cgpio_reset_enable = 0
self._tgpio_reset_enable = 0
self._cgpio_states = [0, 0, 256, 65533, 0, 65280, 0, 0, 0.0, 0.0, [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0]]
self._iden_progress = 0
self._ignore_error = False
self._ignore_state = False
self.modbus_baud = -1
self.gripper_is_enabled = False
self.gripper_speed = 0
self.gripper_version_numbers = [-1, -1, -1]
self.bio_gripper_is_enabled = False
self.bio_gripper_speed = 0
self.bio_gripper_error_code = 0
self.robotiq_is_activated = False
self._cmd_timeout = XCONF.UxbusConf.SET_TIMEOUT / 1000
self._is_collision_detection = 1
self._collision_tool_type = 0
self._collision_tool_params = [0, 0, 0, 0, 0, 0]
self._is_simulation_robot = False
self._last_update_err_time = 0
self._last_update_state_time = 0
self._last_update_cmdnum_time = 0
self._arm_type_is_1300 = False
self._control_box_type_is_1300 = False
self.linear_track_baud = -1
self.linear_track_speed = 1
self.linear_track_is_enabled = False
self._ft_ext_force = [0, 0, 0, 0, 0, 0]
self._ft_raw_force = [0, 0, 0, 0, 0, 0]
self._has_motion_cmd = False
self._need_sync = False
if not do_not_open:
self.connect()
def _init(self):
self._last_position = [201.5, 0, 140.5, 3.1415926, 0, 0] # [x(mm), y(mm), z(mm), roll(rad), pitch(rad), yaw(rad)]
self._last_angles = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] # [servo_1(rad), servo_2(rad), servo_3(rad), servo_4(rad), servo_5(rad), servo_6(rad), servo_7(rad)]
self._last_tcp_speed = 100 # mm/s, rad/s
self._last_tcp_acc = 2000 # mm/s^2, rad/s^2
self._last_joint_speed = 0.3490658503988659 # 20 °/s
self._last_joint_acc = 8.726646259971648 # 500 °/s^2
self._mvtime = 0
self._version = None
self._robot_sn = None
self._control_box_sn = None
self._position = [201.5, 0, 140.5, 3.1415926, 0, 0]
self._pose_aa = [201.5, 0, 140.5, 3.1415926, 0, 0]
self._angles = [0] * 7
self._position_offset = [0] * 6
self._world_offset = [0] * 6
self._state = 4
self._mode = 0
self._joints_torque = [0, 0, 0, 0, 0, 0, 0] # 力矩
self._tcp_load = [0, [0, 0, 0]] # 负载[重量, 重心], [weight, [x, y, z]]
self._collision_sensitivity = 0 # 碰撞灵敏度
self._teach_sensitivity = 0 # 示教灵敏度
self._error_code = 0
self._warn_code = 0
self._servo_codes = [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0]]
self._cmd_num = 0
self._arm_master_id = 0
self._arm_slave_id = 0
self._arm_motor_tid = 0
self._arm_motor_fid = 0
self._arm_motor_brake_states = [-1, -1, -1, -1, -1, -1, -1,
-1] # [motor-1-brake-state, ..., motor-7-brake, reserved]
self._arm_motor_enable_states = [-1, -1, -1, -1, -1, -1, -1,
-1] # [motor-1-enable-state, ..., motor-7-enable, reserved]
self._gravity_direction = [0, 0, -1]
self._is_ready = False
self._is_sync = False
self._is_first_report = True
self._first_report_over = False
self._sleep_finish_time = time.time()
self._is_old_protocol = False
self._major_version_number = 0 # 固件主版本号
self._minor_version_number = 0 # 固件次版本号
self._revision_version_number = 0 # 固件修正版本号
self._temperatures = [0, 0, 0, 0, 0, 0, 0]
self._voltages = [0, 0, 0, 0, 0, 0, 0]
self._currents = [0, 0, 0, 0, 0, 0, 0]
self._is_set_move = False
self._pause_cond = threading.Condition()
self._pause_lock = threading.Lock()
self._pause_cnts = 0
self._realtime_tcp_speed = 0
self._realtime_joint_speeds = [0, 0, 0, 0, 0, 0, 0]
self._count = -1
self._last_report_time = time.time()
self._max_report_interval = 0
self._cgpio_reset_enable = 0
self._tgpio_reset_enable = 0
self._cgpio_states = [0, 0, 256, 65533, 0, 65280, 0, 0, 0.0, 0.0, [0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
self._iden_progress = 0
self._ignore_error = False
self._ignore_state = False
self.modbus_baud = -1
self.gripper_is_enabled = False
self.gripper_speed = 0
self.gripper_version_numbers = [-1, -1, -1]
self.bio_gripper_is_enabled = False
self.bio_gripper_speed = 0
self.bio_gripper_error_code = 0
self.robotiq_is_activated = False
self._cmd_timeout = XCONF.UxbusConf.SET_TIMEOUT / 1000
self._is_collision_detection = 1
self._collision_tool_type = 0
self._collision_tool_params = [0, 0, 0, 0, 0, 0]
self._is_simulation_robot = False
self._last_update_err_time = 0
self._last_update_state_time = 0
self._last_update_cmdnum_time = 0
self._arm_type_is_1300 = False
self._control_box_type_is_1300 = False
self.linear_track_baud = -1
self.linear_track_speed = 1
self.linear_track_is_enabled = False
self._ft_ext_force = [0, 0, 0, 0, 0, 0]
self._ft_raw_force = [0, 0, 0, 0, 0, 0]
self._has_motion_cmd = False
self._need_sync = False
@staticmethod
def log_api_info(msg, *args, code=0, **kwargs):
if code == 0:
logger.info(msg, *args, **kwargs)
else:
logger.error(msg, *args, **kwargs)
def _check_version(self, is_first=False):
if is_first:
self._version = None
self._robot_sn = None
self._control_box_sn = None
try:
if not self._version:
self.get_version()
if is_first:
fail_cnt = 0
while not self._version and fail_cnt < 100:
code, _ = self.get_version()
fail_cnt += 1 if code != 0 else 0
if code != 0 or not self._version:
time.sleep(0.1)
if not self._version and fail_cnt >= 100:
logger.error('failed to get version')
return -2
if self._version and isinstance(self._version, str):
pattern = re.compile(
r'.*(\d+),(\d+),(\S+),(\S+),.*[vV](\d+)\.(\d+)\.(\d+)')
m = re.match(pattern, self._version)
if m:
(xarm_axis, xarm_type, xarm_sn, ac_version,
major_version_number,
minor_version_number,
revision_version_number) = m.groups()
self._arm_axis = int(xarm_axis)
self._arm_type = int(xarm_type)
self._major_version_number = int(major_version_number)
self._minor_version_number = int(minor_version_number)
self._revision_version_number = int(revision_version_number)
self._robot_sn = xarm_sn
self._control_box_sn = ac_version.strip()
self._arm_type_is_1300 = int(xarm_sn[2:6]) >= 1300 if xarm_sn[2:6].isdigit() else False
self._control_box_type_is_1300 = int(ac_version[2:6]) >= 1300 if ac_version[2:6].isdigit() else False
else:
pattern = re.compile(r'.*[vV](\d+)\.(\d+)\.(\d+)')
m = re.match(pattern, self._version)
if m:
(self._major_version_number,
self._minor_version_number,
self._revision_version_number) = map(int, m.groups())
else:
version_date = '-'.join(self._version.split('-')[-3:])
self._is_old_protocol = compare_time('2019-02-01', version_date)
if self._is_old_protocol:
self._major_version_number = 0
self._minor_version_number = 0
self._revision_version_number = 1
else:
self._major_version_number = 0
self._minor_version_number = 1
self._revision_version_number = 0
if is_first:
if self._check_robot_sn:
count = 2
self.get_robot_sn()
while not self._robot_sn and count and self.warn_code == 0:
self.get_robot_sn()
self.get_err_warn_code()
if not self._robot_sn and self.warn_code == 0 and count:
time.sleep(0.1)
count -= 1
if self.warn_code != 0:
self.clean_warn()
print('FIRMWARE_VERSION: v{}, PROTOCOL: {}, DETAIL: {}'.format(
'{}.{}.{}'.format(self._major_version_number, self._minor_version_number, self._revision_version_number),
'V0' if self._is_old_protocol else 'V1', self._version
))
return 0
except Exception as e:
print('compare_time: {}, {}'.format(self._version, e))
return -1
@property
def realtime_tcp_speed(self):
return self._realtime_tcp_speed
@property
def realtime_joint_speeds(self):
return [speed if self._default_is_radian else math.degrees(speed) for speed in self._realtime_joint_speeds]
@property
def version_number(self):
return self._major_version_number, self._minor_version_number, self._revision_version_number
@property
def connected(self):
return self._stream and self._stream.connected
@property
def ready(self):
return self._is_ready
@property
def default_is_radian(self):
return self._default_is_radian
@property
def is_simulation_robot(self):
return self._is_simulation_robot
def check_is_simulation_robot(self):
return self._check_simulation_mode and self.is_simulation_robot
# return self._check_simulation_mode and self.mode != 4
@property
def version(self):
if not self._version:
self.get_version()
return self._version
# return 'v' + '.'.join(map(str, self.version_number))
@property
def sn(self):
return self._robot_sn
@property
def control_box_sn(self):
return self._control_box_sn
@property
def position(self):
if not self._enable_report:
self.get_position()
return [math.degrees(self._position[i]) if 2 < i < 6 and not self._default_is_radian
else self._position[i] for i in range(len(self._position))]
@property
def position_aa(self):
if not self._enable_report:
self.get_position_aa()
return [math.degrees(self._pose_aa[i]) if 2 < i < 6 and not self._default_is_radian
else self._pose_aa[i] for i in range(len(self._pose_aa))]
@property
def tcp_jerk(self):
return self._tcp_jerk
@property
def tcp_speed_limit(self):
return [self._min_tcp_speed, self._max_tcp_speed]
@property
def tcp_acc_limit(self):
return [self._min_tcp_acc, self._max_tcp_acc]
@property
def last_used_position(self):
return [math.degrees(self._last_position[i]) if 2 < i < 6 and not self._default_is_radian
else self._last_position[i] for i in range(len(self._last_position))]
@property
def last_used_tcp_speed(self):
return self._last_tcp_speed
@property
def last_used_tcp_acc(self):
return self._last_tcp_acc
@property
def angles(self):
if not self._enable_report:
self.get_servo_angle()
return [angle if self._default_is_radian else math.degrees(angle) for angle in self._angles]
@property
def joint_jerk(self):
return self._joint_jerk if self._default_is_radian else math.degrees(self._joint_jerk)
@property
def joint_speed_limit(self):
limit = [self._min_joint_speed, self._max_joint_speed]
if not self._default_is_radian:
limit = [math.degrees(i) for i in limit]
return limit
@property
def joint_acc_limit(self):
limit = [self._min_joint_acc, self._max_joint_acc]
if not self._default_is_radian:
limit = [math.degrees(i) for i in limit]
return limit
@property
def last_used_angles(self):
return [angle if self._default_is_radian else math.degrees(angle) for angle in self._last_angles]
@property
def last_used_joint_speed(self):
return self._last_joint_speed if self._default_is_radian else math.degrees(self._last_joint_speed)
@property
def last_used_joint_acc(self):
return self._last_joint_acc if self._default_is_radian else math.degrees(self._last_joint_acc)
@property
def position_offset(self):
return [math.degrees(self._position_offset[i]) if 2 < i < 6 and not self._default_is_radian
else self._position_offset[i] for i in range(len(self._position_offset))]
@property
def world_offset(self):
return [math.degrees(self._world_offset[i]) if 2 < i < 6 and not self._default_is_radian
else self._world_offset[i] for i in range(len(self._world_offset))]
@property
def state(self):
if not self._enable_report:
self.get_state()
return self._state
@property
def mode(self):
return self._mode
@property
def joints_torque(self):
return self._joints_torque
@property
def tcp_load(self):
return self._tcp_load
@property
def collision_sensitivity(self):
return self._collision_sensitivity
@property
def teach_sensitivity(self):
return self._teach_sensitivity
@property
def motor_brake_states(self):
return self._arm_motor_brake_states
@property
def motor_enable_states(self):
return self._arm_motor_enable_states
@property
def temperatures(self):
return self._temperatures
@property
def error_code(self):
if not self._enable_report:
self.get_err_warn_code()
return self._error_code
@property
def warn_code(self):
if not self._enable_report:
self.get_err_warn_code()
return self._warn_code
@property
def has_error(self):
return self.error_code != 0
@property
def has_warn(self):
return self.warn_code != 0
@property
def has_err_warn(self):
return self.has_error or self._warn_code != 0 or (self.arm_cmd and self.arm_cmd.has_err_warn)
@property
def cmd_num(self):
if not self._enable_report:
self.get_cmdnum()
return self._cmd_num
@property
def device_type(self):
return self._arm_type
@property
def axis(self):
return self._arm_axis
@property
def master_id(self):
return self._arm_master_id
@property
def slave_id(self):
return self._arm_slave_id
@property
def motor_tid(self):
return self._arm_motor_tid
@property
def motor_fid(self):
return self._arm_motor_fid
@property
def gravity_direction(self):
return self._gravity_direction
@property
def gpio_reset_config(self):
return [self._cgpio_reset_enable, self._tgpio_reset_enable]
@property
def count(self):
return self._count
@property
def servo_codes(self):
return self._servo_codes
@property
def is_stop(self):
return self.state in [4, 5]
@property
def voltages(self):
return self._voltages
@property
def currents(self):
return self._currents
@property
def cgpio_states(self):
return self._cgpio_states
@property
def self_collision_params(self):
return [self._is_collision_detection, self._collision_tool_type, self._collision_tool_params]
@property
def ft_ext_force(self):
return self._ft_ext_force
@property
def ft_raw_force(self):
return self._ft_raw_force
def version_is_ge(self, major, minor=0, revision=0):
if self._version is None:
self._check_version()
return self._major_version_number > major or (
self._major_version_number == major and self._minor_version_number > minor) or (
self._major_version_number == major and self._minor_version_number == minor and
self._revision_version_number >= revision)
def check_is_pause(self):
if self._check_is_pause:
if self.state == 3 and self._enable_report:
with self._pause_cond:
with self._pause_lock:
self._pause_cnts += 1
self._pause_cond.wait()
with self._pause_lock:
self._pause_cnts -= 1
@property
def state_is_ready(self):
if self._check_is_ready and not self.version_is_ge(1, 5, 20):
return self.ready
else:
return True
def _timed_comm_thread(self):
self._timed_comm_t_alive = True
cnt = 0
while self.connected and self._timed_comm_t_alive:
if self.arm_cmd and time.time() - self.arm_cmd.last_comm_time > self._timed_comm_interval:
try:
if cnt == 0:
code, _ = self.get_cmdnum()
elif cnt == 1:
code, _ = self.get_state()
else:
code, _ = self.get_err_warn_code()
cnt = (cnt + 1) % 3
except:
pass
time.sleep(0.5)
def _clean_thread(self):
self._thread_manage.join(1)
if self._pool:
try:
self._pool.close()
self._pool.join()
except:
pass
def connect(self, port=None, baudrate=None, timeout=None, axis=None, arm_type=None):
if self.connected:
return
if axis in [5, 6, 7]:
self._arm_axis = axis
if arm_type in [3, 5, 6, 7]:
self._arm_type = arm_type
self._is_ready = True
self._port = port if port is not None else self._port
self._baudrate = baudrate if baudrate is not None else self._baudrate
self._timeout = timeout if timeout is not None else self._timeout
if not self._port:
raise Exception('can not connect to port/ip {}'.format(self._port))
if self._timed_comm_t is not None:
try:
self._timed_comm_t_alive = False
self._timed_comm_t.join()
self._timed_comm_t = None
except:
pass
self._is_first_report = True
self._first_report_over = False
self._init()
if isinstance(self._port, (str, bytes)):
if self._port == 'localhost' or re.match(
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
self._port):
self._stream = SocketPort(self._port, XCONF.SocketConf.TCP_CONTROL_PORT,
heartbeat=self._enable_heartbeat,
buffer_size=XCONF.SocketConf.TCP_CONTROL_BUF_SIZE, forbid_uds=self._forbid_uds)
if not self.connected:
raise Exception('connect socket failed')
self._report_error_warn_changed_callback()
self.arm_cmd = UxbusCmdTcp(self._stream)
self._stream_type = 'socket'
try:
if self._timed_comm:
self._timed_comm_t = threading.Thread(target=self._timed_comm_thread, daemon=True)
self._timed_comm_t.start()
except:
pass
self._stream_report = None
try:
self._connect_report()
except:
self._stream_report = None
if self._check_version(is_first=True) < 0:
self.disconnect()
raise Exception('failed to check version, close')
self.arm_cmd.set_debug(self._debug)
if self._max_callback_thread_count < 0 and asyncio is not None:
self._asyncio_loop = asyncio.new_event_loop()
self._asyncio_loop_thread = threading.Thread(target=self._run_asyncio_loop, daemon=True)
self._thread_manage.append(self._asyncio_loop_thread)
self._asyncio_loop_thread.start()
elif self._max_callback_thread_count > 0 and ThreadPool is not None:
self._pool = ThreadPool(self._max_callback_thread_count)
if self._stream.connected and self._enable_report:
self._report_thread = threading.Thread(target=self._report_thread_handle, daemon=True)
self._report_thread.start()
self._thread_manage.append(self._report_thread)
self._report_connect_changed_callback()
else:
self._stream = SerialPort(self._port)
if not self.connected:
raise Exception('connect serail failed')
self._report_error_warn_changed_callback()
self.arm_cmd = UxbusCmdSer(self._stream)
self._stream_type = 'serial'
if self._max_callback_thread_count < 0 and asyncio is not None:
self._asyncio_loop = asyncio.new_event_loop()
self._asyncio_loop_thread = threading.Thread(target=self._run_asyncio_loop, daemon=True)
self._thread_manage.append(self._asyncio_loop_thread)
self._asyncio_loop_thread.start()
elif self._max_callback_thread_count > 0 and ThreadPool is not None:
self._pool = ThreadPool(self._max_callback_thread_count)
if self._enable_report:
self._report_thread = threading.Thread(target=self._auto_get_report_thread, daemon=True)
self._report_thread.start()
self._report_connect_changed_callback(True, True)
self._thread_manage.append(self._report_thread)
else:
self._report_connect_changed_callback(True, False)
self._check_version(is_first=True)
self.arm_cmd.set_debug(self._debug)
self.set_timeout(self._cmd_timeout)
if self._rewrite_modbus_baudrate_method:
setattr(self.arm_cmd, 'set_modbus_baudrate_old', self.arm_cmd.set_modbus_baudrate)
setattr(self.arm_cmd, 'set_modbus_baudrate', self._core_set_modbus_baudrate)
if asyncio:
def _run_asyncio_loop(self):
@asyncio.coroutine
def _asyncio_loop():
logger.debug('asyncio thread start ...')
while self.connected:
yield from asyncio.sleep(0.001)
logger.debug('asyncio thread exit ...')
try:
asyncio.set_event_loop(self._asyncio_loop)
self._asyncio_loop_alive = True
self._asyncio_loop.run_until_complete(_asyncio_loop())
except Exception as e:
pass
self._asyncio_loop_alive = False
@staticmethod
@asyncio.coroutine
def _async_run_callback(callback, msg):
yield from callback(msg)
def _run_callback(self, callback, msg, name='', enable_callback_thread=True):
try:
if self._asyncio_loop_alive and enable_callback_thread:
coroutine = self._async_run_callback(callback, msg)
asyncio.run_coroutine_threadsafe(coroutine, self._asyncio_loop)
elif self._pool is not None and enable_callback_thread:
self._pool.apply_async(callback, args=(msg,))
else:
callback(msg)
except Exception as e:
logger.error('run {} callback exception: {}'.format(name, e))
def _core_set_modbus_baudrate(self, baudrate, use_old=False):
"""
此函数是用于覆盖core.set_modbus_baudrate方法,主要用于兼容旧代码
新代码建议直接使用set_tgpio_modbus_baudrate此接口
:param baudrate:
:param use_old:
为True时调用原来的core.set_modbus_baudrate方法
为False时使用新的set_tgpio_modbus_baudrate
:return [code, ...]
"""
if not use_old:
ret = self.set_tgpio_modbus_baudrate(baudrate)
return [ret, self.modbus_baud]
else:
return self.arm_cmd.set_modbus_baudrate_old(baudrate)
def disconnect(self):
try:
self._stream.close()
except:
pass
if self._stream_report:
try:
self._stream_report.close()
except:
pass
self._is_ready = False
try:
self._stream.join()
except:
pass
if self._stream_report:
try:
self._stream_report.join()
except:
pass
self._report_connect_changed_callback(False, False)
with self._pause_cond:
self._pause_cond.notifyAll()
self._clean_thread()
def set_timeout(self, timeout):
self._cmd_timeout = timeout
if self.arm_cmd is not None:
self._cmd_timeout = self.arm_cmd.set_timeout(self._cmd_timeout)
return self._cmd_timeout
def _connect_report(self):
if self._enable_report:
if self._stream_report:
try:
self._stream_report.close()
except:
pass
time.sleep(2)
if self._report_type == 'real':
self._stream_report = SocketPort(
self._port, XCONF.SocketConf.TCP_REPORT_REAL_PORT,
buffer_size=1024 if not self._is_old_protocol else 87,
forbid_uds=self._forbid_uds)
elif self._report_type == 'normal':
self._stream_report = SocketPort(
self._port, XCONF.SocketConf.TCP_REPORT_NORM_PORT,
buffer_size=XCONF.SocketConf.TCP_REPORT_NORMAL_BUF_SIZE if not self._is_old_protocol else 87,
forbid_uds=self._forbid_uds)
else:
self._stream_report = SocketPort(
self._port, XCONF.SocketConf.TCP_REPORT_RICH_PORT,
buffer_size=1024 if not self._is_old_protocol else 187,
forbid_uds=self._forbid_uds)
def __report_callback(self, report_id, item, name=''):
if report_id in self._report_callbacks.keys():
for callback in self._report_callbacks[report_id]:
self._run_callback(callback, item, name=name)
def _report_connect_changed_callback(self, main_connected=None, report_connected=None):
if self.REPORT_CONNECT_CHANGED_ID in self._report_callbacks.keys():
for callback in self._report_callbacks[self.REPORT_CONNECT_CHANGED_ID]:
self._run_callback(callback, {
'connected': self._stream and self._stream.connected if main_connected is None else main_connected,
'reported': self._stream_report and self._stream_report.connected if report_connected is None else report_connected,
}, name='connect_changed')
def _report_state_changed_callback(self):
if self._ignore_state:
return
self.__report_callback(self.REPORT_STATE_CHANGED_ID, {'state': self._state}, name='state_changed')
def _report_mode_changed_callback(self):
self.__report_callback(self.REPORT_MODE_CHANGED_ID, {'mode': self._mode}, name='mode_changed')
def _report_mtable_mtbrake_changed_callback(self):
self.__report_callback(self.REPORT_MTABLE_MTBRAKE_CHANGED_ID, {
'mtable': [bool(i) for i in self._arm_motor_enable_states],
'mtbrake': [bool(i) for i in self._arm_motor_brake_states]
}, name='(mtable/mtbrake)_changed')
def _report_error_warn_changed_callback(self):
if self._ignore_error:
return
self.__report_callback(self.REPORT_ERROR_WARN_CHANGED_ID, {
'warn_code': self._warn_code,
'error_code': self._error_code,
}, name='(error/warn)_changed')
def _report_cmdnum_changed_callback(self):
self.__report_callback(self.REPORT_CMDNUM_CHANGED_ID, {
'cmdnum': self._cmd_num
}, name='cmdnum_changed')
def _report_temperature_changed_callback(self):
self.__report_callback(self.REPORT_TEMPERATURE_CHANGED_ID, {
'temperatures': self.temperatures
}, name='temperature_changed')
def _report_count_changed_callback(self):
self.__report_callback(self.REPORT_COUNT_CHANGED_ID, {'count': self._count}, name='count_changed')
def _report_iden_progress_changed_callback(self):
self.__report_callback(self.REPORT_IDEN_PROGRESS_CHANGED_ID, {'progress': self._iden_progress}, name='iden_progress_changed')
def _report_location_callback(self):
if self.REPORT_LOCATION_ID in self._report_callbacks.keys():
for item in self._report_callbacks[self.REPORT_LOCATION_ID]:
callback = item['callback']
ret = {}
if item['cartesian']:
ret['cartesian'] = self.position.copy()
if item['joints']:
ret['joints'] = self.angles.copy()
self._run_callback(callback, ret, name='location')
def _report_callback(self):
if self.REPORT_ID in self._report_callbacks.keys():
for item in self._report_callbacks[self.REPORT_ID]:
callback = item['callback']
ret = {}
if item['cartesian']:
ret['cartesian'] = self.position.copy()
if item['joints']:
ret['joints'] = self.angles.copy()
if item['error_code']:
ret['error_code'] = self._error_code
if item['warn_code']:
ret['warn_code'] = self._warn_code
if item['state']:
ret['state'] = self._state
if item['mtable']:
mtable = [bool(i) for i in self._arm_motor_enable_states]
ret['mtable'] = mtable.copy()
if item['mtbrake']:
mtbrake = [bool(i) for i in self._arm_motor_brake_states]
ret['mtbrake'] = mtbrake.copy()
if item['cmdnum']:
ret['cmdnum'] = self._cmd_num
self._run_callback(callback, ret, name='report')
def _report_thread_handle(self):
main_socket_connected = self._stream and self._stream.connected
report_socket_connected = self._stream_report and self._stream_report.connected
while self.connected:
try:
if not self._stream_report or not self._stream_report.connected:
self.get_err_warn_code()
if report_socket_connected:
report_socket_connected = False
self._report_connect_changed_callback(main_socket_connected, report_socket_connected)
self._connect_report()
continue
if not report_socket_connected:
report_socket_connected = True
self._report_connect_changed_callback(main_socket_connected, report_socket_connected)
recv_data = self._stream_report.read(1)
if recv_data != -1:
size = convert.bytes_to_u32(recv_data)
if self._is_old_protocol and size > 256:
self._is_old_protocol = False
self._handle_report_data(recv_data)
else:
if self.connected:
code, err_warn = self.get_err_warn_code()
if code == -1 or code == 3:
break
if not self.connected:
break
elif not self._stream_report or not self._stream_report.connected:
self._connect_report()
except Exception as e:
logger.error(e)
if self.connected:
code, err_warn = self.get_err_warn_code()
if code == -1 or code == 3:
break
if not self.connected:
break
if not self._stream_report or not self._stream_report.connected:
self._connect_report()
time.sleep(0.001)
self.disconnect()
def _handle_report_data(self, data):
def __handle_report_normal_old(rx_data):
# print('length:', convert.bytes_to_u32(rx_data[0:4]))
state, mtbrake, mtable, error_code, warn_code = rx_data[4:9]
angles = convert.bytes_to_fp32s(rx_data[9:7 * 4 + 9], 7)
pose = convert.bytes_to_fp32s(rx_data[37:6 * 4 + 37], 6)
cmd_num = convert.bytes_to_u16(rx_data[61:63])
pose_offset = convert.bytes_to_fp32s(rx_data[63:6 * 4 + 63], 6)
if error_code != self._error_code or warn_code != self._warn_code:
if error_code != self._error_code:
self._error_code = error_code
if self._error_code != 0:
pretty_print('Error, code: {}'.format(self._error_code), color='red')
else:
pretty_print('Error had clean', color='blue')
if warn_code != self._warn_code:
self._warn_code = warn_code
if self._warn_code != 0:
pretty_print('Warn, code: {}'.format(self._warn_code), color='yellow')
else:
pretty_print('Warnning had clean', color='blue')
self._report_error_warn_changed_callback()
logger.info('OnReport -> err={}, warn={}, state={}, cmdnum={}, mtbrake={}, mtable={}'.format(
error_code, warn_code, state, cmd_num, mtbrake, mtable
))
elif not self._only_report_err_warn_changed:
self._report_error_warn_changed_callback()
if cmd_num != self._cmd_num:
self._cmd_num = cmd_num
self._report_cmdnum_changed_callback()
if state != self._state:
self._state = state
self._report_state_changed_callback()
mtbrake = [mtbrake & 0x01, mtbrake >> 1 & 0x01, mtbrake >> 2 & 0x01, mtbrake >> 3 & 0x01,
mtbrake >> 4 & 0x01, mtbrake >> 5 & 0x01, mtbrake >> 6 & 0x01, mtbrake >> 7 & 0x01]
mtable = [mtable & 0x01, mtable >> 1 & 0x01, mtable >> 2 & 0x01, mtable >> 3 & 0x01,
mtable >> 4 & 0x01, mtable >> 5 & 0x01, mtable >> 6 & 0x01, mtable >> 7 & 0x01]
if mtbrake != self._arm_motor_brake_states or mtable != self._arm_motor_enable_states:
self._arm_motor_enable_states = mtable
self._arm_motor_brake_states = mtbrake
self._report_mtable_mtbrake_changed_callback()
if not self._is_first_report:
if state in [4, 5] or not all([bool(item[0] & item[1]) for item in zip(mtbrake, mtable)][:self.axis]):
# if self._is_ready:
# pretty_print('[report], xArm is not ready to move', color='red')
self._is_ready = False
else:
# if not self._is_ready:
# pretty_print('[report], xArm is ready to move', color='green')
self._is_ready = True
else:
self._is_ready = False
self._is_first_report = False
if not self._is_ready:
self._sleep_finish_time = 0
reset_tgpio_params = False
reset_linear_track_params = False
if 0 < error_code <= 17:
reset_tgpio_params = True
reset_linear_track_params = True
elif error_code in [19, 28]:
reset_tgpio_params = True
elif error_code == 111:
reset_linear_track_params = True
if reset_tgpio_params:
self.modbus_baud = -1
self.robotiq_is_activated = False
self.gripper_is_enabled = False
self.bio_gripper_is_enabled = False
self.bio_gripper_speed = 0
self.gripper_is_enabled = False
self.gripper_speed = 0
self.gripper_version_numbers = [-1, -1, -1]
if reset_linear_track_params:
self.linear_track_baud = -1
self.linear_track_is_enabled = False
self.linear_track_speed = 1
# if error_code in [1, 10, 11, 12, 13, 14, 15, 16, 17, 19, 28]:
# self.modbus_baud = -1
# self.robotiq_is_activated = False
# self.gripper_is_enabled = False
# self.bio_gripper_is_enabled = False
# self.bio_gripper_speed = 0
# self.gripper_is_enabled = False
# self.gripper_speed = 0
# self.gripper_version_numbers = [-1, -1, -1]
# self.linear_track_is_enabled = False
# self.linear_track_speed = 0
self._error_code = error_code
self._warn_code = warn_code
self.arm_cmd.has_err_warn = error_code != 0 or warn_code != 0
_state = self._state
self._state = state
if self.state != 3 and (_state == 3 or self._pause_cnts > 0):
with self._pause_cond:
self._pause_cond.notifyAll()
self._cmd_num = cmd_num
self._arm_motor_brake_states = mtbrake
self._arm_motor_enable_states = mtable
update_time = time.time()
self._last_update_cmdnum_time = update_time
self._last_update_state_time = update_time
self._last_update_err_time = update_time
for i in range(len(pose)):
pose[i] = filter_invaild_number(pose[i], 3 if i < 3 else 6, default=self._position[i])
for i in range(len(angles)):
angles[i] = filter_invaild_number(angles[i], 6, default=self._angles[i])
for i in range(len(pose_offset)):
pose_offset[i] = filter_invaild_number(pose_offset[i], 3 if i < 3 else 6, default=self._position_offset[i])
if not (0 < self._error_code <= 17):
self._position = pose
if not (0 < self._error_code <= 17):
self._angles = angles
if not (0 < self._error_code <= 17):
self._position_offset = pose_offset
self._report_location_callback()
self._report_callback()
if not self._is_sync and self._error_code == 0 and self._state not in [4, 5]:
self._sync()
self._is_sync = True
def __handle_report_rich_old(rx_data):
report_time = time.time()
interval = report_time - self._last_report_time
self._max_report_interval = max(self._max_report_interval, interval)
self._last_report_time = report_time
__handle_report_normal(rx_data)
(self._arm_type,
arm_axis,
self._arm_master_id,
self._arm_slave_id,
self._arm_motor_tid,
self._arm_motor_fid) = rx_data[87:93]
if 7 >= arm_axis >= 5:
self._arm_axis = arm_axis
if self._arm_type == 5:
self._arm_axis = 5
elif self._arm_type == 6:
self._arm_axis = 6
elif self._arm_type == 3:
self._arm_axis = 7
ver_msg = rx_data[93:122]
# self._version = str(ver_msg, 'utf-8')
trs_msg = convert.bytes_to_fp32s(rx_data[123:143], 5)
# trs_msg = [i[0] for i in trs_msg]
(self._tcp_jerk,
self._min_tcp_acc,
self._max_tcp_acc,
self._min_tcp_speed,
self._max_tcp_speed) = trs_msg
# print('tcp_jerk: {}, min_acc: {}, max_acc: {}, min_speed: {}, max_speed: {}'.format(
# self._tcp_jerk, self._min_tcp_acc, self._max_tcp_acc, self._min_tcp_speed, self._max_tcp_speed
# ))
p2p_msg = convert.bytes_to_fp32s(rx_data[143:163], 5)
# p2p_msg = [i[0] for i in p2p_msg]
(self._joint_jerk,
self._min_joint_acc,
self._max_joint_acc,
self._min_joint_speed,
self._max_joint_speed) = p2p_msg
# print('joint_jerk: {}, min_acc: {}, max_acc: {}, min_speed: {}, max_speed: {}'.format(
# self._joint_jerk, self._min_joint_acc, self._max_joint_acc,
# self._min_joint_speed, self._max_joint_speed
# ))
rot_msg = convert.bytes_to_fp32s(rx_data[163:171], 2)
# rot_msg = [i[0] for i in rot_msg]
self._rot_jerk, self._max_rot_acc = rot_msg
# print('rot_jerk: {}, mac_acc: {}'.format(self._rot_jerk, self._max_rot_acc))
sv3_msg = convert.bytes_to_u16s(rx_data[171:187], 8)
self._first_report_over = True
def __handle_report_real(rx_data):
state, mode = rx_data[4] & 0x0F, rx_data[4] >> 4
cmd_num = convert.bytes_to_u16(rx_data[5:7])
angles = convert.bytes_to_fp32s(rx_data[7:7 * 4 + 7], 7)
pose = convert.bytes_to_fp32s(rx_data[35:6 * 4 + 35], 6)
torque = convert.bytes_to_fp32s(rx_data[59:7 * 4 + 59], 7)
if cmd_num != self._cmd_num:
self._cmd_num = cmd_num
self._report_cmdnum_changed_callback()
if state != self._state:
self._state = state
self._report_state_changed_callback()
if state in [4, 5]:
self._is_ready = False
else:
self._is_ready = True
if mode != self._mode:
self._mode = mode
self._report_mode_changed_callback()
for i in range(len(pose)):
pose[i] = filter_invaild_number(pose[i], 3 if i < 3 else 6, default=self._position[i])
for i in range(len(angles)):
angles[i] = filter_invaild_number(angles[i], 6, default=self._angles[i])
if not (0 < self._error_code <= 17):
self._position = pose
if not (0 < self._error_code <= 17):
self._angles = angles
self._joints_torque = torque
self._report_location_callback()
self._report_callback()
if not self._is_sync and self._state not in [4, 5]:
self._sync()
self._is_sync = True
length = len(rx_data)
if length >= 135:
# FT_SENSOR
self._ft_ext_force = convert.bytes_to_fp32s(rx_data[87:111], 6)
self._ft_raw_force = convert.bytes_to_fp32s(rx_data[111:135], 6)
def __handle_report_normal(rx_data):
# print('length:', convert.bytes_to_u32(rx_data[0:4]), len(rx_data))
state, mode = rx_data[4] & 0x0F, rx_data[4] >> 4
# if state != self._state or mode != self._mode:
# print('mode: {}, state={}, time={}'.format(mode, state, time.time()))
cmd_num = convert.bytes_to_u16(rx_data[5:7])
angles = convert.bytes_to_fp32s(rx_data[7:7 * 4 + 7], 7)
pose = convert.bytes_to_fp32s(rx_data[35:6 * 4 + 35], 6)
torque = convert.bytes_to_fp32s(rx_data[59:7 * 4 + 59], 7)
mtbrake, mtable, error_code, warn_code = rx_data[87:91]
pose_offset = convert.bytes_to_fp32s(rx_data[91:6 * 4 + 91], 6)
tcp_load = convert.bytes_to_fp32s(rx_data[115:4 * 4 + 115], 4)
collis_sens, teach_sens = rx_data[131:133]
# if (collis_sens not in list(range(6)) or teach_sens not in list(range(6))) \
# and ((error_code != 0 and error_code not in controller_error_keys) or (warn_code != 0 and warn_code not in controller_warn_keys)):
# self._stream_report.close()
# logger.warn('ReportDataException: data={}'.format(rx_data))
# return
length = convert.bytes_to_u32(rx_data[0:4])
data_len = len(rx_data)
if (length != data_len and (length != 233 or data_len != 245)) or collis_sens not in list(range(6)) or teach_sens not in list(range(6)) \
or mode not in list(range(12)) or state not in list(range(10)):
self._stream_report.close()
logger.warn('ReportDataException: length={}, data_len={}, '
'state={}, mode={}, collis_sens={}, teach_sens={}, '
'error_code={}, warn_code={}'.format(
length, data_len,
state, mode, collis_sens, teach_sens, error_code, warn_code
))
return
self._gravity_direction = convert.bytes_to_fp32s(rx_data[133:3*4 + 133], 3)
reset_tgpio_params = False
reset_linear_track_params = False
if 0 < error_code <= 17:
reset_tgpio_params = True
reset_linear_track_params = True
elif error_code in [19, 28]:
reset_tgpio_params = True
elif error_code == 111:
reset_linear_track_params = True
if reset_tgpio_params:
self.modbus_baud = -1
self.robotiq_is_activated = False
self.gripper_is_enabled = False
self.bio_gripper_is_enabled = False
self.bio_gripper_speed = 0
self.gripper_is_enabled = False
self.gripper_speed = 0
self.gripper_version_numbers = [-1, -1, -1]
if reset_linear_track_params:
self.linear_track_baud = -1
self.linear_track_is_enabled = False
self.linear_track_speed = 0
# if error_code in [1, 10, 11, 12, 13, 14, 15, 16, 17, 19, 28]:
# self.modbus_baud = -1
# self.robotiq_is_activated = False
# self.gripper_is_enabled = False
# self.bio_gripper_is_enabled = False
# self.bio_gripper_speed = -1
# self.gripper_speed = -1
# self.gripper_version_numbers = [-1, -1, -1]
# self.linear_track_is_enabled = False
# self.linear_track_speed = -1
# print('torque: {}'.format(torque))
# print('tcp_load: {}'.format(tcp_load))
# print('collis_sens: {}, teach_sens: {}'.format(collis_sens, teach_sens))
if error_code != self._error_code or warn_code != self._warn_code:
if error_code != self._error_code:
self._error_code = error_code
if self._error_code != 0:
pretty_print('ControllerError, code: {}'.format(self._error_code), color='red')
else:
pretty_print('ControllerError had clean', color='blue')
if warn_code != self._warn_code:
self._warn_code = warn_code
if self._warn_code != 0:
pretty_print('ControllerWarning, code: {}'.format(self._warn_code), color='yellow')
else:
pretty_print('ControllerWarning had clean', color='blue')
self._report_error_warn_changed_callback()
logger.info('OnReport -> err={}, warn={}, state={}, cmdnum={}, mtbrake={}, mtable={}, mode={}'.format(
error_code, warn_code, state, cmd_num, mtbrake, mtable, mode
))
elif not self._only_report_err_warn_changed:
self._report_error_warn_changed_callback()
if cmd_num != self._cmd_num:
self._cmd_num = cmd_num
self._report_cmdnum_changed_callback()
if state != self._state:
if not self._has_motion_cmd and self._state in [0, 1] and state not in [0, 1]:
self._need_sync = True
if self._state in [0, 1] and state not in [0, 1]:
self._has_motion_cmd = False
# print('old_state: {}, new_state: {}, has_motion_cmd={}, need_sync: {}'.format(self._state, state, self._has_motion_cmd, self._need_sync))
self._state = state
self._report_state_changed_callback()
if mode != self._mode:
self._mode = mode
self._report_mode_changed_callback()
mtbrake = [mtbrake & 0x01, mtbrake >> 1 & 0x01, mtbrake >> 2 & 0x01, mtbrake >> 3 & 0x01,
mtbrake >> 4 & 0x01, mtbrake >> 5 & 0x01, mtbrake >> 6 & 0x01, mtbrake >> 7 & 0x01]
mtable = [mtable & 0x01, mtable >> 1 & 0x01, mtable >> 2 & 0x01, mtable >> 3 & 0x01,
mtable >> 4 & 0x01, mtable >> 5 & 0x01, mtable >> 6 & 0x01, mtable >> 7 & 0x01]
if mtbrake != self._arm_motor_brake_states or mtable != self._arm_motor_enable_states:
self._arm_motor_enable_states = mtable
self._arm_motor_brake_states = mtbrake
self._report_mtable_mtbrake_changed_callback()
if not self._is_first_report:
if state in [4, 5] or not all([bool(item[0] & item[1]) for item in zip(mtbrake, mtable)][:self.axis]):
# if self._is_ready:
# pretty_print('[report], xArm is not ready to move', color='red')
self._is_ready = False
else:
# if not self._is_ready:
# pretty_print('[report], xArm is ready to move', color='green')
self._is_ready = True
else:
self._is_ready = False
self._is_first_report = False
if not self._is_ready:
self._sleep_finish_time = 0
self._error_code = error_code
self._warn_code = warn_code
self.arm_cmd.has_err_warn = error_code != 0 or warn_code != 0
_state = self._state
self._state = state
if self.state != 3 and (_state == 3 or self._pause_cnts > 0):
with self._pause_cond:
self._pause_cond.notifyAll()
self._mode = mode
self._cmd_num = cmd_num
update_time = time.time()
self._last_update_cmdnum_time = update_time
self._last_update_state_time = update_time
self._last_update_err_time = update_time
self._arm_motor_brake_states = mtbrake
self._arm_motor_enable_states = mtable
self._joints_torque = torque
if compare_version(self.version_number, (0, 2, 0)):
self._tcp_load = [float('{:.3f}'.format(tcp_load[0])), [float('{:.3f}'.format(i)) for i in tcp_load[1:]]]
else:
self._tcp_load = [float('{:.3f}'.format(tcp_load[0])), [float('{:.3f}'.format(i * 1000)) for i in tcp_load[1:]]]
self._collision_sensitivity = collis_sens
self._teach_sensitivity = teach_sens
for i in range(len(pose)):
pose[i] = filter_invaild_number(pose[i], 3 if i < 3 else 6, default=self._position[i])
for i in range(len(angles)):
angles[i] = filter_invaild_number(angles[i], 6, default=self._angles[i])
for i in range(len(pose_offset)):
pose_offset[i] = filter_invaild_number(pose_offset[i], 3 if i < 3 else 6, default=self._position_offset[i])
if not (0 < self._error_code <= 17):
self._position = pose
if not (0 < self._error_code <= 17):
self._angles = angles
if not (0 < self._error_code <= 17):
self._position_offset = pose_offset
self._report_location_callback()
self._report_callback()
if not self._is_sync and self._error_code == 0 and self._state not in [4, 5]:
self._sync()
self._is_sync = True
elif self._need_sync:
self._need_sync = False
# self._sync()
def __handle_report_rich(rx_data):
report_time = time.time()
interval = report_time - self._last_report_time
self._max_report_interval = max(self._max_report_interval, interval)
self._last_report_time = report_time
# print('interval={}, max_interval={}'.format(interval, self._max_report_interval))
__handle_report_normal(rx_data)
(self._arm_type,
arm_axis,
self._arm_master_id,
self._arm_slave_id,
self._arm_motor_tid,
self._arm_motor_fid) = rx_data[145:151]
if 7 >= arm_axis >= 5:
self._arm_axis = arm_axis
# self._version = str(rx_data[151:180], 'utf-8')
trs_msg = convert.bytes_to_fp32s(rx_data[181:201], 5)
# trs_msg = [i[0] for i in trs_msg]
(self._tcp_jerk,
self._min_tcp_acc,
self._max_tcp_acc,
self._min_tcp_speed,
self._max_tcp_speed) = trs_msg
# print('tcp_jerk: {}, min_acc: {}, max_acc: {}, min_speed: {}, max_speed: {}'.format(
# self._tcp_jerk, self._min_tcp_acc, self._max_tcp_acc, self._min_tcp_speed, self._max_tcp_speed
# ))
p2p_msg = convert.bytes_to_fp32s(rx_data[201:221], 5)
# p2p_msg = [i[0] for i in p2p_msg]
(self._joint_jerk,
self._min_joint_acc,
self._max_joint_acc,
self._min_joint_speed,
self._max_joint_speed) = p2p_msg
# print('joint_jerk: {}, min_acc: {}, max_acc: {}, min_speed: {}, max_speed: {}'.format(
# self._joint_jerk, self._min_joint_acc, self._max_joint_acc,
# self._min_joint_speed, self._max_joint_speed
# ))
rot_msg = convert.bytes_to_fp32s(rx_data[221:229], 2)
# rot_msg = [i[0] for i in rot_msg]
self._rot_jerk, self._max_rot_acc = rot_msg
# print('rot_jerk: {}, mac_acc: {}'.format(self._rot_jerk, self._max_rot_acc))
servo_codes = [val for val in rx_data[229:245]]
for i in range(self.axis):
if self._servo_codes[i][0] != servo_codes[i * 2] or self._servo_codes[i][1] != servo_codes[i * 2 + 1]:
print('servo_error_code, servo_id={}, status={}, code={}'.format(i + 1, servo_codes[i * 2], servo_codes[i * 2 + 1]))
self._servo_codes[i][0] = servo_codes[i * 2]
self._servo_codes[i][1] = servo_codes[i * 2 + 1]
self._first_report_over = True
# length = convert.bytes_to_u32(rx_data[0:4])
length = len(rx_data)
if length >= 252:
temperatures = list(map(int, rx_data[245:252]))
if temperatures != self.temperatures:
self._temperatures = temperatures
self._report_temperature_changed_callback()
if length >= 284:
speeds = convert.bytes_to_fp32s(rx_data[252:8 * 4 + 252], 8)
self._realtime_tcp_speed = speeds[0]
self._realtime_joint_speeds = speeds[1:]
# print(speeds[0], speeds[1:])
if length >= 288:
count = convert.bytes_to_u32(rx_data[284:288])
# print(count, rx_data[284:288])
if self._count != -1 and count != self._count:
self._count = count
self._report_count_changed_callback()
self._count = count
if length >= 312:
world_offset = convert.bytes_to_fp32s(rx_data[288:6 * 4 + 288], 6)
for i in range(len(world_offset)):
if i < 3:
world_offset[i] = float('{:.3f}'.format(world_offset[i]))
else:
world_offset[i] = float('{:.6f}'.format(world_offset[i]))
if math.inf not in world_offset and -math.inf not in world_offset and not (10 <= self._error_code <= 17):
self._world_offset = world_offset
if length >= 314:
self._cgpio_reset_enable, self._tgpio_reset_enable = rx_data[312:314]
if length >= 417:
self._is_simulation_robot = bool(rx_data[314])
self._is_collision_detection, self._collision_tool_type = rx_data[315:317]
self._collision_tool_params = convert.bytes_to_fp32s(rx_data[317:341], 6)
voltages = convert.bytes_to_u16s(rx_data[341:355], 7)
voltages = list(map(lambda x: x / 100, voltages))
self._voltages = voltages
currents = convert.bytes_to_fp32s(rx_data[355:383], 7)
self._currents = currents
cgpio_states = []
cgpio_states.extend(rx_data[383:385])
cgpio_states.extend(convert.bytes_to_u16s(rx_data[385:401], 8))
cgpio_states[6:10] = list(map(lambda x: x / 4095.0 * 10.0, cgpio_states[6:10]))
cgpio_states.append(list(map(int, rx_data[401:409])))
cgpio_states.append(list(map(int, rx_data[409:417])))
if self._control_box_type_is_1300 and length >= 433:
cgpio_states[-2].extend(list(map(int, rx_data[417:425])))
cgpio_states[-1].extend(list(map(int, rx_data[425:433])))
self._cgpio_states = cgpio_states
if length >= 481:
# FT_SENSOR
self._ft_ext_force = convert.bytes_to_fp32s(rx_data[433:457], 6)
self._ft_raw_force = convert.bytes_to_fp32s(rx_data[457:481], 6)
if length >= 482:
iden_progress = rx_data[481]
if iden_progress != self._iden_progress:
self._iden_progress = iden_progress
self._report_iden_progress_changed_callback()
if length >= 494:
pose_aa = convert.bytes_to_fp32s(rx_data[482:494], 3)
for i in range(len(pose_aa)):
pose_aa[i] = filter_invaild_number(pose_aa[i], 6, default=self._pose_aa[i])
self._pose_aa = self._position[:3] + pose_aa
try:
if self._report_type == 'real':
__handle_report_real(data)
elif self._report_type == 'rich':
if self._is_old_protocol:
__handle_report_rich_old(data)
else:
__handle_report_rich(data)
else:
if self._is_old_protocol:
__handle_report_normal_old(data)
else:
__handle_report_normal(data)
except Exception as e:
logger.error(e)
def _auto_get_report_thread(self):
logger.debug('get report thread start')
while self.connected:
try:
cmd_num = self._cmd_num
state = self._state
error_code = self._error_code
warn_code = self._warn_code
self.get_cmdnum()
time.sleep(0.01)
self.get_state()
time.sleep(0.01)
self.get_err_warn_code()
time.sleep(0.01)
self.get_servo_angle()
time.sleep(0.01)
self.get_position()
if self.state != 3 and (state == 3 or self._pause_cnts > 0):
with self._pause_cond:
self._pause_cond.notifyAll()
if cmd_num != self._cmd_num:
self._report_cmdnum_changed_callback()
if state != self._state:
self._report_state_changed_callback()
if state in [4, 5]:
# if self._is_ready:
# pretty_print('[report], xArm is not ready to move', color='red')
self._sleep_finish_time = 0
self._is_ready = False
else:
# if not self._is_ready:
# pretty_print('[report], xArm is ready to move', color='green')
self._is_ready = True
if error_code != self._error_code or warn_code != self._warn_code:
self._report_error_warn_changed_callback()
elif not self._only_report_err_warn_changed and (self._error_code != 0 or self._warn_code != 0):
self._report_error_warn_changed_callback()
self._report_location_callback()
self._report_callback()
if self._cmd_num >= self._max_cmd_num:
time.sleep(1)
self._first_report_over = True
time.sleep(0.1)
except:
pass
self.disconnect()
logger.debug('get report thread stopped')
def _sync_tcp(self, index=None):
if not self._stream_report or not self._stream_report.connected:
self.get_position()
self.get_servo_angle()
self._last_angles = self._angles.copy()
if index is None:
self._last_position = self._position.copy()
elif isinstance(index, int) and 0 <= index < 6:
self._last_position[index] = self._position[index]
# print('=============sync_tcp: index={}'.format(index))
def _sync_joints(self, index=None):
if not self._stream_report or not self._stream_report.connected:
self.get_position()
self.get_servo_angle()
self._last_position = self._position.copy()
if index is None:
self._last_angles = self._angles.copy()
elif isinstance(index, int) and 0 <= index < 7:
self._last_angles[index] = self._angles[index]
# print('=============sync_joint: index={}'.format(index))
def _sync(self):
if not self._stream_report or not self._stream_report.connected:
self.get_position()
self.get_servo_angle()
self._last_position = self._position.copy()
self._last_angles = self._angles.copy()
# print('=============sync_all')
def _set_params(self, **kwargs):
is_radian = kwargs.get('is_radian', self._default_is_radian)
if 'X' in kwargs and isinstance(kwargs['X'], (int, float)):
self._last_position[0] = kwargs.get('X')
if 'Y' in kwargs and isinstance(kwargs['Y'], (int, float)):
self._last_position[1] = kwargs.get('Y')
if 'Z' in kwargs and isinstance(kwargs['Z'], (int, float)):
self._last_position[2] = kwargs.get('Z')
if 'A' in kwargs and isinstance(kwargs['A'], (int, float)):
self._last_position[3] = kwargs.get('A') if is_radian else math.radians(kwargs.get('A'))
if 'B' in kwargs and isinstance(kwargs['B'], (int, float)):
self._last_position[4] = kwargs.get('B') if is_radian else math.radians(kwargs.get('B'))
if 'C' in kwargs and isinstance(kwargs['C'], (int, float)):
self._last_position[5] = kwargs.get('C') if is_radian else math.radians(kwargs.get('C'))
# if 'R' in kwargs and isinstance(kwargs['R'], (int, float)):
# self._last_position[6] = kwargs.get('R')
if 'I' in kwargs and isinstance(kwargs['I'], (int, float)):
self._last_angles[0] = kwargs.get('I') if is_radian else math.radians(kwargs.get('I'))
if 'J' in kwargs and isinstance(kwargs['J'], (int, float)):
self._last_angles[1] = kwargs.get('J') if is_radian else math.radians(kwargs.get('J'))
if 'K' in kwargs and isinstance(kwargs['K'], (int, float)):
self._last_angles[2] = kwargs.get('K') if is_radian else math.radians(kwargs.get('K'))
if 'L' in kwargs and isinstance(kwargs['L'], (int, float)):
self._last_angles[3] = kwargs.get('L') if is_radian else math.radians(kwargs.get('L'))
if 'M' in kwargs and isinstance(kwargs['M'], (int, float)):
self._last_angles[4] = kwargs.get('M') if is_radian else math.radians(kwargs.get('M'))
if 'N' in kwargs and isinstance(kwargs['N'], (int, float)):
self._last_angles[5] = kwargs.get('N') if is_radian else math.radians(kwargs.get('N'))
if 'O' in kwargs and isinstance(kwargs['O'], (int, float)):
self._last_angles[6] = kwargs.get('O') if is_radian else math.radians(kwargs.get('O'))
if 'F' in kwargs and isinstance(kwargs['F'], (int, float)):
self._last_tcp_speed = kwargs.get('F')
self._last_tcp_speed = min(max(self._last_tcp_speed, self._min_tcp_speed), self._max_tcp_speed)
if 'Q' in kwargs and isinstance(kwargs['Q'], (int, float)):
self._last_tcp_acc = kwargs.get('Q')
self._last_tcp_acc = min(max(self._last_tcp_acc, self._min_tcp_acc), self._max_tcp_acc)
if 'F2' in kwargs and isinstance(kwargs['F2'], (int, float)):
self._last_joint_speed = kwargs.get('F2')
if not is_radian:
self._last_joint_speed = math.radians(self._last_joint_speed)
self._last_joint_speed = min(max(self._last_joint_speed, self._min_joint_speed), self._max_joint_speed)
if 'Q2' in kwargs and isinstance(kwargs['Q2'], (int, float)):
self._last_joint_acc = kwargs.get('Q2')
if not is_radian:
self._last_joint_acc = math.radians(self._last_joint_acc)
self._last_joint_acc = min(max(self._last_joint_acc, self._min_joint_acc), self._max_joint_acc)
if 'T' in kwargs and isinstance(kwargs['T'], (int, float)):
self._mvtime = kwargs.get('T')
if 'LIMIT_VELO' in kwargs and isinstance(kwargs['LIMIT_VELO'], (list, tuple)) \
and len(kwargs['LIMIT_VELO']) == 2 and isinstance(kwargs['LIMIT_VELO'][0], (int, float)) \
and isinstance(kwargs['LIMIT_VELO'][1], (int, float)) \
and kwargs['LIMIT_VELO'][0] <= kwargs['LIMIT_VELO'][1]:
self._min_tcp_speed, self._max_tcp_speed = kwargs.get('LIMIT_VELO')
if 'LIMIT_ACC' in kwargs and isinstance(kwargs['LIMIT_ACC'], (list, tuple)) \
and len(kwargs['LIMIT_ACC']) == 2 and isinstance(kwargs['LIMIT_ACC'][0], (int, float)) \
and isinstance(kwargs['LIMIT_ACC'][1], (int, float)) \
and kwargs['LIMIT_ACC'][0] <= kwargs['LIMIT_ACC'][1]:
self._min_tcp_acc, self._max_tcp_acc = kwargs.get('LIMIT_ACC')
def _get_params(self, is_radian=None):
is_radian = self._default_is_radian if is_radian is None else is_radian
if is_radian:
return {
'lastPosition': self._last_position,
'lastAngles': self._last_angles,
'mvvelo': self._last_tcp_speed,
'mvacc': self._last_tcp_acc,
'tcpJerk': self._tcp_jerk,
'jointJerk': self._joint_jerk,
'angle_mvvelo': self._last_joint_speed,
'angle_mvacc': self._last_joint_acc,
'mvtime': self._mvtime,
'LIMIT_VELO': [self._min_tcp_speed, self._max_tcp_speed],
'LIMIT_ACC': [self._min_tcp_acc, self._max_tcp_acc],
'LIMIT_ANGLE_VELO': [self._min_joint_speed, self._max_joint_speed],
'LIMIT_ANGLE_ACC': [self._min_joint_acc, self._max_joint_acc],
}
else:
return {
'lastPosition': [math.degrees(self._last_position[i]) if 2 < i < 6 else self._last_position[i] for i in range(len(self._last_position))],
'lastAngles': [math.degrees(angle) for angle in self._last_angles],
'mvvelo': round(self._last_tcp_speed),
'mvacc': round(self._last_tcp_acc),
'tcpJerk': round(self._tcp_jerk),
'jointJerk': round(math.degrees(self._joint_jerk)),
'angle_mvvelo': round(math.degrees(self._last_joint_speed)),
'angle_mvacc': round(math.degrees(self._last_joint_acc)),
'mvtime': self._mvtime,
'LIMIT_VELO': list(map(round, [self._min_tcp_speed, self._max_tcp_speed])),
'LIMIT_ACC': list(map(round, [self._min_tcp_acc, self._max_tcp_acc])),
'LIMIT_ANGLE_VELO': list(map(round, [math.degrees(self._min_joint_speed), math.degrees(self._max_joint_speed)])),
'LIMIT_ANGLE_ACC': list(map(round, [math.degrees(self._min_joint_acc), math.degrees(self._max_joint_acc)])),
}
def _check_code(self, code, is_move_cmd=False):
if is_move_cmd:
if code in [0, XCONF.UxbusState.WAR_CODE]:
if self.arm_cmd.state_is_ready:
return 0
else:
return XCONF.UxbusState.STATE_NOT_READY
else:
return code
# return 0 if code in [0, XCONF.UxbusState.WAR_CODE] and self.arm_cmd.state_is_ready else XCONF.UxbusState.STATE_NOT_READY if not self.arm_cmd.state_is_ready else code
else:
return 0 if code in [0, XCONF.UxbusState.ERR_CODE, XCONF.UxbusState.WAR_CODE, XCONF.UxbusState.STATE_NOT_READY] else code
def _check_mode_is_correct(self, mode, timeout=1):
if self._enable_report and self._stream_type == 'socket':
cnt = int(10 * timeout)
while cnt > 0 and self.mode != mode:
time.sleep(0.1)
cnt -= 1
if self.mode != mode:
return False
return True
def wait_until_cmdnum_lt_max(self):
if not self._check_cmdnum_limit or self._stream_type != 'socket' or not self._enable_report:
return
# if time.time() - self._last_report_time > 0.4:
# self.get_cmdnum()
if self._max_cmd_num / 2 < self.cmd_num < self._max_cmd_num:
self.get_cmdnum()
while self.cmd_num >= self._max_cmd_num:
if not self.connected:
return APIState.NOT_CONNECTED
elif self.has_error:
return APIState.HAS_ERROR
elif not self.state_is_ready:
return APIState.NOT_READY
elif self.is_stop:
return APIState.EMERGENCY_STOP
time.sleep(0.05)
@xarm_is_connected(_type='get')
def get_version(self):
ret = self.arm_cmd.get_version()
ret[0] = self._check_code(ret[0])
if ret[0] == 0:
version = ''.join(list(map(chr, ret[1:])))
self._version = version[:version.find('\0')]
return ret[0], self._version
@xarm_is_connected(_type='get')
def get_robot_sn(self):
ret = self.arm_cmd.get_robot_sn()
ret[0] = self._check_code(ret[0])
if ret[0] == 0:
robot_sn = ''.join(list(map(chr, ret[1:])))
split_inx = robot_sn.find('\0')
self._robot_sn = robot_sn[:split_inx]
control_box_sn = robot_sn[split_inx+1:]
self._control_box_sn = control_box_sn[:control_box_sn.find('\0')].strip()
self._arm_type_is_1300 = int(self._robot_sn[2:6]) >= 1300 if self._robot_sn[2:6].isdigit() else False
self._control_box_type_is_1300 = int(self._control_box_sn[2:6]) >= 1300 if self._control_box_sn[2:6].isdigit() else False
return ret[0], self._robot_sn
@xarm_is_connected(_type='get')
def check_verification(self):
ret = self.arm_cmd.check_verification()
ret[0] = self._check_code(ret[0])
return ret[0], ret[1]
@xarm_is_connected(_type='get')
def get_position(self, is_radian=None):
is_radian = self._default_is_radian if is_radian is None else is_radian
ret = self.arm_cmd.get_tcp_pose()
ret[0] = self._check_code(ret[0])
if ret[0] == 0 and len(ret) > 6:
self._position = [filter_invaild_number(ret[i], 6, default=self._position[i-1]) for i in range(1, 7)]
return ret[0], [float(
'{:.6f}'.format(math.degrees(self._position[i]) if 2 < i < 6 and not is_radian else self._position[i])) for
i in range(len(self._position))]
@xarm_is_connected(_type='get')
def get_servo_angle(self, servo_id=None, is_radian=None):
is_radian = self._default_is_radian if is_radian is None else is_radian
ret = self.arm_cmd.get_joint_pos()
ret[0] = self._check_code(ret[0])
if ret[0] == 0 and len(ret) > 7:
self._angles = [filter_invaild_number(ret[i], 6, default=self._angles[i-1]) for i in range(1, 8)]
if servo_id is None or servo_id == 8 or len(self._angles) < servo_id:
return ret[0], list(
map(lambda x: float('{:.6f}'.format(x if is_radian else math.degrees(x))), self._angles))
else:
return ret[0], float(
'{:.6f}'.format(self._angles[servo_id - 1] if is_radian else math.degrees(self._angles[servo_id - 1])))
@xarm_is_connected(_type='get')
def get_position_aa(self, is_radian=None):
is_radian = self._default_is_radian if is_radian is None else is_radian
ret = self.arm_cmd.get_position_aa()
ret[0] = self._check_code(ret[0])
if ret[0] == 0 and len(ret) > 6:
self._pose_aa = [filter_invaild_number(ret[i], 6, default=self._pose_aa[i - 1]) for i in range(1, 7)]
return ret[0], [float(
'{:.6f}'.format(math.degrees(self._pose_aa[i]) if 2 < i < 6 and not is_radian else self._pose_aa[i]))
for i in range(len(self._pose_aa))]
@xarm_is_connected(_type='get')
def get_pose_offset(self, pose1, pose2, orient_type_in=0, orient_type_out=0, is_radian=None):
is_radian = self._default_is_radian if is_radian is None else is_radian
_pose1 = [pose1[i] if i <= 2 or is_radian else math.radians(pose1[i]) for i in range(6)]
_pose2 = [pose2[i] if i <= 2 or is_radian else math.radians(pose2[i]) for i in range(6)]
ret = self.arm_cmd.get_pose_offset(_pose1, _pose2, orient_type_in, orient_type_out)
ret[0] = self._check_code(ret[0])
if ret[0] == 0 and len(ret) > 6:
pose = [float('{:.6f}'.format(ret[i] if i <= 3 or is_radian else math.degrees(ret[i]))) for i in
range(1, 7)]
return ret[0], pose
return ret[0], ret[1:7]
def get_is_moving(self):
self.get_state()
return self._state == 1
@xarm_is_connected(_type='get')
def get_state(self):
ret = self.arm_cmd.get_state()
ret[0] = self._check_code(ret[0])
if ret[0] == 0:
# if ret[1] != self._state:
# self._state = ret[1]
# self._report_state_changed_callback()
self._state = ret[1]
self._last_update_state_time = time.time()
return ret[0], ret[1] if ret[0] == 0 else self._state
@xarm_is_connected(_type='set')
def set_state(self, state=0):
_state = self._state
ret = self.arm_cmd.set_state(state)
ret[0] = self._check_code(ret[0])
if state == 4 and ret[0] == 0:
# self._last_position[:6] = self.position
# self._last_angles = self.angles
self._sleep_finish_time = 0
# self._is_sync = False
self.get_state()
if _state != self._state:
self._report_state_changed_callback()
if self.state != 3 and (_state == 3 or self._pause_cnts > 0):
with self._pause_cond:
self._pause_cond.notifyAll()
if self._state in [4, 5]:
self._sleep_finish_time = 0
if self._is_ready:
pretty_print('[set_state], xArm is not ready to move', color='red')
self._is_ready = False
else:
if not self._is_ready:
pretty_print('[set_state], xArm is ready to move', color='green')
self._is_ready = True
self.log_api_info('API -> set_state({}) -> code={}, state={}'.format(state, ret[0], self._state), code=ret[0])
return ret[0]
@xarm_is_connected(_type='set')
def set_mode(self, mode=0):
ret = self.arm_cmd.set_mode(mode)
ret[0] = self._check_code(ret[0])
self.log_api_info('API -> set_mode({}) -> code={}'.format(mode, ret[0]), code=ret[0])
return ret[0]
@xarm_is_connected(_type='get')
def get_cmdnum(self):
ret = self.arm_cmd.get_cmdnum()
ret[0] = self._check_code(ret[0])
if ret[0] == 0:
if ret[1] != self._cmd_num:
self._report_cmdnum_changed_callback()
self._cmd_num = ret[1]
self._last_update_cmdnum_time = time.time()
return ret[0], self._cmd_num
@xarm_is_connected(_type='get')
def get_err_warn_code(self, show=False, lang='en'):
ret = self.arm_cmd.get_err_code()
lang = lang if lang == 'cn' else 'en'
ret[0] = self._check_code(ret[0])
if ret[0] == 0:
# if ret[1] != self._error_code or ret[2] != self._warn_code:
# self._error_code, self._warn_code = ret[1:3]
# self._report_error_warn_changed_callback()
self._error_code, self._warn_code = ret[1:3]
self._last_update_err_time = time.time()
if show:
pretty_print('************* {}, {}: {} **************'.format(
'获取控制器错误警告码' if lang == 'cn' else 'GetErrorWarnCode',
'状态' if lang == 'cn' else 'Status',
ret[0]), color='light_blue')
controller_error = ControllerError(self._error_code, status=0)
controller_warn = ControllerWarn(self._warn_code, status=0)
pretty_print('* {}: {}, {}: {}'.format(
'错误码' if lang == 'cn' else 'ErrorCode',
controller_error.code,
'信息' if lang == 'cn' else 'Info',
controller_error.title[lang]),
color='red' if self._error_code != 0 else 'white')
pretty_print('* {}: {}, {}: {}'.format(
'警告码' if lang == 'cn' else 'WarnCode',
controller_warn.code,
'信息' if lang == 'cn' else 'Info',
controller_warn.title[lang]),
color='yellow' if self._warn_code != 0 else 'white')
pretty_print('*' * 50, color='light_blue')
return ret[0], ret[1:3] if ret[0] == 0 else [self._error_code, self._warn_code]
@xarm_is_connected(_type='set')
def clean_error(self):
ret = self.arm_cmd.clean_err()
self.get_state()
if self._state in [4, 5]:
self._sleep_finish_time = 0
if self._is_ready:
pretty_print('[clean_error], xArm is not ready to move', color='red')
self._is_ready = False
else:
if not self._is_ready:
pretty_print('[clean_error], xArm is ready to move', color='green')
self._is_ready = True
self.log_api_info('API -> clean_error -> code={}'.format(ret[0]), code=ret[0])
return ret[0]
@xarm_is_connected(_type='set')
def clean_warn(self):
ret = self.arm_cmd.clean_war()
self.log_api_info('API -> clean_warn -> code={}'.format(ret[0]), code=ret[0])
return ret[0]
@xarm_is_connected(_type='set')
@xarm_is_not_simulation_mode(ret=0)
def motion_enable(self, enable=True, servo_id=None):
assert servo_id is None or (isinstance(servo_id, int) and 1 <= servo_id <= 8)
if servo_id is None or servo_id == 8:
ret = self.arm_cmd.motion_en(8, int(enable))
else:
ret = self.arm_cmd.motion_en(servo_id, int(enable))
ret[0] = self._check_code(ret[0])
if ret[0] == 0:
self._is_ready = bool(enable)
self.get_state()
if self._state in [4, 5]:
self._sleep_finish_time = 0
if self._is_ready:
pretty_print('[motion_enable], xArm is not ready to move', color='red')
self._is_ready = False
else:
if not self._is_ready:
pretty_print('[motion_enable], xArm is ready to move', color='green')
self._is_ready = True
self.log_api_info('API -> motion_enable -> code={}'.format(ret[0]), code=ret[0])
return ret[0]
def wait_move(self, timeout=None):
if timeout is not None:
expired = time.time() + timeout + (self._sleep_finish_time if self._sleep_finish_time > time.time() else 0)
else:
expired = 0
count = 0
_, state = self.get_state()
max_cnt = 4 if _ == 0 and state == 1 else 10
while timeout is None or time.time() < expired:
if not self.connected:
self.log_api_info('wait_move, xarm is disconnect', code=APIState.NOT_CONNECTED)
return APIState.NOT_CONNECTED
if time.time() - self._last_report_time > 0.4:
self.get_state()
self.get_err_warn_code()
if self.error_code != 0:
self.log_api_info('wait_move, xarm has error, error={}'.format(self.error_code), code=APIState.HAS_ERROR)
return APIState.HAS_ERROR
# no wait in velocity mode
if self.mode in [4, 5]:
return 0
if self.is_stop:
_, state = self.get_state()
if _ != 0 or state not in [4, 5]:
time.sleep(0.02)
continue
self._sleep_finish_time = 0
self.log_api_info('wait_move, xarm is stop, state={}'.format(self.state), code=APIState.EMERGENCY_STOP)
return APIState.EMERGENCY_STOP
if time.time() < self._sleep_finish_time or self.state == 3:
time.sleep(0.02)
count = 0
continue
if self.state != 1:
count += 1
if count >= max_cnt:
_, state = self.get_state()
self.get_err_warn_code()
if _ == 0 and state != 1:
return 0
else:
count = 0
# return 0
# if count % 4 == 0:
# self.get_state()
# self.get_err_warn_code()
else:
count = 0
time.sleep(0.05)
return APIState.WAIT_FINISH_TIMEOUT
@xarm_is_connected(_type='set')
def _check_modbus_code(self, ret, length=2, only_check_code=False, host_id=XCONF.TGPIO_HOST_ID):
code = ret[0]
if self._check_code(code) == 0:
if not only_check_code:
if len(ret) < length:
return APIState.MODBUS_ERR_LENG
if ret[1] != host_id:
return APIState.HOST_ID_ERR
if code != 0:
if host_id == XCONF.TGPIO_HOST_ID:
if self.error_code != 19 and self.error_code != 28:
self.get_err_warn_code()
if self.error_code != 19 and self.error_code != 28:
code = 0
else:
if self.error_code != 100 + host_id:
self.get_err_warn_code()
if self.error_code != 100 + host_id:
code = 0
return code
@xarm_is_connected(_type='set')
def checkset_modbus_baud(self, baudrate, check=True, host_id=XCONF.TGPIO_HOST_ID):
if check and ((host_id == XCONF.TGPIO_HOST_ID and self.modbus_baud == baudrate) or (host_id == XCONF.LINEER_TRACK_HOST_ID and self.linear_track_baud == baudrate)):
return 0
if baudrate not in self.arm_cmd.BAUDRATES:
return APIState.MODBUS_BAUD_NOT_SUPPORT
ret, cur_baud_inx = self._get_modbus_baudrate_inx(host_id=host_id)
if ret == 0:
baud_inx = self.arm_cmd.BAUDRATES.index(baudrate)
if cur_baud_inx != baud_inx:
try:
self._ignore_error = True
self._ignore_state = True if self.state not in [4, 5] else False
state = self.state
# self.arm_cmd.tgpio_addr_w16(XCONF.ServoConf.MODBUS_BAUDRATE, baud_inx)
self.arm_cmd.tgpio_addr_w16(0x1A0B, baud_inx, bid=host_id)
time.sleep(0.3)
self.arm_cmd.tgpio_addr_w16(XCONF.ServoConf.SOFT_REBOOT, 1, bid=host_id)
if host_id == XCONF.TGPIO_HOST_ID:
if self.error_code != 19 and self.error_code != 28:
self.get_err_warn_code()
if self.error_code == 19 or self.error_code == 28:
self.clean_error()
if self._ignore_state:
self.set_state(state if state >= 3 else 0)
time.sleep(1)
else:
if self.error_code != 100 + host_id:
self.get_err_warn_code()
if self.error_code == 100 + host_id:
self.clean_error()
if self._ignore_state:
self.set_state(state if state >= 3 else 0)
time.sleep(1)
except Exception as e:
self._ignore_error = False
self._ignore_state = False
logger.error('checkset_modbus_baud error: {}'.format(e))
return APIState.API_EXCEPTION
self._ignore_error = False
self._ignore_state = False
ret, cur_baud_inx = self._get_modbus_baudrate_inx(host_id=host_id)
self.log_api_info('API -> checkset_modbus_baud -> code={}, baud_inx={}'.format(ret, cur_baud_inx), code=ret)
# if ret == 0 and cur_baud_inx < len(self.arm_cmd.BAUDRATES):
# self.modbus_baud = self.arm_cmd.BAUDRATES[cur_baud_inx]
if host_id == XCONF.TGPIO_HOST_ID:
return 0 if self.modbus_baud == baudrate else APIState.MODBUS_BAUD_NOT_CORRECT
elif host_id == XCONF.LINEER_TRACK_HOST_ID:
return 0 if self.linear_track_baud == baudrate else APIState.MODBUS_BAUD_NOT_CORRECT
else:
if ret == 0 and 0 <= cur_baud_inx < len(self.arm_cmd.BAUDRATES):
return 0 if self.arm_cmd.BAUDRATES[cur_baud_inx] == baudrate else APIState.MODBUS_BAUD_NOT_CORRECT
return APIState.MODBUS_BAUD_NOT_CORRECT
@xarm_is_connected(_type='get')
def _get_modbus_baudrate_inx(self, host_id=XCONF.TGPIO_HOST_ID):
ret = self.arm_cmd.tgpio_addr_r16(XCONF.ServoConf.MODBUS_BAUDRATE & 0x0FFF, bid=host_id)
if ret[0] in [XCONF.UxbusState.ERR_CODE, XCONF.UxbusState.WAR_CODE]:
if host_id == XCONF.TGPIO_HOST_ID:
if self.error_code != 19 and self.error_code != 28:
self.get_err_warn_code()
if self.error_code != 19 and self.error_code != 28:
ret[0] = 0
else:
if self.error_code != 100 + host_id:
self.get_err_warn_code()
if self.error_code != 100 + host_id:
ret[0] = 0
if ret[0] == 0 and 0 <= ret[1] < len(self.arm_cmd.BAUDRATES):
if host_id == XCONF.TGPIO_HOST_ID:
self.modbus_baud = self.arm_cmd.BAUDRATES[ret[1]]
elif host_id == XCONF.LINEER_TRACK_HOST_ID:
self.linear_track_baud = self.arm_cmd.BAUDRATES[ret[1]]
return ret[0], ret[1]
@xarm_is_connected(_type='set')
def set_tgpio_modbus_timeout(self, timeout):
ret = self.arm_cmd.set_modbus_timeout(timeout)
self.log_api_info('API -> set_tgpio_modbus_timeout -> code={}'.format(ret[0]), code=ret[0])
return ret[0]
@xarm_is_connected(_type='set')
def set_tgpio_modbus_baudrate(self, baud):
code = self.checkset_modbus_baud(baud, check=False)
self.log_api_info('API -> set_tgpio_modbus_baudrate -> code={}'.format(code), code=code)
return code
@xarm_is_connected(_type='get')
def get_tgpio_modbus_baudrate(self):
code, baud_inx = self._get_modbus_baudrate_inx()
# if code == 0 and baud_inx < len(self.arm_cmd.BAUDRATES):
# self.modbus_baud = self.arm_cmd.BAUDRATES[baud_inx]
return code, self.modbus_baud
def getset_tgpio_modbus_data(self, datas, min_res_len=0, ignore_log=False):
if not self.connected:
return APIState.NOT_CONNECTED, []
ret = self.arm_cmd.tgpio_set_modbus(datas, len(datas))
ret[0] = self._check_modbus_code(ret, min_res_len + 2)
if not ignore_log:
self.log_api_info('API -> getset_tgpio_modbus_data -> code={}, response={}'.format(ret[0], ret[2:]), code=ret[0])
return ret[0], ret[2:]
@xarm_is_connected(_type='set')
def set_simulation_robot(self, on_off):
ret = self.arm_cmd.set_simulation_robot(on_off)
ret[0] = self._check_code(ret[0])
self.log_api_info('API -> set_simulation_robot({}) -> code={}'.format(on_off, ret[0]), code=ret[0])
return ret[0]
@xarm_is_connected(_type='set')
@xarm_is_pause(_type='set')
@xarm_wait_until_cmdnum_lt_max(only_wait=False)
def set_tcp_load(self, weight, center_of_gravity):
if compare_version(self.version_number, (0, 2, 0)):
_center_of_gravity = center_of_gravity
else:
_center_of_gravity = [item / 1000.0 for item in center_of_gravity]
ret = self.arm_cmd.set_tcp_load(weight, _center_of_gravity)
self.log_api_info('API -> set_tcp_load -> code={}, weight={}, center={}'.format(ret[0], weight, _center_of_gravity), code=ret[0])
return ret[0]
|
xArm-Developer/xArm-Python-SDK
|
xarm/x3/base.py
|
Python
|
bsd-3-clause
| 102,034
|
"""
A socket wrapper that uses Event IO.
"""
import socket
import event
import time
import logging
import errno
import traceback
import os
from collections import deque
# TODO: Use new io objects from 2.6
# 26 July 10 - I looked into this and a potential problem with io.StringIO is
# that it assumes all text is unicode. Without a full test and probably lots
# of code updated elsewhere, the older StringIO is probably the better choice
# to fix the bug @AW
# https://agora.lighthouseapp.com/projects/47111/tickets/628-odd-amqp-error
from cStringIO import StringIO
class EventSocket(object):
"""
A socket wrapper which uses libevent.
"""
def __init__( self, family=socket.AF_INET, type=socket.SOCK_STREAM, \
protocol=socket.IPPROTO_IP, read_cb=None, accept_cb=None, \
close_cb=None, error_cb=None, output_empty_cb=None, sock=None, \
debug=False, logger=None, max_read_buffer=0, **kwargs):
"""
Initialize the socket. If no read_cb defined, socket will only be used
for reading. If this socket will be used for accepting new connections,
set read_cb here and it will be passed to new sockets. You can also set
accept_cb and be notified with an EventSocket object on accept(). The
error_cb will be called if there are any errors on the socket. The args
to it will be this socket, an error message, and an optional exception.
The close_cb will be called when this socket closes, with this socket as
its argument. If needed, you can wrap an existing socket by setting the
sock argument to a socket object.
"""
self._debug = debug
self._logger = logger
if self._debug and not self._logger:
print 'WARNING: to debug EventSocket, must provide a logger'
self._debug = False
# There various events we may or may not schedule
self._read_event = None
self._write_event = None
self._accept_event = None
self._connect_event = None
self._pending_read_cb_event = None
# Cache the peername so we can include it in logs even if the socket
# is closed. Note that connect() and bind() have to be the ones to do
# that work.
self._peername = 'unknown'
if sock:
self._sock = sock
try:
self._peername = "%s:%d"%self._sock.getpeername()
# Like connect(), only initialize these if the socket is already connected.
self._read_event = event.read( self._sock, self._protected_cb, self._read_cb )
self._write_event = event.write( self._sock, self._protected_cb, self._write_cb )
except socket.error, e:
# unconnected
pass
else:
self._sock = socket.socket(family, type, protocol)
# wholesale binding of stuff we don't need to alter or intercept
self.listen = self._sock.listen
self.setsockopt = self._sock.setsockopt
self.fileno = self._sock.fileno
self.getpeername = self._sock.getpeername
self.getsockname = self._sock.getsockname
self.getsockopt = self._sock.getsockopt
self.setblocking = self._sock.setblocking # is this correct?
self.settimeout = self._sock.settimeout
self.gettimeout = self._sock.gettimeout
self.shutdown = self._sock.shutdown
self._max_read_buffer = max_read_buffer
#self._write_buf = []
self._write_buf = deque()
#self._read_buf = StringIO()
self._read_buf = bytearray()
self._parent_accept_cb = accept_cb
self._parent_read_cb = read_cb
self._parent_error_cb = error_cb
self._parent_close_cb = close_cb
self._parent_output_empty_cb = output_empty_cb
# This is the pending global error message. It's sort of a hack, but it's
# used for __protected_cb in much the same way as errno. This prevents
# having to pass an error message around, when the best way to do that is
# via kwargs that the event lib is itself trying to interpret and won't
# allow to pass to __protected_cb.
self._error_msg = None
self._closed = False
self._inactive_event = None
self.set_inactive_timeout( 0 )
@property
def closed(self):
'''
Return whether this socket is closed.
'''
return self._closed
def close(self):
"""
Close the socket.
"""
# if self._debug:
# self._logger.debug(\
# "closing connection %s to %s"%(self._sock.getsockname(), self._peername) )
# Unload all our events
if self._read_event:
self._read_event.delete()
self._read_event = None
if self._accept_event:
self._accept_event.delete()
self._accept_event = None
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
if self._write_event:
self._write_event.delete()
self._write_event = None
if self._connect_event:
self._connect_event.delete()
self._connect_event = None
if self._sock:
self._sock.close()
self._sock = None
# Flush any pending data to the read callbacks as appropriate. Do this
# manually as there is a chance for the following race condition to occur:
# pending data read by cb
# callback reads 1.1 messages, re-buffers .1 msg back
# callback disconnects from socket based on message, calling close()
# we get back to this code and find there's still data in the input buffer
# and the read cb hasn't been cleared. ruh roh.
#if self._parent_read_cb and self._read_buf.tell()>0:
if self._parent_read_cb and len(self._read_buf)>0:
cb = self._parent_read_cb
self._parent_read_cb = None
self._error_msg = "error processing remaining socket input buffer"
self._protected_cb( cb, self )
# Only mark as closed after socket is really closed, we've flushed buffered
# input, and we're calling back to close handlers.
self._closed = True
if self._parent_close_cb:
self._parent_close_cb( self )
if self._pending_read_cb_event:
self._pending_read_cb_event.delete()
self._pending_read_cb_event = None
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
# Delete references to callbacks to help garbage collection
self._parent_accept_cb = None
self._parent_read_cb = None
self._parent_error_cb = None
self._parent_close_cb = None
self._parent_output_empty_cb = None
# Clear buffers
self._write_buf = None
self._read_buf = None
def accept(self):
"""
No-op as we no longer perform blocking accept calls.
"""
pass
def _set_read_cb(self, cb):
"""
Set the read callback. If there's data in the output buffer, immediately
setup a call.
"""
self._parent_read_cb = cb
#if self._read_buf.tell()>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None:
if len(self._read_buf)>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None:
self._pending_read_cb_event = \
event.timeout( 0, self._protected_cb, self._parent_read_timer_cb )
# Allow someone to change the various callbacks.
read_cb = property( fset=_set_read_cb )
accept_cb = property( fset=lambda self,func: setattr(self, '_parent_accept_cb', func ) )
close_cb = property( fset=lambda self,func: setattr(self, '_parent_close_cb', func ) )
error_cb = property( fset=lambda self,func: setattr(self, '_parent_error_cb', func ) )
output_empty_cb = property( fset=lambda self,func: setattr(self, '_parent_output_empty_cb',func) )
def bind(self, *args):
"""
Bind the socket.
"""
if self._debug:
self._logger.debug( "binding to %s", str(args) )
self._sock.bind( *args )
self._peername = "%s:%d"%self.getsockname()
self._accept_event = event.read( self, self._protected_cb, self._accept_cb )
def connect(self, *args, **kwargs):
'''
Connect to the socket. If currently non-blocking, will return immediately
and call close_cb when the timeout is reached. If timeout_at is a float,
will wait until that time and then call the close_cb. Otherwise, it will
set timeout_at as time()+timeout, where timeout is a float argument or the
current timeout value of the socket. The check interval for successful
connection on a non-blocking socket is 100ms.
IMPORTANT: If you want the socket to timeout at all in non-blocking mode,
you *must* pass in either a relative timout in seconds, or an absolute
value in timeout_at. Otherwise, the socket will forever try to connect.
Passes *args on to socket.connect_ex, and **kwargs are used for local
control of `timeout` and `timeout_at`.
'''
timeout_at = kwargs.get('timeout_at')
timeout = kwargs.get('timeout')
if not isinstance(timeout_at, float):
if not isinstance(timeout,(int,long,float)):
timeout = self._sock.gettimeout()
if timeout>0:
timeout_at = time.time()+timeout
self._connect_cb(timeout_at, *args, immediate_raise=True)
def _connect_cb(self, timeout_at, *args, **kwargs):
'''
Local support for synch and asynch connect. Required because
`event.timeout` doesn't support kwargs. They are spec'd though so that
we can branch how exceptions are handled.
'''
err = self._sock.connect_ex( *args )
if not err:
self._peername = "%s:%d"%self._sock.getpeername()
self._read_event = event.read( self._sock, self._protected_cb, self._read_cb )
self._write_event = event.write( self._sock, self._protected_cb, self._write_cb )
if self._connect_event:
self._connect_event.delete()
self._connect_event = None
elif err in (errno.EINPROGRESS,errno.EALREADY):
# Only track timeout if we're about to re-schedule. Should only receive
# these on a non-blocking socket.
if isinstance(timeout_at,float) and time.time()>timeout_at:
self._error_msg = 'timeout connecting to %s'%str(args)
self.close()
return
if self._connect_event:
self._connect_event.delete()
# Checking every 100ms seems to be a reasonable amount of frequency. If
# requested this too can be configurable.
self._connect_event = event.timeout(0.1, self._connect_cb,
timeout_at, *args)
else:
if self._connect_event:
self._connect_event.delete()
self._error_msg = os.strerror(err)
serr = socket.error( err, self._error_msg )
if kwargs.get('immediate_raise'):
raise serr
else:
self._handle_error( serr )
def set_inactive_timeout(self, t):
"""
Set the inactivity timeout. If is None or 0, there is no activity timeout.
If t>0 then socket will automatically close if there has been no activity
after t seconds (float supported). Will raise TypeError if <t> is invalid.
"""
if t==None or t==0:
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
self._inactive_timeout = 0
elif isinstance(t,(int,long,float)):
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = event.timeout( t, self._inactive_cb )
self._inactive_timeout = t
else:
raise TypeError( "invalid timeout %s"%(str(t)) )
### Private support methods
def _handle_error(self, exc):
'''
Gracefully handle errors.
'''
if self._parent_error_cb:
if self._error_msg!=None:
self._parent_error_cb( self, self._error_msg, exc )
else:
self._parent_error_cb( self, "unknown error", exc )
else:
if self._error_msg!=None:
msg = "unhandled error %s"%(self._error_msg)
else:
msg = "unhandled unknown error"
if self._logger:
self._logger.error( msg, exc_info=True )
else:
traceback.print_exc()
def _protected_cb(self, cb, *args, **kwargs):
"""
Wrap any callback from libevent so that we can be sure that exceptions are
handled and errors forwarded to error_cb.
"""
rval = None
try:
rval = cb(*args, **kwargs)
except Exception, e:
self._handle_error( e )
self._error_msg = None
return rval
def _accept_cb(self):
"""
Accept callback from libevent.
"""
self._error_msg = "error accepting new socket"
(conn, addr) = self._sock.accept()
if self._debug:
self._logger.debug("accepted connection from %s"%(str(addr)))
evsock = EventSocket( read_cb=self._parent_read_cb,
error_cb=self._parent_error_cb,
close_cb=self._parent_close_cb, sock=conn,
debug=self._debug, logger=self._logger,
max_read_buffer=self._max_read_buffer )
if self._parent_accept_cb:
# 31 march 09 aaron - We can't call accept callback asynchronously in the
# event that the socket is quickly opened and closed. What happens is
# that a read event gets scheduled before __parent_accept_cb is run, and
# since the socket is closed, it calls the __parent_close_cb. If the
# socket has not been correctly initialized though, we may encounter
# errors if the close_cb is expected to be changed during the accept
# callback. This is arguably an application-level problem, but handling
# that situation entirely asynchronously would be a giant PITA and prone
# to bugs. We'll avoid that.
self._protected_cb( self._parent_accept_cb, evsock )
# Still reschedule event even if there was an error.
return True
def _read_cb(self):
"""
Read callback from libevent.
"""
# We should be able to use recv_into for speed and efficiency, but sadly
# this was broken after 2.6.1 http://bugs.python.org/issue7827
self._error_msg = "error reading from socket"
data = self._sock.recv( self.getsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF) )
if len(data)>0:
if self._debug:
self._logger.debug( "read %d bytes from %s"%(len(data), self._peername) )
# 23 Feb 09 aaron - There are cases where the client will have started
# pushing data right away, and there's a chance that async handling of
# accept will cause data to be read before the callback function has been
# set. I prefer to ignore data if no read callback defined, but it's
# better to just limit the overall size of the input buffer then to use
# a synchronous callback to __parent_accept_cb.
# TODO: So what is the best way of handling this problem, and if sticking
# with a max input buffer size, what's the correct algorithm? Maybe better
# approach is to raise a notice to a callback and let the callback decide
# what to do.
self._flag_activity()
self._read_buf.extend( data )
if self._max_read_buffer and len(self._read_buf) > self._max_read_buffer:
if self._debug:
self._logger.debug( "buffer for %s overflowed!"%(self._peername) )
# Clear the input buffer so that the callback flush code isn't called in close
self._read_buf = bytearray()
self.close()
return None
# Callback asynchronously so that priority is given to libevent to
# allocate time slices.
if self._parent_read_cb!=None and self._pending_read_cb_event==None:
self._pending_read_cb_event = \
event.timeout( 0, self._protected_cb, self._parent_read_timer_cb )
else:
self.close()
return None
return True
def _parent_read_timer_cb(self):
"""
Callback when we want the parent to read buffered data.
"""
# Shouldn't need to check closed state because all events should be
# cancelled, but there seems to be a case where that can happen so deal
# with it gracefully. Possibly a bug or edge case in libevent when tons
# of events are in play as this only happened during extreme testing.
if not self._closed:
self._error_msg = "error processing socket input buffer"
# allow for __close_cb and __read_cb to do their thing.
self._pending_read_cb_event = None
# Catch edge case where this could have been cleared after _read_cb
if self._parent_read_cb:
self._parent_read_cb( self )
# never reschedule
return None
def _write_cb(self):
"""
Write callback from libevent.
"""
self._error_msg = "error writing socket output buffer"
# If no data, don't reschedule
if len(self._write_buf)==0:
return None
# 7 April 09 aaron - Changed this algorithm so that we continually send
# data from the buffer until the socket didn't accept all of it, then
# break. This should be a bit faster.
if self._debug:
total_sent = 0
total_len = sum( map(len,self._write_buf) )
while len(self._write_buf)>0:
cur = self._write_buf.popleft()
# Catch all env errors since that should catch OSError, IOError and
# socket.error.
try:
bytes_sent = self._sock.send( cur )
except EnvironmentError, e:
# For now this seems to be the only error that isn't fatal. It seems
# to be used only for nonblocking sockets and implies that it can't
# buffer any more data right now.
if e.errno==errno.EAGAIN:
self._write_buf.appendleft( cur )
if self._debug:
self._logger.debug( '"%s" raised, waiting to flush to %s', e, self._peername )
break
else:
raise
if self._debug:
total_sent += bytes_sent
if bytes_sent < len(cur):
# keep the first entry and set to all remaining bytes.
self._write_buf.appendleft( cur[bytes_sent:] )
break
if self._debug:
self._logger.debug( "wrote %d/%d bytes to %s", total_sent,total_len,self._peername )
# also flag activity here? might not be necessary, but in some cases the
# timeout could still be small enough to trigger between accesses to the
# socket output.
self._flag_activity()
if len(self._write_buf)>0:
return True
if self._parent_output_empty_cb!=None:
self._parent_output_empty_cb( self )
return None
def _inactive_cb(self):
"""
Timeout when a socket has been inactive for a long time.
"""
self._error_msg = "error closing inactive socket"
self.close()
def _flag_activity(self):
"""
Flag that this socket is active.
"""
# is there a better way of reseting a timer?
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = event.timeout( self._inactive_timeout, self._protected_cb, self._inactive_cb )
def write(self, data):
"""
Write some data. Will raise socket.error if connection is closed.
"""
if self._closed:
raise socket.error('write error: socket is closed')
# Always append the data to the write buffer, even if we're not connected
# yet.
self._write_buf.append( data )
# 21 July 09 aaron - I'm not sure if this has a significant benefit, but in
# trying to improve throughput I confirmed that this doesn't break anything
# and keeping the event queue cleaner is certainly good.
if self._write_event and not self._write_event.pending():
self._write_event.add()
if self._debug > 1:
self._logger.debug("buffered %d bytes (%d total) to %s",
len(data), sum(map(len,self._write_buf)), self._peername )
# Flag activity here so we don't timeout in case that event is ready to
# fire and we're just now writing.
self._flag_activity()
def read(self):
"""
Return the current read buffer. Will return a bytearray object.
"""
if self._closed:
raise socket.error('read error: socket is closed')
rval = self._read_buf
self._read_buf = bytearray()
return rval
def buffer(self, s):
'''
Re-buffer some data. If it's a bytearray will assign directly as the current
input buffer, else will add to the current buffer. Assumes that re-buffered
data is happening in the same cycle as read() was called, as anything other
than that would be nearly impossible to handle inside an application.
'''
if isinstance(s, bytearray):
self._read_buf = s
else:
self._read_buf.extend( s )
|
agoragames/py-eventsocket
|
eventsocket.py
|
Python
|
bsd-3-clause
| 20,384
|
def test_basic_editor(scratch_tree):
sess = scratch_tree.edit('/')
assert sess.id == ''
assert sess.path == '/'
assert sess.record is not None
assert sess['_model'] == 'page'
assert sess['title'] == 'Index'
assert sess['body'] == 'Hello World!'
sess['body'] = 'A new body'
sess.commit()
assert sess.closed
with open(sess.get_fs_path()) as f:
assert f.read().splitlines() == [
'_model: page',
'---',
'title: Index',
'---',
'body: A new body'
]
def test_create_alt(scratch_tree, scratch_pad):
sess = scratch_tree.edit('/', alt='de')
assert sess.id == ''
assert sess.path == '/'
assert sess.record is not None
assert sess['_model'] == 'page'
assert sess['title'] == 'Index'
assert sess['body'] == 'Hello World!'
sess['body'] = 'Hallo Welt!'
sess.commit()
assert sess.closed
# When we use the editor to change this, we only want the fields that
# changed compared to the base to be included.
with open(sess.get_fs_path(alt='de')) as f:
assert f.read().splitlines() == [
'body: Hallo Welt!'
]
scratch_pad.cache.flush()
item = scratch_pad.get('/', alt='de')
assert item['_slug'] == ''
assert item['title'] == 'Index'
assert item['body'].source == 'Hallo Welt!'
assert item['_model'] == 'page'
|
bameda/lektor
|
tests/test_editor.py
|
Python
|
bsd-3-clause
| 1,424
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 06:05
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('news', '0003_auto_20170228_2249'),
]
operations = [
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(default='Testville', max_length=200)),
('state', models.CharField(default='Montigania', max_length=200)),
],
),
migrations.AddField(
model_name='newspaper',
name='next_paper',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='news.Newspaper'),
),
migrations.AddField(
model_name='newspaper',
name='prev_paper',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='news.Newspaper'),
),
migrations.AlterField(
model_name='newspaper',
name='date_ended',
field=models.DateField(blank=True, null=True, verbose_name='date ended'),
),
migrations.AlterUniqueTogether(
name='location',
unique_together=set([('city', 'state')]),
),
migrations.AddField(
model_name='newspaper',
name='location',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='news.Location'),
),
]
|
GeorgiaTechDHLab/TOME
|
news/migrations/0004_auto_20170307_0605.py
|
Python
|
bsd-3-clause
| 1,746
|
__author__="Joao"
|
joaofrancese/heavy-destruction
|
Panda/src/objects/__init__.py
|
Python
|
bsd-3-clause
| 17
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(
template_name='pages/home.html'), name="home"),
url(r'^about/$',
TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin
#url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("TestYourProject.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
url(r'^api-auth/', include(
'rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include('core.api', namespace='api')),
url(r'^rest-auth/', include('rest_auth.urls')),
url(r'^rest-auth/registration', include('rest_auth.registration.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', 'django.views.defaults.bad_request'),
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
]
|
LABETE/TestYourProject
|
config/urls.py
|
Python
|
bsd-3-clause
| 1,501
|
def extractDlscanlationsCom(item):
'''
Parser for 'dlscanlations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractDlscanlationsCom.py
|
Python
|
bsd-3-clause
| 547
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
class WSPayForm(forms.Form):
ShopID = forms.CharField(widget=forms.HiddenInput)
ShoppingCartID = forms.CharField(widget=forms.HiddenInput)
TotalAmount = forms.CharField(widget=forms.HiddenInput)
Signature = forms.CharField(widget=forms.HiddenInput)
ReturnURL = forms.CharField(widget=forms.HiddenInput)
CancelURL = forms.CharField(widget=forms.HiddenInput)
ReturnErrorURL = forms.CharField(widget=forms.HiddenInput)
|
dinoperovic/django-shop-wspay
|
shop_wspay/forms.py
|
Python
|
bsd-3-clause
| 536
|
import operator
import numpy as np
from pandas._libs import index as libindex
from pandas import compat
from pandas.compat.numpy import function as nv
from pandas.core.dtypes.generic import ABCCategorical, ABCSeries
from pandas.core.dtypes.dtypes import CategoricalDtype
from pandas.core.dtypes.common import (
is_categorical_dtype,
_ensure_platform_int,
is_list_like,
is_interval_dtype,
is_scalar)
from pandas.core.dtypes.missing import array_equivalent, isna
from pandas.core.algorithms import take_1d
from pandas.util._decorators import Appender, cache_readonly
from pandas.core.config import get_option
from pandas.core.indexes.base import Index, _index_shared_docs
from pandas.core import accessor
import pandas.core.common as com
import pandas.core.missing as missing
import pandas.core.indexes.base as ibase
_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update(dict(target_klass='CategoricalIndex'))
class CategoricalIndex(Index, accessor.PandasDelegate):
"""
Immutable Index implementing an ordered, sliceable set. CategoricalIndex
represents a sparsely populated Index with an underlying Categorical.
Parameters
----------
data : array-like or Categorical, (1-dimensional)
categories : optional, array-like
categories for the CategoricalIndex
ordered : boolean,
designating if the categories are ordered
copy : bool
Make a copy of input ndarray
name : object
Name to be stored in the index
Attributes
----------
codes
categories
ordered
Methods
-------
rename_categories
reorder_categories
add_categories
remove_categories
remove_unused_categories
set_categories
as_ordered
as_unordered
map
See Also
--------
Categorical, Index
"""
_typ = 'categoricalindex'
_engine_type = libindex.Int64Engine
_attributes = ['name']
def __new__(cls, data=None, categories=None, ordered=None, dtype=None,
copy=False, name=None, fastpath=False):
if fastpath:
return cls._simple_new(data, name=name, dtype=dtype)
if name is None and hasattr(data, 'name'):
name = data.name
if isinstance(data, ABCCategorical):
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
elif isinstance(data, CategoricalIndex):
data = data._data
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
else:
# don't allow scalars
# if data is None, then categories must be provided
if is_scalar(data):
if data is not None or categories is None:
cls._scalar_data_error(data)
data = []
data = cls._create_categorical(cls, data, categories, ordered,
dtype)
if copy:
data = data.copy()
return cls._simple_new(data, name=name)
def _create_from_codes(self, codes, categories=None, ordered=None,
name=None):
"""
*this is an internal non-public method*
create the correct categorical from codes
Parameters
----------
codes : new codes
categories : optional categories, defaults to existing
ordered : optional ordered attribute, defaults to existing
name : optional name attribute, defaults to existing
Returns
-------
CategoricalIndex
"""
from pandas.core.arrays import Categorical
if categories is None:
categories = self.categories
if ordered is None:
ordered = self.ordered
if name is None:
name = self.name
cat = Categorical.from_codes(codes, categories=categories,
ordered=self.ordered)
return CategoricalIndex(cat, name=name)
@staticmethod
def _create_categorical(self, data, categories=None, ordered=None,
dtype=None):
"""
*this is an internal non-public method*
create the correct categorical from data and the properties
Parameters
----------
data : data for new Categorical
categories : optional categories, defaults to existing
ordered : optional ordered attribute, defaults to existing
dtype : CategoricalDtype, defaults to existing
Returns
-------
Categorical
"""
if (isinstance(data, (ABCSeries, type(self))) and
is_categorical_dtype(data)):
data = data.values
if not isinstance(data, ABCCategorical):
if ordered is None and dtype is None:
ordered = False
from pandas.core.arrays import Categorical
data = Categorical(data, categories=categories, ordered=ordered,
dtype=dtype)
else:
if categories is not None:
data = data.set_categories(categories, ordered=ordered)
elif ordered is not None and ordered != data.ordered:
data = data.set_ordered(ordered)
if isinstance(dtype, CategoricalDtype):
# we want to silently ignore dtype='category'
data = data._set_dtype(dtype)
return data
@classmethod
def _simple_new(cls, values, name=None, categories=None, ordered=None,
dtype=None, **kwargs):
result = object.__new__(cls)
values = cls._create_categorical(cls, values, categories, ordered,
dtype=dtype)
result._data = values
result.name = name
for k, v in compat.iteritems(kwargs):
setattr(result, k, v)
result._reset_identity()
return result
@Appender(_index_shared_docs['_shallow_copy'])
def _shallow_copy(self, values=None, categories=None, ordered=None,
dtype=None, **kwargs):
# categories and ordered can't be part of attributes,
# as these are properties
# we want to reuse self.dtype if possible, i.e. neither are
# overridden.
if dtype is not None and (categories is not None or
ordered is not None):
raise TypeError("Cannot specify both `dtype` and `categories` "
"or `ordered`")
if categories is None and ordered is None:
dtype = self.dtype if dtype is None else dtype
return super(CategoricalIndex, self)._shallow_copy(
values=values, dtype=dtype, **kwargs)
if categories is None:
categories = self.categories
if ordered is None:
ordered = self.ordered
return super(CategoricalIndex, self)._shallow_copy(
values=values, categories=categories,
ordered=ordered, **kwargs)
def _is_dtype_compat(self, other):
"""
*this is an internal non-public method*
provide a comparison between the dtype of self and other (coercing if
needed)
Raises
------
TypeError if the dtypes are not compatible
"""
if is_categorical_dtype(other):
if isinstance(other, CategoricalIndex):
other = other._values
if not other.is_dtype_equal(self):
raise TypeError("categories must match existing categories "
"when appending")
else:
values = other
if not is_list_like(values):
values = [values]
other = CategoricalIndex(self._create_categorical(
self, other, categories=self.categories, ordered=self.ordered))
if not other.isin(values).all():
raise TypeError("cannot append a non-category item to a "
"CategoricalIndex")
return other
def equals(self, other):
"""
Determines if two CategorialIndex objects contain the same elements.
"""
if self.is_(other):
return True
if not isinstance(other, Index):
return False
try:
other = self._is_dtype_compat(other)
return array_equivalent(self._data, other)
except (TypeError, ValueError):
pass
return False
@property
def _formatter_func(self):
return self.categories._formatter_func
def _format_attrs(self):
"""
Return a list of tuples of the (attr,formatted_value)
"""
max_categories = (10 if get_option("display.max_categories") == 0 else
get_option("display.max_categories"))
attrs = [
('categories',
ibase.default_pprint(self.categories,
max_seq_items=max_categories)),
('ordered', self.ordered)]
if self.name is not None:
attrs.append(('name', ibase.default_pprint(self.name)))
attrs.append(('dtype', "'%s'" % self.dtype.name))
max_seq_items = get_option('display.max_seq_items') or len(self)
if len(self) > max_seq_items:
attrs.append(('length', len(self)))
return attrs
@property
def inferred_type(self):
return 'categorical'
@property
def values(self):
""" return the underlying data, which is a Categorical """
return self._data
@property
def itemsize(self):
# Size of the items in categories, not codes.
return self.values.itemsize
def get_values(self):
""" return the underlying data as an ndarray """
return self._data.get_values()
def tolist(self):
return self._data.tolist()
@property
def codes(self):
return self._data.codes
@property
def categories(self):
return self._data.categories
@property
def ordered(self):
return self._data.ordered
def _reverse_indexer(self):
return self._data._reverse_indexer()
@Appender(_index_shared_docs['__contains__'] % _index_doc_kwargs)
def __contains__(self, key):
hash(key)
if isna(key): # if key is a NaN, check if any NaN is in self.
return self.hasnans
# is key in self.categories? Then get its location.
# If not (i.e. KeyError), it logically can't be in self either
try:
loc = self.categories.get_loc(key)
except KeyError:
return False
# loc is the location of key in self.categories, but also the value
# for key in self.codes and in self._engine. key may be in categories,
# but still not in self, check this. Example:
# 'b' in CategoricalIndex(['a'], categories=['a', 'b']) # False
if is_scalar(loc):
return loc in self._engine
else:
# if self.categories is IntervalIndex, loc is an array
# check if any scalar of the array is in self._engine
return any(loc_ in self._engine for loc_ in loc)
@Appender(_index_shared_docs['contains'] % _index_doc_kwargs)
def contains(self, key):
hash(key)
return key in self
def __array__(self, dtype=None):
""" the array interface, return my values """
return np.array(self._data, dtype=dtype)
@Appender(_index_shared_docs['astype'])
def astype(self, dtype, copy=True):
if is_interval_dtype(dtype):
from pandas import IntervalIndex
return IntervalIndex(np.array(self))
elif is_categorical_dtype(dtype):
# GH 18630
dtype = self.dtype.update_dtype(dtype)
if dtype == self.dtype:
return self.copy() if copy else self
return super(CategoricalIndex, self).astype(dtype=dtype, copy=copy)
@cache_readonly
def _isnan(self):
""" return if each value is nan"""
return self._data.codes == -1
@Appender(ibase._index_shared_docs['fillna'])
def fillna(self, value, downcast=None):
self._assert_can_do_op(value)
return CategoricalIndex(self._data.fillna(value), name=self.name)
def argsort(self, *args, **kwargs):
return self.values.argsort(*args, **kwargs)
@cache_readonly
def _engine(self):
# we are going to look things up with the codes themselves
return self._engine_type(lambda: self.codes.astype('i8'), len(self))
# introspection
@cache_readonly
def is_unique(self):
return self._engine.is_unique
@property
def is_monotonic_increasing(self):
return self._engine.is_monotonic_increasing
@property
def is_monotonic_decreasing(self):
return self._engine.is_monotonic_decreasing
@Appender(_index_shared_docs['index_unique'] % _index_doc_kwargs)
def unique(self, level=None):
if level is not None:
self._validate_index_level(level)
result = self.values.unique()
# CategoricalIndex._shallow_copy keeps original categories
# and ordered if not otherwise specified
return self._shallow_copy(result, categories=result.categories,
ordered=result.ordered)
@Appender(Index.duplicated.__doc__)
def duplicated(self, keep='first'):
from pandas._libs.hashtable import duplicated_int64
codes = self.codes.astype('i8')
return duplicated_int64(codes, keep)
def _to_safe_for_reshape(self):
""" convert to object if we are a categorical """
return self.astype('object')
def get_loc(self, key, method=None):
"""
Get integer location, slice or boolean mask for requested label.
Parameters
----------
key : label
method : {None}
* default: exact matches only.
Returns
-------
loc : int if unique index, slice if monotonic index, else mask
Examples
---------
>>> unique_index = pd.CategoricalIndex(list('abc'))
>>> unique_index.get_loc('b')
1
>>> monotonic_index = pd.CategoricalIndex(list('abbc'))
>>> monotonic_index.get_loc('b')
slice(1, 3, None)
>>> non_monotonic_index = p.dCategoricalIndex(list('abcb'))
>>> non_monotonic_index.get_loc('b')
array([False, True, False, True], dtype=bool)
"""
codes = self.categories.get_loc(key)
if (codes == -1):
raise KeyError(key)
return self._engine.get_loc(codes)
def get_value(self, series, key):
"""
Fast lookup of value from 1-dimensional ndarray. Only use this if you
know what you're doing
"""
try:
k = com._values_from_object(key)
k = self._convert_scalar_indexer(k, kind='getitem')
indexer = self.get_loc(k)
return series.iloc[indexer]
except (KeyError, TypeError):
pass
# we might be a positional inexer
return super(CategoricalIndex, self).get_value(series, key)
def _can_reindex(self, indexer):
""" always allow reindexing """
pass
@Appender(_index_shared_docs['where'])
def where(self, cond, other=None):
if other is None:
other = self._na_value
values = np.where(cond, self.values, other)
from pandas.core.arrays import Categorical
cat = Categorical(values,
categories=self.categories,
ordered=self.ordered)
return self._shallow_copy(cat, **self._get_attributes_dict())
def reindex(self, target, method=None, level=None, limit=None,
tolerance=None):
"""
Create index with target's values (move/add/delete values as necessary)
Returns
-------
new_index : pd.Index
Resulting index
indexer : np.ndarray or None
Indices of output values in original index
"""
if method is not None:
raise NotImplementedError("argument method is not implemented for "
"CategoricalIndex.reindex")
if level is not None:
raise NotImplementedError("argument level is not implemented for "
"CategoricalIndex.reindex")
if limit is not None:
raise NotImplementedError("argument limit is not implemented for "
"CategoricalIndex.reindex")
target = ibase._ensure_index(target)
if not is_categorical_dtype(target) and not target.is_unique:
raise ValueError("cannot reindex with a non-unique indexer")
indexer, missing = self.get_indexer_non_unique(np.array(target))
if len(self.codes):
new_target = self.take(indexer)
else:
new_target = target
# filling in missing if needed
if len(missing):
cats = self.categories.get_indexer(target)
if (cats == -1).any():
# coerce to a regular index here!
result = Index(np.array(self), name=self.name)
new_target, indexer, _ = result._reindex_non_unique(
np.array(target))
else:
codes = new_target.codes.copy()
codes[indexer == -1] = cats[missing]
new_target = self._create_from_codes(codes)
# we always want to return an Index type here
# to be consistent with .reindex for other index types (e.g. they don't
# coerce based on the actual values, only on the dtype)
# unless we had an initial Categorical to begin with
# in which case we are going to conform to the passed Categorical
new_target = np.asarray(new_target)
if is_categorical_dtype(target):
new_target = target._shallow_copy(new_target, name=self.name)
else:
new_target = Index(new_target, name=self.name)
return new_target, indexer
def _reindex_non_unique(self, target):
""" reindex from a non-unique; which CategoricalIndex's are almost
always
"""
new_target, indexer = self.reindex(target)
new_indexer = None
check = indexer == -1
if check.any():
new_indexer = np.arange(len(self.take(indexer)))
new_indexer[check] = -1
cats = self.categories.get_indexer(target)
if not (cats == -1).any():
# .reindex returns normal Index. Revert to CategoricalIndex if
# all targets are included in my categories
new_target = self._shallow_copy(new_target)
return new_target, indexer, new_indexer
@Appender(_index_shared_docs['get_indexer'] % _index_doc_kwargs)
def get_indexer(self, target, method=None, limit=None, tolerance=None):
from pandas.core.arrays.categorical import _recode_for_categories
method = missing.clean_reindex_fill_method(method)
target = ibase._ensure_index(target)
if self.is_unique and self.equals(target):
return np.arange(len(self), dtype='intp')
if method == 'pad' or method == 'backfill':
raise NotImplementedError("method='pad' and method='backfill' not "
"implemented yet for CategoricalIndex")
elif method == 'nearest':
raise NotImplementedError("method='nearest' not implemented yet "
'for CategoricalIndex')
if (isinstance(target, CategoricalIndex) and
self.values.is_dtype_equal(target)):
if self.values.equals(target.values):
# we have the same codes
codes = target.codes
else:
codes = _recode_for_categories(target.codes,
target.categories,
self.values.categories)
else:
if isinstance(target, CategoricalIndex):
code_indexer = self.categories.get_indexer(target.categories)
codes = take_1d(code_indexer, target.codes, fill_value=-1)
else:
codes = self.categories.get_indexer(target)
indexer, _ = self._engine.get_indexer_non_unique(codes)
return _ensure_platform_int(indexer)
@Appender(_index_shared_docs['get_indexer_non_unique'] % _index_doc_kwargs)
def get_indexer_non_unique(self, target):
target = ibase._ensure_index(target)
if isinstance(target, CategoricalIndex):
# Indexing on codes is more efficient if categories are the same:
if target.categories is self.categories:
target = target.codes
indexer, missing = self._engine.get_indexer_non_unique(target)
return _ensure_platform_int(indexer), missing
target = target.values
codes = self.categories.get_indexer(target)
indexer, missing = self._engine.get_indexer_non_unique(codes)
return _ensure_platform_int(indexer), missing
@Appender(_index_shared_docs['_convert_scalar_indexer'])
def _convert_scalar_indexer(self, key, kind=None):
if self.categories._defer_to_indexing:
return self.categories._convert_scalar_indexer(key, kind=kind)
return super(CategoricalIndex, self)._convert_scalar_indexer(
key, kind=kind)
@Appender(_index_shared_docs['_convert_list_indexer'])
def _convert_list_indexer(self, keyarr, kind=None):
# Return our indexer or raise if all of the values are not included in
# the categories
if self.categories._defer_to_indexing:
indexer = self.categories._convert_list_indexer(keyarr, kind=kind)
return Index(self.codes).get_indexer_for(indexer)
indexer = self.categories.get_indexer(np.asarray(keyarr))
if (indexer == -1).any():
raise KeyError(
"a list-indexer must only "
"include values that are "
"in the categories")
return self.get_indexer(keyarr)
@Appender(_index_shared_docs['_convert_arr_indexer'])
def _convert_arr_indexer(self, keyarr):
keyarr = com._asarray_tuplesafe(keyarr)
if self.categories._defer_to_indexing:
return keyarr
return self._shallow_copy(keyarr)
@Appender(_index_shared_docs['_convert_index_indexer'])
def _convert_index_indexer(self, keyarr):
return self._shallow_copy(keyarr)
@Appender(_index_shared_docs['take'] % _index_doc_kwargs)
def take(self, indices, axis=0, allow_fill=True,
fill_value=None, **kwargs):
nv.validate_take(tuple(), kwargs)
indices = _ensure_platform_int(indices)
taken = self._assert_take_fillable(self.codes, indices,
allow_fill=allow_fill,
fill_value=fill_value,
na_value=-1)
return self._create_from_codes(taken)
def is_dtype_equal(self, other):
return self._data.is_dtype_equal(other)
take_nd = take
def map(self, mapper):
"""
Map values using input correspondence (a dict, Series, or function).
Maps the values (their categories, not the codes) of the index to new
categories. If the mapping correspondence is one-to-one the result is a
:class:`~pandas.CategoricalIndex` which has the same order property as
the original, otherwise an :class:`~pandas.Index` is returned.
If a `dict` or :class:`~pandas.Series` is used any unmapped category is
mapped to `NaN`. Note that if this happens an :class:`~pandas.Index`
will be returned.
Parameters
----------
mapper : function, dict, or Series
Mapping correspondence.
Returns
-------
pandas.CategoricalIndex or pandas.Index
Mapped index.
See Also
--------
Index.map : Apply a mapping correspondence on an
:class:`~pandas.Index`.
Series.map : Apply a mapping correspondence on a
:class:`~pandas.Series`.
Series.apply : Apply more complex functions on a
:class:`~pandas.Series`.
Examples
--------
>>> idx = pd.CategoricalIndex(['a', 'b', 'c'])
>>> idx
CategoricalIndex(['a', 'b', 'c'], categories=['a', 'b', 'c'],
ordered=False, dtype='category')
>>> idx.map(lambda x: x.upper())
CategoricalIndex(['A', 'B', 'C'], categories=['A', 'B', 'C'],
ordered=False, dtype='category')
>>> idx.map({'a': 'first', 'b': 'second', 'c': 'third'})
CategoricalIndex(['first', 'second', 'third'], categories=['first',
'second', 'third'], ordered=False, dtype='category')
If the mapping is one-to-one the ordering of the categories is
preserved:
>>> idx = pd.CategoricalIndex(['a', 'b', 'c'], ordered=True)
>>> idx
CategoricalIndex(['a', 'b', 'c'], categories=['a', 'b', 'c'],
ordered=True, dtype='category')
>>> idx.map({'a': 3, 'b': 2, 'c': 1})
CategoricalIndex([3, 2, 1], categories=[3, 2, 1], ordered=True,
dtype='category')
If the mapping is not one-to-one an :class:`~pandas.Index` is returned:
>>> idx.map({'a': 'first', 'b': 'second', 'c': 'first'})
Index(['first', 'second', 'first'], dtype='object')
If a `dict` is used, all unmapped categories are mapped to `NaN` and
the result is an :class:`~pandas.Index`:
>>> idx.map({'a': 'first', 'b': 'second'})
Index(['first', 'second', nan], dtype='object')
"""
return self._shallow_copy_with_infer(self.values.map(mapper))
def delete(self, loc):
"""
Make new Index with passed location(-s) deleted
Returns
-------
new_index : Index
"""
return self._create_from_codes(np.delete(self.codes, loc))
def insert(self, loc, item):
"""
Make new Index inserting new item at location. Follows
Python list.append semantics for negative values
Parameters
----------
loc : int
item : object
Returns
-------
new_index : Index
Raises
------
ValueError if the item is not in the categories
"""
code = self.categories.get_indexer([item])
if (code == -1) and not (is_scalar(item) and isna(item)):
raise TypeError("cannot insert an item into a CategoricalIndex "
"that is not already an existing category")
codes = self.codes
codes = np.concatenate((codes[:loc], code, codes[loc:]))
return self._create_from_codes(codes)
def _concat(self, to_concat, name):
# if calling index is category, don't check dtype of others
return CategoricalIndex._concat_same_dtype(self, to_concat, name)
def _concat_same_dtype(self, to_concat, name):
"""
Concatenate to_concat which has the same class
ValueError if other is not in the categories
"""
to_concat = [self._is_dtype_compat(c) for c in to_concat]
codes = np.concatenate([c.codes for c in to_concat])
result = self._create_from_codes(codes, name=name)
# if name is None, _create_from_codes sets self.name
result.name = name
return result
def _codes_for_groupby(self, sort, observed):
""" Return a Categorical adjusted for groupby """
return self.values._codes_for_groupby(sort, observed)
@classmethod
def _add_comparison_methods(cls):
""" add in comparison methods """
def _make_compare(op):
opname = '__{op}__'.format(op=op.__name__)
def _evaluate_compare(self, other):
# if we have a Categorical type, then must have the same
# categories
if isinstance(other, CategoricalIndex):
other = other._values
elif isinstance(other, Index):
other = self._create_categorical(
self, other._values, categories=self.categories,
ordered=self.ordered)
if isinstance(other, (ABCCategorical, np.ndarray,
ABCSeries)):
if len(self.values) != len(other):
raise ValueError("Lengths must match to compare")
if isinstance(other, ABCCategorical):
if not self.values.is_dtype_equal(other):
raise TypeError("categorical index comparisons must "
"have the same categories and ordered "
"attributes")
result = op(self.values, other)
if isinstance(result, ABCSeries):
# Dispatch to pd.Categorical returned NotImplemented
# and we got a Series back; down-cast to ndarray
result = result.values
return result
return compat.set_function_name(_evaluate_compare, opname, cls)
cls.__eq__ = _make_compare(operator.eq)
cls.__ne__ = _make_compare(operator.ne)
cls.__lt__ = _make_compare(operator.lt)
cls.__gt__ = _make_compare(operator.gt)
cls.__le__ = _make_compare(operator.le)
cls.__ge__ = _make_compare(operator.ge)
def _delegate_method(self, name, *args, **kwargs):
""" method delegation to the ._values """
method = getattr(self._values, name)
if 'inplace' in kwargs:
raise ValueError("cannot use inplace with CategoricalIndex")
res = method(*args, **kwargs)
if is_scalar(res):
return res
return CategoricalIndex(res, name=self.name)
@classmethod
def _add_accessors(cls):
""" add in Categorical accessor methods """
from pandas.core.arrays import Categorical
CategoricalIndex._add_delegate_accessors(
delegate=Categorical, accessors=["rename_categories",
"reorder_categories",
"add_categories",
"remove_categories",
"remove_unused_categories",
"set_categories",
"as_ordered", "as_unordered",
"min", "max"],
typ='method', overwrite=True)
CategoricalIndex._add_numeric_methods_add_sub_disabled()
CategoricalIndex._add_numeric_methods_disabled()
CategoricalIndex._add_logical_methods_disabled()
CategoricalIndex._add_comparison_methods()
CategoricalIndex._add_accessors()
|
louispotok/pandas
|
pandas/core/indexes/category.py
|
Python
|
bsd-3-clause
| 31,573
|
from BinPy.Gates import *
class PowerSource:
"""
Models a Power Source from which various connectors can tap by connecting to it.
taps: The list of all connectors connected to this power source.
connect(): Takes in one or more connectors as input and connects them to the power source.
disconnect(): Takes in one or more connectors as input and disconnects them from the power source.
"""
def __init__(self):
self.taps = []
def connect(self, *connectors):
"""Takes in one or more connectors as an input and taps to the power source."""
for connector in connectors:
if not isinstance(connector, Connector):
raise Exception("Error: Input given is not a connector")
else:
if len(connector.connections['output']) != 0:
raise Exception(
"ERROR: The connector is already an output of some other object")
self.taps.append(connector)
connector.state = 1
connector.tap(self, 'output')
connector.trigger()
def disconnect(self, *connectors):
"""
Takes in one or more connectors as an input and disconnects them from the power source.
A floating connector has a value of None.
A message is printed if a specified connector is not already tapping from this source.
"""
for connector in connectors:
if isinstance(connector, Connector):
try:
self.taps.remove(connector)
connector.state = None
connector.connections['output'].remove(self)
connector.trigger()
except:
print (
"The specified connector is not tapped to this power source")
else:
raise Exception("Error: Input given is not a connector")
|
coder006/BinPy
|
BinPy/tools/powersource.py
|
Python
|
bsd-3-clause
| 1,962
|
''' Simple test for apachelogs '''
import unittest
from apachelogs import ApacheLogFile
class apachelogs_test(unittest.TestCase):
def test_foo(self):
log = ApacheLogFile('test.log')
line = iter(log).next()
self.assertEquals(line.ip, '127.0.0.1')
self.assertEquals(line.ident, '-')
self.assertEquals(line.http_user, 'frank')
self.assertEquals(line.time, '5/Oct/2000:13:55:36 -0700')
self.assertEquals(line.request_line, 'GET /apache_pb.gif?foo=bar&baz=zip HTTP/1.0')
self.assertEquals(line.http_response_code, '200')
self.assertEquals(line.http_response_size, '2326')
self.assertEquals(line.referrer, 'http://www.example.com/start.html')
self.assertEquals(line.user_agent, 'Mozilla/4.08 [en] (Win98; I ;Nav)')
log.close()
def setUp(self):
pass
if __name__ == '__main__':
unittest.main()
|
bkjones/loghetti
|
test/apachelogs_test.py
|
Python
|
bsd-3-clause
| 930
|
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# global default settings for physical simulation
# ---------------------------------------------------------------------
# system parameters
d1 = 0.08
l2 = 0.19
d2 = 0.08
l3 = 0.19
initial_state = [0, 0, 0, 0, 0]
# animation parameters
dia = 0.1
car_radius = dia/2
wheel = dia/4
|
cklb/PyMoskito
|
pymoskito/examples/car/settings.py
|
Python
|
bsd-3-clause
| 379
|
from django.contrib import messages
from django.http import Http404, HttpResponse
from django.shortcuts import render
from corehq.apps.app_manager.dbaccessors import get_app
from corehq.apps.app_manager.decorators import require_deploy_apps, \
require_can_edit_apps
from corehq.apps.app_manager.xform import XForm
from corehq.util.view_utils import set_file_download
from dimagi.utils.logging import notify_exception
from dimagi.utils.subprocess_timeout import ProcessTimedOut
@require_can_edit_apps
def multimedia_list_download(request, domain, app_id):
app = get_app(domain, app_id)
include_audio = request.GET.get("audio", True)
include_images = request.GET.get("images", True)
strip_jr = request.GET.get("strip_jr", True)
filelist = []
for m in app.get_modules():
for f in m.get_forms():
parsed = XForm(f.source)
parsed.validate()
if include_images:
filelist.extend(parsed.image_references)
if include_audio:
filelist.extend(parsed.audio_references)
if strip_jr:
filelist = [s.replace("jr://file/", "") for s in filelist if s]
response = HttpResponse()
set_file_download(response, 'list.txt')
response.write("\n".join(sorted(set(filelist))))
return response
@require_deploy_apps
def multimedia_ajax(request, domain, app_id, template='app_manager/v1/partials/multimedia_ajax.html'):
app = get_app(domain, app_id)
if app.get_doc_type() == 'Application':
try:
multimedia_state = app.check_media_state()
except ProcessTimedOut:
notify_exception(request)
messages.warning(request, (
"We were unable to check if your forms had errors. "
"Refresh the page and we will try again."
))
multimedia_state = {
'has_media': False,
'has_form_errors': True,
'has_missing_refs': False,
}
context = {
'multimedia_state': multimedia_state,
'domain': domain,
'app': app,
}
return render(request, template, context)
else:
raise Http404()
|
qedsoftware/commcare-hq
|
corehq/apps/app_manager/views/multimedia.py
|
Python
|
bsd-3-clause
| 2,218
|
''' Provide functions to embed Bokeh models (e.g., plots, widget, layouts)
in various different ways.
There are a number of different combinations of options when embedding
Bokeh plots. The data for the plot can be contained in the document,
or on a Bokeh server, or in a sidecar JavaScript file. Likewise, BokehJS
may be inlined in the document, or loaded from CDN or a Bokeh server.
The functions in ``bokeh.embed`` provide functionality to embed in all
these different cases.
'''
from __future__ import absolute_import
import re
import uuid
from warnings import warn
from .templates import (
AUTOLOAD_JS, AUTOLOAD_TAG, FILE,
NOTEBOOK_DIV, PLOT_DIV, DOC_JS, SCRIPT_TAG
)
from .util.string import encode_utf8
from .plot_object import PlotObject, _ModelInDocument
from ._json_encoder import serialize_json
from .resources import DEFAULT_SERVER_HTTP_URL
from .client import DEFAULT_SESSION_ID
from .document import Document
from collections import Sequence
from six import string_types
def _wrap_in_function(code):
# indent and wrap Bokeh function def around
code = "\n".join([" " + line for line in code.split("\n")])
return 'Bokeh.$(function() {\n%s\n});' % code
def components(plot_objects, resources=None, wrap_script=True, wrap_plot_info=True):
'''
Return HTML components to embed a Bokeh plot. The data for the plot is
stored directly in the returned HTML.
An example can be found in examples/embed/embed_multiple.py
.. note::
The returned components assume that BokehJS resources are
**already loaded**.
Args:
plot_objects (PlotObject|list|dict|tuple) :
A single PlotObject, a list/tuple of PlotObjects, or a dictionary of keys and PlotObjects.
resources :
Deprecated argument
wrap_script (boolean, optional) :
If True, the returned javascript is wrapped in a script tag.
(default: True)
wrap_plot_info (boolean, optional) : If True, returns ``<div>`` strings.
Otherwise, return dicts that can be used to build your own divs.
(default: True)
If False, the returned dictionary contains the following information:
.. code-block:: python
{
'modelid': 'The model ID, used with Document.get_model_by_id',
'elementid': 'The css identifier the BokehJS will look for to target the plot',
'docid': 'Used by Bokeh to find the doc embedded in the returned script',
}
Returns:
UTF-8 encoded *(script, div[s])* or *(raw_script, plot_info[s])*
Examples:
With default wrapping parameter values:
.. code-block:: python
components(plot)
# => (script, plot_div)
components((plot1, plot2))
# => (script, (plot1_div, plot2_div))
components({"Plot 1": plot1, "Plot 2": plot2})
# => (script, {"Plot 1": plot1_div, "Plot 2": plot2_div})
Examples:
With wrapping parameters set to ``False``:
.. code-block:: python
components(plot, wrap_script=False, wrap_plot_info=False)
# => (javascript, plot_dict)
components((plot1, plot2), wrap_script=False, wrap_plot_info=False)
# => (javascript, (plot1_dict, plot2_dict))
components({"Plot 1": plot1, "Plot 2": plot2}, wrap_script=False, wrap_plot_info=False)
# => (javascript, {"Plot 1": plot1_dict, "Plot 2": plot2_dict})
'''
if resources is not None:
warn('Because the ``resources`` argument is no longer needed, '
'it is deprecated and no longer has any effect',
DeprecationWarning, stacklevel=2)
# 1) Convert single items and dicts into list
was_single_object = isinstance(plot_objects, PlotObject) or isinstance(plot_objects, Document)
# converts single to list
plot_objects = _check_plot_objects(plot_objects, allow_dict=True)
# now convert dict to list, saving keys in the same order
plot_object_keys = None
if isinstance(plot_objects, dict):
plot_object_keys = plot_objects.keys()
values = []
# don't just use .values() to ensure we are in the same order as key list
for k in plot_object_keys:
values.append(plot_objects[k])
plot_objects = values
# 2) Do our rendering
with _ModelInDocument(plot_objects):
(docs_json, render_items) = _standalone_docs_json_and_render_items(plot_objects)
custom_models = _extract_custom_models(plot_objects)
script = _script_for_render_items(docs_json, render_items, custom_models=custom_models,
websocket_url=None, wrap_script=wrap_script)
script = encode_utf8(script)
if wrap_plot_info:
results = list(_div_for_render_item(item) for item in render_items)
else:
results = render_items
# 3) convert back to the input shape
if was_single_object:
return script, results[0]
elif plot_object_keys is not None:
result = {}
for (key, value) in zip(plot_object_keys, results):
result[key] = value
return script, result
else:
return script, tuple(results)
def _escape_code(code):
""" Escape JS/CS source code, so that it can be embbeded in a JS string.
This is based on https://github.com/joliss/js-string-escape.
"""
def escape(match):
ch = match.group(0)
if ch == '"' or ch == "'" or ch == '\\':
return '\\' + ch
elif ch == '\n':
return '\\n'
elif ch == '\r':
return '\\r'
elif ch == '\u2028':
return '\\u2028'
elif ch == '\u2029':
return '\\u2029'
return re.sub(u"""['"\\\n\r\u2028\u2029]""", escape, code)
def _extract_custom_models(plot_objects):
custom_models = {}
def extract_from_model(model):
for r in model.references():
impl = getattr(r.__class__, "__implementation__", None)
if impl is not None:
name = r.__class__.__name__
impl = "['%s', {}]" % _escape_code(impl)
custom_models[name] = impl
for o in plot_objects:
if isinstance(o, Document):
for r in o.roots:
extract_from_model(r)
else:
extract_from_model(o)
return custom_models
def notebook_div(plot_object):
''' Return HTML for a div that will display a Bokeh plot in an
IPython Notebook
The data for the plot is stored directly in the returned HTML.
Args:
plot_object (PlotObject) : Bokeh object to render
Returns:
UTF-8 encoded HTML text for a ``<div>``
.. note::
Assumes :func:`~bokeh.util.notebook.load_notebook` or the equivalent
has already been executed.
'''
plot_object = _check_one_plot_object(plot_object)
with _ModelInDocument(plot_object):
(docs_json, render_items) = _standalone_docs_json_and_render_items([plot_object])
custom_models = _extract_custom_models([plot_object])
script = _script_for_render_items(docs_json, render_items,
custom_models=custom_models,
websocket_url=None)
item = render_items[0]
div = _div_for_render_item(item)
html = NOTEBOOK_DIV.render(
plot_script = script,
plot_div = div,
)
return encode_utf8(html)
def _use_widgets(plot_objects):
from .models.widgets import Widget
for o in plot_objects:
if isinstance(o, Document):
if _use_widgets(o.roots):
return True
else:
if any(isinstance(model, Widget) for model in o.references()):
return True
return False
def file_html(plot_objects,
resources,
title,
js_resources=None,
css_resources=None,
template=FILE,
template_variables={}):
'''Return an HTML document that embeds Bokeh PlotObject or Document objects.
The data for the plot is stored directly in the returned HTML.
This is an alias for standalone_html_page_for_models() which
supports customizing the JS/CSS resources independently and
customizing the jinja2 template.
Args:
plot_objects (PlotObject or Document or list) : Bokeh object or objects to render
typically a PlotObject or Document
resources (Resources) : a resource configuration for BokehJS assets
title (str) : a title for the HTML document ``<title>`` tags
template (Template, optional) : HTML document template (default: FILE)
A Jinja2 Template, see bokeh.templates.FILE for the required
template parameters
template_variables (dict, optional) : variables to be used in the Jinja2
template. If used, the following variable names will be overwritten:
title, js_resources, css_resources, plot_script, plot_div
Returns:
UTF-8 encoded HTML
'''
plot_objects = _check_plot_objects(plot_objects)
with _ModelInDocument(plot_objects):
(docs_json, render_items) = _standalone_docs_json_and_render_items(plot_objects)
custom_models = _extract_custom_models(plot_objects)
return _html_page_for_render_items(resources, docs_json, render_items, title,
custom_models=custom_models, websocket_url=None,
js_resources=js_resources, css_resources=css_resources,
template=template, template_variables=template_variables,
use_widgets=_use_widgets(plot_objects))
# TODO rename this "standalone"?
def autoload_static(plot_object, resources, script_path):
''' Return JavaScript code and a script tag that can be used to embed
Bokeh Plots.
The data for the plot is stored directly in the returned JavaScript code.
Args:
plot_object (PlotObject or Document) :
resources (Resources) :
script_path (str) :
Returns:
(js, tag) :
JavaScript code to be saved at ``script_path`` and a ``<script>``
tag to load it
Raises:
ValueError
'''
if resources.mode == 'inline':
raise ValueError("autoload_static() requires non-inline resources")
# TODO why is this?
if resources.dev:
raise ValueError("autoload_static() only works with non-dev resources")
plot_object = _check_one_plot_object(plot_object)
with _ModelInDocument(plot_object):
(docs_json, render_items) = _standalone_docs_json_and_render_items([plot_object])
item = render_items[0]
model_id = ""
if 'modelid' in item:
model_id = item['modelid']
doc_id = ""
if 'docid' in item:
doc_id = item['docid']
js = AUTOLOAD_JS.render(
docs_json = serialize_json(docs_json),
# TODO we should load all the JS files, but the code
# in AUTOLOAD_JS isn't smart enough to deal with it.
js_url = resources.js_files[0],
css_files = resources.css_files,
elementid = item['elementid'],
websocket_url = None
)
tag = AUTOLOAD_TAG.render(
src_path = script_path,
elementid = item['elementid'],
modelid = model_id,
docid = doc_id,
loglevel = resources.log_level
)
return encode_utf8(js), encode_utf8(tag)
def autoload_server(plot_object, app_path="/", session_id=DEFAULT_SESSION_ID, url="default", loglevel="info"):
''' Return a script tag that can be used to embed Bokeh Plots from
a Bokeh Server.
The data for the plot is stored on the Bokeh Server.
Args:
plot_object (PlotObject) : the object to render from the session, or None for entire document
app_path (str, optional) : the server path to the app we want to load
session_id (str, optional) : server session ID
url (str, optional) : server root URL (where static resources live, not where a specific app lives)
loglevel (str, optional) : "trace", "debug", "info", "warn", "error", "fatal"
Returns:
tag :
a ``<script>`` tag that will execute an autoload script
loaded from the Bokeh Server
'''
if url == "default":
url = DEFAULT_SERVER_HTTP_URL
elementid = str(uuid.uuid4())
# empty model_id means render the entire doc from session_id
model_id = ""
if plot_object is not None:
model_id = plot_object._id
if not url.endswith("/"):
url = url + "/"
if not app_path.endswith("/"):
app_path = app_path + "/"
if app_path.startswith("/"):
app_path = app_path[1:]
src_path = url + app_path + "autoload.js" + "?bokeh-autoload-element=" + elementid
tag = AUTOLOAD_TAG.render(
src_path = src_path,
elementid = elementid,
modelid = model_id,
sessionid = session_id,
loglevel = loglevel
)
return encode_utf8(tag)
def _script_for_render_items(docs_json, render_items, websocket_url,
custom_models, wrap_script=True):
# this avoids emitting the "register custom models" code at all
# just to register an empty set
if (custom_models is not None) and len(custom_models) == 0:
custom_models = None
plot_js = _wrap_in_function(
DOC_JS.render(
custom_models=custom_models,
websocket_url=websocket_url,
docs_json=serialize_json(docs_json),
render_items=serialize_json(render_items)
)
)
if wrap_script:
return SCRIPT_TAG.render(js_code=plot_js)
else:
return plot_js
def _html_page_for_render_items(resources, docs_json, render_items, title, websocket_url,
custom_models, js_resources=None, css_resources=None,
template=FILE, template_variables={}, use_widgets=True):
if resources:
if js_resources:
warn('Both resources and js_resources provided. resources will override js_resources.')
if css_resources:
warn('Both resources and css_resources provided. resources will override css_resources.')
js_resources = resources
css_resources = resources
bokeh_js = ''
if js_resources:
if not css_resources:
warn('No Bokeh CSS Resources provided to template. If required you will need to provide them manually.')
js_resources = js_resources.use_widgets(use_widgets)
bokeh_js = js_resources.render_js()
bokeh_css = ''
if css_resources:
if not js_resources:
warn('No Bokeh JS Resources provided to template. If required you will need to provide them manually.')
css_resources = css_resources.use_widgets(use_widgets)
bokeh_css = css_resources.render_css()
script = _script_for_render_items(docs_json, render_items, websocket_url, custom_models)
template_variables_full = template_variables.copy()
template_variables_full.update(dict(
title = title,
bokeh_js = bokeh_js,
bokeh_css = bokeh_css,
plot_script = script,
plot_div = "\n".join(_div_for_render_item(item) for item in render_items)
))
html = template.render(template_variables_full)
return encode_utf8(html)
def _check_plot_objects(plot_objects, allow_dict=False):
input_type_valid = False
# Check for single item
if isinstance(plot_objects, (PlotObject, Document)):
plot_objects = [plot_objects]
# Check for sequence
if isinstance(plot_objects, Sequence) and all(isinstance(x, (PlotObject, Document)) for x in plot_objects):
input_type_valid = True
if allow_dict:
if isinstance(plot_objects, dict) and \
all(isinstance(x, string_types) for x in plot_objects.keys()) and \
all(isinstance(x, (PlotObject, Document)) for x in plot_objects.values()):
input_type_valid = True
if not input_type_valid:
if allow_dict:
raise ValueError(
'Input must be a PlotObject, a Document, a Sequence of PlotObjects and Document, or a dictionary from string to PlotObject and Document'
)
else:
raise ValueError('Input must be a PlotObject, a Document, or a Sequence of PlotObjects and Document')
return plot_objects
def _check_one_plot_object(plot_object):
plot_objects = _check_plot_objects(plot_object)
if len(plot_objects) != 1:
raise ValueError("Input must be exactly one PlotObject or Document")
return plot_objects[0]
def _div_for_render_item(item):
return PLOT_DIV.render(elementid=item['elementid'])
def _standalone_docs_json_and_render_items(plot_objects):
plot_objects = _check_plot_objects(plot_objects)
render_items = []
docs_by_id = {}
for p in plot_objects:
modelid = None
if isinstance(p, Document):
doc = p
else:
if p.document is None:
raise ValueError("To render a PlotObject as HTML it must be part of a Document")
doc = p.document
modelid = p._id
docid = None
for key in docs_by_id:
if docs_by_id[key] == doc:
docid = key
if docid is None:
docid = str(uuid.uuid4())
docs_by_id[docid] = doc
elementid = str(uuid.uuid4())
render_items.append({
'docid' : docid,
'elementid' : elementid,
# if modelid is None, that means the entire document
'modelid' : modelid
})
docs_json = {}
for k, v in docs_by_id.items():
docs_json[k] = v.to_json()
return (docs_json, render_items)
# TODO this is a theory about what file_html() "should" be,
# with a more explicit name similar to the server names below,
# and without the jinja2 entanglement. Thus this encapsulates that
# we use jinja2 and encapsulates the exact template variables we require.
# Anyway, we should deprecate file_html or else drop this version,
# most likely.
def standalone_html_page_for_models(plot_objects, resources, title):
''' Return an HTML document that renders zero or more Bokeh documents or models.
The document for each model will be embedded directly in the HTML, so the
resulting HTML file is standalone (does not require a server). Depending
on the provided resources, the HTML file may be completely self-contained
or may have to load JS and CSS from different files.
Args:
plot_objects (PlotObject or Document) : Bokeh object to render
typically a PlotObject or a Document
resources (Resources) : a resource configuration for BokehJS assets
title (str) : a title for the HTML document ``<title>`` tags
Returns:
UTF-8 encoded HTML
'''
return file_html(plot_objects, resources, title)
def server_html_page_for_models(session_id, model_ids, resources, title, websocket_url):
render_items = []
for modelid in model_ids:
if modelid is None:
raise ValueError("None found in list of model_ids")
elementid = str(uuid.uuid4())
render_items.append({
'sessionid' : session_id,
'elementid' : elementid,
'modelid' : modelid
})
return _html_page_for_render_items(resources, {}, render_items, title,
websocket_url=websocket_url, custom_models=None)
def server_html_page_for_session(session_id, resources, title, websocket_url):
elementid = str(uuid.uuid4())
render_items = [{
'sessionid' : session_id,
'elementid' : elementid
# no 'modelid' implies the entire session document
}]
return _html_page_for_render_items(resources, {}, render_items, title,
websocket_url=websocket_url, custom_models=None)
|
gpfreitas/bokeh
|
bokeh/embed.py
|
Python
|
bsd-3-clause
| 20,330
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 30, transform = "Quantization", sigma = 0.0, exog_count = 20, ar_order = 0);
|
antoinecarme/pyaf
|
tests/artificial/transf_Quantization/trend_PolyTrend/cycle_30/ar_/test_artificial_1024_Quantization_PolyTrend_30__20.py
|
Python
|
bsd-3-clause
| 269
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..brainsuite import Hemisplit
def test_Hemisplit_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
inputHemisphereLabelFile=dict(argstr='-l %s',
mandatory=True,
),
inputSurfaceFile=dict(argstr='-i %s',
mandatory=True,
),
outputLeftHemisphere=dict(argstr='--left %s',
genfile=True,
),
outputLeftPialHemisphere=dict(argstr='-pl %s',
genfile=True,
),
outputRightHemisphere=dict(argstr='--right %s',
genfile=True,
),
outputRightPialHemisphere=dict(argstr='-pr %s',
genfile=True,
),
pialSurfaceFile=dict(argstr='-p %s',
),
terminal_output=dict(deprecated='1.0.0',
nohash=True,
),
timer=dict(argstr='--timer',
),
verbosity=dict(argstr='-v %d',
),
)
inputs = Hemisplit.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_Hemisplit_outputs():
output_map = dict(outputLeftHemisphere=dict(),
outputLeftPialHemisphere=dict(),
outputRightHemisphere=dict(),
outputRightPialHemisphere=dict(),
)
outputs = Hemisplit.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
|
mick-d/nipype
|
nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py
|
Python
|
bsd-3-clause
| 1,610
|
"""
Phil scope of options for scaling.
"""
from __future__ import annotations
import iotbx.phil
phil_scope = iotbx.phil.parse(
"""
anomalous = False
.type = bool
.help = "Separate anomalous pairs in scaling and error model optimisation."
.expert_level=0
overwrite_existing_models = False
.type = bool
.help = "If True, create new scaling models for all datasets"
.expert_level = 0
reflection_selection {
method = *quasi_random intensity_ranges use_all random
.type = choice
.help = "Method to use when choosing a reflection subset for scaling model"
"minimisation."
"The quasi_random option randomly selects reflections groups"
"within a dataset, and also selects groups which have good"
"connectedness across datasets for multi-dataset cases. The random"
"option selects reflection groups randomly for both single"
"and multi dataset scaling, so for a single dataset"
"quasi_random == random."
"The intensity_ranges option uses the E2_range, Isigma_range and"
"d_range options to the subset of reflections"
"The use_all option uses all suitable reflections, which may be"
"slow for large datasets."
random {
multi_dataset {
Isigma_cutoff = 1.0
.type = float
.help = "Minimum average I/sigma of reflection groups to use when"
"selecting random reflections for minimisation."
}
min_groups = 2000
.type = int
.help = "The minimum number of symmetry groups to use during"
"minimisation."
.expert_level=1
min_reflections = 50000
.type = int
.help = "The minimum number of reflections to use during minimisation."
.expert_level=1
}
best_unit_cell = None
.type = unit_cell
.help = "Best unit cell value, to use when performing resolution cutting"
"and merging statistics. If None, the median cell will be used."
E2_range = 0.8, 5.0
.type = floats(size=2)
.help = "Minimum and maximum normalised E^2 value to used to select a"
"subset of reflections for minimisation."
.expert_level = 1
Isigma_range = -5.0, 0.0
.type = floats(size=2)
.help = "Minimum and maximum I/sigma values used to select a subset of"
"reflections for minimisation. A value of 0.0 for the maximum"
"indicates that no upper limit should be applied."
.expert_level = 1
d_range = None
.type = floats(size=2)
.help = "Minimum and maximum d-values used to select a subset of"
"reflections for minimisation."
.expert_level = 1
min_partiality = 0.95
.type = float
.help = "Minimum partiality to use when selecting reflections to use"
"to determine the scaling model and error model."
.expert_level = 2
intensity_choice = profile sum *combine
.alias = intensity
.type = choice
.help = "Option to choose from profile fitted or summation intensities, or
an optimised combination of profile/sum."
.expert_level = 1
combine.Imid = None
.type = floats
.help = "A list of values to try for the midpoint, for profile/sum combination
calculation: the value with the lowest Rmeas will be chosen.
0 and 1 are special values that can be supplied to include profile
and sum respectively in the comparison."
.expert_level = 2
combine.joint_analysis = True
.type = bool
.help = "Option of whether to do intensity combination optimisation
separately (i.e. different Imid per dataset) or joint for
multiple datasets"
.expert_level = 2
}
weighting {
weighting_scheme = *invvar
.type = choice
.help = "Weighting scheme used during Ih calculation. Weighting schemes
other than invvar and unity may trigger iterative reweighting
during minimisation, which may be unstable for certain minimisation
engines (LBFGS)."
.expert_level = 2
error_model {
include scope dials.algorithms.scaling.error_model.error_model.phil_scope
}
}
cut_data {
d_min = None
.type = float
.help = "Option to apply a high resolution cutoff for the dataset (i.e.
the chosen reflections have d > d_min)."
.expert_level = 1
d_max = None
.type = float
.help = "Option to apply a low resolution cutoff for the dataset (i.e.
the chosen reflections have d < d_max)."
.expert_level = 1
partiality_cutoff = 0.4
.type = float
.help = "Value below which reflections are removed from the dataset due
to low partiality."
.expert_level = 1
min_isigi = -5
.type = float
.help = "Value below which reflections are removed from the dataset due"
"to low I/sigI in either profile or summation intensity estimates"
.expert_level = 1
}
scaling_options {
check_consistent_indexing = False
.type = bool
.help = "If True, run dials.cosym on all data in the data preparation"
"step, to ensure consistent indexing."
target_cycle = True
.type = bool
.help = "Option to turn of initial round of targeted scaling
if some datasets are already scaled."
.expert_level = 2
only_target = False
.type = bool
.help = "Option to only do targeted scaling if some datasets
are already scaled."
.expert_level = 2
only_save_targeted = True
.type = bool
.help = "If only_target is true, this option to change whether the dataset
that is being scaled will be saved on its own, or combined with the
already scaled dataset."
.expert_level = 2
target_model = None
.type = path
.help = "Path to cif file to use to calculate target intensities for
scaling."
.expert_level = 2
target_mtz = None
.type = path
.help = "Path to merged mtz file to use as a target for scaling."
.expert_level = 2
nproc = 1
.type = int(value_min=1)
.help = "Number of blocks to divide the data into for minimisation.
This also sets the number of processes to use if the option is
available."
.expert_level = 2
use_free_set = False
.type = bool
.help = "Option to use a free set during scaling to check for overbiasing.
This free set is used to calculate an RMSD, which is shown alongisde
the 'working' RMSD during refinement, but is not currently used
to terminate refinement or make any choices on the model."
.expert_level = 2
free_set_percentage = 10.0
.type = float
.help = "Percentage of symmetry equivalent groups to use for the free set,
if use_free_set is True."
.expert_level = 2
free_set_offset = 0
.type = int
.help = "Offset for choosing unique groups for the free set from the whole
set of unique groups."
.expert_level = 2
full_matrix = True
.type = bool
.help = "Option to turn off GN/LM refinement round used to determine
error estimates on scale factors."
.expert_level = 2
outlier_rejection = *standard simple
.type = choice
.help = "Choice of outlier rejection routine. Standard may take a
significant amount of time to run for large datasets or high
multiplicities, whereas simple should be quick for these datasets."
.expert_level = 1
outlier_zmax = 6.0
.type = float(value_min=3.0)
.help = "Cutoff z-score value for identifying outliers based on their
normalised deviation within the group of equivalent reflections"
.expert_level = 1
emax = 10
.type = float(value_min = 0)
.help = "Reject reflections with normalised intensities E^2 > emax^2"
.expert_level = 2
}
"""
)
|
dials/dials
|
algorithms/scaling/scaling_options.py
|
Python
|
bsd-3-clause
| 8,202
|
import unittest
import pythran
import os.path
#pythran export a((float,(int,uintp),str list) list list)
#pythran export a(str)
#pythran export a( (str,str), int, intp list list)
#pythran export a( float set )
#pythran export a( bool:str dict )
#pythran export a( float )
#pythran export a( int8[] )
#pythran export a( int8[][] order (F))
#pythran export a( byte )
#pythran export a0( uint8 )
#pythran export a1( int16 )
#pythran export a2( uint16 )
#pythran export a3( int32 )
#pythran export a4( uint32 )
#pythran export a5( int64 )
#pythran export a6( uint64 )
#pythran export a7( float32 )
#pythran export a8( float64 )
#pythran export a9( complex64 )
#pythran export a10( complex128 )
#pythran export a( int8 set )
#pythran export b( int8 set? )
#pythran export a( uint8 list)
#pythran export a( int16 [], slice)
#pythran export a( uint16 [][] order(C))
#pythran export a( uint16 [::][])
#pythran export a( uint16 [:,:,:])
#pythran export a( uint16 [:,::,:])
#pythran export a( uint16 [,,,,])
#pythran export a( (int32, ( uint32 , int64 ) ) )
#pythran export a( uint64:float32 dict )
#pythran export a( float64, complex64, complex128 )
class TestSpecParser(unittest.TestCase):
def test_parser(self):
real_path = os.path.splitext(os.path.realpath(__file__))[0]+".py"
with open(real_path) as fd:
print(pythran.spec_parser(fd.read()))
def test_invalid_specs0(self):
code = '#pythran export foo()\ndef foo(n): return n'
with self.assertRaises(pythran.syntax.PythranSyntaxError):
pythran.compile_pythrancode("dumber", code)
def test_invalid_specs1(self):
code = '#pythran export boo(int)\ndef boo(): pass'
with self.assertRaises(pythran.syntax.PythranSyntaxError):
pythran.compile_pythrancode("dumber", code)
def test_invalid_specs2(self):
code = '#pythran export bar(int)\ndef foo(): pass'
with self.assertRaises(pythran.syntax.PythranSyntaxError):
pythran.compile_pythrancode("dumber", code)
def test_invalid_specs3(self):
code = '#pythran export bar(int, int?, int)\ndef bar(x, y=1, z=1): pass'
with self.assertRaises(pythran.syntax.PythranSyntaxError):
pythran.compile_pythrancode("dumber", code)
def test_multiline_spec0(self):
code = '''
#pythran export foo(
# )
def foo(): return
'''
self.assertTrue(pythran.spec_parser(code))
def test_multiline_spec1(self):
code = '''
#pythran export foo(int
#, int
# )
def foo(i,j): return
'''
self.assertTrue(pythran.spec_parser(code))
def test_multiline_spec2(self):
code = '''
# pythran export foo(int,
# float
#, int
# )
def foo(i,j,k): return
'''
self.assertTrue(pythran.spec_parser(code))
def test_crappy_spec0(self):
code = '''
# pythran export soo(int) this is an int test
def soo(i): return
'''
self.assertTrue(pythran.spec_parser(code))
def test_crappy_spec1(self):
code = '''
# pythran export poo(int)
#this is a pythran export test
def poo(i): return
'''
self.assertTrue(pythran.spec_parser(code))
def test_middle_spec0(self):
code = '''
def too(i): return
# pythran export too(int)
#this is a pythran export test
def bar(i): return
'''
self.assertTrue(pythran.spec_parser(code))
def test_middle_spec1(self):
code = '''
def zoo(i): return
#this is a pythran export test
# pythran export zoo(int)
#this is an export test
# pythran export zoo(str)
def bar(i): return
'''
self.assertEquals(len(pythran.spec_parser(code).functions), 1)
self.assertEquals(len(pythran.spec_parser(code).functions['zoo']), 2)
def test_var_export0(self):
code = '''
# pythran export coo
coo = 1
'''
self.assertTrue(pythran.spec_parser(code))
|
pombredanne/pythran
|
pythran/tests/test_spec_parser.py
|
Python
|
bsd-3-clause
| 3,994
|
import pytest
from pontoon.test import factories
@pytest.fixture
def admin():
"""Admin - a superuser"""
return factories.UserFactory.create(username="admin", is_superuser=True,)
@pytest.fixture
def client_superuser(client, admin):
"""Provides a client with a logged in superuser. """
client.force_login(admin)
return client
@pytest.fixture
def user_a():
return factories.UserFactory(username="user_a")
@pytest.fixture
def user_b():
return factories.UserFactory(username="user_b")
@pytest.fixture
def user_c():
return factories.UserFactory(username="user_c")
@pytest.fixture
def member(client, user_a):
"""Provides a `LoggedInMember` with the attributes `user` and `client`
the `client` is authenticated
"""
class LoggedInMember(object):
def __init__(self, user, client):
client.force_login(user)
self.client = client
self.user = user
return LoggedInMember(user_a, client)
@pytest.fixture
def locale_a():
return factories.LocaleFactory(code="kg", name="Klingon",)
@pytest.fixture
def google_translate_locale(locale_a):
"""Set the Google Cloud Translation API locale code for locale_a"""
locale_a.google_translate_code = "bg"
locale_a.save()
return locale_a
@pytest.fixture
def ms_locale(locale_a):
"""Set the Microsoft API locale code for locale_a"""
locale_a.ms_translator_code = "gb"
locale_a.save()
return locale_a
@pytest.fixture
def locale_b():
return factories.LocaleFactory(code="gs", name="Geonosian",)
@pytest.fixture
def project_a():
return factories.ProjectFactory(
slug="project_a", name="Project A", repositories=[],
)
@pytest.fixture
def project_b():
return factories.ProjectFactory(slug="project_b", name="Project B")
@pytest.fixture
def system_project_a():
return factories.ProjectFactory(
slug="system_project_a",
name="System Project A",
repositories=[],
system_project=True,
)
@pytest.fixture
def resource_a(project_a):
return factories.ResourceFactory(
project=project_a, path="resource_a.po", format="po"
)
@pytest.fixture
def resource_b(project_b):
return factories.ResourceFactory(
project=project_b, path="resource_b.po", format="po"
)
@pytest.fixture
def entity_a(resource_a):
return factories.EntityFactory(resource=resource_a, string="entity a")
@pytest.fixture
def entity_b(resource_b):
return factories.EntityFactory(resource=resource_b, string="entity b")
@pytest.fixture
def project_locale_a(project_a, locale_a):
return factories.ProjectLocaleFactory(project=project_a, locale=locale_a,)
@pytest.fixture
def translation_a(locale_a, project_locale_a, entity_a, user_a):
"""Return a translation.
Note that we require the `project_locale_a` fixture because a
valid ProjectLocale is needed in order to query Translations.
"""
translation_a = factories.TranslationFactory(
entity=entity_a,
locale=locale_a,
user=user_a,
string="Translation for entity_a",
)
translation_a.locale.refresh_from_db()
translation_a.entity.resource.project.refresh_from_db()
return translation_a
@pytest.fixture
def tag_a(resource_a, project_a, locale_a):
# Tags require a TranslatedResource to work.
factories.TranslatedResourceFactory.create(resource=resource_a, locale=locale_a)
tag = factories.TagFactory.create(slug="tag", name="Tag", project=project_a,)
tag.resources.add(resource_a)
return tag
|
jotes/pontoon
|
pontoon/test/fixtures/base.py
|
Python
|
bsd-3-clause
| 3,576
|
from __future__ import unicode_literals
import json
import requests
import six
from datetime import datetime
from six.moves.urllib.parse import parse_qs
from xml.etree.ElementTree import Element, SubElement, tostring
from xml.parsers.expat import ExpatError
from .auth import OAuth2Credentials
from .exceptions import (
XeroBadRequest,
XeroExceptionUnknown,
XeroForbidden,
XeroInternalError,
XeroNotAvailable,
XeroNotFound,
XeroNotImplemented,
XeroRateLimitExceeded,
XeroTenantIdNotSet,
XeroUnauthorized,
)
from .utils import isplural, json_load_object_hook, singular
class BaseManager(object):
DECORATED_METHODS = (
"get",
"save",
"filter",
"all",
"put",
"delete",
"get_history",
"put_history",
"get_attachments",
"get_attachment_data",
"put_attachment_data",
)
OBJECT_DECORATED_METHODS = {
"Invoices": ["email", "online_invoice"],
}
DATETIME_FIELDS = (
"UpdatedDateUTC",
"Updated",
"FullyPaidOnDate",
"DateTimeUTC",
"CreatedDateUTC",
"JournalDate",
)
DATE_FIELDS = (
"DueDate",
"Date",
"PaymentDate",
"StartDate",
"EndDate",
"PeriodLockDate",
"DateOfBirth",
"OpeningBalanceDate",
"PaymentDueDate",
"ReportingDate",
"DeliveryDate",
"ExpectedArrivalDate",
)
BOOLEAN_FIELDS = (
"IsSupplier",
"IsCustomer",
"IsDemoCompany",
"PaysTax",
"IsAuthorisedToApproveTimesheets",
"IsAuthorisedToApproveLeave",
"HasHELPDebt",
"AustralianResidentForTaxPurposes",
"TaxFreeThresholdClaimed",
"HasSFSSDebt",
"EligibleToReceiveLeaveLoading",
"IsExemptFromTax",
"IsExemptFromSuper",
"SentToContact",
"IsSubscriber",
"HasAttachments",
"ShowOnCashBasisReports",
"IncludeInEmails",
"SentToContact",
"CanApplyToRevenue",
"CanApplyToLiabilities",
"CanApplyToExpenses",
"CanApplyToEquity",
"CanApplyToAssets",
"IsReconciled",
"EnablePaymentsToAccount",
"ShowInExpenseClaims",
"DiscountEnteredAsPercent",
"IsPurchased",
"IsSold",
"IsTrackedAsInventory",
)
DECIMAL_FIELDS = (
"Hours",
"NumberOfUnit",
)
INTEGER_FIELDS = (
"FinancialYearEndDay",
"FinancialYearEndMonth",
)
NO_SEND_FIELDS = (
"UpdatedDateUTC",
"HasValidationErrors",
"IsDiscounted",
"DateString",
"HasErrors",
"DueDateString",
"HasAccount",
)
OPERATOR_MAPPINGS = {
"gt": ">",
"lt": "<",
"lte": "<=",
"gte": ">=",
"ne": "!=",
}
def __init__(self):
pass
def dict_to_xml(self, root_elm, data):
for key in data.keys():
# Xero will complain if we send back these fields.
if key in self.NO_SEND_FIELDS:
continue
sub_data = data[key]
elm = SubElement(root_elm, key)
# Key references a dict. Unroll the dict
# as it's own XML node with subnodes
if isinstance(sub_data, dict):
self.dict_to_xml(elm, sub_data)
# Key references a list/tuple
elif isinstance(sub_data, list) or isinstance(sub_data, tuple):
# key name is a plural. This means each item
# in the list needs to be wrapped in an XML
# node that is a singular version of the list name.
if isplural(key):
for d in sub_data:
self.dict_to_xml(SubElement(elm, singular(key)), d)
# key name isn't a plural. Just insert the content
# as an XML node with subnodes
else:
for d in sub_data:
self.dict_to_xml(elm, d)
# Normal element - just insert the data.
else:
if key in self.BOOLEAN_FIELDS:
val = "true" if sub_data else "false"
elif key in self.DATE_FIELDS:
val = sub_data.strftime("%Y-%m-%dT%H:%M:%S")
else:
val = six.text_type(sub_data)
elm.text = val
return root_elm
def _prepare_data_for_save(self, data):
if isinstance(data, list) or isinstance(data, tuple):
root_elm = Element(self.name)
for d in data:
sub_elm = SubElement(root_elm, self.singular)
self.dict_to_xml(sub_elm, d)
else:
root_elm = self.dict_to_xml(Element(self.singular), data)
# In python3 this seems to return a bytestring
return six.u(tostring(root_elm))
def _parse_api_response(self, response, resource_name):
data = json.loads(response.text, object_hook=json_load_object_hook)
assert data["Status"] == "OK", (
"Expected the API to say OK but received %s" % data["Status"]
)
try:
return data[resource_name]
except KeyError:
return data
def _get_data(self, func):
""" This is the decorator for our DECORATED_METHODS.
Each of the decorated methods must return:
uri, params, method, body, headers, singleobject
"""
def wrapper(*args, **kwargs):
timeout = kwargs.pop("timeout", None)
uri, params, method, body, headers, singleobject = func(*args, **kwargs)
if headers is None:
headers = {}
headers["Content-Type"] = "application/xml"
if isinstance(self.credentials, OAuth2Credentials):
if self.credentials.tenant_id:
headers["Xero-tenant-id"] = self.credentials.tenant_id
else:
raise XeroTenantIdNotSet
# Use the JSON API by default, but remember we might request a PDF (application/pdf)
# so don't force the Accept header.
if "Accept" not in headers:
headers["Accept"] = "application/json"
# Set a user-agent so Xero knows the traffic is coming from pyxero
# or individual user/partner
headers["User-Agent"] = self.user_agent
response = getattr(requests, method)(
uri,
data=body,
headers=headers,
auth=self.credentials.oauth,
params=params,
timeout=timeout,
)
if response.status_code == 200:
# If we haven't got XML or JSON, assume we're being returned a
# binary file
if not response.headers["content-type"].startswith("application/json"):
return response.content
return self._parse_api_response(response, self.name)
elif response.status_code == 204:
return response.content
elif response.status_code == 400:
try:
raise XeroBadRequest(response)
except (ValueError, ExpatError):
raise XeroExceptionUnknown(
response, msg="Unable to parse Xero API response"
)
elif response.status_code == 401:
raise XeroUnauthorized(response)
elif response.status_code == 403:
raise XeroForbidden(response)
elif response.status_code == 404:
raise XeroNotFound(response)
elif response.status_code == 429:
limit_reason = response.headers.get("X-Rate-Limit-Problem") or "unknown"
payload = {"oauth_problem": ["rate limit exceeded: " + limit_reason],
"oauth_problem_advice": ["please wait before retrying the xero api, "
"the limit exceeded is: " + limit_reason]}
raise XeroRateLimitExceeded(response, payload)
elif response.status_code == 500:
raise XeroInternalError(response)
elif response.status_code == 501:
raise XeroNotImplemented(response)
elif response.status_code == 503:
# Two 503 responses are possible. Rate limit errors
# return encoded content; offline errors don't.
# If you parse the response text and there's nothing
# encoded, it must be a not-available error.
payload = parse_qs(response.text)
if payload:
raise XeroRateLimitExceeded(response, payload)
else:
raise XeroNotAvailable(response)
else:
raise XeroExceptionUnknown(response)
return wrapper
def _get(self, id, headers=None, params=None):
uri = "/".join([self.base_url, self.name, id])
uri_params = self.extra_params.copy()
uri_params.update(params if params else {})
return uri, uri_params, "get", None, headers, True
def _get_history(self, id):
uri = "/".join([self.base_url, self.name, id, "history"]) + "/"
return uri, {}, "get", None, None, False
def _get_attachments(self, id):
"""Retrieve a list of attachments associated with this Xero object."""
uri = "/".join([self.base_url, self.name, id, "Attachments"]) + "/"
return uri, {}, "get", None, None, False
def _get_attachment_data(self, id, filename):
"""
Retrieve the contents of a specific attachment (identified by filename).
"""
uri = "/".join([self.base_url, self.name, id, "Attachments", filename])
return uri, {}, "get", None, None, False
def get_attachment(self, id, filename, file):
"""
Retrieve the contents of a specific attachment (identified by filename).
Writes data to file object, returns length of data written.
"""
data = self.get_attachment_data(id, filename)
file.write(data)
return len(data)
def _email(self, id):
uri = "/".join([self.base_url, self.name, id, "Email"])
return uri, {}, "post", None, None, True
def _online_invoice(self, id):
uri = "/".join([self.base_url, self.name, id, "OnlineInvoice"])
return uri, {}, "get", None, None, True
def save_or_put(self, data, method="post", headers=None, summarize_errors=True):
uri = "/".join([self.base_url, self.name])
body = self._prepare_data_for_save(data)
params = self.extra_params.copy()
if not summarize_errors:
params["summarizeErrors"] = "false"
return uri, params, method, body, headers, False
def _save(self, data):
return self.save_or_put(data, method="post")
def _put(self, data, summarize_errors=True):
return self.save_or_put(data, method="put", summarize_errors=summarize_errors)
def _delete(self, id):
uri = "/".join([self.base_url, self.name, id])
return uri, {}, "delete", None, None, False
def _put_history_data(self, id, details):
"""Add a history note to the Xero object."""
uri = "/".join([self.base_url, self.name, id, "history"])
details_data = {"Details": details}
root_elm = Element("HistoryRecord")
self.dict_to_xml(root_elm, details_data)
data = six.u(tostring(root_elm))
return uri, {}, "put", data, None, False
def _put_history(self, id, details):
"""Upload a history note to the Xero object."""
return self._put_history_data(id, details)
def _put_attachment_data(
self, id, filename, data, content_type, include_online=False
):
"""Upload an attachment to the Xero object."""
uri = "/".join([self.base_url, self.name, id, "Attachments", filename])
params = {"IncludeOnline": "true"} if include_online else {}
headers = {"Content-Type": content_type, "Content-Length": str(len(data))}
return uri, params, "put", data, headers, False
def put_attachment(self, id, filename, file, content_type, include_online=False):
"""Upload an attachment to the Xero object (from file object)."""
return self.put_attachment_data(
id, filename, file.read(), content_type, include_online=include_online
)
def prepare_filtering_date(self, val):
if isinstance(val, datetime):
val = val.strftime("%a, %d %b %Y %H:%M:%S GMT")
else:
val = '"%s"' % val
return {"If-Modified-Since": val}
def _filter(self, **kwargs):
params = self.extra_params.copy()
headers = None
uri = "/".join([self.base_url, self.name])
if kwargs:
if "since" in kwargs:
val = kwargs["since"]
headers = self.prepare_filtering_date(val)
del kwargs["since"]
# Accept IDs parameter for Invoices and Contacts endpoints
if "IDs" in kwargs:
params["IDs"] = ",".join(kwargs["IDs"])
del kwargs["IDs"]
def get_filter_params(key, value):
last_key = key.split("_")[-1]
if last_key.endswith("ID"):
return 'Guid("%s")' % six.text_type(value)
if key in self.BOOLEAN_FIELDS:
return "true" if value else "false"
elif key in self.DATE_FIELDS:
return "DateTime(%s,%s,%s)" % (value.year, value.month, value.day)
elif key in self.DATETIME_FIELDS:
return value.isoformat()
else:
return '"%s"' % six.text_type(value)
def generate_param(key, value):
parts = key.split("__")
field = key.replace("_", ".")
fmt = "%s==%s"
if len(parts) == 2:
# support filters:
# Name__Contains=John becomes Name.Contains("John")
if parts[1] in ["contains", "startswith", "endswith"]:
field = parts[0]
fmt = "".join(["%s.", parts[1], "(%s)"])
elif parts[1] in ["tolower", "toupper"]:
field = parts[0]
fmt = "".join(["%s.", parts[1], "()==%s"])
elif parts[1] in self.OPERATOR_MAPPINGS:
field = parts[0]
key = field
fmt = "%s" + self.OPERATOR_MAPPINGS[parts[1]] + "%s"
elif parts[1] in ["isnull"]:
sign = "=" if value else "!"
return "%s%s=null" % (parts[0], sign)
field = field.replace("_", ".")
return fmt % (field, get_filter_params(key, value))
# Move any known parameter names to the query string
KNOWN_PARAMETERS = ["order", "offset", "page", "includeArchived"]
for param in KNOWN_PARAMETERS:
if param in kwargs:
params[param] = kwargs.pop(param)
filter_params = []
if "raw" in kwargs:
raw = kwargs.pop("raw")
filter_params.append(raw)
# Treat any remaining arguments as filter predicates
# Xero will break if you search without a check for null in the first position:
# http://developer.xero.com/documentation/getting-started/http-requests-and-responses/#title3
sortedkwargs = sorted(
six.iteritems(kwargs), key=lambda item: -1 if "isnull" in item[0] else 0
)
for key, value in sortedkwargs:
filter_params.append(generate_param(key, value))
if filter_params:
params["where"] = "&&".join(filter_params)
return uri, params, "get", None, headers, False
def _all(self):
uri = "/".join([self.base_url, self.name])
return uri, {}, "get", None, None, False
|
freakboy3742/pyxero
|
xero/basemanager.py
|
Python
|
bsd-3-clause
| 16,413
|
from jsonapi_requests import base
from jsonapi_requests.orm import registry
class OrmApi:
def __init__(self, api):
self.type_registry = registry.TypeRegistry()
self.api = api
@classmethod
def config(cls, *args, **kwargs):
return cls(base.Api.config(*args, **kwargs))
def endpoint(self, path):
return self.api.endpoint(path)
|
socialwifi/jsonapi-requests
|
jsonapi_requests/orm/api.py
|
Python
|
bsd-3-clause
| 376
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from appconf import AppConf
class BlogConf(AppConf):
DISQUS_SHORTNAME = 'django-staticgen'
POST_IDENTIFIER = 'current_post'
class Meta:
prefix = 'blog'
|
mishbahr/staticgen-demo
|
staticgen_demo/blog/conf.py
|
Python
|
bsd-3-clause
| 347
|
"""All pytest-django fixtures"""
from __future__ import with_statement
import os
import warnings
import pytest
from . import live_server_helper
from .db_reuse import (monkey_patch_creation_for_db_reuse,
monkey_patch_creation_for_db_suffix)
from .django_compat import is_django_unittest
from .lazy_django import get_django_version, skip_if_no_django
__all__ = ['_django_db_setup', 'db', 'transactional_db', 'admin_user',
'django_user_model', 'django_username_field',
'client', 'admin_client', 'rf', 'settings', 'live_server',
'_live_server_helper']
# ############### Internal Fixtures ################
@pytest.fixture(scope='session')
def _django_db_setup(request,
_django_test_environment,
_django_cursor_wrapper):
"""Session-wide database setup, internal to pytest-django"""
skip_if_no_django()
from .compat import setup_databases, teardown_databases
# xdist
if hasattr(request.config, 'slaveinput'):
db_suffix = request.config.slaveinput['slaveid']
else:
db_suffix = None
monkey_patch_creation_for_db_suffix(db_suffix)
_handle_south()
if request.config.getvalue('nomigrations'):
_disable_native_migrations()
db_args = {}
with _django_cursor_wrapper:
if (request.config.getvalue('reuse_db') and
not request.config.getvalue('create_db')):
if get_django_version() >= (1, 8):
db_args['keepdb'] = True
else:
monkey_patch_creation_for_db_reuse()
# Create the database
db_cfg = setup_databases(verbosity=pytest.config.option.verbose,
interactive=False, **db_args)
def teardown_database():
with _django_cursor_wrapper:
teardown_databases(db_cfg)
if not request.config.getvalue('reuse_db'):
request.addfinalizer(teardown_database)
def _django_db_fixture_helper(transactional, request, _django_cursor_wrapper):
if is_django_unittest(request):
return
if not transactional and 'live_server' in request.funcargnames:
# Do nothing, we get called with transactional=True, too.
return
django_case = None
_django_cursor_wrapper.enable()
request.addfinalizer(_django_cursor_wrapper.disable)
if transactional:
from django import get_version
if get_version() >= '1.5':
from django.test import TransactionTestCase as django_case
else:
# Django before 1.5 flushed the DB during setUp.
# Use pytest-django's old behavior with it.
def flushdb():
"""Flush the database and close database connections"""
# Django does this by default *before* each test
# instead of after.
from django.db import connections
from django.core.management import call_command
for db in connections:
call_command('flush', interactive=False, database=db,
verbosity=pytest.config.option.verbose)
for conn in connections.all():
conn.close()
request.addfinalizer(flushdb)
else:
from django.test import TestCase as django_case
if django_case:
case = django_case(methodName='__init__')
case._pre_setup()
request.addfinalizer(case._post_teardown)
def _handle_south():
from django.conf import settings
# NOTE: Django 1.7 does not have `management._commands` anymore, which
# is used by South's `patch_for_test_db_setup` and the code below.
if 'south' not in settings.INSTALLED_APPS or get_django_version() > (1, 7):
return
from django.core import management
try:
# if `south` >= 0.7.1 we can use the test helper
from south.management.commands import patch_for_test_db_setup
except ImportError:
# if `south` < 0.7.1 make sure its migrations are disabled
management.get_commands()
management._commands['syncdb'] = 'django.core'
else:
# Monkey-patch south.hacks.django_1_0.SkipFlushCommand to load
# initial data.
# Ref: http://south.aeracode.org/ticket/1395#comment:3
import south.hacks.django_1_0
from django.core.management.commands.flush import (
Command as FlushCommand)
class SkipFlushCommand(FlushCommand):
def handle_noargs(self, **options):
# Reinstall the initial_data fixture.
from django.core.management import call_command
# `load_initial_data` got introduces with Django 1.5.
load_initial_data = options.get('load_initial_data', None)
if load_initial_data or load_initial_data is None:
# Reinstall the initial_data fixture.
call_command('loaddata', 'initial_data', **options)
# no-op to avoid calling flush
return
south.hacks.django_1_0.SkipFlushCommand = SkipFlushCommand
patch_for_test_db_setup()
def _disable_native_migrations():
from django.conf import settings
from .migrations import DisableMigrations
settings.MIGRATION_MODULES = DisableMigrations()
# ############### User visible fixtures ################
@pytest.fixture(scope='function')
def db(request, _django_db_setup, _django_cursor_wrapper):
"""Require a django test database
This database will be setup with the default fixtures and will have
the transaction management disabled. At the end of the test the outer
transaction that wraps the test itself will be rolled back to undo any
changes to the database (in case the backend supports transactions).
This is more limited than the ``transactional_db`` resource but
faster.
If both this and ``transactional_db`` are requested then the
database setup will behave as only ``transactional_db`` was
requested.
"""
if 'transactional_db' in request.funcargnames \
or 'live_server' in request.funcargnames:
request.getfuncargvalue('transactional_db')
else:
_django_db_fixture_helper(False, request, _django_cursor_wrapper)
@pytest.fixture(scope='function')
def transactional_db(request, _django_db_setup, _django_cursor_wrapper):
"""Require a django test database with transaction support
This will re-initialise the django database for each test and is
thus slower than the normal ``db`` fixture.
If you want to use the database with transactions you must request
this resource. If both this and ``db`` are requested then the
database setup will behave as only ``transactional_db`` was
requested.
"""
_django_db_fixture_helper(True, request, _django_cursor_wrapper)
@pytest.fixture()
def client():
"""A Django test client instance."""
skip_if_no_django()
from django.test.client import Client
return Client()
@pytest.fixture()
def django_user_model(db):
"""The class of Django's user model."""
try:
from django.contrib.auth import get_user_model
except ImportError:
assert get_django_version() < (1, 5)
from django.contrib.auth.models import User as UserModel
else:
UserModel = get_user_model()
return UserModel
@pytest.fixture()
def django_username_field(django_user_model):
"""The fieldname for the username used with Django's user model."""
try:
return django_user_model.USERNAME_FIELD
except AttributeError:
assert get_django_version() < (1, 5)
return 'username'
@pytest.fixture()
def admin_user(db, django_user_model, django_username_field):
"""A Django admin user.
This uses an existing user with username "admin", or creates a new one with
password "password".
"""
UserModel = django_user_model
username_field = django_username_field
try:
user = UserModel._default_manager.get(**{username_field: 'admin'})
except UserModel.DoesNotExist:
extra_fields = {}
if username_field != 'username':
extra_fields[username_field] = 'admin'
user = UserModel._default_manager.create_superuser(
'admin', 'admin@example.com', 'password', **extra_fields)
return user
@pytest.fixture()
def admin_client(db, admin_user):
"""A Django test client logged in as an admin user."""
from django.test.client import Client
client = Client()
client.login(username=admin_user.username, password='password')
return client
@pytest.fixture()
def rf():
"""RequestFactory instance"""
skip_if_no_django()
from django.test.client import RequestFactory
return RequestFactory()
class MonkeyPatchWrapper(object):
def __init__(self, monkeypatch, wrapped_object):
super(MonkeyPatchWrapper, self).__setattr__('monkeypatch', monkeypatch)
super(MonkeyPatchWrapper, self).__setattr__('wrapped_object',
wrapped_object)
def __getattr__(self, attr):
return getattr(self.wrapped_object, attr)
def __setattr__(self, attr, value):
self.monkeypatch.setattr(self.wrapped_object, attr, value,
raising=False)
def __delattr__(self, attr):
self.monkeypatch.delattr(self.wrapped_object, attr)
@pytest.fixture()
def settings(monkeypatch):
"""A Django settings object which restores changes after the testrun"""
skip_if_no_django()
from django.conf import settings as django_settings
return MonkeyPatchWrapper(monkeypatch, django_settings)
@pytest.fixture(scope='session')
def live_server(request):
"""Run a live Django server in the background during tests
The address the server is started from is taken from the
--liveserver command line option or if this is not provided from
the DJANGO_LIVE_TEST_SERVER_ADDRESS environment variable. If
neither is provided ``localhost:8081,8100-8200`` is used. See the
Django documentation for it's full syntax.
NOTE: If the live server needs database access to handle a request
your test will have to request database access. Furthermore
when the tests want to see data added by the live-server (or
the other way around) transactional database access will be
needed as data inside a transaction is not shared between
the live server and test code.
Static assets will be served for all versions of Django.
Except for django >= 1.7, if ``django.contrib.staticfiles`` is not
installed.
"""
skip_if_no_django()
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_LIVE_TEST_SERVER_ADDRESS')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if addr:
warnings.warn('Please use DJANGO_LIVE_TEST_SERVER_ADDRESS'
' instead of DJANGO_TEST_LIVE_SERVER_ADDRESS.',
DeprecationWarning)
if not addr:
addr = 'localhost:8081,8100-8200'
server = live_server_helper.LiveServer(addr)
request.addfinalizer(server.stop)
return server
@pytest.fixture(autouse=True, scope='function')
def _live_server_helper(request):
"""Helper to make live_server work, internal to pytest-django.
This helper will dynamically request the transactional_db fixture
for a test which uses the live_server fixture. This allows the
server and test to access the database without having to mark
this explicitly which is handy since it is usually required and
matches the Django behaviour.
The separate helper is required since live_server can not request
transactional_db directly since it is session scoped instead of
function-scoped.
"""
if 'live_server' in request.funcargnames:
request.getfuncargvalue('transactional_db')
|
tomviner/pytest-django
|
pytest_django/fixtures.py
|
Python
|
bsd-3-clause
| 12,082
|
"""empty message
Revision ID: 424f18f4c1df
Revises: 106e3631fe9
Create Date: 2015-06-23 11:31:08.548661
"""
# revision identifiers, used by Alembic.
revision = '424f18f4c1df'
down_revision = '106e3631fe9'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import ENUM
providers_list = ENUM('facebook', 'twitter', 'truenth', name='providers',
create_type=False)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
providers_list.create(op.get_bind(), checkfirst=False)
op.create_table('auth_providers',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('provider', providers_list, nullable=True),
sa.Column('provider_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('auth_providers')
providers_list.drop(op.get_bind(), checkfirst=False)
### end Alembic commands ###
|
uwcirg/true_nth_usa_portal
|
portal/migrations/versions/424f18f4c1df_.py
|
Python
|
bsd-3-clause
| 1,316
|
"""
JupyterHub Spawner to spawn user notebooks on a Kubernetes cluster.
This module exports `KubeSpawner` class, which is the actual spawner
implementation that should be used by JupyterHub.
"""
import asyncio
import os
import signal
import string
import sys
import warnings
from functools import partial
from functools import wraps
from urllib.parse import urlparse
import escapism
from jinja2 import BaseLoader
from jinja2 import Environment
from jupyterhub.spawner import Spawner
from jupyterhub.traitlets import Command
from jupyterhub.utils import exponential_backoff
from kubernetes_asyncio import client
from kubernetes_asyncio.client.rest import ApiException
from slugify import slugify
from tornado import gen
from traitlets import Bool
from traitlets import default
from traitlets import Dict
from traitlets import Integer
from traitlets import List
from traitlets import observe
from traitlets import Unicode
from traitlets import Union
from traitlets import validate
from .clients import load_config
from .clients import shared_client
from .objects import make_namespace
from .objects import make_owner_reference
from .objects import make_pod
from .objects import make_pvc
from .objects import make_secret
from .objects import make_service
from .reflector import ResourceReflector
from .traitlets import Callable
class PodReflector(ResourceReflector):
"""
PodReflector is merely a configured ResourceReflector. It exposes
the pods property, which is simply mapping to self.resources where the
ResourceReflector keeps an updated list of the resource defined by
the `kind` field and the `list_method_name` field.
"""
kind = "pods"
# The default component label can be over-ridden by specifying the component_label property
labels = {
'component': 'singleuser-server',
}
@property
def pods(self):
"""
A dictionary of pods for the namespace as returned by the Kubernetes
API. The dictionary keys are the pod ids and the values are
dictionaries of the actual pod resource values.
ref: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#pod-v1-core
"""
return self.resources
class EventReflector(ResourceReflector):
"""
EventsReflector is merely a configured ResourceReflector. It
exposes the events property, which is simply mapping to self.resources where
the ResourceReflector keeps an updated list of the resource
defined by the `kind` field and the `list_method_name` field.
"""
kind = "events"
@property
def events(self):
"""
Returns list of dictionaries representing the k8s
events within the namespace, sorted by the latest event.
ref: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#event-v1-core
"""
# NOTE:
# - self.resources is a dictionary with keys mapping unique ids of
# Kubernetes Event resources, updated by ResourceReflector.
# self.resources will builds up with incoming k8s events, but can also
# suddenly refreshes itself entirely. We should not assume a call to
# this dictionary's values will result in a consistently ordered list,
# so we sort it to get it somewhat more structured.
# - We either seem to get only event['lastTimestamp'] or
# event['eventTime'], both fields serve the same role but the former
# is a low resolution timestamp without and the other is a higher
# resolution timestamp.
return sorted(
self.resources.values(),
key=lambda event: event["lastTimestamp"] or event["eventTime"],
)
class MockObject(object):
pass
class KubeSpawner(Spawner):
"""
A JupyterHub spawner that spawn pods in a Kubernetes Cluster. Each server
spawned by a user will have its own KubeSpawner instance.
"""
reflectors = {
"pods": None,
"events": None,
}
# Characters as defined by safe for DNS
# Note: '-' is not in safe_chars, as it is being used as escape character
safe_chars = set(string.ascii_lowercase + string.digits)
@property
def pod_reflector(self):
"""
A convenience alias to the class variable reflectors['pods'].
"""
return self.__class__.reflectors['pods']
@property
def event_reflector(self):
"""
A convenience alias to the class variable reflectors['events'] if the
spawner instance has events_enabled.
"""
if self.events_enabled:
return self.__class__.reflectors['events']
def __init__(self, *args, **kwargs):
_mock = kwargs.pop('_mock', False)
super().__init__(*args, **kwargs)
if _mock:
# runs during test execution only
if 'user' not in kwargs:
user = MockObject()
user.name = 'mock_name'
user.id = 'mock_id'
user.url = 'mock_url'
self.user = user
if 'hub' not in kwargs:
hub = MockObject()
hub.public_host = 'mock_public_host'
hub.url = 'mock_url'
hub.base_url = 'mock_base_url'
hub.api_url = 'mock_api_url'
self.hub = hub
# We have to set the namespace (if user namespaces are enabled)
# before we start the reflectors, so this must run before
# watcher start in normal execution. We still want to get the
# namespace right for test, though, so we need self.user to have
# been set in order to do that.
# By now, all the traitlets have been set, so we can use them to
# compute other attributes
if self.enable_user_namespaces:
self.namespace = self._expand_user_properties(self.user_namespace_template)
self.log.info("Using user namespace: {}".format(self.namespace))
self.pod_name = self._expand_user_properties(self.pod_name_template)
self.dns_name = self.dns_name_template.format(
namespace=self.namespace, name=self.pod_name
)
self.secret_name = self._expand_user_properties(self.secret_name_template)
self.pvc_name = self._expand_user_properties(self.pvc_name_template)
if self.working_dir:
self.working_dir = self._expand_user_properties(self.working_dir)
if self.port == 0:
# Our default port is 8888
self.port = 8888
# The attribute needs to exist, even though it is unset to start with
self._start_future = None
load_config(host=self.k8s_api_host, ssl_ca_cert=self.k8s_api_ssl_ca_cert)
self.api = shared_client("CoreV1Api")
self._start_watching_pods()
if self.events_enabled:
self._start_watching_events()
def _await_pod_reflector(method):
"""Decorator to wait for pod reflector to load
Apply to methods which require the pod reflector
to have completed its first load of pods.
"""
@wraps(method)
async def async_method(self, *args, **kwargs):
if not self.pod_reflector.first_load_future.done():
await self.pod_reflector.first_load_future
return await method(self, *args, **kwargs)
return async_method
def _await_event_reflector(method):
"""Decorator to wait for event reflector to load
Apply to methods which require the event reflector
to have completed its first load of events.
"""
@wraps(method)
async def async_method(self, *args, **kwargs):
if (
self.events_enabled
and not self.event_reflector.first_load_future.done()
):
await self.event_reflector.first_load_future
return await method(self, *args, **kwargs)
return async_method
k8s_api_ssl_ca_cert = Unicode(
"",
config=True,
help="""
Location (absolute filepath) for CA certs of the k8s API server.
Typically this is unnecessary, CA certs are picked up by
config.load_incluster_config() or config.load_kube_config.
In rare non-standard cases, such as using custom intermediate CA
for your cluster, you may need to mount root CA's elsewhere in
your Pod/Container and point this variable to that filepath
""",
)
k8s_api_host = Unicode(
"",
config=True,
help="""
Full host name of the k8s API server ("https://hostname:port").
Typically this is unnecessary, the hostname is picked up by
config.load_incluster_config() or config.load_kube_config.
""",
)
k8s_api_threadpool_workers = Integer(
config=True,
help="""
DEPRECATED in KubeSpawner 3.0.0.
No longer has any effect, as there is no threadpool anymore.
""",
)
k8s_api_request_timeout = Integer(
3,
config=True,
help="""
API request timeout (in seconds) for all k8s API calls.
This is the total amount of time a request might take before the connection
is killed. This includes connection time and reading the response.
NOTE: This is currently only implemented for creation and deletion of pods,
and creation of PVCs.
""",
)
k8s_api_request_retry_timeout = Integer(
30,
config=True,
help="""
Total timeout, including retry timeout, for kubernetes API calls
When a k8s API request connection times out, we retry it while backing
off exponentially. This lets you configure the total amount of time
we will spend trying an API request - including retries - before
giving up.
""",
)
events_enabled = Bool(
True,
config=True,
help="""
Enable event-watching for progress-reports to the user spawn page.
Disable if these events are not desirable
or to save some performance cost.
""",
)
enable_user_namespaces = Bool(
False,
config=True,
help="""
Cause each user to be spawned into an individual namespace.
This comes with some caveats. The Hub must run with significantly
more privilege (must have ClusterRoles analogous to its usual Roles)
and can therefore do heinous things to the entire cluster.
It will also make the Reflectors aware of pods and events across
all namespaces. This will have performance implications, although
using labels to restrict resource selection helps somewhat.
If you use this, consider cleaning up the user namespace in your
post_stop_hook.
""",
)
user_namespace_template = Unicode(
"{hubnamespace}-{username}",
config=True,
help="""
Template to use to form the namespace of user's pods (only if
enable_user_namespaces is True).
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
namespace = Unicode(
config=True,
help="""
Kubernetes namespace to spawn user pods in.
Assuming that you are not running with enable_user_namespaces
turned on, if running inside a kubernetes cluster with service
accounts enabled, defaults to the current namespace, and if not,
defaults to `default`.
If you are running with enable_user_namespaces, this parameter
is ignored in favor of the `user_namespace_template` template
resolved with the hub namespace and the user name, with the
caveat that if the hub namespace is `default` the user
namespace will have the prefix `user` rather than `default`.
""",
)
@default('namespace')
def _namespace_default(self):
"""
Set namespace default to current namespace if running in a k8s cluster
If not in a k8s cluster with service accounts enabled, default to
`default`
"""
ns_path = '/var/run/secrets/kubernetes.io/serviceaccount/namespace'
if os.path.exists(ns_path):
with open(ns_path) as f:
return f.read().strip()
return 'default'
ip = Unicode(
'0.0.0.0',
config=True,
help="""
The IP address (or hostname) the single-user server should listen on.
We override this from the parent so we can set a more sane default for
the Kubernetes setup.
""",
)
cmd = Command(
None,
allow_none=True,
minlen=0,
config=True,
help="""
The command used to start the single-user server.
Either
- a string containing a single command or path to a startup script
- a list of the command and arguments
- `None` (default) to use the Docker image's `CMD`
If `cmd` is set, it will be augmented with `spawner.get_args(). This will override the `CMD` specified in the Docker image.
""",
)
# FIXME: Don't override 'default_value' ("") or 'allow_none' (False) (Breaking change)
working_dir = Unicode(
None,
allow_none=True,
config=True,
help="""
The working directory where the Notebook server will be started inside the container.
Defaults to `None` so the working directory will be the one defined in the Dockerfile.
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
# FIXME: Don't override 'default_value' ("") or 'allow_none' (False) (Breaking change)
service_account = Unicode(
None,
allow_none=True,
config=True,
help="""
The service account to be mounted in the spawned user pod.
The token of the service account is NOT mounted by default.
This makes sure that we don't accidentally give access to the whole
kubernetes API to the users in the spawned pods.
Set automount_service_account_token True to mount it.
This `serviceaccount` must already exist in the namespace the user pod is being spawned in.
""",
)
automount_service_account_token = Bool(
None,
allow_none=True,
config=True,
help="""
Whether to mount the service account token in the spawned user pod.
The default value is None, which mounts the token if the service account is explicitly set,
but doesn't mount it if not.
WARNING: Be careful with this configuration! Make sure the service account being mounted
has the minimal permissions needed, and nothing more. When misconfigured, this can easily
give arbitrary users root over your entire cluster.
""",
)
dns_name_template = Unicode(
"{name}.{namespace}.svc.cluster.local",
config=True,
help="""
Template to use to form the dns name for the pod.
""",
)
pod_name_template = Unicode(
'jupyter-{username}--{servername}',
config=True,
help="""
Template to use to form the name of user's pods.
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
Trailing `-` characters are stripped for safe handling of empty server names (user default servers).
This must be unique within the namespace the pods are being spawned
in, so if you are running multiple jupyterhubs spawning in the
same namespace, consider setting this to be something more unique.
.. versionchanged:: 0.12
`--` delimiter added to the template,
where it was implicitly added to the `servername` field before.
Additionally, `username--servername` delimiter was `-` instead of `--`,
allowing collisions in certain circumstances.
""",
)
pod_connect_ip = Unicode(
config=True,
help="""
The IP address (or hostname) of user's pods which KubeSpawner connects to.
If you do not specify the value, KubeSpawner will use the pod IP.
e.g. 'jupyter-{username}--{servername}.notebooks.jupyterhub.svc.cluster.local',
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
Trailing `-` characters in each domain level are stripped for safe handling of empty server names (user default servers).
This must be unique within the namespace the pods are being spawned
in, so if you are running multiple jupyterhubs spawning in the
same namespace, consider setting this to be something more unique.
""",
)
storage_pvc_ensure = Bool(
False,
config=True,
help="""
Ensure that a PVC exists for each user before spawning.
Set to true to create a PVC named with `pvc_name_template` if it does
not exist for the user when their pod is spawning.
""",
)
delete_pvc = Bool(
True,
config=True,
help="""Delete PVCs when deleting Spawners.
When a Spawner is deleted (not just stopped),
delete its associated PVC.
This occurs when a named server is deleted,
or when the user itself is deleted for the default Spawner.
Requires JupyterHub 1.4.1 for Spawner.delete_forever support.
.. versionadded: 0.17
""",
)
pvc_name_template = Unicode(
'claim-{username}--{servername}',
config=True,
help="""
Template to use to form the name of user's pvc.
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
Trailing `-` characters are stripped for safe handling of empty server names (user default servers).
This must be unique within the namespace the pvc are being spawned
in, so if you are running multiple jupyterhubs spawning in the
same namespace, consider setting this to be something more unique.
.. versionchanged:: 0.12
`--` delimiter added to the template,
where it was implicitly added to the `servername` field before.
Additionally, `username--servername` delimiter was `-` instead of `--`,
allowing collisions in certain circumstances.
""",
)
component_label = Unicode(
'singleuser-server',
config=True,
help="""
The component label used to tag the user pods. This can be used to override
the spawner behavior when dealing with multiple hub instances in the same
namespace. Usually helpful for CI workflows.
""",
)
secret_name_template = Unicode(
'jupyter-{username}{servername}',
config=True,
help="""
Template to use to form the name of user's secret.
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
This must be unique within the namespace the pvc are being spawned
in, so if you are running multiple jupyterhubs spawning in the
same namespace, consider setting this to be something more unique.
""",
)
secret_mount_path = Unicode(
"/etc/jupyterhub/ssl/",
allow_none=False,
config=True,
help="""
Location to mount the spawned pod's certificates needed for internal_ssl functionality.
""",
)
# FIXME: Don't override 'default_value' ("") or 'allow_none' (False) (Breaking change)
hub_connect_ip = Unicode(
allow_none=True,
config=True,
help="""DEPRECATED. Use c.JupyterHub.hub_connect_ip""",
)
hub_connect_port = Integer(
config=True, help="""DEPRECATED. Use c.JupyterHub.hub_connect_url"""
)
@observe('hub_connect_ip', 'hub_connect_port')
def _deprecated_changed(self, change):
warnings.warn(
"""
KubeSpawner.{0} is deprecated with JupyterHub >= 0.8.
Use JupyterHub.{0}
""".format(
change.name
),
DeprecationWarning,
)
setattr(self.hub, change.name.split('_', 1)[1], change.new)
common_labels = Dict(
{
'app': 'jupyterhub',
'heritage': 'jupyterhub',
},
config=True,
help="""
Kubernetes labels that both spawned singleuser server pods and created
user PVCs will get.
Note that these are only set when the Pods and PVCs are created, not
later when this setting is updated.
""",
)
extra_labels = Dict(
config=True,
help="""
Extra kubernetes labels to set on the spawned single-user pods, as well
as on the pods' associated k8s Service and k8s Secret if internal_ssl is
enabled.
The keys and values specified here would be set as labels on the spawned single-user
kubernetes pods. The keys and values must both be strings that match the kubernetes
label key / value constraints.
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/>`__
for more info on what labels are and why you might want to use them!
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
extra_annotations = Dict(
config=True,
help="""
Extra Kubernetes annotations to set on the spawned single-user pods, as
well as on the pods' associated k8s Service and k8s Secret if
internal_ssl is enabled.
The keys and values specified here are added as annotations on the spawned single-user
kubernetes pods. The keys and values must both be strings.
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/>`__
for more info on what annotations are and why you might want to use them!
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
image = Unicode(
'jupyterhub/singleuser:latest',
config=True,
help="""
Docker image to use for spawning user's containers.
Defaults to `jupyterhub/singleuser:latest`
Name of the container + a tag, same as would be used with
a `docker pull` command. If tag is set to `latest`, kubernetes will
check the registry each time a new user is spawned to see if there
is a newer image available. If available, new image will be pulled.
Note that this could cause long delays when spawning, especially
if the image is large. If you do not specify a tag, whatever version
of the image is first pulled on the node will be used, thus possibly
leading to inconsistent images on different nodes. For all these
reasons, it is recommended to specify a specific immutable tag
for the image.
If your image is very large, you might need to increase the timeout
for starting the single user container from the default. You can
set this with::
c.KubeSpawner.start_timeout = 60 * 5 # Up to 5 minutes
""",
)
image_pull_policy = Unicode(
'IfNotPresent',
config=True,
help="""
The image pull policy of the docker container specified in
`image`.
Defaults to `IfNotPresent` which causes the Kubelet to NOT pull the image
specified in KubeSpawner.image if it already exists, except if the tag
is `:latest`. For more information on image pull policy,
refer to `the Kubernetes documentation <https://kubernetes.io/docs/concepts/containers/images/>`__.
This configuration is primarily used in development if you are
actively changing the `image_spec` and would like to pull the image
whenever a user container is spawned.
""",
)
image_pull_secrets = Union(
trait_types=[
List(),
Unicode(),
],
config=True,
help="""
A list of references to Kubernetes Secret resources with credentials to
pull images from image registries. This list can either have strings in
it or objects with the string value nested under a name field.
Passing a single string is still supported, but deprecated as of
KubeSpawner 0.14.0.
See `the Kubernetes documentation
<https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod>`__
for more information on when and why this might need to be set, and what
it should be set to.
""",
)
@validate('image_pull_secrets')
def _validate_image_pull_secrets(self, proposal):
if type(proposal['value']) == str:
warnings.warn(
"""Passing KubeSpawner.image_pull_secrets string values is
deprecated since KubeSpawner 0.14.0. The recommended
configuration is now a list of either strings or dictionary
objects with the string referencing the Kubernetes Secret name
in under the value of the dictionary's name key.""",
DeprecationWarning,
)
return [{"name": proposal['value']}]
return proposal['value']
node_selector = Dict(
config=True,
help="""
The dictionary Selector labels used to match the Nodes where Pods will be launched.
Default is None and means it will be launched in any available Node.
For example to match the Nodes that have a label of `disktype: ssd` use::
c.KubeSpawner.node_selector = {'disktype': 'ssd'}
""",
)
uid = Union(
trait_types=[
Integer(),
Callable(),
],
default_value=None,
allow_none=True,
config=True,
help="""
The UID to run the single-user server containers as.
This UID should ideally map to a user that already exists in the container
image being used. Running as root is discouraged.
Instead of an integer, this could also be a callable that takes as one
parameter the current spawner instance and returns an integer. The callable
will be called asynchronously if it returns a future. Note that
the interface of the spawner class is not deemed stable across versions,
so using this functionality might cause your JupyterHub or kubespawner
upgrades to break.
If set to `None`, the user specified with the `USER` directive in the
container metadata is used.
""",
)
gid = Union(
trait_types=[
Integer(),
Callable(),
],
default_value=None,
allow_none=True,
config=True,
help="""
The GID to run the single-user server containers as.
This GID should ideally map to a group that already exists in the container
image being used. Running as root is discouraged.
Instead of an integer, this could also be a callable that takes as one
parameter the current spawner instance and returns an integer. The callable
will be called asynchronously if it returns a future. Note that
the interface of the spawner class is not deemed stable across versions,
so using this functionality might cause your JupyterHub or kubespawner
upgrades to break.
If set to `None`, the group of the user specified with the `USER` directive
in the container metadata is used.
""",
)
fs_gid = Union(
trait_types=[
Integer(),
Callable(),
],
default_value=None,
allow_none=True,
config=True,
help="""
The GID of the group that should own any volumes that are created & mounted.
A special supplemental group that applies primarily to the volumes mounted
in the single-user server. In volumes from supported providers, the following
things happen:
1. The owning GID will be the this GID
2. The setgid bit is set (new files created in the volume will be owned by
this GID)
3. The permission bits are OR’d with rw-rw
The single-user server will also be run with this gid as part of its supplemental
groups.
Instead of an integer, this could also be a callable that takes as one
parameter the current spawner instance and returns an integer. The callable will
be called asynchronously if it returns a future, rather than an int. Note that
the interface of the spawner class is not deemed stable across versions,
so using this functionality might cause your JupyterHub or kubespawner
upgrades to break.
You'll *have* to set this if you are using auto-provisioned volumes with most
cloud providers. See `fsGroup <https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#podsecuritycontext-v1-core>`__
for more details.
""",
)
supplemental_gids = Union(
trait_types=[
List(),
Callable(),
],
config=True,
help="""
A list of GIDs that should be set as additional supplemental groups to the
user that the container runs as.
Instead of a list of integers, this could also be a callable that takes as one
parameter the current spawner instance and returns a list of integers. The
callable will be called asynchronously if it returns a future, rather than
a list. Note that the interface of the spawner class is not deemed stable
across versions, so using this functionality might cause your JupyterHub
or kubespawner upgrades to break.
You may have to set this if you are deploying to an environment with RBAC/SCC
enforced and pods run with a 'restricted' SCC which results in the image being
run as an assigned user ID. The supplemental group IDs would need to include
the corresponding group ID of the user ID the image normally would run as. The
image must setup all directories/files any application needs access to, as group
writable.
""",
)
privileged = Bool(
False,
config=True,
help="""
Whether to run the pod with a privileged security context.
""",
)
allow_privilege_escalation = Bool(
False,
allow_none=True,
config=True,
help="""
Controls whether a process can gain more privileges than its parent process.
When set to False (the default), the primary user visible effect is that
setuid binaries (like sudo) will no longer work.
When set to None, the defaults for the cluster are respected.
This bool directly controls whether the no_new_privs flag gets set on the container
AllowPrivilegeEscalation is true always when the container is:
1) run as Privileged OR 2) has CAP_SYS_ADMIN.
""",
)
container_security_context = Union(
trait_types=[
Dict(),
Callable(),
],
config=True,
help="""
A Kubernetes security context for the container. Note that all
configuration options within here should be camelCased.
What is configured here has the highest priority, so the alternative
configuration `uid`, `gid`, `privileged`, and
`allow_privilege_escalation` will be overridden by this.
Rely on `the Kubernetes reference
<https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#securitycontext-v1-core>`__
for details on allowed configuration.
""",
)
pod_security_context = Union(
trait_types=[
Dict(),
Callable(),
],
config=True,
help="""
A Kubernetes security context for the pod. Note that all configuration
options within here should be camelCased.
What is configured here has higher priority than `fs_gid` and
`supplemental_gids`, but lower priority than what is set in the
`container_security_context`.
Note that anything configured on the Pod level will influence all
containers, including init containers and sidecar containers.
Rely on `the Kubernetes reference
<https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#podsecuritycontext-v1-core>`__
for details on allowed configuration.
""",
)
modify_pod_hook = Callable(
None,
allow_none=True,
config=True,
help="""
Callable to augment the Pod object before launching.
Expects a callable that takes two parameters:
1. The spawner object that is doing the spawning
2. The Pod object that is to be launched
You should modify the Pod object and return it.
This can be a coroutine if necessary. When set to none, no augmenting is done.
This is very useful if you want to modify the pod being launched dynamically.
Note that the spawner object can change between versions of KubeSpawner and JupyterHub,
so be careful relying on this!
""",
)
volumes = List(
config=True,
help="""
List of Kubernetes Volume specifications that will be mounted in the user pod.
This list will be directly added under `volumes` in the kubernetes pod spec,
so you should use the same structure. Each item in the list must have the
following two keys:
- `name`
Name that'll be later used in the `volume_mounts` config to mount this
volume at a specific path.
- `<name-of-a-supported-volume-type>` (such as `hostPath`, `persistentVolumeClaim`,
etc)
The key name determines the type of volume to mount, and the value should
be an object specifying the various options available for that kind of
volume.
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/storage/volumes>`__
for more information on the various kinds of volumes available and their options.
Your kubernetes cluster must already be configured to support the volume types you want to use.
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
volume_mounts = List(
config=True,
help="""
List of paths on which to mount volumes in the user notebook's pod.
This list will be added to the values of the `volumeMounts` key under the user's
container in the kubernetes pod spec, so you should use the same structure as that.
Each item in the list should be a dictionary with at least these two keys:
- `mountPath` The path on the container in which we want to mount the volume.
- `name` The name of the volume we want to mount, as specified in the `volumes` config.
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/storage/volumes>`__
for more information on how the `volumeMount` item works.
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
# FIXME: Don't override 'default_value' ("") or 'allow_none' (False) (Breaking change)
storage_capacity = Unicode(
None,
config=True,
allow_none=True,
help="""
The amount of storage space to request from the volume that the pvc will
mount to. This amount will be the amount of storage space the user has
to work with on their notebook. If left blank, the kubespawner will not
create a pvc for the pod.
This will be added to the `resources: requests: storage:` in the k8s pod spec.
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#persistentvolumeclaims>`__
for more information on how storage works.
Quantities can be represented externally as unadorned integers, or as fixed-point
integers with one of these SI suffices (`E, P, T, G, M, K, m`) or their power-of-two
equivalents (`Ei, Pi, Ti, Gi, Mi, Ki`). For example, the following represent roughly
the same value: `128974848`, `129e6`, `129M`, `123Mi`.
""",
)
storage_extra_labels = Dict(
config=True,
help="""
Extra kubernetes labels to set on the user PVCs.
The keys and values specified here would be set as labels on the PVCs
created by kubespawner for the user. Note that these are only set
when the PVC is created, not later when this setting is updated.
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/>`__
for more info on what labels are and why you might want to use them!
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
# FIXME: Don't override 'default_value' ("") or 'allow_none' (False) (Breaking change)
storage_class = Unicode(
None,
config=True,
allow_none=True,
help="""
The storage class that the pvc will use.
This will be added to the `annotations: volume.beta.kubernetes.io/storage-class:`
in the pvc metadata.
This will determine what type of volume the pvc will request to use. If one exists
that matches the criteria of the StorageClass, the pvc will mount to that. Otherwise,
b/c it has a storage class, k8s will dynamically spawn a pv for the pvc to bind to
and a machine in the cluster for the pv to bind to.
Note that an empty string is a valid value and is always interpreted to be
requesting a pv with no class.
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/storage/storage-classes/>`__
for more information on how StorageClasses work.
""",
)
storage_access_modes = List(
["ReadWriteOnce"],
config=True,
help="""
List of access modes the user has for the pvc.
The access modes are:
- `ReadWriteOnce` : the volume can be mounted as read-write by a single node
- `ReadOnlyMany` : the volume can be mounted read-only by many nodes
- `ReadWriteMany` : the volume can be mounted as read-write by many nodes
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes>`__
for more information on how access modes work.
""",
)
storage_selector = Dict(
config=True,
help="""
The dictionary Selector labels used to match a PersistentVolumeClaim to
a PersistentVolume.
Default is None and means it will match based only on other storage criteria.
For example to match the Nodes that have a label of `content: jupyter` use::
c.KubeSpawner.storage_selector = {'matchLabels':{'content': 'jupyter'}}
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
lifecycle_hooks = Dict(
config=True,
help="""
Kubernetes lifecycle hooks to set on the spawned single-user pods.
The keys is name of hooks and there are only two hooks, postStart and preStop.
The values are handler of hook which executes by Kubernetes management system when hook is called.
Below is an sample copied from
`the Kubernetes documentation <https://kubernetes.io/docs/tasks/configure-pod-container/attach-handler-lifecycle-event/>`__::
c.KubeSpawner.lifecycle_hooks = {
"postStart": {
"exec": {
"command": ["/bin/sh", "-c", "echo Hello from the postStart handler > /usr/share/message"]
}
},
"preStop": {
"exec": {
"command": ["/usr/sbin/nginx", "-s", "quit"]
}
}
}
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/>`__
for more info on what lifecycle hooks are and why you might want to use them!
""",
)
init_containers = List(
config=True,
help="""
List of initialization containers belonging to the pod.
This list will be directly added under `initContainers` in the kubernetes pod spec,
so you should use the same structure. Each item in the dict must a field
of the `V1Container specification <https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#container-v1-core>`__
One usage is disabling access to metadata service from single-user
notebook server with configuration below::
c.KubeSpawner.init_containers = [{
"name": "init-iptables",
"image": "<image with iptables installed>",
"command": ["iptables", "-A", "OUTPUT", "-p", "tcp", "--dport", "80", "-d", "169.254.169.254", "-j", "DROP"],
"securityContext": {
"capabilities": {
"add": ["NET_ADMIN"]
}
}
}]
See `the Kubernetes documentation <https://kubernetes.io/docs/concepts/workloads/pods/init-containers/>`__
for more info on what init containers are and why you might want to use them!
To user this feature, Kubernetes version must greater than 1.6.
""",
)
extra_container_config = Dict(
config=True,
help="""
Extra configuration (e.g. ``envFrom``) for notebook container which is not covered by other attributes.
This dict will be directly merge into `container` of notebook server,
so you should use the same structure. Each item in the dict must a field
of the `V1Container specification <https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#container-v1-core>`__.
One usage is set ``envFrom`` on notebook container with configuration below::
c.KubeSpawner.extra_container_config = {
"envFrom": [{
"configMapRef": {
"name": "special-config"
}
}]
}
The key could be either a camelCase word (used by Kubernetes yaml, e.g.
``envFrom``) or a snake_case word (used by Kubernetes Python client,
e.g. ``env_from``).
""",
)
extra_pod_config = Dict(
config=True,
help="""
Extra configuration for the pod which is not covered by other attributes.
This dict will be directly merge into pod,so you should use the same structure.
Each item in the dict is field of pod configuration
which follows spec at https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#podspec-v1-core
One usage is set restartPolicy and dnsPolicy with configuration below::
c.KubeSpawner.extra_pod_config = {
"restartPolicy": "OnFailure",
"dns_policy": "ClusterFirstWithHostNet"
}
The `key` could be either a camelCase word (used by Kubernetes yaml,
e.g. `restartPolicy`) or a snake_case word (used by Kubernetes Python
client, e.g. `dns_policy`).
""",
)
extra_containers = List(
config=True,
help="""
List of containers belonging to the pod which besides to the container generated for notebook server.
This list will be directly appended under `containers` in the kubernetes pod spec,
so you should use the same structure. Each item in the list is container configuration
which follows spec at https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#container-v1-core
One usage is setting crontab in a container to clean sensitive data with configuration below::
c.KubeSpawner.extra_containers = [{
"name": "crontab",
"image": "supercronic",
"command": ["/usr/local/bin/supercronic", "/etc/crontab"]
}]
`{username}`, `{userid}`, `{servername}`, `{hubnamespace}`,
`{unescaped_username}`, and `{unescaped_servername}` will be expanded if
found within strings of this configuration. The username and servername
come escaped to follow the [DNS label
standard](https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names).
""",
)
# FIXME: Don't override 'default_value' ("") or 'allow_none' (False) (Breaking change)
scheduler_name = Unicode(
None,
allow_none=True,
config=True,
help="""
Set the pod's scheduler explicitly by name. See `the Kubernetes documentation <https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#podspec-v1-core>`__
for more information.
""",
)
tolerations = List(
config=True,
help="""
List of tolerations that are to be assigned to the pod in order to be able to schedule the pod
on a node with the corresponding taints. See the official Kubernetes documentation for additional details
https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/
Pass this field an array of `"Toleration" objects
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#toleration-v1-core
Example::
[
{
'key': 'key',
'operator': 'Equal',
'value': 'value',
'effect': 'NoSchedule'
},
{
'key': 'key',
'operator': 'Exists',
'effect': 'NoSchedule'
}
]
""",
)
node_affinity_preferred = List(
config=True,
help="""
Affinities describe where pods prefer or require to be scheduled, they
may prefer or require a node to have a certain label or be in proximity
/ remoteness to another pod. To learn more visit
https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
Pass this field an array of "PreferredSchedulingTerm" objects.*
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#preferredschedulingterm-v1-core
""",
)
node_affinity_required = List(
config=True,
help="""
Affinities describe where pods prefer or require to be scheduled, they
may prefer or require a node to have a certain label or be in proximity
/ remoteness to another pod. To learn more visit
https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
Pass this field an array of "NodeSelectorTerm" objects.*
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#nodeselectorterm-v1-core
""",
)
pod_affinity_preferred = List(
config=True,
help="""
Affinities describe where pods prefer or require to be scheduled, they
may prefer or require a node to have a certain label or be in proximity
/ remoteness to another pod. To learn more visit
https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
Pass this field an array of "WeightedPodAffinityTerm" objects.*
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#weightedpodaffinityterm-v1-core
""",
)
pod_affinity_required = List(
config=True,
help="""
Affinities describe where pods prefer or require to be scheduled, they
may prefer or require a node to have a certain label or be in proximity
/ remoteness to another pod. To learn more visit
https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
Pass this field an array of "PodAffinityTerm" objects.*
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#podaffinityterm-v1-core
""",
)
pod_anti_affinity_preferred = List(
config=True,
help="""
Affinities describe where pods prefer or require to be scheduled, they
may prefer or require a node to have a certain label or be in proximity
/ remoteness to another pod. To learn more visit
https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
Pass this field an array of "WeightedPodAffinityTerm" objects.*
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#weightedpodaffinityterm-v1-core
""",
)
pod_anti_affinity_required = List(
config=True,
help="""
Affinities describe where pods prefer or require to be scheduled, they
may prefer or require a node to have a certain label or be in proximity
/ remoteness to another pod. To learn more visit
https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
Pass this field an array of "PodAffinityTerm" objects.*
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#podaffinityterm-v1-core
""",
)
extra_resource_guarantees = Dict(
config=True,
help="""
The dictionary used to request arbitrary resources.
Default is None and means no additional resources are requested.
For example, to request 1 Nvidia GPUs::
c.KubeSpawner.extra_resource_guarantees = {"nvidia.com/gpu": "1"}
""",
)
extra_resource_limits = Dict(
config=True,
help="""
The dictionary used to limit arbitrary resources.
Default is None and means no additional resources are limited.
For example, to add a limit of 3 Nvidia GPUs::
c.KubeSpawner.extra_resource_limits = {"nvidia.com/gpu": "3"}
""",
)
delete_stopped_pods = Bool(
True,
config=True,
help="""
Whether to delete pods that have stopped themselves.
Set to False to leave stopped pods in the completed state,
allowing for easier debugging of why they may have stopped.
""",
)
profile_form_template = Unicode(
"""
<style>
/* The profile description should not be bold, even though it is inside the <label> tag */
#kubespawner-profiles-list label p {
font-weight: normal;
}
</style>
<div class='form-group' id='kubespawner-profiles-list'>
{% for profile in profile_list %}
<label for='profile-item-{{ profile.slug }}' class='form-control input-group'>
<div class='col-md-1'>
<input type='radio' name='profile' id='profile-item-{{ profile.slug }}' value='{{ profile.slug }}' {% if profile.default %}checked{% endif %} />
</div>
<div class='col-md-11'>
<strong>{{ profile.display_name }}</strong>
{% if profile.description %}
<p>{{ profile.description }}</p>
{% endif %}
</div>
</label>
{% endfor %}
</div>
""",
config=True,
help="""
Jinja2 template for constructing profile list shown to user.
Used when `profile_list` is set.
The contents of `profile_list` are passed in to the template.
This should be used to construct the contents of a HTML form. When
posted, this form is expected to have an item with name `profile` and
the value the index of the profile in `profile_list`.
""",
)
profile_list = Union(
trait_types=[List(trait=Dict()), Callable()],
config=True,
help="""
List of profiles to offer for selection by the user.
Signature is: `List(Dict())`, where each item is a dictionary that has two keys:
- `display_name`: the human readable display name (should be HTML safe)
- `slug`: the machine readable slug to identify the profile
(missing slugs are generated from display_name)
- `description`: Optional description of this profile displayed to the user.
- `kubespawner_override`: a dictionary with overrides to apply to the KubeSpawner
settings. Each value can be either the final value to change or a callable that
take the `KubeSpawner` instance as parameter and return the final value.
- `default`: (optional Bool) True if this is the default selected option
Example::
c.KubeSpawner.profile_list = [
{
'display_name': 'Training Env - Python',
'slug': 'training-python',
'default': True,
'kubespawner_override': {
'image': 'training/python:label',
'cpu_limit': 1,
'mem_limit': '512M',
}
}, {
'display_name': 'Training Env - Datascience',
'slug': 'training-datascience',
'kubespawner_override': {
'image': 'training/datascience:label',
'cpu_limit': 4,
'mem_limit': '8G',
}
}, {
'display_name': 'DataScience - Small instance',
'slug': 'datascience-small',
'kubespawner_override': {
'image': 'datascience/small:label',
'cpu_limit': 10,
'mem_limit': '16G',
}
}, {
'display_name': 'DataScience - Medium instance',
'slug': 'datascience-medium',
'kubespawner_override': {
'image': 'datascience/medium:label',
'cpu_limit': 48,
'mem_limit': '96G',
}
}, {
'display_name': 'DataScience - Medium instance (GPUx2)',
'slug': 'datascience-gpu2x',
'kubespawner_override': {
'image': 'datascience/medium:label',
'cpu_limit': 48,
'mem_limit': '96G',
'extra_resource_guarantees': {"nvidia.com/gpu": "2"},
}
}
]
Instead of a list of dictionaries, this could also be a callable that takes as one
parameter the current spawner instance and returns a list of dictionaries. The
callable will be called asynchronously if it returns a future, rather than
a list. Note that the interface of the spawner class is not deemed stable
across versions, so using this functionality might cause your JupyterHub
or kubespawner upgrades to break.
""",
)
priority_class_name = Unicode(
config=True,
help="""
The priority class that the pods will use.
See https://kubernetes.io/docs/concepts/configuration/pod-priority-preemption for
more information on how pod priority works.
""",
)
delete_grace_period = Integer(
1,
config=True,
help="""
Time in seconds for the pod to be in `terminating` state before is forcefully killed.
Increase this if you need more time to execute a `preStop` lifecycle hook.
See https://kubernetes.io/docs/concepts/workloads/pods/pod/#termination-of-pods for
more information on how pod termination works.
Defaults to `1`.
""",
)
# deprecate redundant and inconsistent singleuser_ and user_ prefixes:
_deprecated_traits_09 = [
"singleuser_working_dir",
"singleuser_service_account",
"singleuser_extra_labels",
"singleuser_extra_annotations",
"singleuser_image_spec",
"singleuser_image_pull_policy",
"singleuser_image_pull_secrets",
"singleuser_node_selector",
"singleuser_uid",
"singleuser_fs_gid",
"singleuser_supplemental_gids",
"singleuser_privileged",
"singleuser_allow_privilege_escalation" "singleuser_lifecycle_hooks",
"singleuser_extra_pod_config",
"singleuser_init_containers",
"singleuser_extra_container_config",
"singleuser_extra_containers",
"user_storage_class",
"user_storage_pvc_ensure",
"user_storage_capacity",
"user_storage_extra_labels",
"user_storage_access_modes",
]
# other general deprecations:
_deprecated_traits = {
'image_spec': ('image', '0.10'),
}
# add the bulk deprecations from 0.9
for _deprecated_name in _deprecated_traits_09:
_new_name = _deprecated_name.split('_', 1)[1]
_deprecated_traits[_deprecated_name] = (_new_name, '0.9')
@validate('config')
def _handle_deprecated_config(self, proposal):
config = proposal.value
if 'KubeSpawner' not in config:
# nothing to check
return config
for _deprecated_name, (_new_name, version) in self._deprecated_traits.items():
# for any `singleuser_name` deprecate in favor of `name`
if _deprecated_name not in config.KubeSpawner:
# nothing to do
continue
# remove deprecated value from config
_deprecated_value = config.KubeSpawner.pop(_deprecated_name)
self.log.warning(
"KubeSpawner.%s is deprecated in %s. Use KubeSpawner.%s instead",
_deprecated_name,
version,
_new_name,
)
if _new_name in config.KubeSpawner:
# *both* config values found,
# ignore deprecated config and warn about the collision
_new_value = config.KubeSpawner[_new_name]
# ignore deprecated config in favor of non-deprecated config
self.log.warning(
"Ignoring deprecated config KubeSpawner.%s = %r "
" in favor of KubeSpawner.%s = %r",
_deprecated_name,
_deprecated_value,
_new_name,
_new_value,
)
else:
# move deprecated config to its new home
config.KubeSpawner[_new_name] = _deprecated_value
return config
# define properties for deprecated names
# so we can propagate their values to the new traits.
# most deprecations should be handled via config above,
# but in case these are set at runtime, e.g. by subclasses
# or hooks, hook this up.
# The signature-order of these is funny
# because the property methods are created with
# functools.partial(f, name) so name is passed as the first arg
# before self.
def _get_deprecated(name, new_name, version, self):
# warn about the deprecated name
self.log.warning(
"KubeSpawner.%s is deprecated in %s. Use KubeSpawner.%s",
name,
version,
new_name,
)
return getattr(self, new_name)
def _set_deprecated(name, new_name, version, self, value):
# warn about the deprecated name
self.log.warning(
"KubeSpawner.%s is deprecated in %s. Use KubeSpawner.%s",
name,
version,
new_name,
)
return setattr(self, new_name, value)
for _deprecated_name, (_new_name, _version) in _deprecated_traits.items():
exec(
"""{0} = property(
partial(_get_deprecated, '{0}', '{1}', '{2}'),
partial(_set_deprecated, '{0}', '{1}', '{2}'),
)
""".format(
_deprecated_name,
_new_name,
_version,
)
)
del _deprecated_name
def _expand_user_properties(self, template):
# Make sure username and servername match the restrictions for DNS labels
# Note: '-' is not in safe_chars, as it is being used as escape character
safe_chars = set(string.ascii_lowercase + string.digits)
raw_servername = self.name or ''
safe_servername = escapism.escape(
raw_servername, safe=safe_chars, escape_char='-'
).lower()
hub_namespace = self._namespace_default()
if hub_namespace == "default":
hub_namespace = "user"
legacy_escaped_username = ''.join(
[s if s in safe_chars else '-' for s in self.user.name.lower()]
)
safe_username = escapism.escape(
self.user.name, safe=safe_chars, escape_char='-'
).lower()
rendered = template.format(
userid=self.user.id,
username=safe_username,
unescaped_username=self.user.name,
legacy_escape_username=legacy_escaped_username,
servername=safe_servername,
unescaped_servername=raw_servername,
hubnamespace=hub_namespace,
)
# strip trailing - delimiter in case of empty servername.
# k8s object names cannot have trailing -
return rendered.rstrip("-")
def _expand_all(self, src):
if isinstance(src, list):
return [self._expand_all(i) for i in src]
elif isinstance(src, dict):
return {k: self._expand_all(v) for k, v in src.items()}
elif isinstance(src, str):
return self._expand_user_properties(src)
else:
return src
def _build_common_labels(self, extra_labels):
# Default set of labels, picked up from
# https://github.com/helm/helm-www/blob/HEAD/content/en/docs/chart_best_practices/labels.md
labels = {
'hub.jupyter.org/username': escapism.escape(
self.user.name, safe=self.safe_chars, escape_char='-'
).lower()
}
labels.update(extra_labels)
labels.update(self.common_labels)
return labels
def _build_pod_labels(self, extra_labels):
labels = self._build_common_labels(extra_labels)
labels.update(
{
'component': self.component_label,
'hub.jupyter.org/servername': self.name,
}
)
return labels
def _build_common_annotations(self, extra_annotations):
# Annotations don't need to be escaped
annotations = {'hub.jupyter.org/username': self.user.name}
if self.name:
annotations['hub.jupyter.org/servername'] = self.name
annotations.update(extra_annotations)
return annotations
# specify default ssl alt names
@default("ssl_alt_names")
def _default_ssl_alt_names(self):
return [
f"DNS:{self.dns_name}",
f"DNS:{self.pod_name}",
f"DNS:{self.pod_name}.{self.namespace}",
f"DNS:{self.pod_name}.{self.namespace}.svc",
]
@default("ssl_alt_names_include_local")
def _default_ssl_alt_names_include_local(self):
return False
get_pod_url = Callable(
default_value=None,
allow_none=True,
config=True,
help="""Callable to retrieve pod url
Called with (spawner, pod)
Must not be async
""",
)
def _get_pod_url(self, pod):
"""Return the pod url
Default: use pod.status.pod_ip (dns_name if ssl is enabled)
"""
if self.get_pod_url:
# custom get_pod_url hook
return self.get_pod_url(self, pod)
if getattr(self, "internal_ssl", False):
proto = "https"
hostname = self.dns_name
else:
proto = "http"
hostname = pod["status"]["podIP"]
if self.pod_connect_ip:
hostname = ".".join(
[
s.rstrip("-")
for s in self._expand_user_properties(self.pod_connect_ip).split(
"."
)
]
)
return "{}://{}:{}".format(
proto,
hostname,
self.port,
)
async def get_pod_manifest(self):
"""
Make a pod manifest that will spawn current user's notebook pod.
"""
if callable(self.uid):
uid = await gen.maybe_future(self.uid(self))
else:
uid = self.uid
if callable(self.gid):
gid = await gen.maybe_future(self.gid(self))
else:
gid = self.gid
if callable(self.fs_gid):
fs_gid = await gen.maybe_future(self.fs_gid(self))
else:
fs_gid = self.fs_gid
if callable(self.supplemental_gids):
supplemental_gids = await gen.maybe_future(self.supplemental_gids(self))
else:
supplemental_gids = self.supplemental_gids
if callable(self.container_security_context):
csc = await gen.maybe_future(self.container_security_context(self))
else:
csc = self.container_security_context
if callable(self.pod_security_context):
psc = await gen.maybe_future(self.pod_security_context(self))
else:
psc = self.pod_security_context
args = self.get_args()
real_cmd = None
if self.cmd:
real_cmd = self.cmd + args
elif args:
self.log.warning(
f"Ignoring arguments when using implicit command from image: {args}."
" Set KubeSpawner.cmd explicitly to support passing cli arguments."
)
labels = self._build_pod_labels(self._expand_all(self.extra_labels))
annotations = self._build_common_annotations(
self._expand_all(self.extra_annotations)
)
return make_pod(
name=self.pod_name,
cmd=real_cmd,
port=self.port,
image=self.image,
image_pull_policy=self.image_pull_policy,
image_pull_secrets=self.image_pull_secrets,
node_selector=self.node_selector,
uid=uid,
gid=gid,
fs_gid=fs_gid,
supplemental_gids=supplemental_gids,
privileged=self.privileged,
allow_privilege_escalation=self.allow_privilege_escalation,
container_security_context=csc,
pod_security_context=psc,
env=self.get_env(),
volumes=self._expand_all(self.volumes),
volume_mounts=self._expand_all(self.volume_mounts),
working_dir=self.working_dir,
labels=labels,
annotations=annotations,
cpu_limit=self.cpu_limit,
cpu_guarantee=self.cpu_guarantee,
mem_limit=self.mem_limit,
mem_guarantee=self.mem_guarantee,
extra_resource_limits=self.extra_resource_limits,
extra_resource_guarantees=self.extra_resource_guarantees,
lifecycle_hooks=self.lifecycle_hooks,
init_containers=self._expand_all(self.init_containers),
service_account=self._expand_all(self.service_account),
automount_service_account_token=self.automount_service_account_token,
extra_container_config=self.extra_container_config,
extra_pod_config=self._expand_all(self.extra_pod_config),
extra_containers=self._expand_all(self.extra_containers),
scheduler_name=self.scheduler_name,
tolerations=self.tolerations,
node_affinity_preferred=self.node_affinity_preferred,
node_affinity_required=self.node_affinity_required,
pod_affinity_preferred=self.pod_affinity_preferred,
pod_affinity_required=self.pod_affinity_required,
pod_anti_affinity_preferred=self.pod_anti_affinity_preferred,
pod_anti_affinity_required=self.pod_anti_affinity_required,
priority_class_name=self.priority_class_name,
ssl_secret_name=self.secret_name if self.internal_ssl else None,
ssl_secret_mount_path=self.secret_mount_path,
logger=self.log,
)
def get_secret_manifest(self, owner_reference):
"""
Make a secret manifest that contains the ssl certificates.
"""
labels = self._build_common_labels(self._expand_all(self.extra_labels))
annotations = self._build_common_annotations(
self._expand_all(self.extra_annotations)
)
return make_secret(
name=self.secret_name,
username=self.user.name,
cert_paths=self.cert_paths,
hub_ca=self.internal_trust_bundles['hub-ca'],
owner_references=[owner_reference],
labels=labels,
annotations=annotations,
)
def get_service_manifest(self, owner_reference):
"""
Make a service manifest for dns.
"""
labels = self._build_common_labels(self._expand_all(self.extra_labels))
annotations = self._build_common_annotations(
self._expand_all(self.extra_annotations)
)
# TODO: validate that the service name
return make_service(
name=self.pod_name,
port=self.port,
servername=self.name,
owner_references=[owner_reference],
labels=labels,
annotations=annotations,
)
def get_pvc_manifest(self):
"""
Make a pvc manifest that will spawn current user's pvc.
"""
labels = self._build_common_labels(self._expand_all(self.storage_extra_labels))
labels.update({'component': 'singleuser-storage'})
annotations = self._build_common_annotations({})
storage_selector = self._expand_all(self.storage_selector)
return make_pvc(
name=self.pvc_name,
storage_class=self.storage_class,
access_modes=self.storage_access_modes,
selector=storage_selector,
storage=self.storage_capacity,
labels=labels,
annotations=annotations,
)
def is_pod_running(self, pod):
"""
Check if the given pod is running
pod must be a dictionary representing a Pod kubernetes API object.
"""
# FIXME: Validate if this is really the best way
is_running = (
pod is not None
and pod["status"]["phase"] == 'Running'
and pod["status"]["podIP"] is not None
and "deletionTimestamp" not in pod["metadata"]
and all([cs["ready"] for cs in pod["status"]["containerStatuses"]])
)
return is_running
def pod_has_uid(self, pod):
"""
Check if the given pod exists and has a UID
pod must be a dictionary representing a Pod kubernetes API object.
"""
return bool(
pod and pod.get("metadata") and pod["metadata"].get("uid") is not None
)
def get_state(self):
"""
Save state required to reinstate this user's pod from scratch
We save the `pod_name`, even though we could easily compute it,
because JupyterHub requires you save *some* state! Otherwise
it assumes your server is dead. This works around that.
It's also useful for cases when the `pod_template` changes between
restarts - this keeps the old pods around.
"""
state = super().get_state()
state['pod_name'] = self.pod_name
return state
def get_env(self):
"""Return the environment dict to use for the Spawner.
See also: jupyterhub.Spawner.get_env
"""
env = super(KubeSpawner, self).get_env()
# deprecate image
env['JUPYTER_IMAGE_SPEC'] = self.image
env['JUPYTER_IMAGE'] = self.image
return env
def load_state(self, state):
"""
Load state from storage required to reinstate this user's pod
Since this runs after `__init__`, this will override the generated `pod_name`
if there's one we have saved in state. These are the same in most cases,
but if the `pod_template` has changed in between restarts, it will no longer
be the case. This allows us to continue serving from the old pods with
the old names.
"""
if 'pod_name' in state:
self.pod_name = state['pod_name']
@_await_pod_reflector
async def poll(self):
"""
Check if the pod is still running.
Uses the same interface as subprocess.Popen.poll(): if the pod is
still running, returns None. If the pod has exited, return the
exit code if we can determine it, or 1 if it has exited but we
don't know how. These are the return values JupyterHub expects.
Note that a clean exit will have an exit code of zero, so it is
necessary to check that the returned value is None, rather than
just Falsy, to determine that the pod is still running.
"""
ref_key = "{}/{}".format(self.namespace, self.pod_name)
pod = self.pod_reflector.pods.get(ref_key, None)
if pod is not None:
if pod["status"]["phase"] == 'Pending':
return None
ctr_stat = pod["status"].get("containerStatuses")
if ctr_stat is None: # No status, no container (we hope)
# This seems to happen when a pod is idle-culled.
return 1
for c in ctr_stat:
# return exit code if notebook container has terminated
if c["name"] == 'notebook':
if "terminated" in c["state"]:
# call self.stop to delete the pod
if self.delete_stopped_pods:
await self.stop(now=True)
return c["state"]["terminated"]["exitCode"]
break
# pod running. Check and update server url if it changed!
# only do this if fully running, not just starting up
# and there's a stored url in self.server to check against
if self.is_pod_running(pod) and self.server:
def _normalize_url(url):
"""Normalize url to be comparable
- parse with urlparse
- Ensures port is always defined
"""
url = urlparse(url)
if url.port is None:
if url.scheme.lower() == "https":
url = url._replace(netloc=f"{url.hostname}:443")
elif url.scheme.lower() == "http":
url = url._replace(netloc=f"{url.hostname}:80")
return url
pod_url = _normalize_url(self._get_pod_url(pod))
server_url = _normalize_url(self.server.url)
# netloc: only compare hostname:port, ignore path
if server_url.netloc != pod_url.netloc:
self.log.warning(
f"Pod {ref_key} url changed! {server_url.netloc} -> {pod_url.netloc}"
)
self.server.ip = pod_url.hostname
self.server.port = pod_url.port
self.db.commit()
# None means pod is running or starting up
return None
# pod doesn't exist or has been deleted
return 1
@property
def events(self):
"""Filter event-reflector to just this pods events
Returns list of all events that match our pod_name
since our ._last_event (if defined).
._last_event is set at the beginning of .start().
"""
if not self.event_reflector:
return []
events = []
for event in self.event_reflector.events:
if event["involvedObject"]["name"] != self.pod_name:
# only consider events for my pod name
continue
if self._last_event and event["metadata"]["uid"] == self._last_event:
# saw last_event marker, ignore any previous events
# and only consider future events
# only include events *after* our _last_event marker
events = []
else:
events.append(event)
return events
async def progress(self):
"""
This function is reporting back the progress of spawning a pod until
self._start_future has fired.
This is working with events parsed by the python kubernetes client,
and here is the specification of events that is relevant to understand:
ref: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.20/#event-v1-core
"""
if not self.events_enabled:
return
self.log.debug('progress generator: %s', self.pod_name)
start_future = self._start_future
progress = 0
next_event = 0
break_while_loop = False
while True:
# This logic avoids a race condition. self._start() will be invoked by
# self.start() and almost directly set self._start_future. But,
# progress() will be invoked via self.start(), so what happen first?
# Due to this, the logic below is to avoid making an assumption that
# self._start_future was set before this function was called.
if start_future is None and self._start_future:
start_future = self._start_future
# Ensure we capture all events by inspecting events a final time
# after the start_future signal has fired, we could have been in
# .sleep() and missed something.
if start_future and start_future.done():
break_while_loop = True
events = self.events
len_events = len(events)
if next_event < len_events:
for i in range(next_event, len_events):
event = events[i]
# move the progress bar.
# Since we don't know how many events we will get,
# asymptotically approach 90% completion with each event.
# each event gets 33% closer to 90%:
# 30 50 63 72 78 82 84 86 87 88 88 89
progress += (90 - progress) / 3
yield {
'progress': int(progress),
'raw_event': event,
'message': "%s [%s] %s"
% (
event["lastTimestamp"] or event["eventTime"],
event["type"],
event["message"],
),
}
next_event = len_events
if break_while_loop:
break
await asyncio.sleep(1)
def _start_reflector(
self,
kind=None,
reflector_class=ResourceReflector,
replace=False,
**kwargs,
):
"""Start a shared reflector on the KubeSpawner class
kind: key for the reflector (e.g. 'pod' or 'events')
reflector_class: Reflector class to be instantiated
kwargs: extra keyword-args to be relayed to ReflectorClass
If replace=False and the pod reflector is already running,
do nothing.
If replace=True, a running pod reflector will be stopped
and a new one started (for recovering from possible errors).
"""
key = kind
ReflectorClass = reflector_class
def on_reflector_failure():
self.log.critical(
"%s reflector failed, halting Hub.",
key.title(),
)
sys.exit(1)
previous_reflector = self.__class__.reflectors.get(key)
if replace or not previous_reflector:
self.__class__.reflectors[key] = ReflectorClass(
parent=self,
namespace=self.namespace,
on_failure=on_reflector_failure,
**kwargs,
)
asyncio.ensure_future(self.__class__.reflectors[key].start())
if replace and previous_reflector:
# we replaced the reflector, stop the old one
asyncio.ensure_future(previous_reflector.stop())
# return the current reflector
return self.__class__.reflectors[key]
def _start_watching_events(self, replace=False):
"""Start the events reflector
If replace=False and the event reflector is already running,
do nothing.
If replace=True, a running pod reflector will be stopped
and a new one started (for recovering from possible errors).
"""
return self._start_reflector(
kind="events",
reflector_class=EventReflector,
fields={"involvedObject.kind": "Pod"},
omit_namespace=self.enable_user_namespaces,
replace=replace,
)
def _start_watching_pods(self, replace=False):
"""Start the pod reflector
If replace=False and the pod reflector is already running,
do nothing.
If replace=True, a running pod reflector will be stopped
and a new one started (for recovering from possible errors).
"""
pod_reflector_class = PodReflector
pod_reflector_class.labels.update({"component": self.component_label})
return self._start_reflector(
"pods",
PodReflector,
omit_namespace=self.enable_user_namespaces,
replace=replace,
)
def start(self):
"""Thin wrapper around self._start
so we can hold onto a reference for the Future
start returns, which we can use to terminate
.progress()
"""
self._start_future = asyncio.ensure_future(self._start())
return self._start_future
_last_event = None
async def _make_create_pod_request(self, pod, request_timeout):
"""
Make an HTTP request to create the given pod
Designed to be used with exponential_backoff, so returns
True / False on success / failure
"""
try:
self.log.info(
f"Attempting to create pod {pod.metadata.name}, with timeout {request_timeout}"
)
await asyncio.wait_for(
self.api.create_namespaced_pod(
self.namespace,
pod,
),
request_timeout,
)
return True
except asyncio.TimeoutError:
# Just try again
return False
except ApiException as e:
pod_name = pod.metadata.name
if e.status != 409:
# We only want to handle 409 conflict errors
self.log.exception("Failed for %s", pod.to_str())
raise
self.log.info(f'Found existing pod {pod_name}, attempting to kill')
# TODO: this should show up in events
await self.stop(True)
self.log.info(
f'Killed pod {pod_name}, will try starting singleuser pod again'
)
# We tell exponential_backoff to retry
return False
async def _make_create_pvc_request(self, pvc, request_timeout):
# Try and create the pvc. If it succeeds we are good. If
# returns a 409 indicating it already exists we are good. If
# it returns a 403, indicating potential quota issue we need
# to see if pvc already exists before we decide to raise the
# error for quota being exceeded. This is because quota is
# checked before determining if the PVC needed to be
# created.
pvc_name = pvc.metadata.name
try:
self.log.info(
f"Attempting to create pvc {pvc.metadata.name}, with timeout {request_timeout}"
)
await asyncio.wait_for(
self.api.create_namespaced_persistent_volume_claim(
namespace=self.namespace,
body=pvc,
),
request_timeout,
)
return True
except asyncio.TimeoutError:
# Just try again
return False
except ApiException as e:
if e.status == 409:
self.log.info(
"PVC " + pvc_name + " already exists, so did not create new pvc."
)
return True
elif e.status == 403:
t, v, tb = sys.exc_info()
try:
await self.api.read_namespaced_persistent_volume_claim(
name=pvc_name,
namespace=self.namespace,
)
except ApiException as e:
raise v.with_traceback(tb)
self.log.info(
"PVC "
+ self.pvc_name
+ " already exists, possibly have reached quota though."
)
return True
else:
raise
async def _ensure_not_exists(self, kind, name):
"""Ensure a resource does not exist
Request deletion and wait for it to be gone
Designed to be used with exponential_backoff, so returns
True when the resource no longer exists, False otherwise
"""
delete = getattr(self.api, "delete_namespaced_{}".format(kind))
read = getattr(self.api, "read_namespaced_{}".format(kind))
# first, attempt to delete the resource
try:
self.log.info(f"Deleting {kind}/{name}")
await asyncio.wait_for(
delete(namespace=self.namespace, name=name),
self.k8s_api_request_timeout,
)
except asyncio.TimeoutError:
# Just try again
return False
except ApiException as e:
if e.status == 404:
self.log.info(f"{kind}/{name} is gone")
# no such resource, delete successful
return True
self.log.exception("Error deleting {kind}/{name}: {e}")
return False
try:
self.log.info(f"Checking for {kind}/{name}")
await asyncio.wait_for(
read(namespace=self.namespace, name=name), self.k8s_api_request_timeout
)
except asyncio.TimeoutError:
# Just try again
return False
except ApiException as e:
if e.status == 404:
self.log.info(f"{kind}/{name} is gone")
return True
self.log.exception("Error reading {kind}/{name}: {e}")
return False
# if we got here, resource still exists, try again
return False
async def _make_create_resource_request(self, kind, manifest):
"""Make an HTTP request to create the given resource
Designed to be used with exponential_backoff, so returns
True / False on success / failure
"""
create = getattr(self.api, f"create_namespaced_{kind}")
self.log.info(f"Attempting to create {kind} {manifest.metadata.name}")
try:
await asyncio.wait_for(
create(self.namespace, manifest), self.k8s_api_request_timeout
)
except asyncio.TimeoutError:
# Just try again
return False
except ApiException as e:
name = manifest.metadata.name
if e.status == 409:
self.log.info(f'Found existing {kind} {name}')
return True
# We only want to handle 409 conflict errors
self.log.exception("Failed to create %s", manifest.to_str())
raise
else:
return True
async def _start(self):
"""Start the user's pod"""
# load user options (including profile)
await self.load_user_options()
# If we have user_namespaces enabled, create the namespace.
# It's fine if it already exists.
if self.enable_user_namespaces:
await self._ensure_namespace()
# record latest event so we don't include old
# events from previous pods in self.events
# track by order and name instead of uid
# so we get events like deletion of a previously stale
# pod if it's part of this spawn process
events = self.events
if events:
self._last_event = events[-1]["metadata"]["uid"]
if self.storage_pvc_ensure:
pvc = self.get_pvc_manifest()
# If there's a timeout, just let it propagate
await exponential_backoff(
partial(
self._make_create_pvc_request, pvc, self.k8s_api_request_timeout
),
f'Could not create PVC {self.pvc_name}',
# Each req should be given k8s_api_request_timeout seconds.
timeout=self.k8s_api_request_retry_timeout,
)
# If we run into a 409 Conflict error, it means a pod with the
# same name already exists. We stop it, wait for it to stop, and
# try again. We try 4 times, and if it still fails we give up.
pod = await self.get_pod_manifest()
if self.modify_pod_hook:
pod = await gen.maybe_future(self.modify_pod_hook(self, pod))
ref_key = "{}/{}".format(self.namespace, self.pod_name)
# If there's a timeout, just let it propagate
await exponential_backoff(
partial(self._make_create_pod_request, pod, self.k8s_api_request_timeout),
f'Could not create pod {ref_key}',
timeout=self.k8s_api_request_retry_timeout,
)
if self.internal_ssl:
try:
# wait for pod to have uid,
# required for creating owner reference
await exponential_backoff(
lambda: self.pod_has_uid(
self.pod_reflector.pods.get(ref_key, None)
),
f"pod/{ref_key} does not have a uid!",
)
pod = self.pod_reflector.pods[ref_key]
owner_reference = make_owner_reference(
self.pod_name, pod["metadata"]["uid"]
)
# internal ssl, create secret object
secret_manifest = self.get_secret_manifest(owner_reference)
await exponential_backoff(
partial(
self._ensure_not_exists, "secret", secret_manifest.metadata.name
),
f"Failed to delete secret {secret_manifest.metadata.name}",
)
await exponential_backoff(
partial(
self._make_create_resource_request, "secret", secret_manifest
),
f"Failed to create secret {secret_manifest.metadata.name}",
)
service_manifest = self.get_service_manifest(owner_reference)
await exponential_backoff(
partial(
self._ensure_not_exists,
"service",
service_manifest.metadata.name,
),
f"Failed to delete service {service_manifest.metadata.name}",
)
await exponential_backoff(
partial(
self._make_create_resource_request, "service", service_manifest
),
f"Failed to create service {service_manifest.metadata.name}",
)
except Exception:
# cleanup on failure and re-raise
await self.stop(True)
raise
# we need a timeout here even though start itself has a timeout
# in order for this coroutine to finish at some point.
# using the same start_timeout here
# essentially ensures that this timeout should never propagate up
# because the handler will have stopped waiting after
# start_timeout, starting from a slightly earlier point.
try:
await exponential_backoff(
lambda: self.is_pod_running(self.pod_reflector.pods.get(ref_key, None)),
'pod %s did not start in %s seconds!' % (ref_key, self.start_timeout),
timeout=self.start_timeout,
)
except TimeoutError:
if ref_key not in self.pod_reflector.pods:
# if pod never showed up at all,
# restart the pod reflector which may have become disconnected.
self.log.error(
"Pod %s never showed up in reflector, restarting pod reflector",
ref_key,
)
self.log.error("Pods: {}".format(self.pod_reflector.pods))
self._start_watching_pods(replace=True)
raise
pod = self.pod_reflector.pods[ref_key]
self.pod_id = pod["metadata"]["uid"]
if self.event_reflector:
self.log.debug(
'pod %s events before launch: %s',
ref_key,
"\n".join(
[
"%s [%s] %s"
% (
event["lastTimestamp"] or event["eventTime"],
event["type"],
event["message"],
)
for event in self.events
]
),
)
return self._get_pod_url(pod)
async def _make_delete_pod_request(
self, pod_name, delete_options, grace_seconds, request_timeout
):
"""
Make an HTTP request to delete the given pod
Designed to be used with exponential_backoff, so returns
True / False on success / failure
"""
ref_key = "{}/{}".format(self.namespace, pod_name)
self.log.info("Deleting pod %s", ref_key)
try:
await asyncio.wait_for(
self.api.delete_namespaced_pod(
name=pod_name,
namespace=self.namespace,
body=delete_options,
grace_period_seconds=grace_seconds,
),
request_timeout,
)
return True
except asyncio.TimeoutError:
return False
except ApiException as e:
if e.status == 404:
self.log.warning(
"No pod %s to delete. Assuming already deleted.",
ref_key,
)
# If there isn't already a pod, that's ok too!
return True
else:
raise
async def _make_delete_pvc_request(self, pvc_name, request_timeout):
"""
Make an HTTP request to delete the given PVC
Designed to be used with exponential_backoff, so returns
True / False on success / failure
"""
self.log.info("Deleting pvc %s", pvc_name)
try:
await asyncio.wait_for(
self.api.delete_namespaced_persistent_volume_claim(
name=pvc_name,
namespace=self.namespace,
),
request_timeout,
)
return True
except asyncio.TimeoutError:
return False
except ApiException as e:
if e.status == 404:
self.log.warning(
"No pvc %s to delete. Assuming already deleted.",
pvc_name,
)
# If there isn't a PVC to delete, that's ok too!
return True
else:
raise
@_await_pod_reflector
async def stop(self, now=False):
delete_options = client.V1DeleteOptions()
if now:
grace_seconds = 0
else:
grace_seconds = self.delete_grace_period
delete_options.grace_period_seconds = grace_seconds
ref_key = "{}/{}".format(self.namespace, self.pod_name)
await exponential_backoff(
partial(
self._make_delete_pod_request,
self.pod_name,
delete_options,
grace_seconds,
self.k8s_api_request_timeout,
),
f'Could not delete pod {ref_key}',
timeout=self.k8s_api_request_retry_timeout,
)
try:
await exponential_backoff(
lambda: self.pod_reflector.pods.get(ref_key, None) is None,
'pod %s did not disappear in %s seconds!'
% (ref_key, self.start_timeout),
timeout=self.start_timeout,
)
except TimeoutError:
self.log.error(
"Pod %s did not disappear, restarting pod reflector", ref_key
)
self._start_watching_pods(replace=True)
raise
@default('env_keep')
def _env_keep_default(self):
return []
_profile_list = None
def _render_options_form(self, profile_list):
self._profile_list = self._init_profile_list(profile_list)
profile_form_template = Environment(loader=BaseLoader).from_string(
self.profile_form_template
)
return profile_form_template.render(profile_list=self._profile_list)
async def _render_options_form_dynamically(self, current_spawner):
profile_list = await gen.maybe_future(self.profile_list(current_spawner))
profile_list = self._init_profile_list(profile_list)
return self._render_options_form(profile_list)
@default('options_form')
def _options_form_default(self):
"""
Build the form template according to the `profile_list` setting.
Returns:
'' when no `profile_list` has been defined
The rendered template (using jinja2) when `profile_list` is defined.
"""
if not self.profile_list:
return ''
if callable(self.profile_list):
return self._render_options_form_dynamically
else:
return self._render_options_form(self.profile_list)
@default('options_from_form')
def _options_from_form_default(self):
return self._options_from_form
def _options_from_form(self, formdata):
"""get the option selected by the user on the form
This only constructs the user_options dict,
it should not actually load any options.
That is done later in `.load_user_options()`
Args:
formdata: user selection returned by the form
To access to the value, you can use the `get` accessor and the name of the html element,
for example::
formdata.get('profile',[0])
to get the value of the form named "profile", as defined in `form_template`::
<select class="form-control" name="profile"...>
</select>
Returns:
user_options (dict): the selected profile in the user_options form,
e.g. ``{"profile": "cpus-8"}``
"""
return {'profile': formdata.get('profile', [None])[0]}
async def _load_profile(self, slug):
"""Load a profile by name
Called by load_user_options
"""
# find the profile
default_profile = self._profile_list[0]
for profile in self._profile_list:
if profile.get('default', False):
# explicit default, not the first
default_profile = profile
if profile['slug'] == slug:
break
else:
if slug:
# name specified, but not found
raise ValueError(
"No such profile: %s. Options include: %s"
% (slug, ', '.join(p['slug'] for p in self._profile_list))
)
else:
# no name specified, use the default
profile = default_profile
self.log.debug(
"Applying KubeSpawner override for profile '%s'", profile['display_name']
)
kubespawner_override = profile.get('kubespawner_override', {})
for k, v in kubespawner_override.items():
if callable(v):
v = v(self)
self.log.debug(
".. overriding KubeSpawner value %s=%s (callable result)", k, v
)
else:
self.log.debug(".. overriding KubeSpawner value %s=%s", k, v)
setattr(self, k, v)
# set of recognised user option keys
# used for warning about ignoring unrecognised options
_user_option_keys = {
'profile',
}
def _init_profile_list(self, profile_list):
# generate missing slug fields from display_name
for profile in profile_list:
if 'slug' not in profile:
profile['slug'] = slugify(profile['display_name'])
return profile_list
async def load_user_options(self):
"""Load user options from self.user_options dict
This can be set via POST to the API or via options_from_form
Only supported argument by default is 'profile'.
Override in subclasses to support other options.
"""
if self._profile_list is None:
if callable(self.profile_list):
profile_list = await gen.maybe_future(self.profile_list(self))
else:
profile_list = self.profile_list
self._profile_list = self._init_profile_list(profile_list)
selected_profile = self.user_options.get('profile', None)
if self._profile_list:
await self._load_profile(selected_profile)
elif selected_profile:
self.log.warning(
"Profile %r requested, but profiles are not enabled", selected_profile
)
# help debugging by logging any option fields that are not recognized
option_keys = set(self.user_options)
unrecognized_keys = option_keys.difference(self._user_option_keys)
if unrecognized_keys:
self.log.warning(
"Ignoring unrecognized KubeSpawner user_options: %s",
", ".join(map(str, sorted(unrecognized_keys))),
)
async def _ensure_namespace(self):
ns = make_namespace(self.namespace)
api = self.api
try:
await asyncio.wait_for(
api.create_namespace(ns),
self.k8s_api_request_timeout,
)
except ApiException as e:
if e.status != 409:
# It's fine if it already exists
self.log.exception("Failed to create namespace %s", self.namespace)
raise
async def delete_forever(self):
"""Called when a user is deleted.
This can do things like request removal of resources such as persistent storage.
Only called on stopped spawners, and is likely the last action ever taken for the user.
Called on each spawner after deletion,
i.e. on named server deletion (not just stop),
and on the default Spawner when the user is being deleted.
Requires JupyterHub 1.4.1+
.. versionadded: 0.17
"""
log_name = self.user.name
if self.name:
log_name = f"{log_name}/{self.name}"
if not self.delete_pvc:
self.log.info(f"Not deleting pvc for {log_name}: {self.pvc_name}")
return
if self.name and '{servername}' not in self.pvc_name_template:
# named server has the same PVC as the default server
# don't delete the default server's PVC!
self.log.info(
f"Not deleting shared pvc for named server {log_name}: {self.pvc_name}"
)
return
await exponential_backoff(
partial(
self._make_delete_pvc_request,
self.pvc_name,
self.k8s_api_request_timeout,
),
f'Could not delete pvc {self.pvc_name}',
timeout=self.k8s_api_request_retry_timeout,
)
|
jupyterhub/kubespawner
|
kubespawner/spawner.py
|
Python
|
bsd-3-clause
| 109,371
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.config import ConfigValidationError
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from txircd.utils import trimStringToByteLength
from zope.interface import implementer
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
@implementer(IPlugin, IModuleData, ICommand)
class AwayCommand(ModuleData, Command):
name = "AwayCommand"
core = True
def userCommands(self) -> List[Tuple[str, int, Command]]:
return [ ("AWAY", 1, self) ]
def actions(self) -> List[Tuple[str, int, Callable]]:
return [ ("commandextra-PRIVMSG", 10, self.notifyAway),
("commandextra-NOTICE", 10, self.notifyAway),
("extrawhois", 10, self.addWhois),
("buildisupport", 1, self.buildISupport),
("usermetadataupdate", 10, self.sendAwayNotice) ]
def verifyConfig(self, config: Dict[str, Any]) -> None:
if "away_length" in config:
if not isinstance(config["away_length"], int) or config["away_length"] < 0:
raise ConfigValidationError("away_length", "invalid number")
elif config["away_length"] > 200:
config["away_length"] = 200
self.ircd.logConfigValidationWarning("away_length", "value is too large", 200)
def notifyAway(self, user: "IRCUser", data: Dict[Any, Any]) -> None:
if "targetusers" not in data:
return
for u in data["targetusers"].keys():
if u.metadataKeyExists("away"):
user.sendMessage(irc.RPL_AWAY, u.nick, u.metadataValue("away"))
def addWhois(self, user: "IRCUser", targetUser: "IRCUser") -> None:
if targetUser.metadataKeyExists("away"):
user.sendMessage(irc.RPL_AWAY, targetUser.nick, targetUser.metadataValue("away"))
def buildISupport(self, data: Dict[str, Union[str, int]]) -> None:
data["AWAYLEN"] = self.ircd.config.get("away_length", 200)
def sendAwayNotice(self, user: "IRCUser", key: str, oldValue: str, value: str, fromServer: Optional["IRCServer"]) -> None:
if key == "away":
if value:
user.sendMessage(irc.RPL_NOWAWAY, "You have been marked as being away")
else:
user.sendMessage(irc.RPL_UNAWAY, "You are no longer marked as being away")
def parseParams(self, user: "IRCUser", params: List[str], prefix: str, tags: Dict[str, Optional[str]]) -> Optional[Dict[Any, Any]]:
if not params:
return {}
message = " ".join(params)
message = trimStringToByteLength(message, self.ircd.config.get("away_length", 200))
return {
"message": message
}
def execute(self, user: "IRCUser", data: Dict[Any, Any]) -> bool:
if "message" in data and data["message"]:
user.setMetadata("away", data["message"])
else:
user.setMetadata("away", None)
return True
awayCommand = AwayCommand()
|
Heufneutje/txircd
|
txircd/modules/rfc/cmd_away.py
|
Python
|
bsd-3-clause
| 2,756
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("AdaBoostClassifier" , "FourClass_100" , "db2")
|
antoinecarme/sklearn2sql_heroku
|
tests/classification/FourClass_100/ws_FourClass_100_AdaBoostClassifier_db2_code_gen.py
|
Python
|
bsd-3-clause
| 144
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant l'éditeur 'spedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package.
Note importante : ce package contient la définition d'un éditeur, mais
celui-ci peut très bien être étendu par d'autres modules. Auquel cas,
les extensions n'apparaîtront pas ici.
"""
from primaires.interpreteur.editeur.choix import Choix
from primaires.interpreteur.editeur.description import Description
from primaires.interpreteur.editeur.entier import Entier
from primaires.interpreteur.editeur.flag import Flag
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.selection import Selection
from primaires.interpreteur.editeur.uniligne import Uniligne
from primaires.scripting.editeurs.edt_script import EdtScript
from secondaires.magie.constantes import ELEMENTS
from .edt_difficulte import EdtDifficulte
from .supprimer import NSupprimer
class EdtSpedit(Presentation):
"""Classe définissant l'éditeur de sort 'spedit'.
"""
nom = "spedit"
def __init__(self, personnage, sort):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, sort)
if personnage and sort:
self.construire(sort)
def __getnewargs__(self):
return (None, None)
def construire(self, sort):
"""Construction de l'éditeur"""
# Nom
nom = self.ajouter_choix("nom", "n", Uniligne, sort, "nom")
nom.parent = self
nom.prompt = "Nom du sort (sans article) : "
nom.apercu = "{objet.nom}"
nom.aide_courte = \
"Entrez le |ent|nom|ff| du sort ou |cmd|/|ff| pour revenir " \
"à la fenêtre parente.\n\nNom actuel : |bc|{objet.nom}|ff|"
# Description
description = self.ajouter_choix("description", "d", Description, \
sort)
description.parent = self
description.apercu = "{objet.description.paragraphes_indentes}"
description.aide_courte = \
"| |tit|" + "Description du sort {}".format(sort.cle).ljust(76) + \
"|ff||\n" + self.opts.separateur
# Points de tribut
tribut = self.ajouter_choix("points de tribut", "tr", Entier, sort,
"points_tribut", 1)
tribut.parent = self
tribut.prompt = "Points de tribut nécessaire pour apprendre le sort : "
tribut.apercu = "{objet.points_tribut}"
tribut.aide_courte = \
"Entrez le |ent|nombre de points de tribut|ff| nécessaires "\
"pour apprendre le sort\nou |cmd|/|ff| pour revenir à la " \
"fenêtre parente.\n\nPoints de tribut actuels : " \
"|bc|{objet.points_tribut}|ff|"
# Éléments
elements = self.ajouter_choix("eléments", "e", Selection, sort,
"elements", ELEMENTS)
elements.parent = self
elements.apercu = "{objet.str_elements}"
elements.aide_courte = \
"Entrez un |ent|élément|ff| pour l'ajouter " \
"ou le retirer\nou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\n\n" \
"Éléments existants : |cmd|" + "|ff|, |cmd|".join(
ELEMENTS) + "\n" \
"Éléments actuels : |bc|{objet.str_elements}|ff|"
# Type de sort
types = ["destruction", "alteration", "invocation", "illusion"]
type = self.ajouter_choix("type de sort", "s", Choix, sort,
"type", types)
type.parent = self
type.prompt = "Type de sort : "
type.apercu = "{objet.type}"
type.aide_courte = \
"Entrez le |ent|type|ff| du sort ou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\nTypes disponibles : |cmd|" \
"{}|ff|.\n\nType actuel : |bc|{{objet.type}}|ff|".format(
"|ff|, |cmd|".join(types))
# Cible
types = ["aucune", "personnage", "objet", "salle"]
cible = self.ajouter_choix("type de cible", "c", Choix, sort,
"type_cible", types)
cible.parent = self
cible.prompt = "Type de cible : "
cible.apercu = "{objet.type_cible}"
cible.aide_courte = \
"Entrez le |ent|type de cible|ff| du sort ou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\nTypes disponibles : |cmd|" \
"{}|ff|.\n\nType actuel : |bc|{{objet.type_cible}}|ff|".format(
"|ff|, |cmd|".join(types))
# Stats
stats = self.ajouter_choix("stats", "st", Selection, sort,
"stats", ("agilite", "intelligence", "sensibilite"))
stats.parent = self
stats.apercu = "{objet.str_stats}"
stats.aide_courte = \
"Entrez une |ent|stat|ff| pour l'ajouter " \
"ou la retirer\nou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\n\n" \
"stats actuelles : |bc|{objet.str_stats}|ff|"
# Difficulté
difficulte = self.ajouter_choix("difficulté", "i", Entier, sort,
"difficulte", 0, 100)
difficulte.parent = self
difficulte.prompt = "Difficulté d'apprentissage : "
difficulte.apercu = "{objet.difficulte}"
difficulte.aide_courte = \
"Paramétrez la |ent|difficulté|ff| d'apprentissage du sort " \
"entre |cmd|0|ff| et |cmd|100|ff| ou entrez\n|cmd|/|ff| pour " \
"revenir à la fenêtre parente. |cmd|100|ff| signifie que le sort " \
"ne peut pas\nêtre appris par la pratique.\n\n" \
"Difficulté actuelle : |bc|{objet.difficulte}|ff|"
# Coût
cout = self.ajouter_choix("coût", "o", Entier, sort, "cout")
cout.parent = self
cout.prompt = "Coùt en mana : "
cout.apercu = "{objet.cout}"
cout.aide_courte = \
"Entrez la |ent|quantité|ff| d'énergie magique nécessaire pour " \
"lancer ce sort ou |cmd|/|ff| pour\nrevenir à la fenêtre " \
"parente.\n\n" \
"Coût : |bc|{objet.cout}|ff|"
# Durée
duree = self.ajouter_choix("durée de concentration", "u", Entier, sort,
"duree", 1)
duree.parent = self
duree.prompt = "Durée de concentration : "
duree.apercu = "{objet.duree}"
duree.aide_courte = \
"Entrez la |ent|durée|ff| de concentration du sort, en " \
"secondes, ou |cmd|/|ff| pour revenir à\nla fenêtre parente. La " \
"durée diminue automatiquement quand la maîtrise du sort\n" \
"augmente ; la valeur entrée correspond au temps maximum.\n\n" \
"Durée actuelle : |bc|{objet.duree}|ff|"
# Offensif
offensif = self.ajouter_choix("offensif", "of", Flag, sort,
"offensif")
offensif.parent = self
# Distance
distance = self.ajouter_choix("distance", "t", Flag, sort,
"distance")
distance.parent = self
# Script
scripts = self.ajouter_choix("scripts", "sc", EdtScript,
sort.script)
scripts.parent = self
# Suppression
suppression = self.ajouter_choix("supprimer", "sup", NSupprimer, \
sort)
suppression.parent = self
suppression.aide_courte = "Souhaitez-vous réellement supprimer " \
"le sort {} ?".format(sort.nom)
suppression.action = "magie.supprimer_sort"
suppression.confirme = "Le sort {} a bien été supprimé.".format(
sort.nom)
|
vlegoff/tsunami
|
src/secondaires/magie/editeurs/spedit/__init__.py
|
Python
|
bsd-3-clause
| 9,326
|
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decorators for Chromium port of Rietveld."""
import mimetypes
import sha
from google.appengine.api import memcache
from django.http import HttpResponseForbidden
from . import decorators as deco
from . import models_chromium
from . import responses
def binary_required(func):
"""Decorator that processes the content argument.
Attributes set on the request:
content: a Content entity.
"""
@deco.patch_required
def binary_wrapper(request, content_type, *args, **kwds):
if content_type == "0":
content_key = request.patch.content_key
elif content_type == "1":
content_key = request.patch.patched_content_key
if not content_key or not content_key.get().data:
# The file was not modified. It was likely moved without modification.
# Return the original file.
content_key = request.patch.content_key
else:
# Other values are erroneous so request.content won't be set.
return responses.HttpTextResponse(
'Invalid content type: %s, expected 0 or 1' % content_type,
status=404)
request.mime_type = mimetypes.guess_type(request.patch.filename)[0]
request.content = content_key.get()
return func(request, *args, **kwds)
return binary_wrapper
def key_required(func):
"""Decorator that insists that you are using a specific key."""
@deco.require_methods('POST')
def key_wrapper(request, *args, **kwds):
key = request.POST.get('password')
if request.user or not key:
return HttpResponseForbidden('You must be admin in for this function')
value = memcache.get('key_required')
if not value:
obj = models_chromium.Key.query().get()
if not obj:
# Create a dummy value so it can be edited from the datastore admin.
obj = models_chromium.Key(hash='invalid hash')
obj.put()
value = obj.hash
memcache.add('key_required', value, 60)
if sha.new(key).hexdigest() != value:
return HttpResponseForbidden('You must be admin in for this function')
return func(request, *args, **kwds)
return key_wrapper
|
nicko96/Chrome-Infra
|
appengine/chromium_rietveld/codereview/decorators_chromium.py
|
Python
|
bsd-3-clause
| 2,669
|
import unittest
import doctest
from pprint import pprint
from interlude import interact
import lxml.etree as etree
optionflags = doctest.NORMALIZE_WHITESPACE | \
doctest.ELLIPSIS | \
doctest.REPORT_ONLY_FIRST_FAILURE #| \
# doctest.REPORT_NDIFF
TESTFILES = [
'api.rst',
'script.rst',
]
def fxml(xml):
et = etree.fromstring(xml)
return etree.tostring(et, pretty_print=True)
def pxml(xml):
print fxml(xml)
def test_suite():
return unittest.TestSuite([
doctest.DocFileSuite(
filename,
optionflags=optionflags,
globs={'interact': interact,
'pprint': pprint,
'pxml': pxml},
) for filename in TESTFILES
])
if __name__ == '__main__': # pragma NO COVERAGE
unittest.main(defaultTest='test_suite') # pragma NO COVERAGE
|
bluedynamics/vdexcsv
|
src/vdexcsv/tests.py
|
Python
|
bsd-3-clause
| 886
|
#coding:utf8
from __future__ import absolute_import, unicode_literals
import msgpack
from .base import BaseSerializer
class MSGPackSerializer(BaseSerializer):
def dumps(self, value):
return msgpack.dumps(value)
def loads(self, value):
return msgpack.loads(value, encoding="utf-8")
|
glumu/django-redis-cluster
|
django_redis_cluster/serializers/msgpack.py
|
Python
|
bsd-3-clause
| 311
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'resources/about_dialog.ui'
#
# Created: Sat Apr 8 00:53:02 2017
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_AboutDialog(object):
def setupUi(self, AboutDialog):
AboutDialog.setObjectName("AboutDialog")
AboutDialog.setWindowModality(QtCore.Qt.ApplicationModal)
AboutDialog.resize(400, 150)
AboutDialog.setMinimumSize(QtCore.QSize(360, 110))
AboutDialog.setMaximumSize(QtCore.QSize(400, 150))
AboutDialog.setFocusPolicy(QtCore.Qt.NoFocus)
AboutDialog.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
AboutDialog.setModal(True)
self.gridLayout = QtGui.QGridLayout(AboutDialog)
self.gridLayout.setObjectName("gridLayout")
self.label = QtGui.QLabel(AboutDialog)
font = QtGui.QFont()
font.setPointSize(14)
font.setWeight(75)
font.setBold(True)
self.label.setFont(font)
self.label.setTextFormat(QtCore.Qt.AutoText)
self.label.setAlignment(QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_2 = QtGui.QLabel(AboutDialog)
self.label_2.setAlignment(QtCore.Qt.AlignBottom|QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 2, 0, 1, 1)
self.label_3 = QtGui.QLabel(AboutDialog)
font = QtGui.QFont()
font.setUnderline(True)
self.label_3.setFont(font)
self.label_3.setCursor(QtCore.Qt.PointingHandCursor)
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 1, 0, 1, 1)
self.retranslateUi(AboutDialog)
QtCore.QMetaObject.connectSlotsByName(AboutDialog)
def retranslateUi(self, AboutDialog):
AboutDialog.setWindowTitle(QtGui.QApplication.translate("AboutDialog", "About", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("AboutDialog", "Open Funktion Gen\n"
"Wavetable Editor", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("AboutDialog", "Copyright 2017 Tim Savage", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("AboutDialog", "https://github.com/timsavage/funktion-generator", None, QtGui.QApplication.UnicodeUTF8))
|
timsavage/funktion-generator
|
utils/wave-editor/wave_editor/_ui/about_dialog.py
|
Python
|
bsd-3-clause
| 2,661
|
"""The WaveBlocks Project
Compute the transformation to the eigen basis for wavefunction.
@author: R. Bourquin
@copyright: Copyright (C) 2012, 2016 R. Bourquin
@license: Modified BSD License
"""
from WaveBlocksND import BlockFactory
from WaveBlocksND import WaveFunction
from WaveBlocksND import BasisTransformationWF
def transform_wavefunction_to_eigen(iomin, iomout, blockidin=0, blockidout=0):
"""Compute the transformation to the eigenbasis for a wavefunction.
Save the result back to a file.
:param iomin: An :py:class:`IOManager: instance providing the simulation data.
:param iomout: An :py:class:`IOManager: instance for saving the transformed data.
:param blockidin: The data block from which the values are read. Default is `0`.
:param blockidout: The data block to which the values are written. Default is `0`.
"""
parameters = iomin.load_parameters()
# Number of time steps we saved
timesteps = iomin.load_wavefunction_timegrid(blockid=blockidin)
nrtimesteps = timesteps.shape[0]
iomout.add_wavefunction(parameters, timeslots=nrtimesteps, blockid=blockidout)
# The grid on the domain
grid = BlockFactory().create_grid(parameters)
# The potential used
Potential = BlockFactory().create_potential(parameters)
# Basis transformator
BT = BasisTransformationWF(Potential)
BT.set_grid(grid)
# And two empty wavefunctions
WF = WaveFunction(parameters)
WF.set_grid(grid)
# Iterate over all timesteps
for i, step in enumerate(timesteps):
print(" Compute eigentransform at timestep %d" % step)
# Retrieve simulation data
values = iomin.load_wavefunction(timestep=step, blockid=blockidin)
values = [values[j, ...] for j in range(parameters["ncomponents"])]
WF.set_values(values)
# Project wavefunction values to eigenbasis
BT.transform_to_eigen(WF)
# Save the transformed values
iomout.save_wavefunction(WF.get_values(), timestep=step, blockid=blockidout)
|
WaveBlocks/WaveBlocksND
|
WaveBlocksND/Interface/EigentransformWavefunction.py
|
Python
|
bsd-3-clause
| 2,041
|
from django.conf.urls import url
from .views import BaseView
urlpatterns = [
url(r"^$", BaseView.as_view(), name="base-view-2"),
]
|
nephila/djangocms-apphook-setup
|
tests/sample_app_2/urls.py
|
Python
|
bsd-3-clause
| 137
|
from datetime import date, timedelta, datetime
import time
def get_first_day(dt, d_years=0, d_months=0):
# d_years, d_months are "deltas" to apply to dt
y, m = dt.year + d_years, dt.month + d_months
a, m = divmod(m-1, 12)
return date(y+a, m+1, 1)
def get_last_day(dt):
return get_first_day(dt, 0, 1) + timedelta(-1)
def str_to_date(value):
"""
Convert string to datatime object
"""
if not value:
return value
if value.__class__.__name__ in ['date']:
return value
return datetime.strptime(value, "%Y-%m-%d").date()
|
russelmahmud/mess-account
|
core/utils.py
|
Python
|
bsd-3-clause
| 581
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models import CMSPlugin, Page, get_plugin_media_path
from os.path import basename
class Picture(CMSPlugin):
"""
A Picture with or without a link
"""
CENTER = "center"
LEFT = "left"
RIGHT = "right"
FLOAT_CHOICES = ((CENTER, _("center")),
(LEFT, _("left")),
(RIGHT, _("right")),
)
image = models.ImageField(_("image"), upload_to=get_plugin_media_path)
url = models.CharField(_("link"), max_length=255, blank=True, null=True, help_text=_("if present image will be clickable"))
page_link = models.ForeignKey(Page, verbose_name=_("page"), null=True, blank=True, help_text=_("if present image will be clickable"))
alt = models.CharField(_("alternate text"), max_length=255, blank=True, null=True, help_text=_("textual description of the image"))
longdesc = models.CharField(_("long description"), max_length=255, blank=True, null=True, help_text=_("additional description of the image"))
float = models.CharField(_("side"), max_length=10, blank=True, null=True, choices=FLOAT_CHOICES)
class Meta:
db_table = 'cmsplugin_picture'
def __unicode__(self):
if self.alt:
return self.alt[:40]
elif self.image:
# added if, because it raised attribute error when file wasn't defined
try:
return u"%s" % basename(self.image.path)
except:
pass
return "<empty>"
|
pbs/django-cms
|
cms/plugins/picture/models.py
|
Python
|
bsd-3-clause
| 1,579
|
from __future__ import print_function
from glyphNameFormatter.tools import camelCase
doNotProcessAsLigatureRanges = [
(0xfc5e, 0xfc63),
(0xfe70, 0xfe74),
#(0xfc5e, 0xfc61),
(0xfcf2, 0xfcf4),
(0xfe76, 0xfe80),
]
def process(self):
# Specifically: do not add suffixes to these ligatures,
# they're really arabic marks
for a, b in doNotProcessAsLigatureRanges:
if a <= self.uniNumber <= b:
self.replace('TAIL FRAGMENT', "kashida Fina")
self.replace('INITIAL FORM', "init")
self.replace('MEDIAL FORM', "medi")
self.replace('FINAL FORM', "fina")
self.replace('ISOLATED FORM', "isol")
self.replace('WITH SUPERSCRIPT', "")
self.replace('WITH', "")
self.replace("LIGATURE", "")
self.replace("ARABIC", "")
self.replace("SYMBOL", "")
self.replace("LETTER", "")
self.lower()
self.camelCase()
return True
return False
if __name__ == "__main__":
from glyphNameFormatter import GlyphName
print("\ndoNotProcessAsLigatureRanges", doNotProcessAsLigatureRanges)
odd = 0xfe76
for a, b in doNotProcessAsLigatureRanges:
for u in range(a,b+1):
try:
g = GlyphName(uniNumber=u)
n = g.getName()
print(hex(u), n, g.uniName)
except:
import traceback
traceback.print_exc()
|
LettError/glyphNameFormatter
|
Lib/glyphNameFormatter/rangeProcessors/helper_arabic_ligature_exceptions.py
|
Python
|
bsd-3-clause
| 1,516
|
import datetime
import factory
from django.db.models import signals
from elections.models import (
Election,
ModerationHistory,
ElectionType,
ElectedRole,
ModerationStatus,
ModerationStatuses,
)
from organisations.tests.factories import (
OrganisationFactory,
OrganisationDivisionFactory,
DivisionGeographyFactory,
)
class ElectionTypeFactory(factory.django.DjangoModelFactory):
class Meta:
model = ElectionType
django_get_or_create = ("election_type",)
name = "Local elections"
election_type = "local"
# default_voting_system
class ElectedRoleFactory(factory.django.DjangoModelFactory):
class Meta:
model = ElectedRole
django_get_or_create = ("election_type",)
election_type = factory.SubFactory(ElectionTypeFactory)
organisation = factory.SubFactory(OrganisationFactory)
elected_title = "Councillor"
elected_role_name = "Councillor"
@factory.django.mute_signals(signals.post_save)
class ElectionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Election
django_get_or_create = ("election_id",)
@classmethod
def _get_manager(cls, model_class):
return model_class.private_objects
election_id = factory.Sequence(lambda n: "local.place-name-%d.2017-03-23" % n)
election_title = factory.Sequence(lambda n: "Election %d" % n)
election_type = factory.SubFactory(ElectionTypeFactory)
poll_open_date = "2017-03-23"
organisation = factory.SubFactory(OrganisationFactory)
elected_role = factory.SubFactory(ElectedRoleFactory)
division = factory.SubFactory(OrganisationDivisionFactory)
division_geography = factory.SubFactory(DivisionGeographyFactory)
organisation_geography = None
seats_contested = 1
seats_total = 1
group = factory.SubFactory(
"elections.tests.factories.ElectionFactory",
election_id="local.2017-03-23",
group=None,
group_type="election",
)
group_type = None
class ModerationStatusFactory(factory.django.DjangoModelFactory):
class Meta:
model = ModerationStatus
django_get_or_create = ("short_label",)
short_label = ModerationStatuses.approved.value
long_label = "long label"
class ModerationHistoryFactory(factory.django.DjangoModelFactory):
class Meta:
model = ModerationHistory
election = factory.SubFactory(ElectionFactory)
status = factory.SubFactory(ModerationStatusFactory)
created = datetime.datetime.now()
modified = datetime.datetime.now()
class ElectionWithStatusFactory(ElectionFactory):
moderation_status = factory.RelatedFactory(
ModerationHistoryFactory,
"election",
status__short_label=ModerationStatuses.approved.value,
)
def related_status(status):
return factory.RelatedFactory(
ModerationHistoryFactory,
"election",
status__short_label=ModerationStatuses(status.capitalize()).value,
)
|
DemocracyClub/EveryElection
|
every_election/apps/elections/tests/factories.py
|
Python
|
bsd-3-clause
| 2,998
|
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
import commonware.log
import waffle
from users.models import UserProfile
from .models import Access
from .oauth import OAuthServer
log = commonware.log.getLogger('z.api')
class RestOAuthMiddleware(object):
"""
This is based on https://github.com/amrox/django-tastypie-two-legged-oauth
with permission.
"""
def process_request(self, request):
# Do not process the request if the flag is off.
if not waffle.switch_is_active('drf'):
return
path_ = request.get_full_path()
try:
_, lang, platform, api, rest = path_.split('/', 4)
except ValueError:
return
# For now we only want these to apply to the API.
if not api.lower() == 'api':
return
if not settings.SITE_URL:
raise ValueError('SITE_URL is not specified')
# Set up authed_from attribute.
if not hasattr(request, 'authed_from'):
request.authed_from = []
auth_header_value = request.META.get('HTTP_AUTHORIZATION')
if (not auth_header_value and
'oauth_token' not in request.META['QUERY_STRING']):
self.user = AnonymousUser()
log.info('No HTTP_AUTHORIZATION header')
return
# Set up authed_from attribute.
auth_header = {'Authorization': auth_header_value}
method = getattr(request, 'signed_method', request.method)
oauth = OAuthServer()
# Only 2-legged OAuth scenario.
log.info('Trying 2 legged OAuth')
try:
valid, oauth_request = oauth.verify_request(
request.build_absolute_uri(),
method, headers=auth_header,
require_resource_owner=False)
except ValueError:
log.error('ValueError on verifying_request', exc_info=True)
return
if not valid:
log.error(u'Cannot find APIAccess token with that key: %s'
% oauth_request._params[u'oauth_consumer_key'])
return
uid = Access.objects.filter(
key=oauth_request.client_key).values_list(
'user_id', flat=True)[0]
if not uid:
log.error(u'Cannot find Access with that key: %s'
% oauth_request.client_key)
return
request.user = UserProfile.objects.get(pk=uid)
# But you cannot have one of these roles.
denied_groups = set(['Admins'])
roles = set(request.user.groups.values_list('name', flat=True))
if roles and roles.intersection(denied_groups):
log.info(u'Attempt to use API with denied role, user: %s'
% request.user.pk)
# Set request attributes back to None.
request.user = None
return
if request.user:
request.authed_from.append('RestOAuth')
log.info('Successful OAuth with user: %s' % request.user)
|
mdaif/olympia
|
apps/api/middleware.py
|
Python
|
bsd-3-clause
| 3,064
|
from setuptools import setup, find_packages
setup(
name = 'django-dzenlog',
version = __import__('django_dzenlog').__version__,
description = '''Django Dzenlog is a set of models and templates, which can be '''
'''used to create blogs with different kinds media.''',
long_description = '''
Django Dzenlog is a set of models and templates, which can be
used to create blogs with different kinds media.
Dzenlog relies on new django's feature -- model inheritance,
so you can derive you own models from dzenlog's models and
add an actual information.
This is very effective way of the code reuse, because dzenlog
will take care about all publishing options, and all what you
need is to describe details, specific to you particular blog.
For example, for can create a blog with two post types: textual
posts and links to internet resources. In that case, all you need
is to define two models: `TextPost` and `LinkPost`. Each of these
models should be derived from `django_dzenlog.models.GeneralPost`.
Features
========
* Simple way to add new types of posts.
* All post types can be agregated in one feed.
* Separate feed for each post type.
* Example projects, which uses most features of this application.
* Tagging support.
''',
keywords = 'django apps blogging',
license = 'New BSD License',
author = 'Alexander Artemenko',
author_email = 'svetlyak.40wt@gmail.com',
url = 'http://github.com/svetlyak40wt/django-dzenlog/',
install_requires = [],
extras_require = {
'tagging': ['tagging>=0.3-pre'],
},
dependency_links = ['http://pypi.aartemenko.com', ],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages = find_packages(exclude=['example*']),
package_data = {
'templates': ['*.html'],
},
include_package_data = True,
zip_safe = False,
)
|
svetlyak40wt/django-dzenlog
|
setup.py
|
Python
|
bsd-3-clause
| 2,152
|
class WorkflowRegistry(object):
def __init__(self):
self.workflows = {}
self.class_index = {}
def add(self, name, cls):
self.workflows[id(cls)] = self.workflows.get(id(cls), set())
self.workflows[id(cls)].add(name)
self.class_index[id(cls)] = cls
def get_class_fields(self, model):
return self.workflows[id(model)]
workflow_registry = WorkflowRegistry()
|
javrasya/django-river
|
river/core/workflowregistry.py
|
Python
|
bsd-3-clause
| 418
|
from sympy import (
Abs, Chi, Ci, CosineTransform, Dict, Ei, Eq, FallingFactorial, FiniteSet,
Float, FourierTransform, Function, Integral, Interval,
InverseCosineTransform, InverseFourierTransform,
InverseLaplaceTransform, InverseMellinTransform, InverseSineTransform,
Lambda, LaplaceTransform, Limit, Matrix, Max, MellinTransform, Min,
Order, Piecewise, Poly, ring, field, ZZ, Product, Range, Rational,
RisingFactorial, RootOf, RootSum, S, Shi, Si, SineTransform, Subs,
Sum, Symbol, TransformationSet, Tuple, Union, arg, asin,
assoc_laguerre, assoc_legendre, binomial, catalan, ceiling,
chebyshevt, chebyshevu, conjugate, cot, coth, diff, dirichlet_eta,
exp, expint, factorial, factorial2, floor, gamma, gegenbauer, hermite,
hyper, im, im, jacobi, laguerre, legendre, lerchphi, log, lowergamma,
meijerg, oo, polar_lift, polylog, re, re, root, sin, sqrt, symbols,
uppergamma, zeta, subfactorial)
from sympy.abc import mu, tau
from sympy.printing.latex import latex
from sympy.utilities.pytest import XFAIL, raises
from sympy.functions import DiracDelta, Heaviside, KroneckerDelta, LeviCivita
from sympy.logic import Implies
from sympy.logic.boolalg import And, Or
from sympy.core.trace import Tr
x, y, z, t, a, b = symbols('x y z t a b')
k, n = symbols('k n', integer=True)
def test_printmethod():
class R(Abs):
def _latex(self, printer):
return "foo(%s)" % printer._print(self.args[0])
assert latex(R(x)) == "foo(x)"
class R(Abs):
def _latex(self, printer):
return "foo"
assert latex(R(x)) == "foo"
def test_latex_basic():
assert latex(1 + x) == "x + 1"
assert latex(x**2) == "x^{2}"
assert latex(x**(1 + x)) == "x^{x + 1}"
assert latex(x**3 + x + 1 + x**2) == "x^{3} + x^{2} + x + 1"
assert latex(2*x*y) == "2 x y"
assert latex(2*x*y, mul_symbol='dot') == r"2 \cdot x \cdot y"
assert latex(sqrt(x)) == r"\sqrt{x}"
assert latex(x**Rational(1, 3)) == r"\sqrt[3]{x}"
assert latex(sqrt(x)**3) == r"x^{\frac{3}{2}}"
assert latex(sqrt(x), itex=True) == r"\sqrt{x}"
assert latex(x**Rational(1, 3), itex=True) == r"\root{3}{x}"
assert latex(sqrt(x)**3, itex=True) == r"x^{\frac{3}{2}}"
assert latex(x**Rational(3, 4)) == r"x^{\frac{3}{4}}"
assert latex(x**Rational(3, 4), fold_frac_powers=True) == "x^{3/4}"
assert latex((x + 1)**Rational(3, 4)) == \
r"\left(x + 1\right)^{\frac{3}{4}}"
assert latex((x + 1)**Rational(3, 4), fold_frac_powers=True) == \
r"\left(x + 1\right)^{3/4}"
assert latex(1.5e20*x) == r"1.5 \times 10^{20} x"
assert latex(1.5e20*x, mul_symbol='dot') == r"1.5 \cdot 10^{20} \cdot x"
assert latex(1/sin(x)) == r"\frac{1}{\sin{\left (x \right )}}"
assert latex(sin(x)**-1) == r"\frac{1}{\sin{\left (x \right )}}"
assert latex(sin(x)**Rational(3, 2)) == \
r"\sin^{\frac{3}{2}}{\left (x \right )}"
assert latex(sin(x)**Rational(3, 2), fold_frac_powers=True) == \
r"\sin^{3/2}{\left (x \right )}"
assert latex(~x) == r"\neg x"
assert latex(x & y) == r"x \wedge y"
assert latex(x & y & z) == r"x \wedge y \wedge z"
assert latex(x | y) == r"x \vee y"
assert latex(x | y | z) == r"x \vee y \vee z"
assert latex((x & y) | z) == r"z \vee \left(x \wedge y\right)"
assert latex(Implies(x, y)) == r"x \Rightarrow y"
assert latex(~(x >> ~y)) == r"\neg (x \Rightarrow \neg y)"
assert latex(~x, symbol_names={x: "x_i"}) == r"\neg x_i"
assert latex(x & y, symbol_names={x: "x_i", y: "y_i"}) == \
r"x_i \wedge y_i"
assert latex(x & y & z, symbol_names={x: "x_i", y: "y_i", z: "z_i"}) == \
r"x_i \wedge y_i \wedge z_i"
assert latex(x | y, symbol_names={x: "x_i", y: "y_i"}) == r"x_i \vee y_i"
assert latex(x | y | z, symbol_names={x: "x_i", y: "y_i", z: "z_i"}) == \
r"x_i \vee y_i \vee z_i"
assert latex((x & y) | z, symbol_names={x: "x_i", y: "y_i", z: "z_i"}) == \
r"z_i \vee \left(x_i \wedge y_i\right)"
assert latex(Implies(x, y), symbol_names={x: "x_i", y: "y_i"}) == \
r"x_i \Rightarrow y_i"
def test_latex_Float():
assert latex(Float(1.0e100)) == r"1.0 \times 10^{100}"
assert latex(Float(1.0e-100)) == r"1.0 \times 10^{-100}"
assert latex(Float(1.0e-100), mul_symbol="dot") == r"1.0 \cdot 10^{-100}"
assert latex(1.0*oo) == r"\infty"
assert latex(-1.0*oo) == r"- \infty"
def test_latex_symbols():
Gamma, lmbda, rho = symbols('Gamma, lambda, rho')
mass, volume = symbols('mass, volume')
assert latex(Gamma + lmbda) == r"\Gamma + \lambda"
assert latex(Gamma * lmbda) == r"\Gamma \lambda"
assert latex(Symbol('q21')) == r"q_{21}"
assert latex(Symbol('epsilon0')) == r"\epsilon_{0}"
assert latex(Symbol('91')) == r"91"
assert latex(Symbol('alpha_new')) == r"\alpha_{new}"
assert latex(Symbol('C^orig')) == r"C^{orig}"
@XFAIL
def test_latex_symbols_failing():
rho, mass, volume = symbols('rho, mass, volume')
assert latex(
volume * rho == mass) == r"\rho \mathrm{volume} = \mathrm{mass}"
assert latex(volume / mass * rho == 1) == r"\rho \mathrm{volume} {\mathrm{mass}}^{(-1)} = 1"
assert latex(mass**3 * volume**3) == r"{\mathrm{mass}}^{3} \cdot {\mathrm{volume}}^{3}"
def test_latex_functions():
assert latex(exp(x)) == "e^{x}"
assert latex(exp(1) + exp(2)) == "e + e^{2}"
f = Function('f')
assert latex(f(x)) == '\\operatorname{f}{\\left (x \\right )}'
beta = Function('beta')
assert latex(beta(x)) == r"\beta{\left (x \right )}"
assert latex(sin(x)) == r"\sin{\left (x \right )}"
assert latex(sin(x), fold_func_brackets=True) == r"\sin {x}"
assert latex(sin(2*x**2), fold_func_brackets=True) == \
r"\sin {2 x^{2}}"
assert latex(sin(x**2), fold_func_brackets=True) == \
r"\sin {x^{2}}"
assert latex(asin(x)**2) == r"\operatorname{asin}^{2}{\left (x \right )}"
assert latex(asin(x)**2, inv_trig_style="full") == \
r"\arcsin^{2}{\left (x \right )}"
assert latex(asin(x)**2, inv_trig_style="power") == \
r"\sin^{-1}{\left (x \right )}^{2}"
assert latex(asin(x**2), inv_trig_style="power",
fold_func_brackets=True) == \
r"\sin^{-1} {x^{2}}"
assert latex(factorial(k)) == r"k!"
assert latex(factorial(-k)) == r"\left(- k\right)!"
assert latex(subfactorial(k)) == r"!k"
assert latex(subfactorial(-k)) == r"!\left(- k\right)"
assert latex(factorial2(k)) == r"k!!"
assert latex(factorial2(-k)) == r"\left(- k\right)!!"
assert latex(binomial(2, k)) == r"{\binom{2}{k}}"
assert latex(
FallingFactorial(3, k)) == r"{\left(3\right)}_{\left(k\right)}"
assert latex(RisingFactorial(3, k)) == r"{\left(3\right)}^{\left(k\right)}"
assert latex(floor(x)) == r"\lfloor{x}\rfloor"
assert latex(ceiling(x)) == r"\lceil{x}\rceil"
assert latex(Min(x, 2, x**3)) == r"\min\left(2, x, x^{3}\right)"
assert latex(Min(x, y)**2) == r"\min\left(x, y\right)^{2}"
assert latex(Max(x, 2, x**3)) == r"\max\left(2, x, x^{3}\right)"
assert latex(Max(x, y)**2) == r"\max\left(x, y\right)^{2}"
assert latex(Abs(x)) == r"\lvert{x}\rvert"
assert latex(re(x)) == r"\Re{x}"
assert latex(re(x + y)) == r"\Re{x} + \Re{y}"
assert latex(im(x)) == r"\Im{x}"
assert latex(conjugate(x)) == r"\overline{x}"
assert latex(gamma(x)) == r"\Gamma\left(x\right)"
assert latex(Order(x)) == r"\mathcal{O}\left(x\right)"
assert latex(lowergamma(x, y)) == r'\gamma\left(x, y\right)'
assert latex(uppergamma(x, y)) == r'\Gamma\left(x, y\right)'
assert latex(cot(x)) == r'\cot{\left (x \right )}'
assert latex(coth(x)) == r'\coth{\left (x \right )}'
assert latex(re(x)) == r'\Re{x}'
assert latex(im(x)) == r'\Im{x}'
assert latex(root(x, y)) == r'x^{\frac{1}{y}}'
assert latex(arg(x)) == r'\arg{\left (x \right )}'
assert latex(zeta(x)) == r'\zeta\left(x\right)'
assert latex(zeta(x)) == r"\zeta\left(x\right)"
assert latex(zeta(x)**2) == r"\zeta^{2}\left(x\right)"
assert latex(zeta(x, y)) == r"\zeta\left(x, y\right)"
assert latex(zeta(x, y)**2) == r"\zeta^{2}\left(x, y\right)"
assert latex(dirichlet_eta(x)) == r"\eta\left(x\right)"
assert latex(dirichlet_eta(x)**2) == r"\eta^{2}\left(x\right)"
assert latex(polylog(x, y)) == r"\operatorname{Li}_{x}\left(y\right)"
assert latex(
polylog(x, y)**2) == r"\operatorname{Li}_{x}^{2}\left(y\right)"
assert latex(lerchphi(x, y, n)) == r"\Phi\left(x, y, n\right)"
assert latex(lerchphi(x, y, n)**2) == r"\Phi^{2}\left(x, y, n\right)"
assert latex(Ei(x)) == r'\operatorname{Ei}{\left (x \right )}'
assert latex(Ei(x)**2) == r'\operatorname{Ei}^{2}{\left (x \right )}'
assert latex(expint(x, y)**2) == r'\operatorname{E}_{x}^{2}\left(y\right)'
assert latex(Shi(x)**2) == r'\operatorname{Shi}^{2}{\left (x \right )}'
assert latex(Si(x)**2) == r'\operatorname{Si}^{2}{\left (x \right )}'
assert latex(Ci(x)**2) == r'\operatorname{Ci}^{2}{\left (x \right )}'
assert latex(Chi(x)**2) == r'\operatorname{Chi}^{2}{\left (x \right )}'
assert latex(
jacobi(n, a, b, x)) == r'P_{n}^{\left(a,b\right)}\left(x\right)'
assert latex(jacobi(n, a, b, x)**2) == r'\left(P_{n}^{\left(a,b\right)}\left(x\right)\right)^{2}'
assert latex(
gegenbauer(n, a, x)) == r'C_{n}^{\left(a\right)}\left(x\right)'
assert latex(gegenbauer(n, a, x)**2) == r'\left(C_{n}^{\left(a\right)}\left(x\right)\right)^{2}'
assert latex(chebyshevt(n, x)) == r'T_{n}\left(x\right)'
assert latex(
chebyshevt(n, x)**2) == r'\left(T_{n}\left(x\right)\right)^{2}'
assert latex(chebyshevu(n, x)) == r'U_{n}\left(x\right)'
assert latex(
chebyshevu(n, x)**2) == r'\left(U_{n}\left(x\right)\right)^{2}'
assert latex(legendre(n, x)) == r'P_{n}\left(x\right)'
assert latex(legendre(n, x)**2) == r'\left(P_{n}\left(x\right)\right)^{2}'
assert latex(
assoc_legendre(n, a, x)) == r'P_{n}^{\left(a\right)}\left(x\right)'
assert latex(assoc_legendre(n, a, x)**2) == r'\left(P_{n}^{\left(a\right)}\left(x\right)\right)^{2}'
assert latex(laguerre(n, x)) == r'L_{n}\left(x\right)'
assert latex(laguerre(n, x)**2) == r'\left(L_{n}\left(x\right)\right)^{2}'
assert latex(
assoc_laguerre(n, a, x)) == r'L_{n}^{\left(a\right)}\left(x\right)'
assert latex(assoc_laguerre(n, a, x)**2) == r'\left(L_{n}^{\left(a\right)}\left(x\right)\right)^{2}'
assert latex(hermite(n, x)) == r'H_{n}\left(x\right)'
assert latex(hermite(n, x)**2) == r'\left(H_{n}\left(x\right)\right)^{2}'
# Test latex printing of function names with "_"
assert latex(
polar_lift(0)) == r"\operatorname{polar\_lift}{\left (0 \right )}"
assert latex(polar_lift(
0)**3) == r"\operatorname{polar\_lift}^{3}{\left (0 \right )}"
def test_hyper_printing():
from sympy import pi
from sympy.abc import x, z
assert latex(meijerg(Tuple(pi, pi, x), Tuple(1),
(0, 1), Tuple(1, 2, 3/pi), z)) == \
r'{G_{4, 5}^{2, 3}\left(\begin{matrix} \pi, \pi, x & 1 \\0, 1 & 1, 2, \frac{3}{\pi} \end{matrix} \middle| {z} \right)}'
assert latex(meijerg(Tuple(), Tuple(1), (0,), Tuple(), z)) == \
r'{G_{1, 1}^{1, 0}\left(\begin{matrix} & 1 \\0 & \end{matrix} \middle| {z} \right)}'
assert latex(hyper((x, 2), (3,), z)) == \
r'{{}_{2}F_{1}\left(\begin{matrix} x, 2 ' \
r'\\ 3 \end{matrix}\middle| {z} \right)}'
assert latex(hyper(Tuple(), Tuple(1), z)) == \
r'{{}_{0}F_{1}\left(\begin{matrix} ' \
r'\\ 1 \end{matrix}\middle| {z} \right)}'
def test_latex_bessel():
from sympy.functions.special.bessel import (besselj, bessely, besseli,
besselk, hankel1, hankel2, jn, yn)
from sympy.abc import z
assert latex(besselj(n, z**2)**k) == r'J^{k}_{n}\left(z^{2}\right)'
assert latex(bessely(n, z)) == r'Y_{n}\left(z\right)'
assert latex(besseli(n, z)) == r'I_{n}\left(z\right)'
assert latex(besselk(n, z)) == r'K_{n}\left(z\right)'
assert latex(hankel1(n, z**2)**2) == \
r'\left(H^{(1)}_{n}\left(z^{2}\right)\right)^{2}'
assert latex(hankel2(n, z)) == r'H^{(2)}_{n}\left(z\right)'
assert latex(jn(n, z)) == r'j_{n}\left(z\right)'
assert latex(yn(n, z)) == r'y_{n}\left(z\right)'
def test_latex_fresnel():
from sympy.functions.special.error_functions import (fresnels, fresnelc)
from sympy.abc import z
assert latex(fresnels(z)) == r'S\left(z\right)'
assert latex(fresnelc(z)) == r'C\left(z\right)'
assert latex(fresnels(z)**2) == r'S^{2}\left(z\right)'
assert latex(fresnelc(z)**2) == r'C^{2}\left(z\right)'
def test_latex_brackets():
assert latex((-1)**x) == r"\left(-1\right)^{x}"
def test_latex_derivatives():
assert latex(diff(x**3, x, evaluate=False)) == \
r"\frac{\partial}{\partial x} x^{3}"
assert latex(diff(sin(x) + x**2, x, evaluate=False)) == \
r"\frac{\partial}{\partial x}\left(x^{2} + \sin{\left (x \right )}\right)"
def test_latex_subs():
assert latex(Subs(x*y, (
x, y), (1, 2))) == r'\left. x y \right|_{\substack{ x=1\\ y=2 }}'
def test_latex_integrals():
assert latex(Integral(log(x), x)) == r"\int \log{\left (x \right )}\, dx"
assert latex(Integral(x**2, (x, 0, 1))) == r"\int_{0}^{1} x^{2}\, dx"
assert latex(Integral(x**2, (x, 10, 20))) == r"\int_{10}^{20} x^{2}\, dx"
assert latex(Integral(
y*x**2, (x, 0, 1), y)) == r"\int\int_{0}^{1} x^{2} y\, dx\, dy"
assert latex(Integral(y*x**2, (x, 0, 1), y), mode='equation*') \
== r"\begin{equation*}\int\int\limits_{0}^{1} x^{2} y\, dx\, dy\end{equation*}"
assert latex(Integral(y*x**2, (x, 0, 1), y), mode='equation*', itex=True) \
== r"$$\int\int_{0}^{1} x^{2} y\, dx\, dy$$"
assert latex(Integral(x, (x, 0))) == r"\int^{0} x\, dx"
assert latex(Integral(x*y, x, y)) == r"\iint x y\, dx\, dy"
assert latex(Integral(x*y*z, x, y, z)) == r"\iiint x y z\, dx\, dy\, dz"
assert latex(Integral(x*y*z*t, x, y, z, t)) == \
r"\iiiint t x y z\, dx\, dy\, dz\, dt"
assert latex(Integral(x, x, x, x, x, x, x)) == \
r"\int\int\int\int\int\int x\, dx\, dx\, dx\, dx\, dx\, dx"
assert latex(Integral(x, x, y, (z, 0, 1))) == \
r"\int_{0}^{1}\int\int x\, dx\, dy\, dz"
def test_latex_sets():
for s in (FiniteSet, frozenset, set):
assert latex(s([x*y, x**2])) == r"\left\{x^{2}, x y\right\}"
assert latex(s(range(1, 6))) == r"\left\{1, 2, 3, 4, 5\right\}"
assert latex(s(range(1, 13))) == \
r"\left\{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12\right\}"
def test_latex_Range():
assert latex(Range(1, 51)) == \
r'\left\{1, 2, \ldots, 50\right\}'
assert latex(Range(1, 4)) == r'\left\{1, 2, 3\right\}'
def test_latex_intervals():
a = Symbol('a', real=True)
assert latex(Interval(0, 0)) == r"\left\{0\right\}"
assert latex(Interval(0, a)) == r"\left[0, a\right]"
assert latex(Interval(0, a, False, False)) == r"\left[0, a\right]"
assert latex(Interval(0, a, True, False)) == r"\left(0, a\right]"
assert latex(Interval(0, a, False, True)) == r"\left[0, a\right)"
assert latex(Interval(0, a, True, True)) == r"\left(0, a\right)"
def test_latex_emptyset():
assert latex(S.EmptySet) == r"\emptyset"
def test_latex_union():
assert latex(Union(Interval(0, 1), Interval(2, 3))) == \
r"\left[0, 1\right] \cup \left[2, 3\right]"
assert latex(Union(Interval(1, 1), Interval(2, 2), Interval(3, 4))) == \
r"\left\{1, 2\right\} \cup \left[3, 4\right]"
def test_latex_productset():
line = Interval(0, 1)
bigline = Interval(0, 10)
fset = FiniteSet(1, 2, 3)
assert latex(line**2) == r"%s^2" % latex(line)
assert latex(line * bigline * fset) == r"%s \times %s \times %s" % (
latex(line), latex(bigline), latex(fset))
def test_latex_Naturals():
assert latex(S.Naturals) == r"\mathbb{N}"
assert latex(S.Integers) == r"\mathbb{Z}"
def test_latex_TransformationSet():
x = Symbol('x')
assert latex(TransformationSet(Lambda(x, x**2), S.Naturals)) == \
r"\left\{x^{2}\; |\; x \in \mathbb{N}\right\}"
def test_latex_sum():
assert latex(Sum(x*y**2, (x, -2, 2), (y, -5, 5))) == \
r"\sum_{\substack{-2 \leq x \leq 2\\-5 \leq y \leq 5}} x y^{2}"
assert latex(Sum(x**2, (x, -2, 2))) == \
r"\sum_{x=-2}^{2} x^{2}"
assert latex(Sum(x**2 + y, (x, -2, 2))) == \
r"\sum_{x=-2}^{2} \left(x^{2} + y\right)"
def test_latex_product():
assert latex(Product(x*y**2, (x, -2, 2), (y, -5, 5))) == \
r"\prod_{\substack{-2 \leq x \leq 2\\-5 \leq y \leq 5}} x y^{2}"
assert latex(Product(x**2, (x, -2, 2))) == \
r"\prod_{x=-2}^{2} x^{2}"
assert latex(Product(x**2 + y, (x, -2, 2))) == \
r"\prod_{x=-2}^{2} \left(x^{2} + y\right)"
def test_latex_limits():
assert latex(Limit(x, x, oo)) == r"\lim_{x \to \infty} x"
def test_issue469():
beta = Symbol(r'\beta')
y = beta + x
assert latex(y) in [r'\beta + x', r'x + \beta']
beta = Symbol(r'beta')
y = beta + x
assert latex(y) in [r'\beta + x', r'x + \beta']
def test_latex():
assert latex((2*tau)**Rational(7, 2)) == "8 \\sqrt{2} \\tau^{\\frac{7}{2}}"
assert latex((2*mu)**Rational(7, 2), mode='equation*') == \
"\\begin{equation*}8 \\sqrt{2} \\mu^{\\frac{7}{2}}\\end{equation*}"
assert latex((2*mu)**Rational(7, 2), mode='equation', itex=True) == \
"$$8 \\sqrt{2} \\mu^{\\frac{7}{2}}$$"
assert latex([2/x, y]) == "\\begin{bmatrix}\\frac{2}{x}, & y\\end{bmatrix}"
def test_latex_dict():
d = {Rational(1): 1, x**2: 2, x: 3, x**3: 4}
assert latex(d) == '\\begin{Bmatrix}1 : 1, & x : 3, & x^{2} : 2, & x^{3} : 4\\end{Bmatrix}'
D = Dict(d)
assert latex(D) == '\\begin{Bmatrix}1 : 1, & x : 3, & x^{2} : 2, & x^{3} : 4\\end{Bmatrix}'
def test_latex_rational():
#tests issue 874
assert latex(-Rational(1, 2)) == "- \\frac{1}{2}"
assert latex(Rational(-1, 2)) == "- \\frac{1}{2}"
assert latex(Rational(1, -2)) == "- \\frac{1}{2}"
assert latex(-Rational(-1, 2)) == "\\frac{1}{2}"
assert latex(-Rational(1, 2)*x) == "- \\frac{1}{2} x"
assert latex(-Rational(1, 2)*x + Rational(-2, 3)*y) in [
"- \\frac{1}{2} x - \\frac{2}{3} y",
"- \\frac{2}{3} y - \\frac{1}{2} x",
]
def test_latex_inverse():
#tests issue 1030
assert latex(1/x) == "\\frac{1}{x}"
assert latex(1/(x + y)) in ["\\frac{1}{x + y}", "\\frac{1}{y + x}"]
def test_latex_DiracDelta():
assert latex(DiracDelta(x)) == r"\delta\left(x\right)"
assert latex(DiracDelta(x)**2) == r"\left(\delta\left(x\right)\right)^{2}"
assert latex(DiracDelta(x, 0)) == r"\delta\left(x\right)"
assert latex(DiracDelta(x, 5)) == \
r"\delta^{\left( 5 \right)}\left( x \right)"
assert latex(DiracDelta(x, 5)**2) == \
r"\left(\delta^{\left( 5 \right)}\left( x \right)\right)^{2}"
def test_latex_Heaviside():
assert latex(Heaviside(x)) == r"\theta\left(x\right)"
assert latex(Heaviside(x)**2) == r"\left(\theta\left(x\right)\right)^{2}"
def test_latex_KroneckerDelta():
assert latex(KroneckerDelta(x, y)) == r"\delta_{x y}"
assert latex(KroneckerDelta(x, y)**2) == r"\left(\delta_{x y}\right)^{2}"
assert latex(KroneckerDelta(x, y + 1)) == r"\delta_{x, y + 1}"
# issue 3479
assert latex(KroneckerDelta(x + 1, y)) == r"\delta_{y, x + 1}"
def test_latex_LeviCivita():
assert latex(LeviCivita(x, y, z)) == r"\varepsilon_{x y z}"
assert latex(LeviCivita(x, y, z)**2) == r"\left(\varepsilon_{x y z}\right)^{2}"
assert latex(LeviCivita(x, y, z + 1)) == r"\varepsilon_{x, y, z + 1}"
assert latex(LeviCivita(x, y + 1, z)) == r"\varepsilon_{x, y + 1, z}"
assert latex(LeviCivita(x + 1, y, z)) == r"\varepsilon_{x + 1, y, z}"
def test_mode():
expr = x + y
assert latex(expr) == 'x + y'
assert latex(expr, mode='plain') == 'x + y'
assert latex(expr, mode='inline') == '$x + y$'
assert latex(
expr, mode='equation*') == '\\begin{equation*}x + y\\end{equation*}'
assert latex(
expr, mode='equation') == '\\begin{equation}x + y\\end{equation}'
def test_latex_Piecewise():
p = Piecewise((x, x < 1), (x**2, True))
assert latex(p) == "\\begin{cases} x & \\text{for}\: x < 1 \\\\x^{2} &" \
" \\text{otherwise} \\end{cases}"
assert latex(p, itex=True) == "\\begin{cases} x & \\text{for}\: x \\lt 1 \\\\x^{2} &" \
" \\text{otherwise} \\end{cases}"
p = Piecewise((x, x < 0), (0, x >= 0))
assert latex(p) == "\\begin{cases} x & \\text{for}\\: x < 0 \\\\0 &" \
" \\text{for}\\: x \\geq 0 \\end{cases}"
A, B = symbols("A B", commutative=False)
p = Piecewise((A**2, Eq(A, B)), (A*B, True))
s = r"\begin{cases} A^{2} & \text{for}\: A = B \\A B & \text{otherwise} \end{cases}"
assert latex(p) == s
assert latex(A*p) == r"A %s" % s
assert latex(p*A) == r"\left(%s\right) A" % s
def test_latex_Matrix():
M = Matrix([[1 + x, y], [y, x - 1]])
assert latex(M) == '\\left[\\begin{smallmatrix}x + 1 & y\\\\y & x - 1' \
'\\end{smallmatrix}\\right]'
settings = {'mat_str': 'bmatrix'}
assert latex(M, **settings) == '\\left[\\begin{bmatrix}x + 1 & y\\\\y &' \
' x - 1\\end{bmatrix}\\right]'
settings['mat_delim'] = None
assert latex(M, **settings) == '\\begin{bmatrix}x + 1 & y\\\\y & x - 1' \
'\\end{bmatrix}'
assert latex(M) == '\\left[\\begin{smallmatrix}x + 1 & y\\\\y & x - 1' \
'\\end{smallmatrix}\\right]'
def test_latex_mul_symbol():
assert latex(4*4**x, mul_symbol='times') == "4 \\times 4^{x}"
assert latex(4*4**x, mul_symbol='dot') == "4 \\cdot 4^{x}"
assert latex(4*4**x, mul_symbol='ldot') == "4 \,.\, 4^{x}"
assert latex(4*x, mul_symbol='times') == "4 \\times x"
assert latex(4*x, mul_symbol='dot') == "4 \\cdot x"
assert latex(4*x, mul_symbol='ldot') == "4 \,.\, x"
def test_latex_issue1282():
y = 4*4**log(2)
assert latex(y) == '4 \\times 4^{\\log{\\left (2 \\right )}}'
assert latex(1/y) == '\\frac{1}{4 \\times 4^{\\log{\\left (2 \\right )}}}'
def test_latex_issue1477():
assert latex(Symbol("beta_13_2")) == r"\beta_{13 2}"
assert latex(Symbol("beta_132_20")) == r"\beta_{132 20}"
assert latex(Symbol("beta_13")) == r"\beta_{13}"
assert latex(Symbol("x_a_b")) == r"x_{a b}"
assert latex(Symbol("x_1_2_3")) == r"x_{1 2 3}"
assert latex(Symbol("x_a_b1")) == r"x_{a b1}"
assert latex(Symbol("x_a_1")) == r"x_{a 1}"
assert latex(Symbol("x_1_a")) == r"x_{1 a}"
assert latex(Symbol("x_1^aa")) == r"x^{aa}_{1}"
assert latex(Symbol("x_1__aa")) == r"x^{aa}_{1}"
assert latex(Symbol("x_11^a")) == r"x^{a}_{11}"
assert latex(Symbol("x_11__a")) == r"x^{a}_{11}"
assert latex(Symbol("x_a_a_a_a")) == r"x_{a a a a}"
assert latex(Symbol("x_a_a^a^a")) == r"x^{a a}_{a a}"
assert latex(Symbol("x_a_a__a__a")) == r"x^{a a}_{a a}"
assert latex(Symbol("alpha_11")) == r"\alpha_{11}"
assert latex(Symbol("alpha_11_11")) == r"\alpha_{11 11}"
assert latex(Symbol("alpha_alpha")) == r"\alpha_{\alpha}"
assert latex(Symbol("alpha^aleph")) == r"\alpha^{\aleph}"
assert latex(Symbol("alpha__aleph")) == r"\alpha^{\aleph}"
def test_latex_pow_fraction():
x = Symbol('x')
# Testing exp
assert 'e^{-x}' in latex(exp(-x)/2).replace(' ', '') # Remove Whitespace
# Testing just e^{-x} in case future changes alter behavior of muls or fracs
# In particular current output is \frac{1}{2}e^{- x} but perhaps this will
# change to \frac{e^{-x}}{2}
# Testing general, non-exp, power
assert '3^{-x}' in latex(3**-x/2).replace(' ', '')
def test_noncommutative():
A, B, C = symbols('A,B,C', commutative=False)
assert latex(A*B*C**-1) == "A B C^{-1}"
assert latex(C**-1*A*B) == "C^{-1} A B"
assert latex(A*C**-1*B) == "A C^{-1} B"
def test_latex_order():
expr = x**3 + x**2*y + 3*x*y**3 + y**4
assert latex(expr, order='lex') == "x^{3} + x^{2} y + 3 x y^{3} + y^{4}"
assert latex(
expr, order='rev-lex') == "y^{4} + 3 x y^{3} + x^{2} y + x^{3}"
def test_latex_Lambda():
assert latex(Lambda(x, x + 1)) == \
r"\Lambda {\left (x, x + 1 \right )}"
assert latex(Lambda((x, y), x + 1)) == \
r"\Lambda {\left (\begin{pmatrix}x, & y\end{pmatrix}, x + 1 \right )}"
def test_latex_PolyElement():
Ruv, u,v = ring("u,v", ZZ);
Rxyz, x,y,z = ring("x,y,z", Ruv.to_domain())
assert latex(x - x) == r"0"
assert latex(x - 1) == r"x - 1"
assert latex(x + 1) == r"x + 1"
assert latex((u**2 + 3*u*v + 1)*x**2*y + u + 1) == r"\left({u}^{2} + 3 u v + 1\right) {x}^{2} y + u + 1"
assert latex((u**2 + 3*u*v + 1)*x**2*y + (u + 1)*x) == r"\left({u}^{2} + 3 u v + 1\right) {x}^{2} y + \left(u + 1\right) x"
assert latex((u**2 + 3*u*v + 1)*x**2*y + (u + 1)*x + 1) == r"\left({u}^{2} + 3 u v + 1\right) {x}^{2} y + \left(u + 1\right) x + 1"
assert latex((-u**2 + 3*u*v - 1)*x**2*y - (u + 1)*x - 1) == r"-\left({u}^{2} - 3 u v + 1\right) {x}^{2} y - \left(u + 1\right) x - 1"
def test_latex_FracElement():
Fuv, u,v = field("u,v", ZZ);
Fxyzt, x,y,z,t = field("x,y,z,t", Fuv.to_domain())
assert latex(x - x) == r"0"
assert latex(x - 1) == r"x - 1"
assert latex(x + 1) == r"x + 1"
assert latex(x/z) == r"\frac{x}{z}"
assert latex(x*y/z) == r"\frac{x y}{z}"
assert latex(x/(z*t)) == r"\frac{x}{z t}"
assert latex(x*y/(z*t)) == r"\frac{x y}{z t}"
assert latex((x - 1)/y) == r"\frac{x - 1}{y}"
assert latex((x + 1)/y) == r"\frac{x + 1}{y}"
assert latex((-x - 1)/y) == r"\frac{-x - 1}{y}"
assert latex((x + 1)/(y*z)) == r"\frac{x + 1}{y z}"
assert latex(-y/(x + 1)) == r"\frac{-y}{x + 1}"
assert latex(y*z/(x + 1)) == r"\frac{y z}{x + 1}"
assert latex(((u + 1)*x*y + 1)/((v - 1)*z - 1)) == r"\frac{\left(u + 1\right) x y + 1}{\left(v - 1\right) z - 1}"
assert latex(((u + 1)*x*y + 1)/((v - 1)*z - t*u*v - 1)) == r"\frac{\left(u + 1\right) x y + 1}{\left(v - 1\right) z - u v t - 1}"
def test_latex_Poly():
assert latex(Poly(x**2 + 2 * x, x)) == \
r"\operatorname{Poly}{\left( x^{2} + 2 x, x, domain=\mathbb{Z} \right)}"
assert latex(Poly(x/y, x)) == \
r"\operatorname{Poly}{\left( \frac{x}{y}, x, domain=\mathbb{Z}\left(y\right) \right)}"
assert latex(Poly(2.0*x + y)) == \
r"\operatorname{Poly}{\left( 2.0 x + 1.0 y, x, y, domain=\mathbb{R} \right)}"
def test_latex_RootOf():
assert latex(RootOf(x**5 + x + 3, 0)) == \
r"\operatorname{RootOf} {\left(x^{5} + x + 3, 0\right)}"
def test_latex_RootSum():
assert latex(RootSum(x**5 + x + 3, sin)) == \
r"\operatorname{RootSum} {\left(x^{5} + x + 3, \Lambda {\left (x, \sin{\left (x \right )} \right )}\right)}"
def test_settings():
raises(TypeError, lambda: latex(x*y, method="garbage"))
def test_latex_numbers():
assert latex(catalan(n)) == r"C_{n}"
def test_lamda():
assert latex(Symbol('lamda')) == r"\lambda"
assert latex(Symbol('Lamda')) == r"\Lambda"
def test_custom_symbol_names():
x = Symbol('x')
y = Symbol('y')
assert latex(x) == "x"
assert latex(x, symbol_names={x: "x_i"}) == "x_i"
assert latex(x + y, symbol_names={x: "x_i"}) == "x_i + y"
assert latex(x**2, symbol_names={x: "x_i"}) == "x_i^{2}"
assert latex(x + y, symbol_names={x: "x_i", y: "y_j"}) == "x_i + y_j"
def test_matAdd():
from sympy import MatrixSymbol
from sympy.printing.latex import LatexPrinter
C = MatrixSymbol('C', 5, 5)
B = MatrixSymbol('B', 5, 5)
l = LatexPrinter()
assert l._print_MatAdd(C - 2*B) in ['-2 B + C', 'C -2 B']
assert l._print_MatAdd(C + 2*B) in ['2 B + C', 'C + 2 B']
assert l._print_MatAdd(B - 2*C) in ['B -2 C', '-2 C + B']
assert l._print_MatAdd(B + 2*C) in ['B + 2 C', '2 C + B']
def test_matMul():
from sympy import MatrixSymbol
from sympy.printing.latex import LatexPrinter
A = MatrixSymbol('A', 5, 5)
B = MatrixSymbol('B', 5, 5)
x = Symbol('x')
l = LatexPrinter()
assert l._print_MatMul(2*A) == '2 A'
assert l._print_MatMul(2*x*A) == '2 x A'
assert l._print_MatMul(-2*A) == '-2 A'
assert l._print_MatMul(1.5*A) == '1.5 A'
assert l._print_MatMul(sqrt(2)*A) == r'\sqrt{2} A'
assert l._print_MatMul(-sqrt(2)*A) == r'- \sqrt{2} A'
assert l._print_MatMul(2*sqrt(2)*x*A) == r'2 \sqrt{2} x A'
assert l._print_MatMul(-2*A*(A + 2*B)) in [r'-2 A \left(A + 2 B\right)',
r'-2 A \left(2 B + A\right)']
def test_latex_MatrixSlice():
from sympy.matrices.expressions import MatrixSymbol
assert latex(MatrixSymbol('X', 10, 10)[:5, 1:9:2]) == \
r'X\left[:5, 1:9:2\right]'
assert latex(MatrixSymbol('X', 10, 10)[5, :5:2]) == \
r'X\left[5, :5:2\right]'
def test_latex_RandomDomain():
from sympy.stats import Normal, Die, Exponential, pspace, where
X = Normal('x1', 0, 1)
assert latex(where(X > 0)) == "Domain: 0 < x_{1}"
D = Die('d1', 6)
assert latex(where(D > 4)) == r"Domain: d_{1} = 5 \vee d_{1} = 6"
A = Exponential('a', 1)
B = Exponential('b', 1)
assert latex(
pspace(Tuple(A, B)).domain) == "Domain: 0 \leq a \wedge 0 \leq b"
def test_PrettyPoly():
from sympy.polys.domains import QQ
F = QQ.frac_field(x, y)
R = QQ[x, y]
assert latex(F.convert(x/(x + y))) == latex(x/(x + y))
assert latex(R.convert(x + y)) == latex(x + y)
def test_integral_transforms():
x = Symbol("x")
k = Symbol("k")
f = Function("f")
a = Symbol("a")
b = Symbol("b")
assert latex(MellinTransform(f(x), x, k)) == r"\mathcal{M}_{x}\left[\operatorname{f}{\left (x \right )}\right]\left(k\right)"
assert latex(InverseMellinTransform(f(k), k, x, a, b)) == r"\mathcal{M}^{-1}_{k}\left[\operatorname{f}{\left (k \right )}\right]\left(x\right)"
assert latex(LaplaceTransform(f(x), x, k)) == r"\mathcal{L}_{x}\left[\operatorname{f}{\left (x \right )}\right]\left(k\right)"
assert latex(InverseLaplaceTransform(f(k), k, x, (a, b))) == r"\mathcal{L}^{-1}_{k}\left[\operatorname{f}{\left (k \right )}\right]\left(x\right)"
assert latex(FourierTransform(f(x), x, k)) == r"\mathcal{F}_{x}\left[\operatorname{f}{\left (x \right )}\right]\left(k\right)"
assert latex(InverseFourierTransform(f(k), k, x)) == r"\mathcal{F}^{-1}_{k}\left[\operatorname{f}{\left (k \right )}\right]\left(x\right)"
assert latex(CosineTransform(f(x), x, k)) == r"\mathcal{COS}_{x}\left[\operatorname{f}{\left (x \right )}\right]\left(k\right)"
assert latex(InverseCosineTransform(f(k), k, x)) == r"\mathcal{COS}^{-1}_{k}\left[\operatorname{f}{\left (k \right )}\right]\left(x\right)"
assert latex(SineTransform(f(x), x, k)) == r"\mathcal{SIN}_{x}\left[\operatorname{f}{\left (x \right )}\right]\left(k\right)"
assert latex(InverseSineTransform(f(k), k, x)) == r"\mathcal{SIN}^{-1}_{k}\left[\operatorname{f}{\left (k \right )}\right]\left(x\right)"
def test_PolynomialRing():
from sympy.polys.domains import QQ
assert latex(QQ[x, y]) == r"\mathbb{Q}\left[x, y\right]"
assert latex(QQ.poly_ring(x, y, order="ilex")) == \
r"S_<^{-1}\mathbb{Q}\left[x, y\right]"
def test_categories():
from sympy.categories import (Object, IdentityMorphism,
NamedMorphism, Category, Diagram, DiagramGrid)
A1 = Object("A1")
A2 = Object("A2")
A3 = Object("A3")
f1 = NamedMorphism(A1, A2, "f1")
f2 = NamedMorphism(A2, A3, "f2")
id_A1 = IdentityMorphism(A1)
K1 = Category("K1")
assert latex(A1) == "A_{1}"
assert latex(f1) == "f_{1}:A_{1}\\rightarrow A_{2}"
assert latex(id_A1) == "id:A_{1}\\rightarrow A_{1}"
assert latex(f2*f1) == "f_{2}\\circ f_{1}:A_{1}\\rightarrow A_{3}"
assert latex(K1) == "\mathbf{K_{1}}"
d = Diagram()
assert latex(d) == "\emptyset"
d = Diagram({f1: "unique", f2: S.EmptySet})
assert latex(d) == "\\begin{Bmatrix}f_{2}\\circ f_{1}:A_{1}" \
"\\rightarrow A_{3} : \\emptyset, & id:A_{1}\\rightarrow " \
"A_{1} : \\emptyset, & id:A_{2}\\rightarrow A_{2} : " \
"\\emptyset, & id:A_{3}\\rightarrow A_{3} : \\emptyset, " \
"& f_{1}:A_{1}\\rightarrow A_{2} : \\left\\{unique\\right\\}, " \
"& f_{2}:A_{2}\\rightarrow A_{3} : \\emptyset\\end{Bmatrix}"
d = Diagram({f1: "unique", f2: S.EmptySet}, {f2 * f1: "unique"})
assert latex(d) == "\\begin{Bmatrix}f_{2}\\circ f_{1}:A_{1}" \
"\\rightarrow A_{3} : \\emptyset, & id:A_{1}\\rightarrow " \
"A_{1} : \\emptyset, & id:A_{2}\\rightarrow A_{2} : " \
"\\emptyset, & id:A_{3}\\rightarrow A_{3} : \\emptyset, " \
"& f_{1}:A_{1}\\rightarrow A_{2} : \\left\\{unique\\right\\}," \
" & f_{2}:A_{2}\\rightarrow A_{3} : \\emptyset\\end{Bmatrix}" \
"\\Longrightarrow \\begin{Bmatrix}f_{2}\\circ f_{1}:A_{1}" \
"\\rightarrow A_{3} : \\left\\{unique\\right\\}\\end{Bmatrix}"
# A linear diagram.
A = Object("A")
B = Object("B")
C = Object("C")
f = NamedMorphism(A, B, "f")
g = NamedMorphism(B, C, "g")
d = Diagram([f, g])
grid = DiagramGrid(d)
assert latex(grid) == "\\begin{array}{cc}\n" \
"A & B \\\\\n" \
" & C \n" \
"\\end{array}\n"
def test_Modules():
from sympy.polys.domains import QQ
from sympy import homomorphism
R = QQ[x, y]
F = R.free_module(2)
M = F.submodule([x, y], [1, x**2])
assert latex(F) == r"{\mathbb{Q}\left[x, y\right]}^{2}"
assert latex(M) == \
r"\left< {\left[ {x},{y} \right]},{\left[ {1},{x^{2}} \right]} \right>"
I = R.ideal(x**2, y)
assert latex(I) == r"\left< {x^{2}},{y} \right>"
Q = F / M
assert latex(Q) == r"\frac{{\mathbb{Q}\left[x, y\right]}^{2}}{\left< {\left[ {x},{y} \right]},{\left[ {1},{x^{2}} \right]} \right>}"
assert latex(Q.submodule([1, x**3/2], [2, y])) == \
r"\left< {{\left[ {1},{\frac{1}{2} x^{3}} \right]} + {\left< {\left[ {x},{y} \right]},{\left[ {1},{x^{2}} \right]} \right>}},{{\left[ {2},{y} \right]} + {\left< {\left[ {x},{y} \right]},{\left[ {1},{x^{2}} \right]} \right>}} \right>"
h = homomorphism(QQ[x].free_module(2), QQ[x].free_module(2), [0, 0])
assert latex(h) == r"{\left[\begin{smallmatrix}0 & 0\\0 & 0\end{smallmatrix}\right]} : {{\mathbb{Q}\left[x\right]}^{2}} \to {{\mathbb{Q}\left[x\right]}^{2}}"
def test_QuotientRing():
from sympy.polys.domains import QQ
R = QQ[x]/[x**2 + 1]
assert latex(
R) == r"\frac{\mathbb{Q}\left[x\right]}{\left< {x^{2} + 1} \right>}"
assert latex(R.one) == r"{1} + {\left< {x^{2} + 1} \right>}"
def test_Tr():
#TODO: Handle indices
A, B = symbols('A B', commutative=False)
t = Tr(A*B)
assert latex(t) == r'\mbox{Tr}\left(A B\right)'
def test_Adjoint():
from sympy.matrices import MatrixSymbol, Adjoint, Inverse, Transpose
X = MatrixSymbol('X', 2, 2)
Y = MatrixSymbol('Y', 2, 2)
assert latex(Adjoint(X)) == r'X^\dag'
assert latex(Adjoint(X + Y)) == r'\left(X + Y\right)^\dag'
assert latex(Adjoint(X) + Adjoint(Y)) == r'X^\dag + Y^\dag'
assert latex(Adjoint(X*Y)) == r'\left(X Y\right)^\dag'
assert latex(Adjoint(Y)*Adjoint(X)) == r'Y^\dag X^\dag'
assert latex(Adjoint(X**2)) == r'\left(X^{2}\right)^\dag'
assert latex(Adjoint(X)**2) == r'\left(X^\dag\right)^{2}'
assert latex(Adjoint(Inverse(X))) == r'\left(X^{-1}\right)^\dag'
assert latex(Inverse(Adjoint(X))) == r'\left(X^\dag\right)^{-1}'
assert latex(Adjoint(Transpose(X))) == r'\left(X^T\right)^\dag'
assert latex(Transpose(Adjoint(X))) == r'\left(X^\dag\right)^T'
def test_Hadamard():
from sympy.matrices import MatrixSymbol, HadamardProduct
X = MatrixSymbol('X', 2, 2)
Y = MatrixSymbol('Y', 2, 2)
assert latex(HadamardProduct(X, Y*Y)) == r'X \circ \left(Y Y\right)'
assert latex(HadamardProduct(X, Y)*Y) == r'\left(X \circ Y\right) Y'
def test_boolean_args_order():
syms = symbols('a:f')
expr = And(*syms)
assert latex(expr) == 'a \\wedge b \\wedge c \\wedge d \\wedge e \\wedge f'
expr = Or(*syms)
assert latex(expr) == 'a \\vee b \\vee c \\vee d \\vee e \\vee f'
|
amitjamadagni/sympy
|
sympy/printing/tests/test_latex.py
|
Python
|
bsd-3-clause
| 36,468
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.core import mail
from django.core.urlresolvers import reverse, reverse_lazy
from django.test import TestCase, override_settings
from django.utils.translation import ugettext_lazy as _
from taggit.models import Tag
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailadmin.site_summary import PagesSummaryItem
from wagtail.wagtailadmin.utils import send_mail, user_has_any_page_permission
from wagtail.wagtailcore.models import Page, Site
class TestHome(TestCase, WagtailTestUtils):
def setUp(self):
# Login
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Welcome to the Test Site Wagtail CMS")
def test_admin_menu(self):
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
# check that media attached to menu items is correctly pulled in
self.assertContains(
response,
'<script type="text/javascript" src="/static/wagtailadmin/js/explorer-menu.js"></script>'
)
# check that custom menu items (including classname / attrs parameters) are pulled in
self.assertContains(
response,
'<a href="http://www.tomroyal.com/teaandkittens/" class="icon icon-kitten" data-fluffy="yes">Kittens!</a>'
)
# check that is_shown is respected on menu items
response = self.client.get(reverse('wagtailadmin_home') + '?hide-kittens=true')
self.assertNotContains(
response,
'<a href="http://www.tomroyal.com/teaandkittens/" class="icon icon-kitten" data-fluffy="yes">Kittens!</a>'
)
def test_never_cache_header(self):
# This tests that wagtailadmins global cache settings have been applied correctly
response = self.client.get(reverse('wagtailadmin_home'))
self.assertIn('private', response['Cache-Control'])
self.assertIn('no-cache', response['Cache-Control'])
self.assertIn('no-store', response['Cache-Control'])
self.assertIn('max-age=0', response['Cache-Control'])
def test_nonascii_email(self):
# Test that non-ASCII email addresses don't break the admin; previously these would
# cause a failure when generating Gravatar URLs
get_user_model().objects.create_superuser(username='snowman', email='☃@thenorthpole.com', password='password')
# Login
self.assertTrue(self.client.login(username='snowman', password='password'))
response = self.client.get(reverse('wagtailadmin_home'))
self.assertEqual(response.status_code, 200)
class TestPagesSummary(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get_request(self):
"""
Get a Django WSGI request that has been passed through middleware etc.
"""
return self.client.get('/admin/').wsgi_request
def test_page_summary_single_site(self):
request = self.get_request()
root_page = request.site.root_page
link = '<a href="{}">'.format(reverse('wagtailadmin_explore', args=[root_page.pk]))
page_summary = PagesSummaryItem(request)
self.assertIn(link, page_summary.render())
def test_page_summary_multiple_sites(self):
Site.objects.create(
hostname='example.com',
root_page=Page.objects.get(pk=1))
request = self.get_request()
link = '<a href="{}">'.format(reverse('wagtailadmin_explore_root'))
page_summary = PagesSummaryItem(request)
self.assertIn(link, page_summary.render())
def test_page_summary_zero_sites(self):
Site.objects.all().delete()
request = self.get_request()
link = '<a href="{}">'.format(reverse('wagtailadmin_explore_root'))
page_summary = PagesSummaryItem(request)
self.assertIn(link, page_summary.render())
class TestEditorHooks(TestCase, WagtailTestUtils):
def setUp(self):
self.homepage = Page.objects.get(id=2)
self.login()
def test_editor_css_hooks_on_add(self):
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.homepage.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<link rel="stylesheet" href="/path/to/my/custom.css">')
def test_editor_js_hooks_on_add(self):
response = self.client.get(reverse('wagtailadmin_pages:add', args=('tests', 'simplepage', self.homepage.id)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<script src="/path/to/my/custom.js"></script>')
def test_editor_css_hooks_on_edit(self):
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.homepage.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<link rel="stylesheet" href="/path/to/my/custom.css">')
def test_editor_js_hooks_on_edit(self):
response = self.client.get(reverse('wagtailadmin_pages:edit', args=(self.homepage.id, )))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<script src="/path/to/my/custom.js"></script>')
class TestSendMail(TestCase):
def test_send_email(self):
send_mail("Test subject", "Test content", ["nobody@email.com"], "test@email.com")
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "test@email.com")
@override_settings(WAGTAILADMIN_NOTIFICATION_FROM_EMAIL='anothertest@email.com')
def test_send_fallback_to_wagtailadmin_notification_from_email_setting(self):
send_mail("Test subject", "Test content", ["nobody@email.com"])
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "anothertest@email.com")
@override_settings(DEFAULT_FROM_EMAIL='yetanothertest@email.com')
def test_send_fallback_to_default_from_email_setting(self):
send_mail("Test subject", "Test content", ["nobody@email.com"])
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "yetanothertest@email.com")
def test_send_default_from_email(self):
send_mail("Test subject", "Test content", ["nobody@email.com"])
# Check that the email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test subject")
self.assertEqual(mail.outbox[0].body, "Test content")
self.assertEqual(mail.outbox[0].to, ["nobody@email.com"])
self.assertEqual(mail.outbox[0].from_email, "webmaster@localhost")
class TestTagsAutocomplete(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
Tag.objects.create(name="Test", slug="test")
def test_tags_autocomplete(self):
response = self.client.get(reverse('wagtailadmin_tag_autocomplete'), {
'term': 'test'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data, ['Test'])
def test_tags_autocomplete_partial_match(self):
response = self.client.get(reverse('wagtailadmin_tag_autocomplete'), {
'term': 'te'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data, ['Test'])
def test_tags_autocomplete_different_term(self):
response = self.client.get(reverse('wagtailadmin_tag_autocomplete'), {
'term': 'hello'
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data, [])
def test_tags_autocomplete_no_term(self):
response = self.client.get(reverse('wagtailadmin_tag_autocomplete'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data, [])
class TestMenuItem(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
response = self.client.get(reverse('wagtailadmin_home'))
self.request = response.wsgi_request
def test_menuitem_reverse_lazy_url_pass(self):
menuitem = MenuItem(_('Test'), reverse_lazy('wagtailadmin_home'))
self.assertEqual(menuitem.is_active(self.request), True)
class TestUserPassesTestPermissionDecorator(TestCase):
"""
Test for custom user_passes_test permission decorators.
testapp_bob_only_zone is a view configured to only grant access to users with a first_name of Bob
"""
def test_user_passes_test(self):
# create and log in as a user called Bob
get_user_model().objects.create_superuser(first_name='Bob', last_name='Mortimer', username='test', email='test@email.com', password='password')
self.assertTrue(self.client.login(username='test', password='password'))
response = self.client.get(reverse('testapp_bob_only_zone'))
self.assertEqual(response.status_code, 200)
def test_user_fails_test(self):
# create and log in as a user not called Bob
get_user_model().objects.create_superuser(first_name='Vic', last_name='Reeves', username='test', email='test@email.com', password='password')
self.assertTrue(self.client.login(username='test', password='password'))
response = self.client.get(reverse('testapp_bob_only_zone'))
self.assertRedirects(response, reverse('wagtailadmin_home'))
class TestUserHasAnyPagePermission(TestCase):
def test_superuser(self):
user = get_user_model().objects.create_superuser(
username='superuser', email='admin@example.com', password='p')
self.assertTrue(user_has_any_page_permission(user))
def test_inactive_superuser(self):
user = get_user_model().objects.create_superuser(
username='superuser', email='admin@example.com', password='p')
user.is_active = False
self.assertFalse(user_has_any_page_permission(user))
def test_editor(self):
user = get_user_model().objects.create_user(
username='editor', email='ed@example.com', password='p')
editors = Group.objects.get(name='Editors')
user.groups.add(editors)
self.assertTrue(user_has_any_page_permission(user))
def test_moderator(self):
user = get_user_model().objects.create_user(
username='moderator', email='mod@example.com', password='p')
editors = Group.objects.get(name='Moderators')
user.groups.add(editors)
self.assertTrue(user_has_any_page_permission(user))
def test_no_permissions(self):
user = get_user_model().objects.create_user(
username='pleb', email='pleb@example.com', password='p')
user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.assertFalse(user_has_any_page_permission(user))
|
Toshakins/wagtail
|
wagtail/wagtailadmin/tests/tests.py
|
Python
|
bsd-3-clause
| 12,417
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.core.cache import cache
from django.db import models
from django.db.models.signals import post_save
try:
from django.db.transaction import atomic
except ImportError:
from django.db.transaction import commit_on_success as atomic
from django.utils.encoding import python_2_unicode_compatible
from django.utils.timezone import now
from .exceptions import MessagingPermissionDenied
from .managers import ConversationManager, ParticipationManager
from .settings import (PRIVATE_CONVERSATION_MEMBER_COUNT,
CONVERSATION_CACHE_KEY_PATTERN)
from .signals import message_sent
from .utils import is_date_greater
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
@python_2_unicode_compatible
class Participation(models.Model):
conversation = models.ForeignKey('Conversation',
related_name='participations')
user = models.ForeignKey(AUTH_USER_MODEL, related_name='participations')
# messages in conversation seen at
read_at = models.DateTimeField(null=True, blank=True, db_index=True)
# replied to conversation at
replied_at = models.DateTimeField(null=True, blank=True, db_index=True)
# deleted conversation at
deleted_at = models.DateTimeField(null=True, blank=True, db_index=True)
objects = ParticipationManager()
class Meta:
ordering = ['conversation']
unique_together = ('conversation', 'user')
def __str__(self):
return "{0} - {1}".format(self.user.username, self.conversation)
@property
def is_deleted(self):
return self.deleted_at is not None
@property
def is_read(self):
return self.read_at is not None
def read_conversation(self):
"""Mark the conversation as read by the participant who requested."""
self.read_at = now()
self.save()
def revoke(self):
"""Sets the deleted_at field of the participation to the time when the
member in question left the conversation or was kicked out of it."""
if self.conversation.is_private:
# can't leave one-on-one conversations
return
self.deleted_at = now()
self.save()
def reinstate(self):
"""Clears the deleted_at field of the participation, meaning the user
re-joined the conversation."""
self.deleted_at = None
self.save()
@python_2_unicode_compatible
class Conversation(models.Model):
latest_message = models.ForeignKey('Message',
related_name='conversation_of_latest',
null=True,
blank=True)
creator = models.ForeignKey(AUTH_USER_MODEL,
related_name='created_conversation')
objects = ConversationManager()
class Meta:
ordering = ['latest_message']
def __str__(self):
return "{0} - {1}".format(self.pk, self.latest_message)
@property
def active_participations(self):
"""Returns a queryset of active participations, meaning that revoked
participations(when a user leaves a conversation) won't be included."""
return self.participations.filter(deleted_at__isnull=True)
def add_participants(self, participants):
"""Adds participants to an existing conversation.
:param participants: A QuerySet or list of user objects, who will be
added to the conversation as participants."""
for user in participants:
participation, created = Participation.objects.get_or_create(
conversation=self,
user=user
)
if not created and participation.is_deleted:
# participation already exists and it was marked as deleted, so
# the user most likely left the conversation, but someone
# re-added him/her
participation.reinstate()
def remove_participants(self, participants):
"""Removes participants from an existing conversation.
:param participants: A QuerySet or list of user objects, whose
participations will be revoked."""
for user in participants:
participation = self.participations.get(user=user)
participation.revoke()
def is_read_by(self, participant):
participation = self.participations.get(user=participant)
return participation.is_read
@property
def participants(self):
"""Returns a list of user objects participating in this conversation"""
return [p.user for p in self.active_participations.all()]
@property
def participant_names(self):
"""Returns a list of usernames who participate in this conversation."""
return list(self.active_participations.values_list('user__username',
flat=True))
def has_participant(self, user):
"""Returns whether this user participates in this conversation.
:param user: A User object (request.user probably)"""
return self.active_participations.filter(user=user).exists()
@property
def is_private(self):
"""Returns whether the conversation is private or not.
If there are more than PRIVATE_CONVERSATION_MEMBER_COUNT (2)
participants in the conversation, it is not private."""
return (self.participations.count() ==
PRIVATE_CONVERSATION_MEMBER_COUNT)
@classmethod
def start(cls, creator, participants):
"""Starts a new conversation between the specified participants.
:param creator: A User object (request.user probably)
:param participants: A QuerySet or list of user objects, who will be
added to the conversation as participants."""
conversation = cls.objects.create(creator=creator)
conversation.add_participants(participants)
return conversation
@python_2_unicode_compatible
class Message(models.Model):
body = models.TextField()
parent = models.ForeignKey('self',
related_name='next_messages',
blank=True,
null=True)
sender = models.ForeignKey(AUTH_USER_MODEL, related_name='messages')
sent_at = models.DateTimeField(auto_now_add=True, db_index=True)
conversation = models.ForeignKey('Conversation', related_name='messages')
class Meta:
ordering = ['-sent_at', '-id']
def __str__(self):
return "{0} - {1}".format(self.sender.username, self.sent_at)
@classmethod
def __send_to_conversation(cls, body, sender, conversation,
new_participants=None):
"""Internally used by both send_to_conversation and __send_to_users
methods. Refactored as a separate method to avoid nesting the atomic
decorator when __send_to_users needs to call __send_to_conversation."""
new_participants = list(new_participants) if new_participants else []
# check whether the sender is participating in the conversation or not
# without this, arbitary users could send messages into conversations
# which they're not even part of
if not conversation.has_participant(sender):
msg = "{0} not participating".format(sender.username)
raise MessagingPermissionDenied(msg)
if new_participants and conversation.is_private:
# this conversation can NOT be extended to include additional
# participants, instead a new conversation has to be started which
# will include all the participants, but not the history of the
# private conversation
recipients = conversation.participants + new_participants
return cls.__send_to_users(body, sender, recipients)
# this was already a group conversation, so just add the new
# participants to it
conversation.add_participants(new_participants)
message = cls.objects.create(body=body,
parent=conversation.latest_message,
sender=sender,
conversation=conversation)
# update latest message of conversation
conversation.latest_message = message
conversation.save()
p_sender = sender.participations.get(conversation=conversation)
p_recipients = conversation.active_participations.exclude(user=sender)
# mark conversation as not read for all participants except the sender
p_recipients.update(read_at=None)
if not any(is_date_greater(pr.replied_at, p_sender.read_at)
for pr in p_recipients):
# if the sender's read_at time is greater than all the other
# participant's replied_at time, it means the sender already read
# all the messages the other's sent, so update the sender's read_at
# value again, to reflect that the sender read it's own (just now
# sent) message.
fields = dict(replied_at=now(), read_at=now())
else:
# if the sender's read_at time is less than any of the other
# participants replied_at time, it means the sender didn't yet
# read the other replier's message, so do not touch the sender's
# read_at time.
# this also means that if the sender replies to the conversation,
# it doesnt't imply that he/she also read the latest message sent
# before his/her message
fields = dict(replied_at=now())
conversation.participations.filter(user=sender).update(**fields)
return message
@classmethod
def __send_to_users(cls, body, sender, recipients):
"""Internally used by both send_to_users and __send_to_conversation
methods. Refactored as a separate method to avoid nesting the atomic
decorator when __send_to_conversation needs to call __send_to_users."""
participants = list(recipients)
if not participants:
raise MessagingPermissionDenied("No recipients specified.")
# if sender is the only participant, deny message sending
if sender in participants and len(participants) == 1:
raise MessagingPermissionDenied("No self-messaging allowed.")
participants.append(sender)
conversations = Conversation.objects.for_participants(participants)
if not conversations.exists():
# no conversation exists between the specified participants, so
# create a new one
conversation = Conversation.start(creator=sender,
participants=participants)
else:
# a conversation exists between the specified participants, so send
# the message to that conversation
(conversation,) = conversations
return cls.__send_to_conversation(body, sender, conversation)
@classmethod
@atomic
def send_to_conversation(cls, body, sender, conversation,
new_participants=None):
"""Sends a message to a specific conversation.
The transaction is atomic, so if anything fails during message sending,
nothing will be committed.
:param body: Body of the new message
:param sender: A User object (request.user probably)
:param conversation: Conversation instance
:param new_participants: Optional, if specified it should be a Queryset
or list of user objects, who will be added to
the existing conversation as new participants.
"""
return cls.__send_to_conversation(body,
sender,
conversation,
new_participants)
@classmethod
@atomic
def send_to_users(cls, body, sender, recipients):
"""Sends a message to a list of users.
The transaction is atomic, so if anything fails during message sending,
nothing will be committed.
:param body: Body of the new message
:param sender: A User object (request.user probably)
:param recipients: Queryset or list of user objects who will receive
the message."""
return cls.__send_to_users(body, sender, recipients)
def clear_conversation_cache(sender, instance, **kwargs):
"""When a message is sent, the cached conversation (all of it's messages)
shall be invalidated."""
key = CONVERSATION_CACHE_KEY_PATTERN.format(instance.conversation.pk)
cache.delete(key)
def fire_message_sent_signal(sender, instance, created, **kwargs):
if created:
message_sent.send(sender=sender, instance=instance)
post_save.connect(clear_conversation_cache,
sender=Message,
dispatch_uid="clear_conversation_cache")
post_save.connect(fire_message_sent_signal,
sender=Message,
dispatch_uid="fire_message_sent_signal")
|
integricho/django-talkalot
|
talkalot/models.py
|
Python
|
bsd-3-clause
| 13,435
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.db import transaction
from django.db.models import Count
from django.http import HttpResponseRedirect, HttpResponseForbidden
from django.shortcuts import render, get_object_or_404
from rapidsms.models import Contact
from groups.models import Group
from groups.forms import GroupForm, ContactForm
@login_required
def list_groups(request):
groups = Group.objects.annotate(count=Count('contacts')).order_by('name')
return render(request, 'groups/groups/list.html', {
'groups': groups,
})
@login_required
@transaction.commit_on_success
def create_edit_group(request, group_id=None):
group = None
if group_id:
group = get_object_or_404(Group, pk=group_id)
if not group.is_editable:
return HttpResponseForbidden('Access denied')
if request.method == 'POST':
form = GroupForm(request.POST, instance=group)
if form.is_valid():
form.save()
messages.info(request, 'Group saved successfully')
return HttpResponseRedirect(reverse('list-groups'))
else:
form = GroupForm(instance=group)
return render(request, 'groups/groups/create_edit.html', {
'form': form,
'group': group,
})
@login_required
@transaction.commit_on_success
def delete_group(request, group_id):
group = get_object_or_404(Group, pk=group_id)
if not group.is_editable:
return HttpResponseForbidden('Access denied')
if request.method == 'POST':
group.delete()
messages.info(request, 'Group successfully deleted')
return HttpResponseRedirect(reverse('list-groups'))
return render(request, 'groups/groups/delete.html', {
'group': group,
})
@login_required
def list_contacts(request):
contacts = Contact.objects.all().order_by('name')
return render(request, 'groups/contacts/list.html', {
'contacts': contacts,
})
@login_required
@transaction.commit_on_success
def create_edit_contact(request, contact_id=None):
contact = None
if contact_id:
contact = get_object_or_404(Contact, pk=contact_id)
if request.method == 'POST':
form = ContactForm(request.POST, instance=contact)
if form.is_valid():
form.save()
messages.info(request, 'Contact saved successfully')
return HttpResponseRedirect(reverse('list-contacts'))
else:
form = ContactForm(instance=contact)
return render(request, 'groups/contacts/create_edit.html', {
'form': form,
'contact': contact,
})
@login_required
@transaction.commit_on_success
def delete_contact(request, contact_id):
contact = get_object_or_404(Contact, pk=contact_id)
if request.method == 'POST':
contact.delete()
messages.info(request, 'Contact successfully deleted')
return HttpResponseRedirect(reverse('list-contacts'))
return render(request, 'groups/contacts/delete.html', {
'contact': contact,
})
|
caktus/rapidsms-groups
|
groups/views.py
|
Python
|
bsd-3-clause
| 3,191
|
def extractQuillofkarnikaWordpressCom(item):
'''
Parser for 'quillofkarnika.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Top Giants: Rebirth of the Black-Bellied Wife', 'Top Giants: Rebirth of the Black-Bellied Wife', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractQuillofkarnikaWordpressCom.py
|
Python
|
bsd-3-clause
| 708
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask_script import Manager, Shell, Server
from flask_script.commands import Clean, ShowUrls
from flask_migrate import MigrateCommand
from inspectors.app import create_app
from inspectors.settings import DevConfig, ProdConfig
from inspectors.database import db
app = create_app()
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
manager = Manager(app)
def _make_context():
"""Return context dict for a shell session so you can access
app, and db.
"""
return {'app': app, 'db': db}
@manager.command
def test():
"""Run the tests."""
import pytest
exit_code = pytest.main([TEST_PATH, '--verbose'])
return exit_code
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
manager.add_command("urls", ShowUrls())
manager.add_command("clean", Clean())
if __name__ == '__main__':
manager.run()
|
codeforamerica/mdc-inspectors
|
manage.py
|
Python
|
bsd-3-clause
| 1,026
|
from __future__ import division, print_function, absolute_import
from io import StringIO
import tempfile
import numpy as np
from numpy.testing import assert_equal, \
assert_array_almost_equal_nulp
from scipy.sparse import coo_matrix, csc_matrix, rand
from scipy.io import hb_read, hb_write
SIMPLE = """\
No Title |No Key
9 4 1 4
RUA 100 100 10 0
(26I3) (26I3) (3E23.15)
1 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3
3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3
3 3 3 3 3 3 3 4 4 4 6 6 6 6 6 6 6 6 6 6 6 8 9 9 9 9
9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 11
37 71 89 18 30 45 70 19 25 52
2.971243799687726e-01 3.662366682877375e-01 4.786962174699534e-01
6.490068647991184e-01 6.617490424831662e-02 8.870370343191623e-01
4.196478590163001e-01 5.649603072111251e-01 9.934423887087086e-01
6.912334991524289e-01
"""
SIMPLE_MATRIX = coo_matrix(
((0.297124379969, 0.366236668288, 0.47869621747, 0.649006864799,
0.0661749042483, 0.887037034319, 0.419647859016,
0.564960307211, 0.993442388709, 0.691233499152,),
(np.array([[36, 70, 88, 17, 29, 44, 69, 18, 24, 51],
[0, 4, 58, 61, 61, 72, 72, 73, 99, 99]]))))
def assert_csc_almost_equal(r, l):
r = csc_matrix(r)
l = csc_matrix(l)
assert_equal(r.indptr, l.indptr)
assert_equal(r.indices, l.indices)
assert_array_almost_equal_nulp(r.data, l.data, 10000)
class TestHBReader(object):
def test_simple(self):
m = hb_read(StringIO(SIMPLE))
assert_csc_almost_equal(m, SIMPLE_MATRIX)
class TestHBReadWrite(object):
def check_save_load(self, value):
with tempfile.NamedTemporaryFile(mode='w+t') as file:
hb_write(file, value)
file.file.seek(0)
value_loaded = hb_read(file)
assert_csc_almost_equal(value, value_loaded)
def test_simple(self):
random_matrix = rand(10, 100, 0.1)
for matrix_format in ('coo', 'csc', 'csr', 'bsr', 'dia', 'dok', 'lil'):
matrix = random_matrix.asformat(matrix_format, copy=False)
self.check_save_load(matrix)
|
arokem/scipy
|
scipy/io/harwell_boeing/tests/test_hb.py
|
Python
|
bsd-3-clause
| 2,366
|
import json
from os import linesep
from urllib2 import Request, urlopen
from string import capwords
from django.conf import settings
from django.core.mail import send_mail
from django.contrib.auth.decorators import login_required
from django.contrib.messages.views import SuccessMessageMixin
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.http import urlencode
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView
from django.core.urlresolvers import reverse, reverse_lazy
# Vocabulary Basic Views
from cvservices.models import ControlledVocabularyRequest, Unit
class DefaultVocabularyListView(ListView):
vocabulary = None
vocabulary_verbose = None
vocabulary_def = None
def __init__(self, **kwargs):
self.vocabulary = kwargs['vocabulary']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
self.vocabulary_def = kwargs['vocabulary_def']
super(DefaultVocabularyListView, self).__init__(**kwargs)
def get_context_data(self, **kwargs):
context = super(DefaultVocabularyListView, self).get_context_data(**kwargs)
context['vocabulary_verbose'] = self.vocabulary_verbose
context['create_url'] = self.vocabulary + '_form'
context['detail_url'] = self.vocabulary + '_detail'
context['vocabulary_def'] = self.vocabulary_def
context['vocabulary'] = self.vocabulary
return context
def get_queryset(self):
queryset = super(DefaultVocabularyListView, self).get_queryset()
queryset = queryset.filter(vocabulary_status=self.model.CURRENT)
return queryset
class DefaultVocabularyDetailView(DetailView):
vocabulary = None
vocabulary_verbose = None
exclude = ['name', 'definition', 'vocabulary_id', 'controlledvocabulary_ptr', 'vocabulary_status', 'previous_version']
slug_field = 'term'
def __init__(self, **kwargs):
super(DefaultVocabularyDetailView, self).__init__(**kwargs)
self.vocabulary = kwargs['vocabulary']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
def get_context_data(self, **kwargs):
context = super(DefaultVocabularyDetailView, self).get_context_data(**kwargs)
context['fields'] = tuple((capwords(field.verbose_name), field.value_to_string(self.get_object())) for field in self.model._meta.fields if field.name not in self.exclude)
context['vocabulary_verbose'] = self.vocabulary_verbose
context['vocabulary'] = self.vocabulary
context['create_url'] = self.vocabulary + '_form'
context['detail_url'] = self.vocabulary + '_detail'
return context
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
if u'pk' in self.kwargs:
queryset = queryset.filter(pk=self.kwargs['pk'])
else:
queryset = queryset.filter(vocabulary_status=self.model.CURRENT)
return super(DefaultVocabularyDetailView, self).get_object(queryset)
# Request Basic Views
class DefaultRequestListView(ListView):
request = None
vocabulary = None
request_verbose = None
vocabulary_verbose = None
@method_decorator(login_required(login_url=reverse_lazy('login')))
def dispatch(self, *args, **kwargs):
return super(DefaultRequestListView, self).dispatch(*args, **kwargs)
def __init__(self, **kwargs):
super(DefaultRequestListView, self).__init__(**kwargs)
self.request_verbose = kwargs['request_verbose']
self.vocabulary = kwargs['vocabulary']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
self.request = kwargs['request']
self.queryset = self.get_queryset().exclude(status=self.model.ARCHIVED)
def get_context_data(self, **kwargs):
context = super(DefaultRequestListView, self).get_context_data(**kwargs)
context['request'] = self.request
context['request_verbose'] = self.request_verbose
context['vocabulary'] = self.vocabulary
context['vocabulary_verbose'] = self.vocabulary_verbose
context['update_url'] = self.vocabulary + '_update_form'
return context
class DefaultRequestUpdateView(SuccessMessageMixin, UpdateView):
request_name = None
vocabulary = None
vocabulary_model = None
request_verbose = None
accept_button = 'request_accept'
reject_button = 'request_reject'
success_message = 'The request has been updated.'
exclude = ['request_id', 'status', 'date_submitted', 'date_status_changed',
'request_for', 'original_request', 'submitter_name', 'submitter_email']
read_only = []
@method_decorator(login_required(login_url=reverse_lazy('login')))
def dispatch(self, *args, **kwargs):
return super(DefaultRequestUpdateView, self).dispatch(*args, **kwargs)
def __init__(self, **kwargs):
super(DefaultRequestUpdateView, self).__init__(**kwargs)
self.request_name = kwargs['request_name']
self.vocabulary = kwargs['vocabulary']
self.vocabulary_model = kwargs['vocabulary_model']
self.request_verbose = kwargs['request_verbose']
self.success_url = reverse(self.request_name)
self.fields = [field.name for field in self.model._meta.fields if field.name not in self.exclude]
def get_context_data(self, **kwargs):
context = super(DefaultRequestUpdateView, self).get_context_data(**kwargs)
context['all_disabled'] = False if self.object.status == ControlledVocabularyRequest.PENDING else True
context['read_only'] = self.read_only
context['request_name'] = self.request_name
context['request_verbose'] = self.request_verbose
context['update_url'] = self.vocabulary + '_update_form'
context['vocabulary'] = self.vocabulary
context['vocabulary_detail_url'] = self.vocabulary + '_detail'
context['success_view'] = 'request_success'
return context
def post(self, request, *args, **kwargs):
object = self.model.objects.get(pk=kwargs['pk'])
request.POST._mutable = True
for field in self.read_only:
request.POST[field] = unicode(object.__getattribute__(field))
return super(DefaultRequestUpdateView, self).post(request, *args, **kwargs)
def form_valid(self, form):
email_subject = 'ODM2 Controlled Vocabularies - Submission Update'
if self.accept_button in self.request.POST:
email_message = ''.join([form.instance.submitter_name, ', your submission "',
form.instance.name, '" for the ', self.vocabulary,
' vocabulary was accepted.', linesep, linesep,
"To see an updated list of terms go to ", self.request.build_absolute_uri(reverse(self.vocabulary))])
send_mail(email_subject, email_message, settings.EMAIL_SENDER, [form.instance.submitter_email])
return self.accept_request(form)
elif self.reject_button in self.request.POST:
email_message = ''.join([form.instance.submitter_name, ', your submission "',
form.instance.name, '" for the ', self.vocabulary,
' vocabulary was rejected.'])
send_mail(email_subject, email_message, settings.EMAIL_SENDER, [form.instance.submitter_email])
return self.reject_request(form)
def accept_request(self, form):
vocabulary = self.vocabulary_model()
is_editing_term = form.instance.request_for is not None
vocabulary_fields = [term_field.name for term_field in vocabulary._meta.fields]
request_fields = [request_field.name for request_field in form.instance._meta.fields]
for field in vocabulary_fields:
if field in request_fields:
vocabulary.__setattr__(field, form.instance.__getattribute__(field))
if is_editing_term:
vocabulary.previous_version = form.instance.request_for
form.instance.request_for.vocabulary_status = self.vocabulary_model.ARCHIVED
form.instance.request_for.save()
vocabulary.vocabulary_status = self.vocabulary_model.CURRENT
vocabulary.save()
revised_request = self.save_revised_request(form, ControlledVocabularyRequest.ACCEPTED)
revised_request.request_for = vocabulary
return super(DefaultRequestUpdateView, self).form_valid(form)
def reject_request(self, form):
self.save_revised_request(form, ControlledVocabularyRequest.REJECTED)
return super(DefaultRequestUpdateView, self).form_valid(form)
def save_revised_request(self, form, status):
current_time = timezone.now()
old_instance = self.model.objects.get(pk=form.instance.pk)
old_instance.status = ControlledVocabularyRequest.ARCHIVED
old_instance.date_status_changed = current_time
old_instance.save()
form.instance.pk = None
form.instance.request_id = None
form.instance.date_status_changed = current_time
form.instance.original_request = old_instance
form.instance.status = status
form.instance.save()
return form.instance
class DefaultRequestCreateView(SuccessMessageMixin, CreateView):
request_name = None
vocabulary = None
request_verbose = None
vocabulary_model = None
vocabulary_verbose = None
recaptcha_key = settings.RECAPTCHA_KEY
success_message = 'Your request has been made successfully.'
exclude = ['request_id', 'status', 'date_submitted', 'date_status_changed', 'request_for', 'request_notes', 'original_request']
submitter_fields = ['submitter_name', 'submitter_email', 'request_reason']
def __init__(self, **kwargs):
super(DefaultRequestCreateView, self).__init__(**kwargs)
self.request_name = kwargs['request_name']
self.vocabulary = kwargs['vocabulary']
self.request_verbose = kwargs['request_verbose']
self.vocabulary_model = kwargs['vocabulary_model']
self.vocabulary_verbose = kwargs['vocabulary_verbose']
self.success_url = reverse(self.vocabulary)
self.fields = [field.name for field in self.model._meta.fields if field.name not in self.exclude]
def get_context_data(self, **kwargs):
context = super(DefaultRequestCreateView, self).get_context_data(**kwargs)
context['request_name'] = self.request_name
context['request_verbose'] = self.request_verbose
context['vocabulary_verbose'] = self.vocabulary_verbose
context['vocabulary'] = self.vocabulary
context['submitter_fields'] = self.submitter_fields
context['recaptcha_user_key'] = settings.RECAPTCHA_USER_KEY
return context
def get_initial(self):
if 'vocabulary_id' not in self.kwargs:
return {}
initial_data = {}
term = self.vocabulary_model.objects.get(pk=self.kwargs['vocabulary_id'])
fields = [concept_field.name for concept_field in term._meta.fields]
for field in fields:
initial_data[field] = term.__getattribute__(field)
return initial_data
def is_captcha_valid(self, form):
url = settings.RECAPTCHA_VERIFY_URL
captcha_response = form.data.get('g-recaptcha-response')
if not captcha_response:
form.add_error(None, 'You are not human!!')
return False
params = urlencode({
'secret': self.recaptcha_key,
'response': captcha_response,
})
request = Request(url=url, data=params, headers={
'Content-type': 'application/x-www-form-urlencoded',
'User-agent': 'reCAPTCHA Python'
})
response = urlopen(request)
return_values = json.loads(response.read())
return return_values["success"]
def form_valid(self, form):
if not self.is_captcha_valid(form):
return super(DefaultRequestCreateView, self).form_invalid(form)
if 'vocabulary_id' in self.kwargs:
form.instance.request_for_id = self.kwargs['vocabulary_id']
self.send_confirmation_email(form)
return super(DefaultRequestCreateView, self).form_valid(form)
def send_confirmation_email(self, form):
action = 'creation of a new ' if 'term' not in self.kwargs else 'update of a '
submitter_email_subject = 'ODM2 Controlled Vocabularies Submission'
submitter_email_message = ''.join(['Thank you for your submission to ODM2 Controlled Vocabularies.', linesep, linesep,
'Vocabulary: ', self.vocabulary_verbose, linesep,
'Term: ', form.cleaned_data['term'], linesep,
'Definition: ', form.cleaned_data['definition'], linesep,
'Notes: ', form.cleaned_data['note'], linesep,
'Reason given for request: ', form.cleaned_data['request_reason'],
])
admins_email_subject = 'New request for an ODM2 Controlled Vocabulary Term'
admins_email_message = ''.join(['User ', form.instance.submitter_name, ' (', form.instance.submitter_email, ')',
' made a request for the ', action, self.vocabulary_verbose, ' vocabulary term.', linesep, linesep,
'Term: ', form.cleaned_data['term'], linesep,
'Definition: ', form.cleaned_data['definition'], linesep,
'Reason given for request: ', form.cleaned_data['request_reason'], linesep, linesep,
'To review this submission go to ', self.request.build_absolute_uri(reverse('requests_list'))])
send_mail(admins_email_subject, admins_email_message, settings.EMAIL_SENDER, settings.EMAIL_RECIPIENTS)
send_mail(submitter_email_subject, submitter_email_message, settings.EMAIL_SENDER, [form.instance.submitter_email])
class UnitsListView(ListView):
model = Unit
template_name = 'cvinterface/units/list.html'
class UnitsDetailView(DetailView):
model = Unit
template_name = ''
exclude = ['unit_id']
slug_field = 'term'
|
ODM2/ODM2ControlledVocabularies
|
src/odm2cvs/cvinterface/views/base_views.py
|
Python
|
bsd-3-clause
| 14,543
|
import gzip
import json
import os
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.paginator import EmptyPage
from django.core.paginator import PageNotAnInteger
from django.core.paginator import Paginator
from django.db.models import Avg
from django.db.models import Count
from django.db.models import Max
from django.db.models import Min
from django.db.models import Q
from django.db.models import Sum
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.shortcuts import render
from django.template import RequestContext
from django.utils.text import slugify
from django.views.generic import DeleteView
from individuals.forms import IndividualForm, ComparisonForm, GroupForm, BrowserForm
from individuals.models import Individual, Group
from individuals.tasks import VerifyVCF, AnnotateVariants, PopulateVariants
from variants.models import Variant
def response_mimetype(request):
if "application/json" in request.META['HTTP_ACCEPT']:
return "application/json"
else:
return "text/plain"
class JSONResponse(HttpResponse):
"""JSON response class."""
def __init__(self,obj='',json_opts={},mimetype="application/json",*args,**kwargs):
content = json.dumps(obj,**json_opts)
super(JSONResponse,self).__init__(content,mimetype,*args,**kwargs)
def create(request):
if request.method == 'POST':
form = IndividualForm(request.POST, request.FILES)
if form.is_valid():
if request.user.is_authenticated:
individual = Individual.objects.create(user=request.user, status='new')
else:
individual = Individual.objects.create(user=None, status='new')
individual.vcf_file= request.FILES.get('file')
print('file')
print(request.FILES.get('file'))
filename = individual.vcf_file.name.split('.')
new_filename = []
for tag in filename:
new_filename.append(slugify(tag))
individual.vcf_file.name = ".".join(new_filename)
print('filename ', filename)
#get name from inside vcf file
individual.name= str(os.path.splitext(individual.vcf_file.name)[0]).replace('.vcf','').replace('.gz','').replace('.rar','').replace('.zip','').replace('._',' ').replace('.',' ')
# individual.shared_with_groups = form.cleaned_data['shared_with_groups']
individual.shared_with_groups.set(form.cleaned_data['shared_with_groups'])
individual.save()
f = individual.vcf_file
#fix permissions
#os.chmod("%s/genomes/%s/" % (settings.BASE_DIR, individual.user), 0777)
#if request.user.is_authenticated:
# os.chmod("%s/genomes/%s/%s" % (settings.BASE_DIR, slugify(individual.user), individual.id), 0o777)
#else:
# os.chmod("%s/genomes/public/%s" % (settings.BASE_DIR, individual.id), 0o777)
# AnnotateVariants.delay(individual.id)
# VerifyVCF.delay(individual.id)
data = {'files': [{'deleteType': 'DELETE', 'name': individual.name, 'url': '', 'thumbnailUrl': '', 'type': 'image/png', 'deleteUrl': '', 'size': f.size}]}
response = JSONResponse(data, mimetype=response_mimetype(request))
response['Content-Disposition'] = 'inline; filename=files.json'
return response
else:
print(form.errors)
else:
form = IndividualForm()
return render(request, 'individuals/create.html', {'form':form})
# Create your views here.
@login_required
def edit(request, individual_id):
individual = get_object_or_404(Individual, pk=individual_id)
if request.method == 'POST':
form = IndividualForm(request.POST, instance=individual)
if form.is_valid():
form.save()
return redirect('dashboard')
# form = IndividualForm(request.POST, request.FILES)
# if form.is_valid():
# individual = form.save(commit=False)
# individual.user = request.user
# individual.save()
# return redirect('dashboard')
else:
form = IndividualForm(instance=individual)
return render(request, 'individuals/individual_form.html', {'form':form})
class IndividualDeleteView(DeleteView):
model = Individual
def delete(self, request, *args, **kwargs):
"""
This does not actually delete the file, only the database record. But
that is easy to implement.
"""
self.object = self.get_object()
individual_id = self.object.id
if self.object.user:
username = self.object.user.username
else:
username = 'public'
#delete files
if self.object.vcf_file:
self.object.vcf_file.delete()
# if self.object.strs_file:
# self.object.strs_file.delete()
# if self.object.cnvs_file:
# self.object.cnvs_file.delete()
os.system('rm -rf %s/genomes/%s/%s' % (settings.BASE_DIR, username, individual_id))
self.object.delete()
# response = JSONResponse(True, {}, response_mimetype(self.request))
# response['Content-Disposition'] = 'inline; filename=files.json'
# return response
messages.add_message(request, messages.INFO, "Individual deleted with success!")
#return redirect('individuals_list')
return redirect('individuals_list')
def view(request, individual_id):
individual = get_object_or_404(Individual, pk=individual_id)
variant_list = Variant.objects.filter(individual=individual)
# snpeff = SnpeffAnnotation.objects.filter(individual=individual)
individual.n_variants = variant_list.count()
individual.novel_variants = variant_list.filter(variant_id = '.').count()
individual.summary = []
#get calculated values from database
summary_item = {
'type': 'Total SNVs',
'total': variant_list.values('genotype').count(),
'discrete': variant_list.values('genotype').annotate(total=Count('genotype'))
}
individual.summary.append(summary_item)
summary_item = {
'type': 'Total Gene-associated SNVs',
'total': variant_list.values('gene').exclude(gene="").count(),
'discrete': variant_list.exclude(gene="").values('genotype').annotate(total=Count('genotype'))
}
individual.summary.append(summary_item)
individual.snp_eff = variant_list.values('snpeff_effect').annotate(Count('snpeff_effect')).order_by('snpeff_effect')
# print 'individual.snp_eff', individual.snp_eff
# variant_list.values('snpeff__effect').annotate(Count('snpeff__effect')).order_by('snpeff__effect')
#
individual.functional_class = variant_list.values('snpeff_func_class').annotate(Count('snpeff_func_class')).order_by('snpeff_func_class')
individual.impact_variants = variant_list.values('snpeff_impact').annotate(Count('snpeff_impact')).order_by('snpeff_impact')
individual.filter_variants = variant_list.values('filter').annotate(Count('filter')).order_by('filter')
individual.quality = variant_list.aggregate(Avg('qual'), Max('qual'), Min('qual'))
individual.read_depth = variant_list.aggregate(Avg('read_depth'), Max('read_depth'), Min('read_depth'))
individual.clinvar_clnsig = variant_list.values('clinvar_clnsig').annotate(total=Count('clinvar_clnsig'))
individual.chromossome = variant_list.values('chr').annotate(total=Count('chr')).order_by('chr')
# variants_with_snpid = variant_list.values('variant_id').exclude(variant_id=".")
#print variants_with_snpid
# fields = Variant._meta.get_all_field_names()
paginator = Paginator(variant_list, 25) # Show 25 contacts per page
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
variants = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
variants = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
variants = paginator.page(paginator.num_pages)
#'fields':fields
return render(request, 'individuals/view.html', {'individual': individual, 'variants':variants})
@login_required
def browse(request, individual_id):
query_string = request.META['QUERY_STRING']
individual = get_object_or_404(Individual, pk=individual_id)
query = {}
# DEFAULT_SORT = 'pk'
# sort_key = request.GET.get('sort', DEFAULT_SORT)
# tags = ['genotype', 'snpeffannotation__effect']#, 'func_class', 'impact', 'cln_omim', 'chr'
# for tag in tags:
# criteria = request.GET.get(tag, '')
# if criteria:
# query[tag] = criteria
if request.method == 'GET':
form = BrowserForm(request.GET)
if form.is_valid():
print('form is valid')
#chr
chr = request.GET.get('chr', '')
if chr != '':
query['chr'] = chr
#pos
pos = request.GET.get('pos', '')
if pos != '':
query['pos'] = pos
effect = request.GET.get('effect', '')
if effect != '':
print('effect', effect)
query['snpeff_effect'] = effect
#snp_id
# snp_id = request.GET.get('snp_id', '')
# if snp_id != '':
# query['variant_id'] = snp_id
# snp_list = request.GET.get('snp_list', '')
# snp_list = snp_list.split('\r\n')
# if snp_list[0] != u'':
# query['variant_id__in'] = snp_list
# snp_eff = request.GET.getlist('effect')
# if len(snp_eff) > 0:
# query['snp_eff__in'] = snp_eff
# func_class = request.GET.getlist('func_class')
# if len(func_class) > 0:
# query['snp_eff_functional_class__in'] = func_class
# gene = request.GET.get('gene', '')
# if gene != '':
# query['gene_name'] = gene
# gene_list = request.GET.get('gene_list', '')
# gene_list = gene_list.split('\r\n')
# if gene_list[0] != u'':
# query['gene_name__in'] = gene_list
# cln = request.GET.get('cln_omim', '')
# print 'clnomim', cln
# if cln == 'on':
# query['cln_omim'] != ''
variants = Variant.objects.filter(individual=individual, **query)
# snpeff_annotations = SnpeffAnnotation.objects.filter(variant__in=variants)
# #b.entry_set.filter(headline__contains='Lennon')
# print 'snpeff_annotations', len(snpeff_annotations)
# for variant in variants:
# print variant.entry_set.all()
# variant.snpeff=
else:
form = BrowserForm(request.GET)
variants = Variant.objects.filter(individual=individual, **query)
#Pagination
paginator = Paginator(variants, 25) # Show 25 contacts per page
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
variants = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
variants = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
variants = paginator.page(paginator.num_pages)
return render(request, 'variants/variants.html', {'individual': individual, 'variants':variants, 'form':form, 'query_string':query_string})
@login_required
def list(request):
if request.method == 'POST':
individuals = request.POST.getlist('individuals')
print(individuals)
individuals = [int(x) for x in individuals]
print(individuals)
if request.POST['selectionField'] == "Show":
for individual_id in individuals:
individual = get_object_or_404(Individual, pk=individual_id)
individual.is_featured = True
individual.save()
if request.POST['selectionField'] == "Hide":
for individual_id in individuals:
individual = get_object_or_404(Individual, pk=individual_id)
individual.is_featured = False
individual.save()
if request.POST['selectionField'] == "Delete":
for individual_id in individuals:
individual = get_object_or_404(Individual, pk=individual_id)
individual_id = individual.id
username = individual.user.username
#delete files
if individual.vcf_file:
individual.vcf_file.delete()
# if individual.strs_file:
# individual.strs_file.delete()
# if individual.cnvs_file:
# individual.cnvs_file.delete()
os.system('rm -rf %s/genomes/%s/%s' % (settings.BASE_DIR, username, individual_id))
individual.delete()
#os.system('rm -rf mendelmd14/site_media/media/genomes/%s/%s' % (username, individual_id))
if request.POST['selectionField'] == "Populate":
for individual_id in individuals:
individual = get_object_or_404(Individual, pk=individual_id)
PopulateVariants.delay(individual.id)
if request.POST['selectionField'] == "Annotate":
for individual_id in individuals:
individual = get_object_or_404(Individual, pk=individual_id)
AnnotateVariants.delay(individual.id)
if request.POST['selectionField'] == "Find_Medical_Conditions_and_Medicines":
for individual_id in individuals:
individual = get_object_or_404(Individual, pk=individual_id)
Find_Medical_Conditions_and_Medicines.delay(individual.id)
args = []
# groups = Groups.objects.filter(user=request.user, shared_with_users=).order_by("-id")
args.append(Q(user=request.user) | Q(shared_with_users=request.user) | Q(shared_with_groups__members=request.user))
if request.user.is_staff:
individuals = Individual.objects.all()
else:
individuals = Individual.objects.filter(*args).order_by("-id")
ind_featured = Individual.objects.filter(is_featured= True).order_by("id")
# paginator = Paginator(individuals, 25) # Show 25 contacts per page
# try:
# page = int(request.GET.get('page', '1'))
# except ValueError:
# page = 1
# try:
# individuals = paginator.page(page)
# except PageNotAnInteger:
# # If page is not an integer, deliver first page.
# individuals = paginator.page(1)
# except EmptyPage:
# # If page is out of range (e.g. 9999), deliver last page of results.
# individuals = paginator.page(paginator.num_pages)
groups = Group.objects.all()
# individuals = Individual.objects.annotate(number_of_variants=Count('variant'))
return render(request, 'individuals/list.html', {'individuals': individuals, 'groups':groups, 'ind_featured':ind_featured})
@login_required
def annotate(request, individual_id):
individual = get_object_or_404(Individual, pk=individual_id)
individual.status = 'new'
individual.n_lines = 0
VerifyVCF.delay(individual.id)
individual.save()
messages.add_message(request, messages.INFO, "Your individual is being annotated.")
return redirect('dashboard')
@login_required
def populate(request, individual_id):
individual = get_object_or_404(Individual, pk=individual_id)
PopulateVariants.delay(individual.id)
messages.add_message(request, messages.INFO, "Your individual is being populated.")
return redirect('dashboard')
@login_required
def populate_mongo(request, individual_id):
individual = get_object_or_404(Individual, pk=individual_id)
PopulateMongoVariants.delay(individual.id)
messages.add_message(request, messages.INFO, "Your individual is being inserted at MongoDB.")
return redirect('individuals_list')
def download(request, individual_id):
individual = get_object_or_404(Individual, pk=individual_id)
filepath = os.path.dirname(str(individual.vcf_file.name))
filename = os.path.basename(str(individual.vcf_file.name))
path = ''
# os.chmod("%s/genomes/%s/%s" % (settings.MEDIA_ROOT, individual.user, individual.id), 0777)
# if filename.endswith('vcf.zip'):
# basename = filename.split('.vcf.zip')[0]
# elif filename.endswith('.zip'):
# basename = filename.split('.zip')[0]
# else:
# basename = filename.split('.vcf')[0]
#print basename
#print path
#print filepath
fullpath = '%s/%s' % (filepath, filename)
if filename.endswith('.gz'):
vcffile = gzip.open(fullpath, 'r')
else:
vcffile = open(fullpath, 'r')
content = vcffile.read()
vcffile.close()
response = HttpResponse(content, content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename=%s' % filename
response['Content-Length'] = os.path.getsize(fullpath)
return response
def download_annotated(request, individual_id):
individual = get_object_or_404(Individual, pk=individual_id)
filepath = os.path.dirname(str(individual.vcf_file.name))
filename = os.path.basename(str(individual.vcf_file.name))
# path = settings.MEDIA_ROOT
# if filename.endswith('vcf.zip'):
# basename = filename.split('.vcf.zip')[0]
# else:
basename = filename.split('.vcf')[0]
fullpath = '%s/annotation.final.vcf.zip' % (filepath)
vcffile = open(fullpath, 'rb')
response = HttpResponse(vcffile, content_type='application/x-zip-compressed')
# # response['Content-Encoding'] = 'gzip'
response['Content-Disposition'] = 'attachment; filename=%s.annotated.mendelmd.vcf.zip' % basename
response['Content-Length'] = os.path.getsize(fullpath)
return response
@login_required
def create_group(request):
if request.method == 'POST':
form = GroupForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('individuals_list')
else:
form = GroupForm()
return render(request, 'groups/create_group.html', {'form': form})
@login_required
def view_group(request, group_id):
group = get_object_or_404(Group, pk=group_id)
return render(request, 'groups/view_group.html', {'group': group})
class GroupDeleteView(DeleteView):
model = Group
def delete(self, request, *args, **kwargs):
"""
This does not actually delete the file, only the database record. But
that is easy to implement.
"""
self.object = self.get_object()
#username = self.object.user.username
self.object.delete()
messages.add_message(request, messages.INFO, "Group deleted with success!")
return redirect('individuals_list')
def comparison(request):
query = {}
summary = {}
variants = []
query_string = request.META['QUERY_STRING']
if request.method == 'GET':
form = ComparisonForm(request.user, request.GET, request.FILES)
if form.is_valid():
individual_one_id = request.GET.get('individual_one', '')
individual_two_id = request.GET.get('individual_two', '')
read_depth = request.GET.get('read_depth', '')
if read_depth != '':
query['read_depth__gte'] = float(read_depth)
if individual_one_id != '' and individual_two_id != '':
variants_ind_one = Variant.objects.filter(individual__id=individual_one_id, **query).values('chr', 'pos', 'genotype')
variants_ind_two = Variant.objects.filter(individual__id=individual_two_id, **query).values('chr', 'pos', 'genotype')
print('Got Variants from Both!')
genotypes_in_common = 0
genotypes_not_in_common = 0
ind_one = {}
ind_two = {}
summary['variants_ind_one'] = variants_ind_one.count()
for variant in variants_ind_one:
id = '%s-%s' % (variant['chr'], variant['pos'])
if id in ind_one:
ind_one[id].append(variant['genotype'])
else:
ind_one[id] = []
ind_one[id].append(variant['genotype'])
summary['variants_ind_two'] = variants_ind_two.count()
for variant in variants_ind_two:
id = '%s-%s' % (variant['chr'], variant['pos'])
if id in ind_two:
ind_two[id].append(variant['genotype'])
else:
ind_two[id] = []
ind_two[id].append(variant['genotype'])
print('Finished creating indexes')
for pos in ind_one:
if pos in ind_two:
for genotype in ind_one[pos]:
if genotype in ind_two[pos]:
genotypes_in_common += 1
# variant ={}
# variant['chr'] = item.split('-')[0]
# variant['pos'] = item.split('-')[1]
# variant['genotype'] = ind_two[item]
# variants.append(variant)
else:
genotypes_not_in_common += 1
#
print('genotypes in common: %s' % genotypes_in_common)
summary['genotypes_in_common'] = genotypes_in_common
summary['genotypes_not_in_common'] = genotypes_not_in_common
summary['total_variants'] = genotypes_in_common + genotypes_not_in_common
summary['percent_ind_one'] = round((float(genotypes_in_common)/summary['variants_ind_one'])*100, 2)
summary['percent_ind_two'] = round((float(genotypes_in_common)/summary['variants_ind_two'])*100, 2)
print(summary)
else:
form = ComparisonForm(request.user)
return render(request, 'individuals/comparison.html', {'form':form, 'summary':summary, 'query_string':query_string})
|
raonyguimaraes/mendelmd
|
individuals/views.py
|
Python
|
bsd-3-clause
| 23,385
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Topological sort """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
__all__ = ["DagError", "assert_dag", "topological_sort"]
class DagError(Exception):
""" Dag Exception """
def __init__(self, node, parent, result):
""" DagError Init """
Exception.__init__(self)
self.node = node
self.parent = parent
self.result = result
def __str__(self):
""" String Representation """
return "Circular dependency error: Starting from '%s', node '%s' depends on '%s', complete path %s" \
% (self.node, self.parent, self.node, self.result)
def assert_dag(data):
"""
Check if data is a dag
>>> assert_dag({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'g', 'c' )})
>>> assert_dag({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'e', 'c' )})
Traceback (most recent call last):
...
DagError: Circular dependency error: Starting from 'e', node 'e' depends on 'e', complete path []
"""
for node, _ in data.items():
_topological_sort(data, node, node, True)
def topological_sort(data, heads):
"""
Topological sort
data should be a dictionary like that (it's a dag):
{
'a' : ( 'b', 'c', 'd' ),
'b' : ( 'e', 'c' )
}
heads are the top of the dag, the result will include all specified heads and their deps
This function return a list. Head will be the last element.
Warning: this sort always find a solution even is data is not a dag!!
If a depend on b and b depend on a, the solution is [ a, b ].
This is ok in our case but could be a problem in other situation.
(you know what? try to use the result you will see if it work!).
>>> topological_sort({
... 'head' : ['telepathe', 'opennao-tools', 'naoqi'],
... 'toolchain' : [],
... 'python-pc' : ['toolchain'],
... 'telepathe' : ['naoqi'],
... 'qt-pc' : ['toolchain'],
... 'opennao-tools': ['toolchain'],
... 'naoqi' : ['qt-pc', 'python-pc', 'streamer', 'toolchain']}, 'head' )
['toolchain', 'qt-pc', 'python-pc', 'streamer', 'naoqi', 'telepathe', 'opennao-tools', 'head']
>>> topological_sort({
... 'a' : ( 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' )}, 'a')
['e', 'c', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'e', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'e' : ( 'g', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'b' ),
... 'b' : ( 'a' ),
... }, 'a')
['b', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'q' : ( 'u', 'i' ),
... 'i' : ( 'y', 'o' ),
... 'e' : ( 'g', 'c' )}, 'a')
['g', 'c', 'e', 'b', 'd', 'a']
>>> topological_sort({
... 'a' : ( 'g', 'b', 'c', 'd' ),
... 'b' : ( 'e', 'c' ),
... 'q' : ( 'u', 'i' ),
... 'i' : ( 'y', 'o' ),
... 'e' : ( 'g', 'c' )}, [ 'a', 'q' ])
['g', 'c', 'e', 'b', 'd', 'a', 'u', 'y', 'o', 'i', 'q']
"""
if isinstance(heads, list):
data['internalfakehead'] = heads
head = 'internalfakehead'
result = _topological_sort(data, head, head)
return [x for x in result if x != 'internalfakehead']
head = heads
return _topological_sort(data, head, head)
def _topological_sort(data, head, top_node, raise_exception=False, result=None, visited=None):
""" Internal function """
if not result:
result = []
if not visited:
visited = []
deps = data.get(head, list())
if head in visited:
if head == top_node and raise_exception:
raise DagError(head, head, result)
return result
visited.append(head)
for i in deps:
try:
result.index(i)
except ValueError:
# the item does not exist
result = _topological_sort(data, i, top_node, raise_exception, result, visited)
result.append(head)
return result
if __name__ == "__main__":
import doctest
doctest.testmod()
|
aldebaran/qibuild
|
python/qisys/sort.py
|
Python
|
bsd-3-clause
| 4,651
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import copy
import logging
import time
from collections import OrderedDict
from contextlib import contextmanager
import six
from django.utils.functional import cached_property
from .client import get_es_client
logger = logging.getLogger('elasticindex')
class ElasticQuerySet(object):
def __init__(self, model_cls, body=None, **kwargs):
self.model_cls = model_cls
self.body = body or {"query": {"match_all": {}}}
self.kwargs = kwargs or {}
self.latest_total_count = None
self.latest_raw_result = None
self.query_finished = False
def __len__(self):
return len(self.result_list)
def __iter__(self):
return iter(self.result_list)
def __bool__(self):
return bool(self.result_list)
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (slice,) + six.integer_types):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0)) or
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
(k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
if self.query_finished:
return self.result_list[k]
if isinstance(k, slice):
qs = self
offset = 0
if k.start is not None:
offset = int(k.start)
qs = qs.offset(offset)
if k.stop is not None:
limit = int(k.stop) - offset
qs = qs.limit(limit)
return list(qs)[::k.step] if k.step else qs
qs = self.limit(1).offset(k)
return list(qs)[0]
def _clone(self):
"""
:rtype: ElasticQuerySet
"""
qs = self.__class__(
self.model_cls, copy.deepcopy(self.body),
**copy.deepcopy(self.kwargs))
return qs
@cached_property
def result_list(self):
self.query_finished = True
return list(self.get_result())
def get_result(self):
"""
elasticsearch の search をそのまま実行
:rtype: generator
"""
with self.log_query():
result = self.es_client.search(
index=self.model_cls.INDEX,
doc_type=self.model_cls.DOC_TYPE,
body=self.body, **self.kwargs)
self.latest_total_count = result['hits']['total']
self.latest_raw_result = result
for hit in result['hits']['hits']:
yield self.model_cls(hit)
@cached_property
def es_client(self):
"""
:rtype: Elasticsearch
"""
return get_es_client()
def get_by_id(self, id):
"""
Elasticsearch のIDで1件取得
:param id:
:return:
"""
result = self.es_client.get(
self.model_cls.INDEX, id, doc_type=self.model_cls.DOC_TYPE)
self.latest_raw_result = result
if not result['found']:
raise self.model_cls.DoesNotExist(id)
return self.model_cls(result)
def delete_by_id(self, id, **kwargs):
"""
Elasticsearch のIDで1件削除
:param id: elasticsearch document id
"""
result = self.es_client.delete(
self.model_cls.INDEX, self.model_cls.DOC_TYPE, id, **kwargs)
self.latest_raw_result = result
return result
def all(self):
"""
:rtype: ElasticQuerySet
"""
return self._clone()
def limit(self, limit):
"""
:rtype: ElasticQuerySet
"""
o = self._clone()
if limit is None:
if 'size' in o.body:
del o.body['size']
else:
o.body['size'] = limit
return o
def offset(self, offset):
"""
:rtype: ElasticQuerySet
"""
o = self._clone()
if offset is None:
if 'from' in o.body:
del o.body['from']
else:
o.body['from'] = offset
return o
def query(self, filter_query_dict):
"""
:param filter_query_dict:
- {"match": {"product_id": 192}}
- {"match_all": {}} # default
- {"multi_match": {
"query": query_word,
"fields": [
"upc", "title^3", "description", "authors",
"publishers", "tags", "keywords"]
}}
- {"bool": {
"must": [
{"match": {"is_used": True}},
{"range": {"stock": {"gt": 0}}}
]}}
:rtype: ElasticQuerySet
"""
o = self._clone()
o.body['query'] = filter_query_dict
return o
def set_body(self, body_dict):
"""
replace query body
"""
o = self._clone()
o.body = body_dict
return o
def get(self, filter_query_dict):
"""
1件取得
複数件あってもエラーは出さず、黙って1件だけ返す
"""
qs = self.query(filter_query_dict).limit(1)
if not qs:
raise self.model_cls.DoesNotExist(filter_query_dict)
return qs[0]
def count(self):
"""
件数取得
"""
if self.query_finished:
return len(self.result_list)
body = self.body.copy()
if 'sort' in body:
del body['sort']
with self.log_query(label='count', body=body):
result = self.es_client.count(
index=self.model_cls.INDEX,
doc_type=self.model_cls.DOC_TYPE,
body=body, **self.kwargs
)
self.latest_raw_result = result
return result['count']
def order_by(self, order_query_list):
"""
sort パラメータをつける
:type order_query_list: list, dict, string
- "mz_score"
- {"mz_score": "desc"}
"""
o = self._clone()
o.body['sort'] = order_query_list
return o
@property
def log_query(self):
"""
クエリをロギングするコンテクストマネージャ
elasticsearch や elasticsearch.trace のロガーを
DEBUG レベルで設定するともっと詳しく出る (結果が全部出る)
"""
@contextmanager
def _context(label='', body=None):
start_time = time.time()
yield
elapsed_time = time.time() - start_time
logger.debug('{}time:{}ms, body:{}'.format(
'{}: '.format(label) if label else '',
int(elapsed_time * 100), body or self.body))
return _context
def bulk(self, body):
return self.es_client.bulk(
body, index=self.model_cls.INDEX,
doc_type=self.model_cls.DOC_TYPE)
class ElasticDocumentManager(object):
"""
class ElasticDocumentManager(ElasticQuerySet)
でもいいんだけど、インスタンス変数が汚れる可能性があるので
クラスプロパティっぽい感じで、アクセスされるたびに新しいクエリセットを作ることにした
"""
def __init__(self, model_cls, body=None, **kwargs):
self.model_cls = model_cls
self.kwargs = kwargs
def __get__(self, cls, owner):
return ElasticQuerySet(self.model_cls)
class ElasticIndexManager(object):
def __init__(self, model_cls):
self.model_cls = model_cls
@cached_property
def mappings_properties(self):
return OrderedDict(
[
(f_name, f.mapping)
for f_name, f
in self.model_cls._cached_fields().items()
])
@cached_property
def mappings(self):
"""
インデックスの mappings の指定にそのまま使える dict
"""
return {
self.model_cls.DOC_TYPE: {
"properties": self.mappings_properties
}
}
def delete(self):
"""
インデックスを削除
:return:
"""
es = get_es_client()
es.indices.delete(self.model_cls.INDEX, ignore=[404, ])
@cached_property
def create_body_params(self):
body = {"mappings": self.mappings}
index_setting = getattr(self.model_cls, 'INDEX_SETTINGS', None)
if index_setting:
body["settings"] = index_setting
return body
def create(self):
"""
インデックスを作成
:return:
"""
es = get_es_client()
es.indices.create(
self.model_cls.INDEX, self.create_body_params)
def exists(self):
"""
インデックスが存在するか
"""
es = get_es_client()
return es.indices.exists(self.model_cls.INDEX)
class ElasticDocumentMeta(type):
def __new__(mcs, name, bases, attrs):
c = super(ElasticDocumentMeta, mcs).__new__(
mcs, name, bases, attrs)
c.objects = ElasticDocumentManager(c)
c.index = ElasticIndexManager(c)
return c
|
ytyng/django-elasticindex
|
elasticindex/managers.py
|
Python
|
bsd-3-clause
| 9,274
|
def table_exists(curs, table_name=''):
'''
If table_name is a schema qualified table name and it exists it is returned,
else if it is not schema qualified and the table name exists in the search
path then that schema qualified table name is returned, else None.
'''
curs.execute('SELECT pgpartitioner.table_exists(%s)', (table_name,))
return curs.fetchone()
def get_column_type(curs, table_name, column_name):
'''
If column_name exists on table_name it's SQL type is returned. Else an
exception is raised.
'''
curs.execute('SELECT pgpartitioner.get_column_type(%s, %s);', (table_name, column_name))
return curs.fetchone()[0]
def get_constraint_defs(curs, table_name, fkeys=True):
'''
Returns a list of constraint definition fragments suitable for use
in SQL create table or alter table statements. fkeys are not included if
fkeys is false
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_constraint_defs(%s, %s);', (table_name, fkeys))
return [res[0] for res in curs.fetchall()]
def get_index_defs(curs, table_name):
'''
Returns a list of 2-tuples consisting of each index creation def statement
for any non-primary key or unique indexes on the given table and the
index name.
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_index_defs(%s);', (table_name,))
return [res[0] for res in curs.fetchall()]
def table_attributes(curs, table_name):
'''
Returns a tuple of the given table's attributes
'''
curs.execute('SELECT * FROM pgpartitioner.get_table_attributes(%s);', (table_name,))
atts = tuple([res[0] for res in curs.fetchall()])
return atts
def normalize_date(curs, date_str, fmt, units='month', diff='0 months'):
'''
Takes a valid date string in any format and formats it according to fmt.
'''
normalize_date_sql = \
'''
SELECT to_char(date_trunc(%s, %s::timestamp + %s), %s);
'''
curs.execute(normalize_date_sql, (units, date_str, diff, fmt))
return curs.fetchone()[0]
|
mage2k/pg_partitioner
|
pg_partitioner/sql_util.py
|
Python
|
bsd-3-clause
| 2,074
|
class A:
def foo(self):
print('A.foo()')
class B(A):
def foo(self):
print('B.foo()')
class C(A):
def foo(self):
print('C.foo()')
class D(B, C):
def foo(self):
print('D.foo()')
x = D()
print(D.__mro__) # (D, B, C, A, object)
x.foo() # D.foo()
|
s3rvac/talks
|
2017-03-07-Introduction-to-Python/examples/23-inheritance.py
|
Python
|
bsd-3-clause
| 303
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of gcloud dataflow jobs cancel command.
"""
from googlecloudsdk.api_lib.dataflow import job_utils
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from surface import dataflow as commands
from googlecloudsdk.third_party.apitools.base.py import exceptions
class Cancel(base.Command):
"""Cancels all jobs that match the command line arguments.
"""
@staticmethod
def Args(parser):
"""Register flags for this command."""
job_utils.ArgsForJobRefs(parser, nargs='+')
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: all the arguments that were provided to this command invocation.
Returns:
A pair of lists indicating the jobs that were successfully cancelled and
those that failed to be cancelled.
"""
for job_ref in job_utils.ExtractJobRefs(self.context, args):
self._CancelJob(job_ref)
return None
def _CancelJob(self, job_ref):
"""Cancels a job.
Args:
job_ref: resources.Resource, The reference to the job to cancel.
"""
apitools_client = self.context[commands.DATAFLOW_APITOOLS_CLIENT_KEY]
dataflow_messages = self.context[commands.DATAFLOW_MESSAGES_MODULE_KEY]
request = dataflow_messages.DataflowProjectsJobsUpdateRequest(
projectId=job_ref.projectId,
jobId=job_ref.jobId,
# We don't need to send the full job, because only the state can be
# updated, and the other fields are ignored.
job=dataflow_messages.Job(
requestedState=(dataflow_messages.Job.RequestedStateValueValuesEnum
.JOB_STATE_CANCELLED)))
try:
apitools_client.projects_jobs.Update(request)
log.status.Print('Cancelled job [{0}]'.format(job_ref.jobId))
except exceptions.HttpError as unused_error:
log.err.Print('Failed to cancel job [{0}]'.format(job_ref.jobId))
|
flgiordano/netcash
|
+/google-cloud-sdk/lib/surface/dataflow/jobs/cancel.py
|
Python
|
bsd-3-clause
| 2,531
|
from currencies.models import Currency
def currencies(request):
currencies = Currency.objects.active()
if not request.session.get('currency'):
try:
currency = Currency.objects.get(is_default__exact=True)
except Currency.DoesNotExist:
currency = None
request.session['currency'] = currency
return {
'CURRENCIES': currencies,
'CURRENCY': request.session['currency']
}
|
barseghyanartur/django-currencies
|
currencies/context_processors.py
|
Python
|
bsd-3-clause
| 450
|
"""
Using dates with timeseries models
"""
import statsmodels.api as sm
import numpy as np
import pandas
# Getting started
# ---------------
data = sm.datasets.sunspots.load()
# Right now an annual date series must be datetimes at the end of the year.
from datetime import datetime
dates = sm.tsa.datetools.dates_from_range('1700', length=len(data.endog))
# Using Pandas
# ------------
# Make a pandas TimeSeries or DataFrame
endog = pandas.TimeSeries(data.endog, index=dates)
# and instantiate the model
ar_model = sm.tsa.AR(endog, freq='A')
pandas_ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1)
# Let's do some out-of-sample prediction
pred = pandas_ar_res.predict(start='2005', end='2015')
print pred
# Using explicit dates
# --------------------
ar_model = sm.tsa.AR(data.endog, dates=dates, freq='A')
ar_res = ar_model.fit(maxlag=9, method='mle', disp=-1)
pred = ar_res.predict(start='2005', end='2015')
print pred
# This just returns a regular array, but since the model has date information
# attached, you can get the prediction dates in a roundabout way.
print ar_res._data.predict_dates
# This attribute only exists if predict has been called. It holds the dates
# associated with the last call to predict.
#..TODO: should this be attached to the results instance?
|
pprett/statsmodels
|
examples/tsa/ex_dates.py
|
Python
|
bsd-3-clause
| 1,296
|
import inspect
import os
from abc import ABC, ABCMeta
from dataclasses import dataclass
from datetime import datetime, tzinfo
from types import FunctionType, WrapperDescriptorType
from typing import (
Any,
Callable,
Dict,
Generic,
Iterable,
List,
Optional,
Type,
TypeVar,
Union,
cast,
)
from uuid import UUID, uuid4
from eventsourcing.utils import get_method_name, get_topic, resolve_topic
# noinspection SpellCheckingInspection
TZINFO: tzinfo = resolve_topic(os.getenv("TZINFO_TOPIC", "datetime:timezone.utc"))
# noinspection PyTypeChecker
TAggregate = TypeVar("TAggregate", bound="Aggregate")
class MetaDomainEvent(ABCMeta):
def __new__(mcs, name: str, bases: tuple, cls_dict: dict) -> "MetaDomainEvent":
event_cls = ABCMeta.__new__(mcs, name, bases, cls_dict)
event_cls = dataclass(frozen=True)(event_cls) # type: ignore
return event_cls
def __init__(cls, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
class DomainEvent(ABC, metaclass=MetaDomainEvent):
# noinspection PyUnresolvedReferences
"""
Base class for domain events, such as aggregate :class:`AggregateEvent`
and aggregate :class:`Snapshot`.
Constructor arguments:
:param UUID originator_id: ID of originating aggregate.
:param int originator_version: version of originating aggregate.
:param datetime timestamp: date-time of the event
"""
originator_id: UUID
originator_version: int
timestamp: datetime
class AggregateEvent(DomainEvent, Generic[TAggregate]):
# noinspection PyUnresolvedReferences
"""
Base class for aggregate events. Subclasses will model
decisions made by the domain model aggregates.
Constructor arguments:
:param UUID originator_id: ID of originating aggregate.
:param int originator_version: version of originating aggregate.
:param datetime timestamp: date-time of the event
"""
def mutate(self, obj: Optional[TAggregate]) -> Optional[TAggregate]:
"""
Changes the state of the aggregate
according to domain event attributes.
"""
# Check event is next in its sequence.
# Use counting to follow the sequence.
# assert isinstance(obj, Aggregate), (type(obj), self)
assert obj is not None
next_version = obj.version + 1
if self.originator_version != next_version:
raise VersionError(self.originator_version, next_version)
if self.apply(obj) is not None: # type: ignore
raise TypeError(
f"Unexpected value returned from "
f"{type(self).apply.__qualname__}(). Values "
f"returned from 'apply' methods are discarded."
)
# Update the aggregate version.
obj.version = self.originator_version
# Update the modified time.
obj.modified_on = self.timestamp
return obj
# noinspection PyShadowingNames
def apply(self, aggregate: TAggregate) -> None:
"""
Applies the domain event to the aggregate.
"""
class AggregateCreated(AggregateEvent["Aggregate"]):
# noinspection PyUnresolvedReferences
"""
Domain event for when aggregate is created.
Constructor arguments:
:param UUID originator_id: ID of originating aggregate.
:param int originator_version: version of originating aggregate.
:param datetime timestamp: date-time of the event
:param str originator_topic: topic for the aggregate class
"""
originator_topic: str
def mutate(self, obj: Optional[TAggregate]) -> TAggregate:
"""
Constructs aggregate instance defined
by domain event object attributes.
"""
assert obj is None
# Copy the event attributes.
kwargs = self.__dict__.copy()
# Resolve originator topic.
aggregate_class: Type[TAggregate] = resolve_topic(
kwargs.pop("originator_topic")
)
# Construct and return aggregate object.
agg: TAggregate = aggregate_class.__new__(aggregate_class)
# Separate the base class keywords arguments.
base_kwargs = {
"id": kwargs.pop("originator_id"),
"version": kwargs.pop("originator_version"),
"timestamp": kwargs.pop("timestamp"),
}
# Call the base class init method.
Aggregate.__base_init__(agg, **base_kwargs)
# Call the aggregate class init method.
# noinspection PyTypeChecker
init_method = agg.__init__ # type: ignore
# Provide the id, if the init method expects it.
if aggregate_class._init_mentions_id:
kwargs["id"] = base_kwargs["id"]
# noinspection PyArgumentList
init_method(**kwargs)
return agg
class CommandMethodDecorator:
def __init__(self, arg: Union[Callable, str, Type[AggregateEvent]]):
self.is_name_inferred_from_method = False
self.given_event_cls: Optional[Type[AggregateEvent]] = None
self.event_cls_name: Optional[str] = None
self.is_property_setter = False
self.property_setter_arg_name: Optional[str] = None
self.is_decorating_a_property = False
self.decorated_property: Optional[property] = None
self.original_method: Optional[FunctionType] = None
# Initialising an instance.
if isinstance(arg, str):
# Decorator used with an explicit name.
self.initialise_from_explicit_name(event_cls_name=arg)
elif isinstance(arg, type) and issubclass(arg, AggregateEvent):
self.initialise_from_event_cls(event_cls=arg)
elif isinstance(arg, FunctionType):
# Decorator used without explicit name.
self.initialise_from_decorated_method(original_method=arg)
elif isinstance(arg, property):
method_name = arg.fset.__name__
raise TypeError(
f"@event on {method_name}() property setter requires event class name"
)
elif isinstance(arg, staticmethod):
raise TypeError(
f"{arg.__func__.__name__}() staticmethod can't be "
f"used to update aggregate state"
)
elif isinstance(arg, classmethod):
# noinspection SpellCheckingInspection
raise TypeError(
f"{arg.__func__.__name__}() classmethod can't be "
f"used to update aggregate state"
)
else:
raise TypeError(f"Unsupported usage: {type(arg)} is not a str or function")
def initialise_from_decorated_method(self, original_method: FunctionType) -> None:
self.original_method = original_method
original_method_name = original_method.__name__
if original_method_name != "__init__":
self.is_name_inferred_from_method = True
self.event_cls_name = "".join(
[s.capitalize() for s in original_method_name.split("_")]
)
_check_no_variable_params(self.original_method)
def initialise_from_event_cls(self, event_cls: Type[AggregateEvent]) -> None:
self.given_event_cls = event_cls
def initialise_from_explicit_name(self, event_cls_name: str) -> None:
if event_cls_name == "":
raise ValueError("Can't use empty string as name of event class")
self.event_cls_name = event_cls_name
def __call__(self, *args: Any, **kwargs: Any) -> Any:
# Calling an instance.
# noinspection SpellCheckingInspection
if self.original_method is None:
# Decorator doesn't yet know what method is being decorated,
# so decorator must have been specified with an explicit
# event name or class, so we're still initialising...
assert len(kwargs) == 0, "Unsupported usage"
assert len(args) == 1, "Unsupported usage"
arg = args[0]
# assert isinstance(args[0], FunctionType), args[0]
if isinstance(arg, FunctionType):
# Decorating a function.
self.original_method = arg
_check_no_variable_params(self.original_method)
elif isinstance(arg, property):
# Decorating a property.
self.is_decorating_a_property = True
self.decorated_property = arg
if arg.fset is None:
assert arg.fget is not None
method_name = arg.fget.__name__
raise TypeError(
f"@event can't decorate {method_name}() property getter"
)
assert isinstance(arg.fset, FunctionType)
self.original_method = arg.fset
assert self.original_method
setter_arg_names = list(inspect.signature(arg.fset).parameters)
assert len(setter_arg_names) == 2
self.property_setter_arg_name = setter_arg_names[1]
_check_no_variable_params(self.original_method)
else:
raise ValueError(
f"Unsupported usage: {type(arg)} is not a str or a FunctionType"
)
if self.given_event_cls:
if self.given_event_cls in original_methods:
name = self.given_event_cls.__name__
raise TypeError(
f"{name} event class used in more than one decorator"
)
# Set decorated event apply() method on given event class.
if "apply" in self.given_event_cls.__dict__:
name = self.given_event_cls.__name__
raise TypeError(f"{name} event class has unexpected apply() method")
# self.given_event_cls.apply = DecoratedEvent.apply # type: ignore
setattr( # noqa: B010
self.given_event_cls, "apply", DecoratedEvent.apply
)
# Register the decorated method under the given event class.
original_methods[self.given_event_cls] = self.original_method
return self
else:
# Initialised decorator was called directly, presumably by
# a decorating property that has this decorator as its fset.
# So trigger an event.
assert self.is_property_setter
assert self.property_setter_arg_name
assert len(args) == 2
assert len(kwargs) == 0
assert isinstance(args[0], Aggregate)
aggregate_instance = args[0]
bound = BoundCommandMethodDecorator(self, aggregate_instance)
property_setter_arg_value = args[1]
kwargs = {self.property_setter_arg_name: property_setter_arg_value}
bound.trigger(**kwargs)
def __get__(
self, instance: Optional[TAggregate], owner: "MetaAggregate"
) -> Union["BoundCommandMethodDecorator", "UnboundCommandMethodDecorator"]:
if self.is_decorating_a_property:
assert self.decorated_property
return self.decorated_property.__get__(instance, owner)
else:
if instance is None:
return UnboundCommandMethodDecorator(self)
else:
return BoundCommandMethodDecorator(self, instance)
def __set__(self, instance: TAggregate, value: Any) -> None:
assert self.is_decorating_a_property
# Set decorated property.
b = BoundCommandMethodDecorator(self, instance)
assert self.property_setter_arg_name
kwargs = {self.property_setter_arg_name: value}
b.trigger(**kwargs)
def event(
arg: Optional[Union[FunctionType, str, Type[AggregateEvent]]] = None
) -> CommandMethodDecorator:
"""
Can be used to decorate an aggregate method so that when the
method is called an event is triggered. The body of the method
will be used to apply the event to the aggregate, both when the
event is triggered and when the aggregate is reconstructed from
stored events.
.. code-block:: python
class MyAggregate(Aggregate):
@event("NameChanged")
def set_name(self, name: str):
self.name = name
...is equivalent to...
.. code-block:: python
class MyAggregate(Aggregate):
def set_name(self, name: str):
self.trigger_event(self.NameChanged, name=name)
class NameChanged(Aggregate.Event):
name: str
def apply(self, aggregate):
aggregate.name = self.name
In the example above, the event "NameChanged" is defined automatically
by inspecting the signature of the `set_name()` method. If it is
preferred to declare the event class explicitly, for example to define
upcasting of old events, the event class itself can be mentioned in the
event decorator rather than just providing the name of the event as a
string.
.. code-block:: python
class MyAggregate(Aggregate):
class NameChanged(Aggregate.Event):
name: str
@event(NameChanged)
def set_name(self, name: str):
aggregate.name = self.name
"""
if arg is None:
return event # type: ignore
else:
return CommandMethodDecorator(arg)
triggers = event
class UnboundCommandMethodDecorator:
"""
Wraps an EventDecorator instance when attribute is accessed
on an aggregate class.
"""
# noinspection PyShadowingNames
def __init__(self, event_decorator: CommandMethodDecorator):
"""
:param CommandMethodDecorator event_decorator:
"""
self.event_decorator = event_decorator
assert event_decorator.original_method
self.__qualname__ = event_decorator.original_method.__qualname__
self.__name__ = event_decorator.original_method.__name__
class BoundCommandMethodDecorator:
"""
Wraps an EventDecorator instance when attribute is accessed
on an aggregate so that the aggregate methods can be accessed.
"""
# noinspection PyShadowingNames
def __init__(self, event_decorator: CommandMethodDecorator, aggregate: TAggregate):
"""
:param CommandMethodDecorator event_decorator:
:param Aggregate aggregate:
"""
assert event_decorator.original_method
self.event_decorator = event_decorator
self.__qualname__ = event_decorator.original_method.__qualname__
self.__name__ = event_decorator.original_method.__name__
self.aggregate = aggregate
def trigger(self, *args: Any, **kwargs: Any) -> None:
assert isinstance(self.event_decorator, CommandMethodDecorator) # for PyCharm
assert self.event_decorator.original_method
kwargs = _coerce_args_to_kwargs(
self.event_decorator.original_method, args, kwargs
)
if self.event_decorator.given_event_cls:
event_cls = self.event_decorator.given_event_cls
else:
assert self.event_decorator.event_cls_name
event_cls = getattr(self.aggregate, self.event_decorator.event_cls_name)
self.aggregate.trigger_event(event_cls, **kwargs)
def __call__(self, *args: Any, **kwargs: Any) -> None:
self.trigger(*args, **kwargs)
original_methods: Dict[MetaDomainEvent, FunctionType] = {}
class DecoratedEvent(AggregateEvent):
# noinspection PyShadowingNames
def apply(self, aggregate: TAggregate) -> None:
"""
Applies event to aggregate by calling
method decorated by @event.
"""
event_obj_dict = dict(self.__dict__)
event_obj_dict.pop("originator_id")
event_obj_dict.pop("originator_version")
event_obj_dict.pop("timestamp")
original_method = original_methods[type(self)]
method_signature = inspect.signature(original_method)
# args = []
# for name, param in method_signature.parameters.items():
for name in method_signature.parameters:
if name == "self":
continue
# if param.kind == param.POSITIONAL_ONLY:
# args.append(event_obj_dict.pop(name))
# original_method(aggregate, *args, **event_obj_dict)
returned_value = original_method(aggregate, **event_obj_dict)
if returned_value is not None:
raise TypeError(
f"Unexpected value returned from "
f"{original_method.__qualname__}(). Values "
f"returned from 'apply' methods are discarded."
)
TDomainEvent = TypeVar("TDomainEvent", bound=DomainEvent)
TAggregateEvent = TypeVar("TAggregateEvent", bound=AggregateEvent)
TAggregateCreated = TypeVar("TAggregateCreated", bound=AggregateCreated)
def _check_no_variable_params(
method: Union[FunctionType, WrapperDescriptorType]
) -> None:
assert isinstance(method, (FunctionType, WrapperDescriptorType)), type(method)
for param in inspect.signature(method).parameters.values():
if param.kind is param.VAR_POSITIONAL:
raise TypeError("variable positional parameters not supported")
# Todo: Support VAR_POSITIONAL?
# annotations["__star_args__"] = "typing.Any"
elif param.kind is param.VAR_KEYWORD:
# Todo: Support VAR_KEYWORD?
# annotations["__star_kwargs__"] = "typing.Any"
raise TypeError("variable keyword parameters not supported")
def _coerce_args_to_kwargs(
method: Union[FunctionType, WrapperDescriptorType],
args: Iterable[Any],
kwargs: Dict[str, Any],
expects_id: bool = False,
) -> Dict[str, Any]:
assert isinstance(method, (FunctionType, WrapperDescriptorType))
method_signature = inspect.signature(method)
copy_kwargs = dict(kwargs)
args = tuple(args)
positional_names = []
keyword_defaults = {}
required_positional = []
required_keyword_only = []
if expects_id:
positional_names.append("id")
required_positional.append("id")
for name, param in method_signature.parameters.items():
if name == "self":
continue
# elif param.kind in (param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD):
if param.kind is param.KEYWORD_ONLY:
required_keyword_only.append(name)
if param.kind is param.POSITIONAL_OR_KEYWORD:
positional_names.append(name)
if param.default == param.empty:
required_positional.append(name)
if param.default != param.empty:
keyword_defaults[name] = param.default
# if not required_keyword_only and not positional_names:
# if args or kwargs:
# raise TypeError(f"{method.__name__}() takes no args")
for name in kwargs:
if name not in required_keyword_only and name not in positional_names:
raise TypeError(
f"{get_method_name(method)}() got an unexpected "
f"keyword argument '{name}'"
)
counter = 0
len_args = len(args)
if len_args > len(positional_names):
msg = (
f"{get_method_name(method)}() takes {len(positional_names) + 1} "
f"positional argument{'' if len(positional_names) + 1 == 1 else 's'} "
f"but {len_args + 1} were given"
)
raise TypeError(msg)
required_positional_not_in_kwargs = [
n for n in required_positional if n not in kwargs
]
num_missing = len(required_positional_not_in_kwargs) - len_args
if num_missing > 0:
missing_names = [
f"'{name}'" for name in required_positional_not_in_kwargs[len_args:]
]
msg = (
f"{get_method_name(method)}() missing {num_missing} required positional "
f"argument{'' if num_missing == 1 else 's'}: "
)
raise_missing_names_type_error(missing_names, msg)
for name in positional_names:
if counter + 1 > len_args:
break
if name not in kwargs:
copy_kwargs[name] = args[counter]
counter += 1
else:
raise TypeError(
f"{get_method_name(method)}() got multiple values for argument '{name}'"
)
missing_keyword_only_arguments = []
for name in required_keyword_only:
if name not in kwargs:
missing_keyword_only_arguments.append(name)
if missing_keyword_only_arguments:
missing_names = [f"'{name}'" for name in missing_keyword_only_arguments]
msg = (
f"{get_method_name(method)}() missing {len(missing_names)} "
f"required keyword-only argument"
f"{'' if len(missing_names) == 1 else 's'}: "
)
raise_missing_names_type_error(missing_names, msg)
for name, value in keyword_defaults.items():
if name not in copy_kwargs:
copy_kwargs[name] = value
return copy_kwargs
def raise_missing_names_type_error(missing_names: List[str], msg: str) -> None:
msg += missing_names[0]
if len(missing_names) == 2:
msg += f" and {missing_names[1]}"
elif len(missing_names) > 2:
msg += ", " + ", ".join(missing_names[1:-1])
msg += f", and {missing_names[-1]}"
raise TypeError(msg)
class MetaAggregate(ABCMeta):
_annotations_mention_id = False
_init_mentions_id = False
INITIAL_VERSION = 1
def __new__(mcs, *args: Any, **kwargs: Any) -> "MetaAggregate":
try:
args[2]["__annotations__"].pop("id")
except KeyError:
pass
else:
args[2]["_annotations_mention_id"] = True
cls = ABCMeta.__new__(mcs, *args)
cls = dataclass(eq=False, repr=False)(cls)
return cast(MetaAggregate, cls)
def __init__(
cls,
*args: Any,
created_event_name: Optional[str] = None,
) -> None:
super().__init__(*args)
# Prepare created event class.
created_event_classes = {}
try:
created_event_class = cls.__dict__["_created_event_class"]
if created_event_name:
raise TypeError(
"Can't use both '_created_event_class' and 'created_event_name'"
)
except KeyError:
created_event_class = None
if isinstance(cls.__dict__["__init__"], CommandMethodDecorator):
init_decorator: CommandMethodDecorator = cls.__dict__["__init__"]
init_method = init_decorator.original_method
if created_event_name:
raise TypeError(
"Can't use both 'created_event_name' and __init__ @event decorator"
)
elif created_event_class:
raise TypeError(
"Can't use both '_created_event_class' and __init__ @event "
"decorator"
)
elif init_decorator.event_cls_name:
created_event_name = init_decorator.event_cls_name
elif init_decorator.given_event_cls:
created_event_class = init_decorator.given_event_cls
else:
raise TypeError(
"Neither name nor class given to __init__ @event decorator"
)
cls.__init__ = init_method # type: ignore
else:
init_method = cls.__dict__["__init__"]
assert isinstance(init_method, FunctionType)
for name, value in tuple(cls.__dict__.items()):
if isinstance(value, type) and issubclass(value, AggregateCreated):
created_event_classes[name] = value
# Use the class as the created class, if so named.
if created_event_name in created_event_classes:
created_event_class = created_event_classes[created_event_name]
elif created_event_class is None:
if len(created_event_classes) == 0 or created_event_name:
if not created_event_name:
created_event_name = "Created"
# Define a "created" event for this class.
created_cls_annotations = {}
_check_no_variable_params(init_method)
method_signature = inspect.signature(init_method)
for param_name in method_signature.parameters:
if param_name == "self":
continue
if param_name == "id":
cls._init_mentions_id = True
continue
created_cls_annotations[param_name] = "typing.Any"
created_event_class = type(
created_event_name,
(AggregateCreated,),
{
"__annotations__": created_cls_annotations,
"__module__": cls.__module__,
"__qualname__": ".".join(
[cls.__qualname__, created_event_name]
),
},
)
setattr(cls, created_event_name, created_event_class)
elif len(created_event_classes) == 1:
created_event_class = list(created_event_classes.values())[0]
cls._created_event_class = created_event_class
# Prepare the subsequent event classes.
for attribute in tuple(cls.__dict__.values()):
# Watch out for @property that sits over an @event.
if isinstance(attribute, property) and isinstance(
attribute.fset, CommandMethodDecorator
):
attribute = attribute.fset
if attribute.is_name_inferred_from_method:
# We don't want name inferred from property (not past participle).
method_name = attribute.original_method.__name__
raise TypeError(
f"@event under {method_name}() property setter requires event "
f"class name"
)
# Attribute is a property decorating an event decorator.
attribute.is_property_setter = True
# Attribute is an event decorator.
if isinstance(attribute, CommandMethodDecorator):
# Prepare the subsequent aggregate events.
original_method = attribute.original_method
assert isinstance(original_method, FunctionType)
method_signature = inspect.signature(original_method)
annotations = {}
for param_name in method_signature.parameters:
if param_name == "self":
continue
elif attribute.is_property_setter:
assert len(method_signature.parameters) == 2
attribute.property_setter_arg_name = param_name
annotations[param_name] = "typing.Any" # Todo: Improve this?
if not attribute.given_event_cls:
assert attribute.event_cls_name
event_cls_name = attribute.event_cls_name
# Check event class isn't already defined.
if event_cls_name in cls.__dict__:
raise TypeError(
f"{event_cls_name} event already defined on {cls.__name__}"
)
event_cls_qualname = ".".join([cls.__qualname__, event_cls_name])
event_cls_dict = {
"__annotations__": annotations,
"__module__": cls.__module__,
"__qualname__": event_cls_qualname,
}
event_cls = MetaDomainEvent(
event_cls_name, (DecoratedEvent,), event_cls_dict
)
original_methods[event_cls] = original_method
setattr(cls, event_cls_name, event_cls)
# Inspect the parameters of the create_id method.
cls._create_id_param_names = []
for name, param in inspect.signature(cls.create_id).parameters.items():
if param.kind in [param.KEYWORD_ONLY, param.POSITIONAL_OR_KEYWORD]:
cls._create_id_param_names.append(name)
def __call__(cls: "MetaAggregate", *args: Any, **kwargs: Any) -> TAggregate:
# noinspection PyTypeChecker
self_init: WrapperDescriptorType = cls.__init__ # type: ignore
kwargs = _coerce_args_to_kwargs(
self_init, args, kwargs, expects_id=cls._annotations_mention_id
)
if cls._created_event_class is None:
raise TypeError("attribute '_created_event_class' not set on class")
else:
new_aggregate: TAggregate = cls._create(
event_class=cls._created_event_class,
# id=id,
**kwargs,
)
return new_aggregate
# noinspection PyUnusedLocal
@staticmethod
def create_id(**kwargs: Any) -> UUID:
"""
Returns a new aggregate ID.
"""
return uuid4()
# noinspection PyShadowingBuiltins
def _create(
cls,
event_class: Type[TAggregateCreated],
*,
id: Optional[UUID] = None,
**kwargs: Any,
) -> TAggregate:
"""
Factory method to construct a new
aggregate object instance.
"""
# Construct the domain event class,
# with an ID and version, and the
# a topic for the aggregate class.
create_id_kwargs = {
k: v for k, v in kwargs.items() if k in cls._create_id_param_names
}
try:
created_event: TAggregateCreated = event_class( # type: ignore
originator_topic=get_topic(cls),
originator_id=id or cls.create_id(**create_id_kwargs),
originator_version=cls.INITIAL_VERSION,
timestamp=datetime.now(tz=TZINFO),
**kwargs,
)
except TypeError as e:
msg = (
f"Unable to construct 'aggregate created' "
f"event with class {event_class.__qualname__} "
f"and keyword args {kwargs}: {e}"
)
raise TypeError(msg)
# Construct the aggregate object.
agg: TAggregate = created_event.mutate(None)
# Append the domain event to pending list.
agg.pending_events.append(created_event)
# Return the aggregate.
return agg
class Aggregate(ABC, metaclass=MetaAggregate):
"""
Base class for aggregate roots.
"""
class Event(AggregateEvent):
pass
class Created(AggregateCreated):
pass
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
return object.__new__(cls)
def __eq__(self, other: Any) -> bool:
return type(self) == type(other) and self.__dict__ == other.__dict__
def __repr__(self) -> str:
attrs = [
f"{k.lstrip('_')}={v!r}"
for k, v in self.__dict__.items()
if k != "_pending_events"
]
return f"{type(self).__name__}({', '.join(attrs)})"
# noinspection PyShadowingBuiltins
def __base_init__(self, id: UUID, version: int, timestamp: datetime) -> None:
"""
Initialises an aggregate object with an :data:`id`, a :data:`version`
number, and a :data:`timestamp`. The internal :data:`pending_events` list
is also initialised.
"""
self._id = id
self._version = version
self._created_on = timestamp
self._modified_on = timestamp
self._pending_events: List[AggregateEvent] = []
@property
def id(self) -> UUID:
"""
The ID of the aggregate.
"""
return self._id
@property
def version(self) -> int:
"""
The version number of the aggregate.
"""
return self._version
@version.setter
def version(self, version: int) -> None:
self._version = version
@property
def created_on(self) -> datetime:
"""
The date and time when the aggregate was created.
"""
return self._created_on
@property
def modified_on(self) -> datetime:
"""
The date and time when the aggregate was last modified.
"""
return self._modified_on
@modified_on.setter
def modified_on(self, modified_on: datetime) -> None:
self._modified_on = modified_on
@property
def pending_events(self) -> List[AggregateEvent]:
"""
A list of pending events.
"""
return self._pending_events
def trigger_event(
self,
event_class: Type[TAggregateEvent],
**kwargs: Any,
) -> None:
"""
Triggers domain event of given type, by creating
an event object and using it to mutate the aggregate.
"""
# Construct the domain event as the
# next in the aggregate's sequence.
# Use counting to generate the sequence.
next_version = self.version + 1
try:
new_event = event_class( # type: ignore
originator_id=self.id,
originator_version=next_version,
timestamp=datetime.now(tz=TZINFO),
**kwargs,
)
except TypeError as e:
raise TypeError(f"Can't construct event {event_class}: {e}")
# Mutate aggregate with domain event.
new_event.mutate(self)
# Append the domain event to pending list.
self.pending_events.append(new_event)
def collect_events(self) -> List[AggregateEvent]:
"""
Collects and returns a list of pending aggregate
:class:`AggregateEvent` objects.
"""
collected = []
while self.pending_events:
collected.append(self.pending_events.pop(0))
return collected
def aggregate(
cls: Optional[MetaAggregate] = None, *, created_event_name: Optional[str] = None
) -> Union[MetaAggregate, Callable]:
"""
Converts the class that was passed in to inherit from Aggregate.
.. code-block:: python
@aggregate
class MyAggregate:
pass
...is equivalent to...
.. code-block:: python
class MyAggregate(Aggregate):
pass
"""
def decorator(cls: Any) -> MetaAggregate:
if issubclass(cls, Aggregate):
raise TypeError(f"{cls.__name__} is already an Aggregate")
bases = cls.__bases__
if bases == (object,):
bases = (Aggregate,)
else:
bases += (Aggregate,)
return MetaAggregate(
cls.__name__,
bases,
dict(cls.__dict__),
created_event_name=created_event_name,
)
if cls:
return decorator(cls)
else:
return decorator
class VersionError(Exception):
"""
Raised when a domain event can't be applied to
an aggregate due to version mismatch indicating
the domain event is not the next in the aggregate's
sequence of events.
"""
class Snapshot(DomainEvent):
# noinspection PyUnresolvedReferences
"""
Snapshots represent the state of an aggregate at a particular
version.
Constructor arguments:
:param UUID originator_id: ID of originating aggregate.
:param int originator_version: version of originating aggregate.
:param datetime timestamp: date-time of the event
:param str topic: string that includes a class and its module
:param dict state: version of originating aggregate.
"""
topic: str
state: dict
# noinspection PyShadowingNames
@classmethod
def take(cls, aggregate: TAggregate) -> "Snapshot":
"""
Creates a snapshot of the given :class:`Aggregate` object.
"""
aggregate_state = dict(aggregate.__dict__)
aggregate_state.pop("_pending_events")
class_version = getattr(type(aggregate), "class_version", 1)
if class_version > 1:
aggregate_state["class_version"] = class_version
originator_id = aggregate_state.pop("_id")
originator_version = aggregate_state.pop("_version")
# noinspection PyArgumentList
return cls( # type: ignore
originator_id=originator_id,
originator_version=originator_version,
timestamp=datetime.now(tz=TZINFO),
topic=get_topic(type(aggregate)),
state=aggregate_state,
)
def mutate(self, _: None = None) -> TAggregate:
"""
Reconstructs the snapshotted :class:`Aggregate` object.
"""
cls = resolve_topic(self.topic)
assert issubclass(cls, Aggregate)
aggregate_state = dict(self.state)
from_version = aggregate_state.pop("class_version", 1)
class_version = getattr(cls, "class_version", 1)
while from_version < class_version:
upcast_name = f"upcast_v{from_version}_v{from_version + 1}"
upcast = getattr(cls, upcast_name)
upcast(aggregate_state)
from_version += 1
aggregate_state["_id"] = self.originator_id
aggregate_state["_version"] = self.originator_version
aggregate_state["_pending_events"] = []
# noinspection PyShadowingNames
aggregate = object.__new__(cls)
aggregate.__dict__.update(aggregate_state)
return aggregate
|
johnbywater/eventsourcing
|
eventsourcing/domain.py
|
Python
|
bsd-3-clause
| 37,825
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.core.cache import cache
from sentry.testutils import PermissionTestCase
from sentry.api.endpoints.setup_wizard import SETUP_WIZARD_CACHE_KEY
class SetupWizard(PermissionTestCase):
def test_redirect(self):
user = self.create_user('foo@example.com', is_active=False)
url = reverse('sentry-project-wizard-fetch', kwargs={
'wizard_hash': 'abc'
})
resp = self.client.get(url)
self.login_as(user)
assert resp.status_code == 302
def test_simple(self):
self.create_organization(owner=self.user)
self.login_as(self.user)
key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, 'abc')
cache.set(key, 'test')
url = reverse('sentry-project-wizard-fetch', kwargs={
'wizard_hash': 'abc'
})
resp = self.client.get(url)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/setup-wizard.html')
def test_redirect_to_org(self):
self.create_organization(owner=self.user)
self.login_as(self.user)
url = reverse('sentry-project-wizard-fetch', kwargs={
'wizard_hash': 'xyz'
})
resp = self.client.get(url)
assert resp.status_code == 302
|
gencer/sentry
|
tests/sentry/web/frontend/test_setup_wizard.py
|
Python
|
bsd-3-clause
| 1,341
|
# Copyright 2013 University of Maryland. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE.TXT file.
import os
from framework.Targets import ApacheTarget
class Target(ApacheTarget):
def get_path(filename):
return os.path.dirname(os.path.realpath(__file__)) + '/' + filename
name = "eXtplorer 2.1"
application_dir_mapping = [get_path("application"), "/var/www"]
chroot_environment = "Debian5"
|
UMD-SEAM/bugbox
|
framework/Targets/eXtplorer_2_1_0/__init__.py
|
Python
|
bsd-3-clause
| 494
|
try:
INSTALLED_APPS
except NameError:
INSTALLED_APPS=()
#Generated Config - Don't modify above this line
|
igudym/twango
|
twango/template/default/src/conf/h_third_party_apps.py
|
Python
|
bsd-3-clause
| 115
|
import sys, time, os
from django.conf import settings
from django.core import mail
from django.core.mail.backends import locmem
from django.db import DEFAULT_DB_ALIAS
from django.test import signals
from django.template import Template
from django.utils.translation import deactivate
from django.utils.unittest import skipIf
class ContextList(list):
"""A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, basestring):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super(ContextList, self).__getitem__(key)
def __contains__(self, key):
try:
value = self[key]
except KeyError:
return False
return True
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal
that can be intercepted by the test system Client
"""
signals.template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
def setup_test_environment():
"""Perform any global pre-test setup. This involves:
- Installing the instrumented test renderer
- Set the email backend to the locmem email backend.
- Setting the active locale to match the LANGUAGE_CODE setting.
"""
Template.original_render = Template._render
Template._render = instrumented_test_render
mail.original_SMTPConnection = mail.SMTPConnection
mail.SMTPConnection = locmem.EmailBackend
mail.original_email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
mail.outbox = []
deactivate()
def teardown_test_environment():
"""Perform any global post-test teardown. This involves:
- Restoring the original test renderer
- Restoring the email sending functions
"""
Template._render = Template.original_render
del Template.original_render
mail.SMTPConnection = mail.original_SMTPConnection
del mail.original_SMTPConnection
settings.EMAIL_BACKEND = mail.original_email_backend
del mail.original_email_backend
del mail.outbox
def get_runner(settings):
test_path = settings.TEST_RUNNER.split('.')
# Allow for Python 2.5 relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, test_path[-1])
test_runner = getattr(test_module, test_path[-1])
return test_runner
def skipIfDBEngine(engine, reason=None):
"""
Decorator to skip tests on a given database engine.
Note that you can pass a single engine or an iterable here
"""
if not reason:
reason = "not supported on this database"
settings_engine = settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE']
if isinstance(engine, basestring):
return skipIf(settings_engine == engine, reason)
return skipIf(settings_engine in engine, reason)
|
sam-tsai/django-old
|
django/test/utils.py
|
Python
|
bsd-3-clause
| 3,206
|
"""
Copyright (C) 2018 Roberto Bruttomesso <roberto.bruttomesso@gmail.com>
This file is distributed under the terms of the 3-clause BSD License.
A copy of the license can be found in the root directory or at
https://opensource.org/licenses/BSD-3-Clause.
Author: Roberto Bruttomesso <roberto.bruttomesso@gmail.com>
Date: 29/10/2018
This module implements the main parsing routine of IEC61131 text
"""
from intrepyd.iec611312py.IEC61131ParserVisitor import IEC61131ParserVisitor
from intrepyd.iec611312py.statement import Assignment, IfThenElse, Case
from intrepyd.iec611312py.expression import VariableOcc, ConstantOcc, Expression, Range, FunctionOcc, ParamInit, TRUE
from intrepyd.iec611312py.variable import Variable
def isNumber(text):
for i in range(len(text)):
if not text[i].isdigit() and text[i] != '.':
return False
return True
def computeCompositeDatatype(var, name2var):
tokens = var.split('.')
if len(tokens) != 2:
raise RuntimeError('Cannot handle nested structures')
baseType = name2var[tokens[0]]
for name, variable in baseType.datatype.fields.iteritems():
if name == tokens[1]:
return variable.datatype
return None
class STMTBuilder(IEC61131ParserVisitor):
"""
Vistor that builds statements from the IEC program
"""
def __init__(self, name2var, pou2inputs):
self._statements = []
self._name2var = name2var
self._pou2inputs = pou2inputs
@property
def statements(self):
return self._statements
def visitBodyST(self, ctx):
self._statements = ctx.getChild(0).accept(self)
def visitStmt_block(self, ctx):
return [ctx.getChild(i).accept(self) for i in range(ctx.getChildCount())]
def visitSt_stmt(self, ctx):
return ctx.getChild(0).accept(self)
def visit_stmt(self, ctx):
return ctx.getChild(0).accept(self)
def visitAssignVariable(self, ctx):
lhs = ctx.getChild(0).accept(self)
rhs = ctx.getChild(2).accept(self)
return Assignment(lhs, rhs)
def visitAssignCompositeAccess(self, ctx):
lhs = ctx.getChild(0).accept(self)
rhs = ctx.getChild(2).accept(self)
return Assignment(lhs, rhs)
def visitExpression(self, ctx):
return ctx.getChild(0).accept(self)
def visitBinaryBoolExpression(self, ctx):
return self._binaryExpressionHelper(ctx)
def visitBinaryTermExpression(self, ctx):
return self._binaryExpressionHelper(ctx)
def visitUnaryBoolExpression(self, ctx):
return self._unaryExpressionHelper(ctx)
def visitUnaryTermExpression(self, ctx):
return self._unaryExpressionHelper(ctx)
def visitLeafBoolExpression(self, ctx):
return ctx.getChild(0).accept(self)
def visitParBoolExpression(self, ctx):
return ctx.subexpr.accept(self)
def visitParTermExpression(self, ctx):
return ctx.subexpr.accept(self)
def visitSimple_var(self, ctx):
var = ctx.getChild(0).getText()
if not var in self._name2var:
raise RuntimeError('Undeclared variable ' + var)
return VariableOcc(self._name2var[var])
def visitComposite_access(self, ctx):
base = ctx.getChild(0).getText()
if not base in self._name2var:
raise RuntimeError('Undeclared variable ' + base)
var = ctx.getText()
if not var in self._name2var:
datatype = computeCompositeDatatype(var, self._name2var)
self._name2var[var] = Variable(var, datatype, Variable.FIELD)
return VariableOcc(self._name2var[var])
def visitArray_access(self, ctx):
raise NotImplementedError
def visitVariable_bit_access(self, ctx):
raise NotImplementedError
def visitConstant(self, ctx):
cst = ctx.getText()
return ConstantOcc(cst)
def visitCallBoolExpression(self, ctx):
return self._callExpressionHelper(ctx)
def visitCallTermExpression(self, ctx):
return self._callExpressionHelper(ctx)
def visitCustomCallExpression(self, ctx):
pouName = ctx.getChild(0).getText()
if not pouName in self._pou2inputs:
raise('Could not find pou ' + pouName)
inputs = self._pou2inputs[pouName]
paramInits = []
param = 0
if ctx.getChildCount() > 2:
for i in range(2, ctx.getChildCount(), 2):
paramInit = ctx.getChild(i).accept(self)
paramInits.append(paramInit)
paramInit.rhs.datatype = inputs[param].datatype
param += 1
return FunctionOcc(ctx.getChild(0).getText(), paramInits)
def visitFunc_param_init(self, ctx):
param = ctx.getChild(0).getText()
value = ctx.getChild(2).getText()
if isNumber(value):
return ParamInit(param, ConstantOcc(value)) # type will be set by caller
return ParamInit(param, VariableOcc(Variable(value, None, Variable.TEMP))) # type will be set by caller
def visitIf_stmt(self, ctx):
return ctx.getChild(0).accept(self)
def visitIf_simple_stmt(self, ctx):
conditions = []
statements = []
conditions.append(ctx.ifexpr.accept(self))
statements.append(ctx.ifstmt.accept(self))
return IfThenElse(conditions, statements)
def visitIf_elseif_stmt(self, ctx):
conditions = []
statements = []
conditions.append(ctx.ifexpr.accept(self))
statements.append(ctx.ifstmt.accept(self))
conds, stmts = ctx.elsifstmt.accept(self)
for cond in conds:
conditions.append(cond)
for stmt in stmts:
statements.append(stmt)
return IfThenElse(conditions, statements)
def visitIf_else_stmt(self, ctx):
conditions = []
statements = []
conditions.append(ctx.ifexpr.accept(self))
statements.append(ctx.ifstmt.accept(self))
conditions.append(TRUE)
statements.append(ctx.elsestmt.accept(self))
return IfThenElse(conditions, statements)
def visitIf_complete_stmt(self, ctx):
conditions = []
statements = []
conditions.append(ctx.ifexpr.accept(self))
statements.append(ctx.ifstmt.accept(self))
conds, stmts = ctx.elsifstmt.accept(self)
for cond in conds:
conditions.append(cond)
for stmt in stmts:
statements.append(stmt)
conditions.append(TRUE)
statements.append(ctx.elsestmt.accept(self))
return IfThenElse(conditions, statements)
def visitElsif_stmt_list(self, ctx):
conditions = []
statements = []
for i in range(ctx.getChildCount()):
cond, stmt = ctx.getChild(i).accept(self)
conditions.append(cond)
statements.append(stmt)
return conditions, statements
def visitElsif_stmt(self, ctx):
return ctx.expr.accept(self), ctx.stmtblock.accept(self)
def visitCase_stmt(self, ctx):
expression = ctx.expr.accept(self)
selections, statements = ctx.casesel.accept(self)
if ctx.getChildCount() == 7:
# There is else too
selections.append([expression])
statements.append(ctx.elsestmt.accept(self))
return Case(expression, selections, statements)
def visitCase_selections(self, ctx):
selections = []
statements = []
for i in range(ctx.getChildCount()):
sel, stmt = ctx.getChild(i).accept(self)
selections.append(sel)
statements.append(stmt)
return selections, statements
def visitCase_selection(self, ctx):
return ctx.getChild(0).accept(self), ctx.getChild(2).accept(self)
def visitCase_list(self, ctx):
return [ctx.getChild(i).accept(self) for i in range(0, ctx.getChildCount(), 2)]
def visitCaseRange(self, ctx):
return Range(ctx.start.getText(), ctx.to.getText())
def visitCaseExpression(self, ctx):
return ctx.getChild(0).accept(self)
def _binaryExpressionHelper(self, ctx):
operator = ctx.op.text
arguments = [ctx.getChild(0).accept(self), ctx.getChild(2).accept(self)]
return Expression(operator, arguments)
def _unaryExpressionHelper(self, ctx):
operator = ctx.getChild(0).getText()
return Expression(operator, [ctx.getChild(1).accept(self)])
def _callExpressionHelper(self, ctx):
operator = ctx.getChild(0).getText()
arguments = [ctx.getChild(2).accept(self)]
return Expression(operator, arguments)
|
formalmethods/intrepyd
|
intrepyd/iec611312py/stmtbuilder.py
|
Python
|
bsd-3-clause
| 8,639
|
from lazyrunner import pmodule, PModule, preset, presetTree, defaults
from treedict import TreeDict
p = defaults()
p.data_defaults.a = 1
p.data_defaults.b = 2
@preset
def change_default_a(p):
p.data_defaults.a = 10
@pmodule
class Data(PModule):
# Use this to set up the local branch of the preset tree. Calling
# defaults() requests the local branch of the
p = defaults()
p.x = 1
@preset
def set_X_2(p):
p.x = 2
@preset
def set_X(p, x = 2):
p.x = x
# The current version of the pmodule. The caching facilities
# assume results are different between different versions.
version = 0.01
# Include dependencies here; alternatively, these may be given as
# class methods, optionally accepting the parameter tree, to
# provide parameter-dependent dependency checking. See
# documentation for more info.
parameter_dependencies = ['data_defaults']
result_dependencies = []
module_dependencies = []
# If true, the results are never saved or loaded from the cache.
# Switch to True once the module is tested.
disable_result_caching = True
def setup(self):
# Setup the Pmodule. Called whenever the module is created.
pass
def run(self):
# Run the module and return a TreeDict instance holding the
# results. Note that this function is not necessarily called
# if the results can be loaded from cache
x = self.p.x
self.log.info("The value of X is %d." % self.p.x)
return TreeDict(x = self.p.x,
a = self.parameters.data_defaults.a,
b = self.parameters.data_defaults.b
)
@classmethod
def reportResult(cls, parameters, p, r):
# Report on results, even if they are loaded from
# cache. `parameters` is the full parameter tree as specified
# by all parameter dependencies, `p` is the local parameter
# tree branch for this module, and `r` is the result of run(),
# possibly loaded from cache.
self.log.info("The reported value of X is %d. " % r.x)
|
hoytak/lazyrunner
|
test/test_environment/data/data.py
|
Python
|
bsd-3-clause
| 2,204
|
from django.db import models
from sampledatahelper.helper import SampleDataHelper
from sampledatahelper import handlers
class Register(object):
fields = {}
ignored_fields = []
_instance = None
sd = SampleDataHelper()
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Register, cls).__new__(cls, *args, **kwargs)
return cls._instance
def register(self, field_class, handler_class):
self.fields[field_class] = handler_class
def ignore(self, field_class):
self.ignored_fields.append(field_class)
def get_handler(self, field_instance):
if field_instance.__class__ in self.ignored_fields:
return None
handler = self.fields.get(field_instance.__class__, None)
if handler:
return handler(self.sd, field_instance)
return None
register = Register()
register.register(models.CharField, handlers.CharHandler)
register.register(models.BigIntegerField, handlers.BigIntegerHandler)
register.register(models.CharField, handlers.CharHandler)
register.register(models.SlugField, handlers.SlugHandler)
register.register(models.EmailField, handlers.EmailHandler)
register.register(models.URLField, handlers.URLHandler)
register.register(models.TextField, handlers.TextHandler)
register.register(models.IntegerField, handlers.IntegerHandler)
register.register(models.SmallIntegerField, handlers.SmallIntegerHandler)
register.register(models.PositiveIntegerField, handlers.PositiveIntegerHandler)
register.register(models.PositiveSmallIntegerField, handlers.PositiveSmallIntegerHandler)
register.register(models.BigIntegerField, handlers.BigIntegerHandler)
register.register(models.FloatField, handlers.FloatHandler)
register.register(models.BooleanField, handlers.BooleanHandler)
register.register(models.NullBooleanField, handlers.NullBooleanHandler)
register.register(models.CommaSeparatedIntegerField, handlers.CommaSeparatedIntegerHandler)
register.register(models.DecimalField, handlers.DecimalHandler)
register.register(models.DateField, handlers.DateHandler)
register.register(models.DateTimeField, handlers.DateTimeHandler)
register.register(models.TimeField, handlers.TimeHandler)
register.register(models.FileField, handlers.FileHandler)
register.register(models.FilePathField, handlers.FilePathHandler)
register.register(models.ImageField, handlers.ImageHandler)
register.register(models.IPAddressField, handlers.IPAddressHandler)
register.register(models.GenericIPAddressField, handlers.GenericIPAddressHandler)
register.register(models.ForeignKey, handlers.ForeignKeyHandler)
register.register(models.OneToOneField, handlers.OneToOneHandler)
register.ignore(models.ManyToManyField)
register.ignore(models.AutoField)
|
kaleidos/django-sampledatahelper
|
sampledatahelper/register.py
|
Python
|
bsd-3-clause
| 2,778
|
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
"""
from flask import url_for
from recruit_app.user.models import User
from .factories import UserFactory
class TestLoggingIn:
"""Login."""
def test_can_log_in_returns_200(self, user, testapp):
"""Login successful."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form in navbar
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'myprecious'
# Submits
# res = form.submit().follow()
res = form.submit()
assert res.status_code == 200
def test_sees_alert_on_log_out(self, user, testapp):
"""Show alert on logout."""
res = testapp.get('/')
# Fills out login form in navbar
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'myprecious'
# Submits
res = form.submit()
res = testapp.get(url_for('security.logout')).follow()
# sees alert
assert 'loginForm' in res
def test_sees_error_message_if_password_is_incorrect(self, user, testapp):
"""Show error if password is incorrect."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'wrong'
# Submits
res = form.submit()
# sees error
assert 'Invalid password' in res
def test_sees_error_message_if_email_doesnt_exist(self, user, testapp):
"""Show error if email doesn't exist."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['email'] = 'unknown@unknown.com'
form['password'] = 'myprecious'
# Submits
res = form.submit()
# sees error
assert 'Specified user does not exist' in res
class TestRegistering:
"""Register a user."""
def test_can_register(self, user, testapp):
"""Register a new user."""
old_count = len(User.query.all())
# Goes to homepage
res = testapp.get('/')
# Clicks Create Account button
res = res.click('Create account')
# Fills out the form
form = res.forms['registerForm']
form['email'] = 'foo@bar.com'
form['password'] = 'secret'
form['password_confirm'] = 'secret'
# Submits
# res = form.submit().follow()
res = form.submit().follow()
assert res.status_code == 200
# A new user was created
assert len(User.query.all()) == old_count + 1
def test_sees_error_message_if_passwords_dont_match(self, user, testapp):
"""Show error if passwords don't match."""
# Goes to registration page
res = testapp.get(url_for('security.register'))
# Fills out form, but passwords don't match
form = res.forms['registerForm']
form['email'] = 'foo@bar.com'
form['password'] = 'secret'
form['password_confirm'] = 'secrets'
# Submits
res = form.submit()
# sees error message
assert 'Passwords do not match' in res
def test_sees_error_message_if_user_already_registered(self, user, testapp):
"""Show error if user already registered."""
user = UserFactory(active=True) # A registered user
user.save()
# Goes to registration page
res = testapp.get(url_for('security.register'))
# Fills out form, but email is already registered
form = res.forms['registerForm']
form['email'] = user.email
form['password'] = 'secret'
form['password_confirm'] = 'secret'
# Submits
res = form.submit()
# sees error
assert 'is already associated with an account' in res
|
tyler274/Recruitment-App
|
tests/test_functional.py
|
Python
|
bsd-3-clause
| 3,980
|
import numdifftools
numdifftools.test()
|
maniteja123/numdifftools
|
conda_recipe/run_test.py
|
Python
|
bsd-3-clause
| 40
|
# -*- coding: utf-8 -*-
'''The app module, containing the app factory function.'''
from flask import Flask, render_template
from cheapr.settings import ProdConfig
from cheapr.assets import assets
from cheapr.extensions import (
bcrypt,
cache,
db,
login_manager,
migrate,
debug_toolbar,
)
from cheapr import public, user
from flask.ext.images import Images
def create_app(config_object=ProdConfig):
'''An application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
'''
app = Flask(__name__)
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
app.secret_key = 'Google'
app.images_cache='static/cache/images'
#https://medium.com/@5hreyans/the-one-weird-trick-that-cut-our-flask-page-load-time-by-70-87145335f679
app.jinja_env.cache = {}
images = Images(app)
#resize = Resize(app)
return app
def register_extensions(app):
assets.init_app(app)
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
debug_toolbar.init_app(app)
migrate.init_app(app, db)
return None
def register_blueprints(app):
app.register_blueprint(public.views.blueprint)
app.register_blueprint(user.views.blueprint)
return None
def register_errorhandlers(app):
def render_error(error):
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template("{0}.html".format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
|
antani/cheapr
|
cheapr/app.py
|
Python
|
bsd-3-clause
| 1,780
|
#
# See top-level LICENSE.rst file for Copyright information
#
# -*- coding: utf-8 -*-
"""
desispec.pipeline
====================
Tools for pipeline creation and running.
"""
from __future__ import absolute_import, division, print_function
from . import tasks
from .defs import (task_states, prod_options_name,
task_state_to_int, task_int_to_state)
from .db import (all_task_types, DataBaseSqlite, DataBasePostgres, check_tasks,
load_db)
from .prod import (update_prod, load_prod)
from .run import (run_task, run_task_simple, run_task_list, run_task_list_db,
dry_run)
from .scriptgen import (batch_shell, batch_nersc)
|
desihub/desispec
|
py/desispec/pipeline/__init__.py
|
Python
|
bsd-3-clause
| 637
|
#!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for writers.android_policy_writer'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
import unittest
from xml.dom import minidom
from writers import writer_unittest_common
from writers import android_policy_writer
class AndroidPolicyWriterUnittest(writer_unittest_common.WriterUnittestCommon):
'''Unit tests to test assumptions in Android Policy Writer'''
def testPolicyWithoutItems(self):
# Test an example policy without items.
policy = {
'name': '_policy_name',
'caption': '_policy_caption',
'desc': 'This is a long policy caption. More than one sentence '
'in a single line because it is very important.\n'
'Second line, also important'
}
writer = android_policy_writer.GetWriter({})
writer.Init()
writer.BeginTemplate()
writer.WritePolicy(policy)
self.assertEquals(
writer._resources.toxml(), '<resources>'
'<string name="_policy_nameTitle">_policy_caption</string>'
'<string name="_policy_nameDesc">This is a long policy caption. More '
'than one sentence in a single line because it is very '
'important.\nSecond line, also important'
'</string>'
'</resources>')
def testPolicyWithItems(self):
# Test an example policy without items.
policy = {
'name':
'_policy_name',
'caption':
'_policy_caption',
'desc':
'_policy_desc_first.\nadditional line',
'items': [{
'caption': '_caption1',
'value': '_value1',
}, {
'caption': '_caption2',
'value': '_value2',
},
{
'caption': '_caption3',
'value': '_value3',
'supported_on': [{
'platform': 'win'
}, {
'platform': 'win7'
}]
},
{
'caption':
'_caption4',
'value':
'_value4',
'supported_on': [{
'platform': 'android'
}, {
'platform': 'win7'
}]
}]
}
writer = android_policy_writer.GetWriter({})
writer.Init()
writer.BeginTemplate()
writer.WritePolicy(policy)
self.assertEquals(
writer._resources.toxml(), '<resources>'
'<string name="_policy_nameTitle">_policy_caption</string>'
'<string name="_policy_nameDesc">_policy_desc_first.\n'
'additional line</string>'
'<string-array name="_policy_nameEntries">'
'<item>_caption1</item>'
'<item>_caption2</item>'
'<item>_caption4</item>'
'</string-array>'
'<string-array name="_policy_nameValues">'
'<item>_value1</item>'
'<item>_value2</item>'
'<item>_value4</item>'
'</string-array>'
'</resources>')
if __name__ == '__main__':
unittest.main()
|
endlessm/chromium-browser
|
components/policy/tools/template_writers/writers/android_policy_writer_unittest.py
|
Python
|
bsd-3-clause
| 3,381
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import versioneer
setup(name='conwhat', #version=versioneer.get_version(),
description='python library for connectome-based white matter atlas analyses in neuroimaging',
long_description='python library for connectome-based white matter atlas analyses in neuroimaging',
keywords='white matter, tractography, MRI, DTI, diffusion, python',
author='John David Griffiths',
author_email='j.davidgriffiths@gmail.com',
url='https://github.com/JohnGriffiths/conwhat',
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=['numpy', 'setuptools'],
classifiers=[
'Intended Audience :: Science/Research',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
],
entry_points={
"console_scripts": [
"conwhat=conwhat.__main__:main",
]
},
#cmdclass=versioneer.get_cmdclass()
)
|
JohnGriffiths/ConWhAt
|
setup.py
|
Python
|
bsd-3-clause
| 1,353
|
# coding=UTF-8
from datetime import timedelta
import resource
import time
import urllib
from django.core.exceptions import ObjectDoesNotExist
from snh.models.youtubemodel import *
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
import snhlogger
logger = snhlogger.init_logger(__name__, "youtube.log")
def run_youtube_harvester():
harvester_list = YoutubeHarvester.objects.all()
for harvester in harvester_list:
logger.info(u"The harvester %s is %s" %
(unicode(harvester),
"active" if harvester.is_active else "inactive"))
if harvester.is_active:
run_harvester_v1(harvester)
def sleeper(retry_count):
retry_delay = 1
wait_delay = retry_count*retry_delay
wait_delay = 10 if wait_delay > 10 else wait_delay
time.sleep(wait_delay)
def get_timedelta(dm_time):
ts = datetime.strptime(dm_time,'%Y-%m-%dT%H:%M:%S+0000')
return (datetime.utcnow() - ts).days
def get_existing_user(param):
user = None
try:
user = YTUser.objects.get(**param)
except MultipleObjectsReturned:
user = YTUser.objects.filter(**param)[0]
logger.warning(u"Duplicated user in DB! %s, %s" % (user, user.fid))
except ObjectDoesNotExist:
pass
return user
def update_user(harvester, userid):
snh_user = None
try:
uniuserid = urllib.urlencode({"k":userid.encode('utf-8')}).split("=")[1:][0]
ytuser = harvester.api_call("GetYouTubeUserEntry",{"username":uniuserid})
split_uri = ytuser.id.text.split("/")
fid = split_uri[len(split_uri)-1]
snh_user = get_existing_user({"fid__exact":fid})
if not snh_user:
snh_user = get_existing_user({"username__exact":userid})
if not snh_user:
snh_user = YTUser(
fid=fid,
username=userid,
)
snh_user.save()
logger.info(u"New user created in status_from_search! %s", snh_user)
snh_user.update_from_youtube(ytuser)
except gdata.service.RequestError, e:
msg = u"RequestError on user %s. Trying to update anyway" % (userid)
logger.info(msg)
if e[0]["status"] == 403 or e[0]["status"] == 400:
snh_user = get_existing_user({"username__exact":userid})
if not snh_user:
snh_user = YTUser(
username=userid,
)
snh_user.save()
logger.info(u"New user created in status_from_search! %s", snh_user)
else:
msg = u"RequestError on user %s!!! Force update failed!!!" % (userid)
logger.exception(msg)
except:
msg = u"Cannot update user %s" % (userid)
logger.exception(msg)
return snh_user
def update_users(harvester):
all_users = harvester.ytusers_to_harvest.all()
for snhuser in all_users:
if not snhuser.error_triggered:
uid = snhuser.fid if snhuser.fid else snhuser.username
update_user(harvester, uid)
else:
logger.info(u"Skipping user update: %s(%s) because user has triggered the error flag." % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"User harvest completed %s Mem:%s MB" % (harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def update_video(snhuser, ytvideo):
split_uri = ytvideo.id.text.split("/")
fid = split_uri[len(split_uri)-1]
snhvideo = None
try:
try:
snhvideo = YTVideo.objects.get(fid__exact=fid)
except ObjectDoesNotExist:
snhvideo = YTVideo(fid=fid, user=snhuser)
snhvideo.save()
snhvideo.update_from_youtube(snhuser, ytvideo)
except:
msg = u"Cannot update video %s" % (unicode(ytvideo.id.text,'UTF-8'))
logger.exception(msg)
return snhvideo
def update_comment(harvester, snhvideo, ytcomment):
author_name = ytcomment.author[0].name.text
snhuser = update_user(harvester, author_name)
split_uri = ytcomment.id.text.split("/")
fid = split_uri[len(split_uri)-1]
try:
try:
snhcomment = YTComment.objects.get(fid__exact=fid)
except ObjectDoesNotExist:
snhcomment = YTComment(fid=fid, video=snhvideo)
snhcomment.save()
snhcomment.update_from_youtube(snhvideo, snhuser, ytcomment)
except:
msg = u"Cannot update comment %s" % (unicode(ytcomment.id.text,'UTF-8'))
logger.exception(msg)
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.debug(u"Commment updated: comid:%s vidid:%s %s Mem:%s MB" % (snhcomment.fid,snhvideo.fid, harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
return snhcomment
def update_all_comment_helper(harvester, snhvideo, comment_list):
for comment in comment_list.entry:
update_comment(harvester, snhvideo, comment)
get_next_comment_uri = comment_list.GetNextLink().href if comment_list.GetNextLink() else None
return get_next_comment_uri
def update_all_comment(harvester,snhvideo):
comment_list = harvester.api_call("GetYouTubeVideoCommentFeed",{"video_id":snhvideo.fid})
get_next_comment_uri = update_all_comment_helper(harvester, snhvideo, comment_list)
while get_next_comment_uri:
comment_list = harvester.api_call("GetYouTubeVideoCommentFeed",{"uri":get_next_comment_uri})
get_next_comment_uri = update_all_comment_helper(harvester, snhvideo, comment_list)
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"Comment harvest completed for this video: %s %s Mem:%s MB" % (snhvideo.fid, harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def update_all_videos(harvester):
all_users = harvester.ytusers_to_harvest.all()
for snhuser in all_users:
out_of_window = False
if not snhuser.error_triggered:
logger.info(u"Will update user: %s(%s)" % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
get_vid_url = 'http://gdata.youtube.com/feeds/api/users/%s/uploads?' % snhuser.username
while get_vid_url and not out_of_window:
video_list = harvester.api_call("GetYouTubeVideoFeed",{"uri":get_vid_url})
for video in video_list.entry:
published = datetime.strptime(video.published.text,'%Y-%m-%dT%H:%M:%S.000Z')
if published < harvester.harvest_window_to:
snhvideo = update_video(snhuser, video)
update_all_comment(harvester, snhvideo)
if published < harvester.harvest_window_from:
out_of_window = True
break
if not out_of_window:
get_vid_url = video_list.GetNextLink().href if video_list.GetNextLink() else None
else:
logger.info(u"Skipping user update: %s(%s) because user has triggered the error flag." % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"Video harvest completed %s Mem:%s MB" % (harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def run_harvester_v1(harvester):
harvester.start_new_harvest()
try:
start = time.time()
update_users(harvester)
update_all_videos(harvester)
logger.info(u"Results computation complete in %ss" % (time.time() - start))
except:
logger.exception(u"EXCEPTION: %s" % harvester)
finally:
usage = resource.getrusage(resource.RUSAGE_SELF)
harvester.end_current_harvest()
logger.info(u"End: %s Stats:%s Mem:%s MB" % (harvester,unicode(harvester.get_stats()),unicode(getattr(usage, "ru_maxrss")/(1024.0))))
|
pylanglois/Social-Network-Harvester
|
SocialNetworkHarvester/snh/management/commands/cronharvester/youtubech.py
|
Python
|
bsd-3-clause
| 8,013
|