text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
|---|---|---|---|---|---|---|
import json
import logging
import socket
from contextlib import closing
from django.core.exceptions import ValidationError
from django.db import connection
from zeroconf import get_all_addresses
from zeroconf import NonUniqueNameException
from zeroconf import ServiceInfo
from zeroconf import USE_IP_OF_OUTGOING_INTERFACE
from zeroconf import Zeroconf
from kolibri.core.discovery.models import DynamicNetworkLocation
from kolibri.core.public.utils import get_device_info
logger = logging.getLogger(__name__)
SERVICE_TYPE = "Kolibri._sub._http._tcp.local."
LOCAL_DOMAIN = "kolibri.local"
ZEROCONF_STATE = {"zeroconf": None, "listener": None, "service": None}
def _id_from_name(name):
assert name.endswith(SERVICE_TYPE), (
"Invalid service name; must end with '%s'" % SERVICE_TYPE
)
return name.replace(SERVICE_TYPE, "").strip(".")
def _is_port_open(host, port, timeout=1):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.settimeout(timeout)
return sock.connect_ex((host, port)) == 0
class KolibriZeroconfService(object):
info = None
def __init__(self, id, port=8080, data={}):
self.id = id
self.port = port
self.data = {key: json.dumps(val) for (key, val) in data.items()}
def register(self):
if not ZEROCONF_STATE["zeroconf"]:
initialize_zeroconf_listener()
if self.info is not None:
logger.error("Service is already registered!")
return
i = 1
id = self.id
while not self.info:
# attempt to create an mDNS service and register it on the network
try:
info = ServiceInfo(
SERVICE_TYPE,
name=".".join([id, SERVICE_TYPE]),
server=".".join([id, LOCAL_DOMAIN, ""]),
address=USE_IP_OF_OUTGOING_INTERFACE,
port=self.port,
properties=self.data,
)
ZEROCONF_STATE["zeroconf"].register_service(info, ttl=60)
self.info = info
except NonUniqueNameException:
# if there's a name conflict, append incrementing integer until no conflict
i += 1
id = "%s-%d" % (self.id, i)
if i > 100:
raise NonUniqueNameException()
self.id = id
return self
def unregister(self):
if self.info is None:
logging.error("Service is not registered!")
return
ZEROCONF_STATE["zeroconf"].unregister_service(self.info)
self.info = None
def cleanup(self, *args, **kwargs):
if self.info and ZEROCONF_STATE["zeroconf"]:
self.unregister()
class KolibriZeroconfListener(object):
instances = {}
def add_service(self, zeroconf, type, name):
timeout = 5000
info = zeroconf.get_service_info(type, name, timeout=timeout)
if info is None:
logger.warn(
"Zeroconf network service information could not be retrieved within {} seconds".format(
str(timeout / 1000.0)
)
)
return
id = _id_from_name(name)
ip = socket.inet_ntoa(info.address)
base_url = "http://{ip}:{port}/".format(ip=ip, port=info.port)
zeroconf_service = ZEROCONF_STATE.get("service")
is_self = zeroconf_service and zeroconf_service.id == id
instance = {
"id": id,
"ip": ip,
"local": ip in get_all_addresses(),
"port": info.port,
"host": info.server.strip("."),
"base_url": base_url,
"self": is_self,
}
device_info = {
bytes.decode(key): json.loads(val) for (key, val) in info.properties.items()
}
instance.update(device_info)
self.instances[id] = instance
if not is_self:
try:
DynamicNetworkLocation.objects.update_or_create(
dict(base_url=base_url, **device_info), id=id
)
logger.info(
"Kolibri instance '%s' joined zeroconf network; service info: %s"
% (id, self.instances[id])
)
except ValidationError:
import traceback
logger.warn(
"""
A new Kolibri instance '%s' was seen on the zeroconf network,
but we had trouble getting the information we needed about it.
Service info:
%s
The following exception was raised:
%s
"""
% (id, self.instances[id], traceback.format_exc(limit=1))
)
finally:
connection.close()
def remove_service(self, zeroconf, type, name):
id = _id_from_name(name)
logger.info("Kolibri instance '%s' has left the zeroconf network." % (id,))
try:
if id in self.instances:
del self.instances[id]
except KeyError:
pass
DynamicNetworkLocation.objects.filter(pk=id).delete()
connection.close()
def register_zeroconf_service(port):
device_info = get_device_info()
DynamicNetworkLocation.objects.all().delete()
connection.close()
id = device_info.get("instance_id")
if ZEROCONF_STATE["service"] is not None:
unregister_zeroconf_service()
logger.info("Registering ourselves to zeroconf network with id '%s'..." % id)
data = device_info
ZEROCONF_STATE["service"] = KolibriZeroconfService(id=id, port=port, data=data)
ZEROCONF_STATE["service"].register()
def unregister_zeroconf_service():
if ZEROCONF_STATE["service"] is not None:
ZEROCONF_STATE["service"].cleanup()
ZEROCONF_STATE["service"] = None
if ZEROCONF_STATE["zeroconf"] is not None:
ZEROCONF_STATE["zeroconf"].close()
def initialize_zeroconf_listener():
ZEROCONF_STATE["zeroconf"] = Zeroconf()
ZEROCONF_STATE["listener"] = KolibriZeroconfListener()
ZEROCONF_STATE["zeroconf"].add_service_listener(
SERVICE_TYPE, ZEROCONF_STATE["listener"]
)
def get_peer_instances():
try:
return ZEROCONF_STATE["listener"].instances.values()
except AttributeError:
return []
|
mrpau/kolibri
|
kolibri/core/discovery/utils/network/search.py
|
Python
|
mit
| 6,523
| 0.00138
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import, division, unicode_literals
from jx_base.expressions import BetweenOp as BetweenOp_
class BetweenOp(BetweenOp_):
pass
|
klahnakoski/jx-sqlite
|
vendor/jx_python/expressions/between_op.py
|
Python
|
mpl-2.0
| 438
| 0
|
import json
from copy import copy
from collections import OrderedDict
# SSE "protocol" is described here: http://mzl.la/UPFyxY
class ServerSentEvent(object):
def __init__(self, data=None, event=None, retry=None, id=None):
if data is None and event is None:
raise ValueError('data and event cannot both be None')
self.data = data
self.event = event
self.retry = retry
self.id = id
def __copy__(self):
return ServerSentEvent(
data=copy(self.data),
event=self.event,
retry=self.retry,
id=self.id
)
def format(self):
items = OrderedDict(self)
if items['data'] is None:
items['data'] = '-'
elif isinstance(items['data'], str):
pass
else:
items['data'] = json.dumps(items['data'])
return items
def __iter__(self):
if self.retry:
yield 'retry', self.retry
yield 'data', self.data
if self.event:
yield 'event', self.event
if self.id:
yield 'id', self.id
def __str__(self):
return '{}\n\n'.format('\n'.join(
['{}: {}'.format(k, v) for k, v in self.format().items()]
))
def __repr__(self):
return '<ServerSentEvent event="{}">'.format(self.event if self.event else '')
|
smithk86/flask-sse
|
flask_sse/server_sent_event.py
|
Python
|
mit
| 1,387
| 0.000721
|
__author__ = 'a.paoletti'
import maya.cmds as cmd
import os
import sys
sys.path.append("C://Users//a.paoletti//Desktop//MY//CORSOSCRIPTING - DISPLACE_GEOTIFF//gdalwin32-1.6//bin")
import colorsys
def getTexture():
"""
:rtype : String
:return : Nome della texture applicata al canale color del lambert
"""
sel = cmd.ls(sl=True)
print '--------- Selection is: ' + sel[0] + ' ---------'
selMesh = cmd.listRelatives(sel, s=True)
print '----- Shape: ' + selMesh[0]
selSG = cmd.listConnections(selMesh[0], t='shadingEngine')
print '----- Shading group: ' + selSG[0]
selMat = cmd.listConnections(selSG[0], t='lambert')
print '----- Material: ' + selMat[0]
selTexture = cmd.listConnections(selMat[0]+'.color')
print '--------- La texture e\': ' + selTexture[0] + ' ---------'
return selTexture[0]
def testColorAtPoint():
# per testare questa funzione seleziona la mesh nel suo intero in object mode
txtName = getTexture()
colors = cmd.colorAtPoint(txtName, o='RGB', su=16, sv=16, mu=0.0, mv=0.0, xu=0.5, xv=0.5)
print colors
def clamp(my_value, min_value, max_value):
return max(min(my_value, max_value), min_value)
def colorToElevation(r, g, b):
"""
Data una terna RGB, ritorna il valore dell'altezza interpretando l'immagine
come mappa fisica
:param r: red component between 0 and 1
:param g: green component between 0 and 1
:param b: blue component between 0 and 1
:return: Float che rappresenta l'elevazione del punto
"""
hsvColor = colorsys.rgb_to_hsv(r, g, b)
h = hsvColor[0]
s = hsvColor[1]
v = hsvColor[2]
base = 5
elevation = 0
# print "H--- " + str(h) + "S--- " + str(s) + "V--- " + str(v)
# if v > 0.5:
tmp = clamp((0.23-h), 0, 1) # 0 blue 1 rosso
elevation = pow(base, tmp+1) - base
return elevation
def testGeoSampler():
sel = cmd.ls(sl=True)
if len(sel) == 0:
raise Exception("Selezionare il piano!")
print '--------- Selection is: ' + sel[0] + ' ---------'
cmd.selectMode(component=True)
cmd.select(sel[0]+'.vtx[:]')
cmd.polyGeoSampler(cs=False, cdo=False, dg=False, ac=True, bf=False)
vtxNumber = len(cmd.getAttr(sel[0]+'.vtx[:]'))
# cmd.softSelect(sse=1, ssd=1)
for i in range(0, vtxNumber):
v = sel[0]+'.vtx[%d]' % i
cmd.select(v, r=True)
vColor = cmd.polyColorPerVertex(query=True, r=True, g=True, b=True)
r = vColor[0]
g = vColor[1]
b = vColor[2]
h = colorToElevation(r, g, b)
cmd.move(h, y=True, r=True)
cmd.softSelect(sse=0)
cmd.selectMode(object=True)
def readGeoTiff(filepath):
try:
from osgeo import gdal
except:
raise Exception("Cannot find gdal modules")
# enable GDAL exceptions
gdal.UseException()
ds = gdal.Open(filepath)
band = ds.GetRasterBand(1)
elevation = band.ReadAsArray()
print elevation.shape
print elevation
def readGeoTiff2(filepath):
import gdal
import gdalconst
# coordinates to get pixel values for
xValues = [122588.008]
yValues = [484475.146]
# set directory
os.chdir(r'D:\\temp\\AHN2_060')
# register all of the drivers
gdal.AllRegister()
# open the image
ds = gdal.Open(filepath, GA_ReadOnly)
if ds is None:
print 'Could not open image'
sys.exit(1)
# get image size
rows = ds.RasterYSize
cols = ds.RasterXSize
bands = ds.RasterCount
# get georeference info
transform = ds.GetGeoTransform()
xOrigin = transform[0]
yOrigin = transform[3]
pixelWidth = transform[1]
pixelHeight = transform[5]
# loop through the coordinates
for xValue, yValue in zip(xValues, yValues):
# get x,y
x = xValue
y = yValue
# compute pixel offset
xOffset = int((x - xOrigin) / pixelWidth)
yOffset = int((y - yOrigin) / pixelHeight)
# create a string to print out
s = "%s %s %s %s " % (x, y, xOffset, yOffset)
# loop through the bands
for i in xrange(1, bands):
band = ds.GetRasterBand(i) # 1-based index
# read data and add the value to the string
data = band.ReadAsArray(xOffset, yOffset, 1, 1)
value = data[0, 0]
s = "%s%s " % (s, value)
# print out the data string
print s
# figure out how long the script took to run
|
RainbowAcademy/ScriptingLectures
|
2015/ContourLine/DisplaceFromImage.py
|
Python
|
gpl-2.0
| 4,181
| 0.028223
|
from satchless.item import InsufficientStock, StockedItem
from datetime import date
from django.utils.text import slugify
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.db import models
from django_prices.models import PriceField
from django.core.exceptions import ValidationError
from registration.models import Tournament
class Product(models.Model, StockedItem):
"""An abstract class that embodies everything that can be sold."""
price = PriceField('Price',
currency='EUR',
max_digits=5,
decimal_places=2,
blank=False,
default=0.0)
stock = models.PositiveSmallIntegerField('Product Stock',
blank=False,
default=0)
date_added = models.DateField('Date added')
last_modified = models.DateTimeField('Last modified')
slug = models.SlugField('Product slug', max_length=256)
def get_price_per_item(self):
return price
class Meta:
abstract = True
class TournamentProductUtilitiesManager(models.Manager):
def create_tournament_product(self, tournament, price=0.0, stock=0):
tourney_product = TournamentProduct(tournament=tournament,
price=price,
stock=stock,
date_added=date.today(),
last_modified=timezone.now())
tourney_product.save()
return tourney_product
def update_tournament_product(self, product_id, **kwargs):
additional_attributes = {'price', 'stock', 'tournament'}
tourney_product = TournamentProduct.objects.get(pk=product_id)
for attribute, value in kwargs.items():
assert attribute in additional_attributes
setattr(tourney_product, attribute, value)
tourney_product.save()
return tourney_product
def delete_tournament_product(self, product_id):
tourney_product = TournamentProduct.objects.get(pk=product_id)
tourney_product.delete()
class TournamentProduct(Product):
tournament = models.OneToOneField(Tournament)
objects = models.Manager()
utilities = TournamentProductUtilitiesManager()
def get_stock(self):
return self.stock
def save(self, *args, **kwargs):
" Override the save method to check the stock and set the slug "
if self.stock > self.tournament.get_available_spots():
msg = 'Stock of a TournamentProduct cannot be greater than the \
tournament available spots'
raise ValidationError(msg)
self.slug = slugify(unicode(self.tournament.slug))
super(TournamentProduct, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('tournament_product_detail', kwargs={'slug': self.slug})
def __unicode__(self):
return self.tournament.title
|
eldruz/tournament_registration
|
tournament_registration/capitalism/models.py
|
Python
|
bsd-3-clause
| 3,092
| 0.000323
|
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
class AlarmsVitrage(horizon.Panel):
name = _("Alarms")
slug = "vitragealarms"
|
openstack/vitrage-dashboard
|
vitrage_dashboard/alarms/panel.py
|
Python
|
apache-2.0
| 733
| 0
|
"""
Manages a Beaker cache of WMS capabilities documents.
@author: rwilkinson
"""
import logging
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from joj.lib.wmc_util import GetWebMapCapabilities
log = logging.getLogger(__name__)
class WmsCapabilityCache():
""" Manages a Beaker cache of WMS capabilities documents.
"""
def __init__(self, config):
"""Creates a cache using the supplied configuration parameters or defaults.
"""
self.enableCache = (config.get('wmscapabilitycache.enable', 'True').lower() == 'true')
if self.enableCache:
cache_opts = {
'cache.expire': config.get('wmscapabilitycache.expire', None),
'cache.type': config.get('wmscapabilitycache.type', 'file'),
'cache.data_dir': config.get('wmscapabilitycache.data_dir', '/tmp/ecomaps/wmscapabilitycache/data'),
'cache.lock_dir': config.get('wmscapabilitycache.lock_dir', None)
}
cacheMgr = CacheManager(**parse_cache_config_options(cache_opts))
self.cache = cacheMgr.get_cache('getWmsCapabilities')
log.info("WMS capability caching %s" % ("enabled" if self.enableCache else "disabled"))
def getWmsCapabilities(self, wmsurl, forceRefresh):
"""Gets the WMS capabilities for an endpoint URL from the cache or WMS server if not found in the cache.
"""
if self.enableCache:
def __doGet():
"""Makes request for capabilities.
"""
log.debug("WMS capabilities not found in cache for %s" % search_param)
return GetWebMapCapabilities(search_param)
search_param = wmsurl
if forceRefresh:
self.cache.remove_value(key = search_param)
log.debug("Looking for WMS capabilities in cache for %s" % search_param)
return self.cache.get(key = search_param, createfunc = __doGet)
else:
log.debug("Fetching WMS capabilities for %s (caching disabled)" % wmsurl)
return GetWebMapCapabilities(wmsurl)
|
NERC-CEH/jules-jasmin
|
majic/joj/lib/wms_capability_cache.py
|
Python
|
gpl-2.0
| 2,164
| 0.00878
|
items = [1, 2, 3, 4, 5]
squared = []
for i in items:
squared.append(i**2)
print(squared)
squared = []
squared = list(map(lambda x: x**2, items))
print(squared)
def multiply(x):
return (x*x)
def add(x):
return (x+x)
funcs = [multiply, add]
for i in range(5):
value = map(lambda x:x(i), funcs)
print(list(value))
number_list = range(-5, 5)
less_than_zero = filter(lambda x: x < 0, number_list)
print(list(less_than_zero))
from functools import reduce
product = reduce( (lambda x, y: x * y), [1, 2, 3, 4])
print(product)
|
cragwen/hello-world
|
py/interpy/4_MapFilterReduce.py
|
Python
|
unlicense
| 542
| 0.012915
|
from nose.tools import with_setup, eq_ as eq
from common import vim, cleanup
from threading import Timer
@with_setup(setup=cleanup)
def test_interrupt_from_another_thread():
session = vim.session
timer = Timer(0.5, lambda: session.threadsafe_call(lambda: session.stop()))
timer.start()
eq(vim.session.next_message(), None)
|
traverseda/python-client
|
test/test_concurrency.py
|
Python
|
apache-2.0
| 341
| 0
|
#!/usr/bin/env python
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Parses mojom IDL files.
This script parses one or more input mojom files and produces corresponding
module files fully describing the definitions contained within each mojom. The
module data is pickled and can be easily consumed by other tools to, e.g.,
generate usable language bindings.
"""
import argparse
import builtins
import codecs
import errno
import json
import logging
import multiprocessing
import os
import os.path
import sys
import traceback
from collections import defaultdict
from mojom.generate import module
from mojom.generate import translate
from mojom.parse import parser
from mojom.parse import conditional_features
# Disable this for easier debugging.
# In Python 2, subprocesses just hang when exceptions are thrown :(.
_ENABLE_MULTIPROCESSING = sys.version_info[0] > 2
if sys.version_info < (3, 4):
_MULTIPROCESSING_USES_FORK = sys.platform.startswith('linux')
else:
# https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
if __name__ == '__main__' and sys.platform == 'darwin':
multiprocessing.set_start_method('fork')
_MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
def _ResolveRelativeImportPath(path, roots):
"""Attempts to resolve a relative import path against a set of possible roots.
Args:
path: The relative import path to resolve.
roots: A list of absolute paths which will be checked in descending length
order for a match against path.
Returns:
A normalized absolute path combining one of the roots with the input path if
and only if such a file exists.
Raises:
ValueError: The path could not be resolved against any of the given roots.
"""
for root in reversed(sorted(roots, key=len)):
abs_path = os.path.join(root, path)
if os.path.isfile(abs_path):
return os.path.normcase(os.path.normpath(abs_path))
raise ValueError('"%s" does not exist in any of %s' % (path, roots))
def _RebaseAbsolutePath(path, roots):
"""Rewrites an absolute file path as relative to an absolute directory path in
roots.
Args:
path: The absolute path of an existing file.
roots: A list of absolute directory paths. The given path argument must fall
within one of these directories.
Returns:
A path equivalent to the input path, but relative to one of the provided
roots. If the input path falls within multiple roots, the longest root is
chosen (and thus the shortest relative path is returned).
Paths returned by this method always use forward slashes as a separator to
mirror mojom import syntax.
Raises:
ValueError if the given path does not fall within any of the listed roots.
"""
assert os.path.isabs(path)
assert os.path.isfile(path)
assert all(map(os.path.isabs, roots))
sorted_roots = list(reversed(sorted(roots, key=len)))
def try_rebase_path(path, root):
head, rebased_path = os.path.split(path)
while head != root:
head, tail = os.path.split(head)
if not tail:
return None
rebased_path = os.path.join(tail, rebased_path)
return rebased_path
for root in sorted_roots:
relative_path = try_rebase_path(path, root)
if relative_path:
# TODO(crbug.com/953884): Use pathlib for this kind of thing once we're
# fully migrated to Python 3.
return relative_path.replace('\\', '/')
raise ValueError('%s does not fall within any of %s' % (path, sorted_roots))
def _GetModuleFilename(mojom_filename):
return mojom_filename + '-module'
def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
dependencies, loaded_modules, module_metadata):
"""Recursively ensures that a module and its dependencies are loaded.
Args:
mojom_abspath: An absolute file path pointing to a mojom file to load.
module_path: The relative path used to identify mojom_abspath.
abs_paths: A mapping from module paths to absolute file paths for all
inputs given to this execution of the script.
asts: A map from each input mojom's absolute path to its parsed AST.
dependencies: A mapping of which input mojoms depend on each other, indexed
by absolute file path.
loaded_modules: A mapping of all modules loaded so far, including non-input
modules that were pulled in as transitive dependencies of the inputs.
module_metadata: Metadata to be attached to every module loaded by this
helper.
Returns:
None
On return, loaded_modules will be populated with the loaded input mojom's
Module as well as the Modules of all of its transitive dependencies."""
if mojom_abspath in loaded_modules:
# Already done.
return
for dep_abspath, dep_path in sorted(dependencies[mojom_abspath]):
if dep_abspath not in loaded_modules:
_EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
loaded_modules, module_metadata)
imports = {}
for imp in asts[mojom_abspath].import_list:
path = imp.import_filename
imports[path] = loaded_modules[abs_paths[path]]
loaded_modules[mojom_abspath] = translate.OrderedModule(
asts[mojom_abspath], module_path, imports)
loaded_modules[mojom_abspath].metadata = dict(module_metadata)
def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
allowed_imports = set()
processed_deps = set()
def collect(metadata_filename):
processed_deps.add(metadata_filename)
with open(metadata_filename) as f:
metadata = json.load(f)
allowed_imports.update(
map(os.path.normcase, map(os.path.normpath, metadata['sources'])))
for dep_metadata in metadata['deps']:
if dep_metadata not in processed_deps:
collect(dep_metadata)
collect(build_metadata_filename)
return allowed_imports
# multiprocessing helper.
def _ParseAstHelper(mojom_abspath, enabled_features):
with codecs.open(mojom_abspath, encoding='utf-8') as f:
ast = parser.Parse(f.read(), mojom_abspath)
conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
return mojom_abspath, ast
# multiprocessing helper.
def _SerializeHelper(mojom_abspath, mojom_path):
module_path = os.path.join(_SerializeHelper.output_root_path,
_GetModuleFilename(mojom_path))
module_dir = os.path.dirname(module_path)
if not os.path.exists(module_dir):
try:
# Python 2 doesn't support exist_ok on makedirs(), so we just ignore
# that failure if it happens. It's possible during build due to races
# among build steps with module outputs in the same directory.
os.makedirs(module_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(module_path, 'wb') as f:
_SerializeHelper.loaded_modules[mojom_abspath].Dump(f)
class _ExceptionWrapper:
def __init__(self):
# Do not capture exception object to ensure pickling works.
self.formatted_trace = traceback.format_exc()
class _FuncWrapper:
"""Marshals exceptions and spreads args."""
def __init__(self, func):
self._func = func
def __call__(self, args):
# multiprocessing does not gracefully handle excptions.
# https://crbug.com/1219044
try:
return self._func(*args)
except: # pylint: disable=bare-except
return _ExceptionWrapper()
def _Shard(target_func, arg_list, processes=None):
arg_list = list(arg_list)
if processes is None:
processes = multiprocessing.cpu_count()
# Seems optimal to have each process perform at least 2 tasks.
processes = min(processes, len(arg_list) // 2)
if sys.platform == 'win32':
# TODO(crbug.com/1190269) - we can't use more than 56
# cores on Windows or Python3 may hang.
processes = min(processes, 56)
# Don't spin up processes unless there is enough work to merit doing so.
if not _ENABLE_MULTIPROCESSING or processes < 2:
for arg_tuple in arg_list:
yield target_func(*arg_tuple)
return
pool = multiprocessing.Pool(processes=processes)
try:
wrapped_func = _FuncWrapper(target_func)
for result in pool.imap_unordered(wrapped_func, arg_list):
if isinstance(result, _ExceptionWrapper):
sys.stderr.write(result.formatted_trace)
sys.exit(1)
yield result
finally:
pool.close()
pool.join() # Needed on Windows to avoid WindowsError during terminate.
pool.terminate()
def _ParseMojoms(mojom_files,
input_root_paths,
output_root_path,
module_root_paths,
enabled_features,
module_metadata,
allowed_imports=None):
"""Parses a set of mojom files and produces serialized module outputs.
Args:
mojom_files: A list of mojom files to process. Paths must be absolute paths
which fall within one of the input or output root paths.
input_root_paths: A list of absolute filesystem paths which may be used to
resolve relative mojom file paths.
output_root_path: An absolute filesystem path which will service as the root
for all emitted artifacts. Artifacts produced from a given mojom file
are based on the mojom's relative path, rebased onto this path.
Additionally, the script expects this root to contain already-generated
modules for any transitive dependencies not listed in mojom_files.
module_root_paths: A list of absolute filesystem paths which contain
already-generated modules for any non-transitive dependencies.
enabled_features: A list of enabled feature names, controlling which AST
nodes are filtered by [EnableIf] or [EnableIfNot] attributes.
module_metadata: A list of 2-tuples representing metadata key-value pairs to
attach to each compiled module output.
Returns:
None.
Upon completion, a mojom-module file will be saved for each input mojom.
"""
assert input_root_paths
assert output_root_path
loaded_mojom_asts = {}
loaded_modules = {}
input_dependencies = defaultdict(set)
mojom_files_to_parse = dict((os.path.normcase(abs_path),
_RebaseAbsolutePath(abs_path, input_root_paths))
for abs_path in mojom_files)
abs_paths = dict(
(path, abs_path) for abs_path, path in mojom_files_to_parse.items())
logging.info('Parsing %d .mojom into ASTs', len(mojom_files_to_parse))
map_args = ((mojom_abspath, enabled_features)
for mojom_abspath in mojom_files_to_parse)
for mojom_abspath, ast in _Shard(_ParseAstHelper, map_args):
loaded_mojom_asts[mojom_abspath] = ast
logging.info('Processing dependencies')
for mojom_abspath, ast in sorted(loaded_mojom_asts.items()):
invalid_imports = []
for imp in ast.import_list:
import_abspath = _ResolveRelativeImportPath(imp.import_filename,
input_root_paths)
if allowed_imports and import_abspath not in allowed_imports:
invalid_imports.append(imp.import_filename)
abs_paths[imp.import_filename] = import_abspath
if import_abspath in mojom_files_to_parse:
# This import is in the input list, so we're going to translate it
# into a module below; however it's also a dependency of another input
# module. We retain record of dependencies to help with input
# processing later.
input_dependencies[mojom_abspath].add(
(import_abspath, imp.import_filename))
elif import_abspath not in loaded_modules:
# We have an import that isn't being parsed right now. It must already
# be parsed and have a module file sitting in a corresponding output
# location.
module_path = _GetModuleFilename(imp.import_filename)
module_abspath = _ResolveRelativeImportPath(
module_path, module_root_paths + [output_root_path])
with open(module_abspath, 'rb') as module_file:
loaded_modules[import_abspath] = module.Module.Load(module_file)
if invalid_imports:
raise ValueError(
'\nThe file %s imports the following files not allowed by build '
'dependencies:\n\n%s\n' % (mojom_abspath, '\n'.join(invalid_imports)))
logging.info('Loaded %d modules from dependencies', len(loaded_modules))
# At this point all transitive imports not listed as inputs have been loaded
# and we have a complete dependency tree of the unprocessed inputs. Now we can
# load all the inputs, resolving dependencies among them recursively as we go.
logging.info('Ensuring inputs are loaded')
num_existing_modules_loaded = len(loaded_modules)
for mojom_abspath, mojom_path in mojom_files_to_parse.items():
_EnsureInputLoaded(mojom_abspath, mojom_path, abs_paths, loaded_mojom_asts,
input_dependencies, loaded_modules, module_metadata)
assert (num_existing_modules_loaded +
len(mojom_files_to_parse) == len(loaded_modules))
# Now we have fully translated modules for every input and every transitive
# dependency. We can dump the modules to disk for other tools to use.
logging.info('Serializing %d modules', len(mojom_files_to_parse))
# Windows does not use fork() for multiprocessing, so we'd need to pass
# loaded_module via IPC rather than via globals. Doing so is slower than not
# using multiprocessing.
_SerializeHelper.loaded_modules = loaded_modules
_SerializeHelper.output_root_path = output_root_path
# Doesn't seem to help past 4. Perhaps IO bound here?
processes = 4 if _MULTIPROCESSING_USES_FORK else 0
map_args = mojom_files_to_parse.items()
for _ in _Shard(_SerializeHelper, map_args, processes=processes):
pass
def Run(command_line):
debug_logging = os.environ.get('MOJOM_PARSER_DEBUG', '0') != '0'
logging.basicConfig(level=logging.DEBUG if debug_logging else logging.WARNING,
format='%(levelname).1s %(relativeCreated)6d %(message)s')
logging.info('Started (%s)', os.path.basename(sys.argv[0]))
arg_parser = argparse.ArgumentParser(
description="""
Parses one or more mojom files and produces corresponding module outputs fully
describing the definitions therein. The output is exhaustive, stable, and
sufficient for another tool to consume and emit e.g. usable language
bindings based on the original mojoms.""",
epilog="""
Note that each transitive import dependency reachable from the input mojoms must
either also be listed as an input or must have its corresponding compiled module
already present in the provided output root.""")
arg_parser.add_argument(
'--input-root',
default=[],
action='append',
metavar='ROOT',
dest='input_root_paths',
help='Adds ROOT to the set of root paths against which relative input '
'paths should be resolved. Provided root paths are always searched '
'in order from longest absolute path to shortest.')
arg_parser.add_argument(
'--output-root',
action='store',
required=True,
dest='output_root_path',
metavar='ROOT',
help='Use ROOT as the root path in which the parser should emit compiled '
'modules for each processed input mojom. The path of emitted module is '
'based on the relative input path, rebased onto this root. Note that '
'ROOT is also searched for existing modules of any transitive imports '
'which were not included in the set of inputs.')
arg_parser.add_argument(
'--module-root',
default=[],
action='append',
metavar='ROOT',
dest='module_root_paths',
help='Adds ROOT to the set of root paths to search for existing modules '
'of non-transitive imports. Provided root paths are always searched in '
'order from longest absolute path to shortest.')
arg_parser.add_argument(
'--mojoms',
nargs='+',
dest='mojom_files',
default=[],
metavar='MOJOM_FILE',
help='Input mojom filename(s). Each filename must be either an absolute '
'path which falls within one of the given input or output roots, or a '
'relative path the parser will attempt to resolve using each of those '
'roots in unspecified order.')
arg_parser.add_argument(
'--mojom-file-list',
action='store',
metavar='LIST_FILENAME',
help='Input file whose contents are a list of mojoms to process. This '
'may be provided in lieu of --mojoms to avoid hitting command line '
'length limtations')
arg_parser.add_argument(
'--enable-feature',
dest='enabled_features',
default=[],
action='append',
metavar='FEATURE',
help='Enables a named feature when parsing the given mojoms. Features '
'are identified by arbitrary string values. Specifying this flag with a '
'given FEATURE name will cause the parser to process any syntax elements '
'tagged with an [EnableIf=FEATURE] or [EnableIfNot] attribute. If this '
'flag is not provided for a given FEATURE, such tagged elements are '
'discarded by the parser and will not be present in the compiled output.')
arg_parser.add_argument(
'--check-imports',
dest='build_metadata_filename',
action='store',
metavar='METADATA_FILENAME',
help='Instructs the parser to check imports against a set of allowed '
'imports. Allowed imports are based on build metadata within '
'METADATA_FILENAME. This is a JSON file with a `sources` key listing '
'paths to the set of input mojom files being processed by this parser '
'run, and a `deps` key listing paths to metadata files for any '
'dependencies of these inputs. This feature can be used to implement '
'build-time dependency checking for mojom imports, where each build '
'metadata file corresponds to a build target in the dependency graph of '
'a typical build system.')
arg_parser.add_argument(
'--add-module-metadata',
dest='module_metadata',
default=[],
action='append',
metavar='KEY=VALUE',
help='Adds a metadata key-value pair to the output module. This can be '
'used by build toolchains to augment parsed mojom modules with product-'
'specific metadata for later extraction and use by custom bindings '
'generators.')
args, _ = arg_parser.parse_known_args(command_line)
if args.mojom_file_list:
with open(args.mojom_file_list) as f:
args.mojom_files.extend(f.read().split())
if not args.mojom_files:
raise ValueError(
'Must list at least one mojom file via --mojoms or --mojom-file-list')
mojom_files = list(map(os.path.abspath, args.mojom_files))
input_roots = list(map(os.path.abspath, args.input_root_paths))
output_root = os.path.abspath(args.output_root_path)
module_roots = list(map(os.path.abspath, args.module_root_paths))
if args.build_metadata_filename:
allowed_imports = _CollectAllowedImportsFromBuildMetadata(
args.build_metadata_filename)
else:
allowed_imports = None
module_metadata = list(
map(lambda kvp: tuple(kvp.split('=')), args.module_metadata))
_ParseMojoms(mojom_files, input_roots, output_root, module_roots,
args.enabled_features, module_metadata, allowed_imports)
logging.info('Finished')
# Exit without running GC, which can save multiple seconds due the large
# number of object created.
os._exit(0)
if __name__ == '__main__':
Run(sys.argv[1:])
|
nwjs/chromium.src
|
mojo/public/tools/mojom/mojom_parser.py
|
Python
|
bsd-3-clause
| 19,670
| 0.00788
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
from functools import wraps
TRANSIENT_USER_TYPES = []
def is_transient_user(user):
return isinstance(user, tuple(TRANSIENT_USER_TYPES))
def prevent_access_to_transient_users(view_func):
def _wrapped_view(request, *args, **kwargs):
'''Test if the user is transient'''
for user_type in TRANSIENT_USER_TYPES:
if is_transient_user(request.user):
return HttpResponseRedirect('/')
return view_func(request, *args, **kwargs)
return login_required(wraps(view_func)(_wrapped_view))
def to_list(func):
@wraps(func)
def f(*args, **kwargs):
return list(func(*args, **kwargs))
return f
|
pu239ppy/authentic2
|
authentic2/decorators.py
|
Python
|
agpl-3.0
| 764
| 0.003927
|
"""
The main purpose of this module is to expose LinkCollector.collect_links().
"""
import cgi
import functools
import itertools
import logging
import mimetypes
import os
import re
from collections import OrderedDict
from pip._vendor import html5lib, requests
from pip._vendor.distlib.compat import unescape
from pip._vendor.requests.exceptions import RetryError, SSLError
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._internal.exceptions import NetworkConnectionError
from pip._internal.models.link import Link
from pip._internal.models.search_scope import SearchScope
from pip._internal.network.utils import raise_for_status
from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
from pip._internal.utils.misc import pairwise, redact_auth_from_url
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.urls import path_to_url, url_to_path
from pip._internal.vcs import is_url, vcs
if MYPY_CHECK_RUNNING:
from optparse import Values
from typing import (
Callable, Iterable, List, MutableMapping, Optional,
Protocol, Sequence, Tuple, TypeVar, Union,
)
import xml.etree.ElementTree
from pip._vendor.requests import Response
from pip._internal.network.session import PipSession
HTMLElement = xml.etree.ElementTree.Element
ResponseHeaders = MutableMapping[str, str]
# Used in the @lru_cache polyfill.
F = TypeVar('F')
class LruCache(Protocol):
def __call__(self, maxsize=None):
# type: (Optional[int]) -> Callable[[F], F]
raise NotImplementedError
logger = logging.getLogger(__name__)
# Fallback to noop_lru_cache in Python 2
# TODO: this can be removed when python 2 support is dropped!
def noop_lru_cache(maxsize=None):
# type: (Optional[int]) -> Callable[[F], F]
def _wrapper(f):
# type: (F) -> F
return f
return _wrapper
_lru_cache = getattr(functools, "lru_cache", noop_lru_cache) # type: LruCache
def _match_vcs_scheme(url):
# type: (str) -> Optional[str]
"""Look for VCS schemes in the URL.
Returns the matched VCS scheme, or None if there's no match.
"""
for scheme in vcs.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
return scheme
return None
def _is_url_like_archive(url):
# type: (str) -> bool
"""Return whether the URL looks like an archive.
"""
filename = Link(url).filename
for bad_ext in ARCHIVE_EXTENSIONS:
if filename.endswith(bad_ext):
return True
return False
class _NotHTML(Exception):
def __init__(self, content_type, request_desc):
# type: (str, str) -> None
super(_NotHTML, self).__init__(content_type, request_desc)
self.content_type = content_type
self.request_desc = request_desc
def _ensure_html_header(response):
# type: (Response) -> None
"""Check the Content-Type header to ensure the response contains HTML.
Raises `_NotHTML` if the content type is not text/html.
"""
content_type = response.headers.get("Content-Type", "")
if not content_type.lower().startswith("text/html"):
raise _NotHTML(content_type, response.request.method)
class _NotHTTP(Exception):
pass
def _ensure_html_response(url, session):
# type: (str, PipSession) -> None
"""Send a HEAD request to the URL, and ensure the response contains HTML.
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
`_NotHTML` if the content type is not text/html.
"""
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
if scheme not in {'http', 'https'}:
raise _NotHTTP()
resp = session.head(url, allow_redirects=True)
raise_for_status(resp)
_ensure_html_header(resp)
def _get_html_response(url, session):
# type: (str, PipSession) -> Response
"""Access an HTML page with GET, and return the response.
This consists of three parts:
1. If the URL looks suspiciously like an archive, send a HEAD first to
check the Content-Type is HTML, to avoid downloading a large file.
Raise `_NotHTTP` if the content type cannot be determined, or
`_NotHTML` if it is not HTML.
2. Actually perform the request. Raise HTTP exceptions on network failures.
3. Check the Content-Type header to make sure we got HTML, and raise
`_NotHTML` otherwise.
"""
if _is_url_like_archive(url):
_ensure_html_response(url, session=session)
logger.debug('Getting page %s', redact_auth_from_url(url))
resp = session.get(
url,
headers={
"Accept": "text/html",
# We don't want to blindly returned cached data for
# /simple/, because authors generally expecting that
# twine upload && pip install will function, but if
# they've done a pip install in the last ~10 minutes
# it won't. Thus by setting this to zero we will not
# blindly use any cached data, however the benefit of
# using max-age=0 instead of no-cache, is that we will
# still support conditional requests, so we will still
# minimize traffic sent in cases where the page hasn't
# changed at all, we will just always incur the round
# trip for the conditional GET now instead of only
# once per 10 minutes.
# For more information, please see pypa/pip#5670.
"Cache-Control": "max-age=0",
},
)
raise_for_status(resp)
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement of an url. Unless we issue a HEAD request on every
# url we cannot know ahead of time for sure if something is HTML
# or not. However we can check after we've downloaded it.
_ensure_html_header(resp)
return resp
def _get_encoding_from_headers(headers):
# type: (ResponseHeaders) -> Optional[str]
"""Determine if we have any encoding information in our headers.
"""
if headers and "Content-Type" in headers:
content_type, params = cgi.parse_header(headers["Content-Type"])
if "charset" in params:
return params['charset']
return None
def _determine_base_url(document, page_url):
# type: (HTMLElement, str) -> str
"""Determine the HTML document's base URL.
This looks for a ``<base>`` tag in the HTML document. If present, its href
attribute denotes the base URL of anchor tags in the document. If there is
no such tag (or if it does not have a valid href attribute), the HTML
file's URL is used as the base URL.
:param document: An HTML document representation. The current
implementation expects the result of ``html5lib.parse()``.
:param page_url: The URL of the HTML document.
"""
for base in document.findall(".//base"):
href = base.get("href")
if href is not None:
return href
return page_url
def _clean_url_path_part(part):
# type: (str) -> str
"""
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
"""
# We unquote prior to quoting to make sure nothing is double quoted.
return urllib_parse.quote(urllib_parse.unquote(part))
def _clean_file_url_path(part):
# type: (str) -> str
"""
Clean the first part of a URL path that corresponds to a local
filesystem path (i.e. the first part after splitting on "@" characters).
"""
# We unquote prior to quoting to make sure nothing is double quoted.
# Also, on Windows the path part might contain a drive letter which
# should not be quoted. On Linux where drive letters do not
# exist, the colon should be quoted. We rely on urllib.request
# to do the right thing here.
return urllib_request.pathname2url(urllib_request.url2pathname(part))
# percent-encoded: /
_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE)
def _clean_url_path(path, is_local_path):
# type: (str, bool) -> str
"""
Clean the path portion of a URL.
"""
if is_local_path:
clean_func = _clean_file_url_path
else:
clean_func = _clean_url_path_part
# Split on the reserved characters prior to cleaning so that
# revision strings in VCS URLs are properly preserved.
parts = _reserved_chars_re.split(path)
cleaned_parts = []
for to_clean, reserved in pairwise(itertools.chain(parts, [''])):
cleaned_parts.append(clean_func(to_clean))
# Normalize %xx escapes (e.g. %2f -> %2F)
cleaned_parts.append(reserved.upper())
return ''.join(cleaned_parts)
def _clean_link(url):
# type: (str) -> str
"""
Make sure a link is fully quoted.
For example, if ' ' occurs in the URL, it will be replaced with "%20",
and without double-quoting other characters.
"""
# Split the URL into parts according to the general structure
# `scheme://netloc/path;parameters?query#fragment`.
result = urllib_parse.urlparse(url)
# If the netloc is empty, then the URL refers to a local filesystem path.
is_local_path = not result.netloc
path = _clean_url_path(result.path, is_local_path=is_local_path)
return urllib_parse.urlunparse(result._replace(path=path))
def _create_link_from_element(
anchor, # type: HTMLElement
page_url, # type: str
base_url, # type: str
):
# type: (...) -> Optional[Link]
"""
Convert an anchor element in a simple repository page to a Link.
"""
href = anchor.get("href")
if not href:
return None
url = _clean_link(urllib_parse.urljoin(base_url, href))
pyrequire = anchor.get('data-requires-python')
pyrequire = unescape(pyrequire) if pyrequire else None
yanked_reason = anchor.get('data-yanked')
if yanked_reason:
# This is a unicode string in Python 2 (and 3).
yanked_reason = unescape(yanked_reason)
link = Link(
url,
comes_from=page_url,
requires_python=pyrequire,
yanked_reason=yanked_reason,
)
return link
class CacheablePageContent(object):
def __init__(self, page):
# type: (HTMLPage) -> None
assert page.cache_link_parsing
self.page = page
def __eq__(self, other):
# type: (object) -> bool
return (isinstance(other, type(self)) and
self.page.url == other.page.url)
def __hash__(self):
# type: () -> int
return hash(self.page.url)
def with_cached_html_pages(
fn, # type: Callable[[HTMLPage], Iterable[Link]]
):
# type: (...) -> Callable[[HTMLPage], List[Link]]
"""
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
function's result (keyed by CacheablePageContent), unless the HTMLPage
`page` has `page.cache_link_parsing == False`.
"""
@_lru_cache(maxsize=None)
def wrapper(cacheable_page):
# type: (CacheablePageContent) -> List[Link]
return list(fn(cacheable_page.page))
@functools.wraps(fn)
def wrapper_wrapper(page):
# type: (HTMLPage) -> List[Link]
if page.cache_link_parsing:
return wrapper(CacheablePageContent(page))
return list(fn(page))
return wrapper_wrapper
@with_cached_html_pages
def parse_links(page):
# type: (HTMLPage) -> Iterable[Link]
"""
Parse an HTML document, and yield its anchor elements as Link objects.
"""
document = html5lib.parse(
page.content,
transport_encoding=page.encoding,
namespaceHTMLElements=False,
)
url = page.url
base_url = _determine_base_url(document, url)
for anchor in document.findall(".//a"):
link = _create_link_from_element(
anchor,
page_url=url,
base_url=base_url,
)
if link is None:
continue
yield link
class HTMLPage(object):
"""Represents one page, along with its URL"""
def __init__(
self,
content, # type: bytes
encoding, # type: Optional[str]
url, # type: str
cache_link_parsing=True, # type: bool
):
# type: (...) -> None
"""
:param encoding: the encoding to decode the given content.
:param url: the URL from which the HTML was downloaded.
:param cache_link_parsing: whether links parsed from this page's url
should be cached. PyPI index urls should
have this set to False, for example.
"""
self.content = content
self.encoding = encoding
self.url = url
self.cache_link_parsing = cache_link_parsing
def __str__(self):
# type: () -> str
return redact_auth_from_url(self.url)
def _handle_get_page_fail(
link, # type: Link
reason, # type: Union[str, Exception]
meth=None # type: Optional[Callable[..., None]]
):
# type: (...) -> None
if meth is None:
meth = logger.debug
meth("Could not fetch URL %s: %s - skipping", link, reason)
def _make_html_page(response, cache_link_parsing=True):
# type: (Response, bool) -> HTMLPage
encoding = _get_encoding_from_headers(response.headers)
return HTMLPage(
response.content,
encoding=encoding,
url=response.url,
cache_link_parsing=cache_link_parsing)
def _get_html_page(link, session=None):
# type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
if session is None:
raise TypeError(
"_get_html_page() missing 1 required keyword argument: 'session'"
)
url = link.url.split('#', 1)[0]
# Check for VCS schemes that do not support lookup as web pages.
vcs_scheme = _match_vcs_scheme(url)
if vcs_scheme:
logger.warning('Cannot look at %s URL %s because it does not support '
'lookup as web pages.', vcs_scheme, link)
return None
# Tack index.html onto file:// URLs that point to directories
scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
# add trailing slash if not present so urljoin doesn't trim
# final segment
if not url.endswith('/'):
url += '/'
url = urllib_parse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s', url)
try:
resp = _get_html_response(url, session=session)
except _NotHTTP:
logger.warning(
'Skipping page %s because it looks like an archive, and cannot '
'be checked by a HTTP HEAD request.', link,
)
except _NotHTML as exc:
logger.warning(
'Skipping page %s because the %s request got Content-Type: %s.'
'The only supported Content-Type is text/html',
link, exc.request_desc, exc.content_type,
)
except NetworkConnectionError as exc:
_handle_get_page_fail(link, exc)
except RetryError as exc:
_handle_get_page_fail(link, exc)
except SSLError as exc:
reason = "There was a problem confirming the ssl certificate: "
reason += str(exc)
_handle_get_page_fail(link, reason, meth=logger.info)
except requests.ConnectionError as exc:
_handle_get_page_fail(link, "connection error: {}".format(exc))
except requests.Timeout:
_handle_get_page_fail(link, "timed out")
else:
return _make_html_page(resp,
cache_link_parsing=link.cache_link_parsing)
return None
def _remove_duplicate_links(links):
# type: (Iterable[Link]) -> List[Link]
"""
Return a list of links, with duplicates removed and ordering preserved.
"""
# We preserve the ordering when removing duplicates because we can.
return list(OrderedDict.fromkeys(links))
def group_locations(locations, expand_dir=False):
# type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
"""
Divide a list of locations into two groups: "files" (archives) and "urls."
:return: A pair of lists (files, urls).
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
# type: (str) -> None
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if os.path.isdir(path):
if expand_dir:
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url:
urls.append(url)
else:
logger.warning(
"Path '%s' is ignored: it is a directory.", path,
)
elif os.path.isfile(path):
sort_path(path)
else:
logger.warning(
"Url '%s' is ignored: it is neither a file "
"nor a directory.", url,
)
elif is_url(url):
# Only add url with clear scheme
urls.append(url)
else:
logger.warning(
"Url '%s' is ignored. It is either a non-existing "
"path or lacks a specific scheme.", url,
)
return files, urls
class CollectedLinks(object):
"""
Encapsulates the return value of a call to LinkCollector.collect_links().
The return value includes both URLs to project pages containing package
links, as well as individual package Link objects collected from other
sources.
This info is stored separately as:
(1) links from the configured file locations,
(2) links from the configured find_links, and
(3) urls to HTML project pages, as described by the PEP 503 simple
repository API.
"""
def __init__(
self,
files, # type: List[Link]
find_links, # type: List[Link]
project_urls, # type: List[Link]
):
# type: (...) -> None
"""
:param files: Links from file locations.
:param find_links: Links from find_links.
:param project_urls: URLs to HTML project pages, as described by
the PEP 503 simple repository API.
"""
self.files = files
self.find_links = find_links
self.project_urls = project_urls
class LinkCollector(object):
"""
Responsible for collecting Link objects from all configured locations,
making network requests as needed.
The class's main method is its collect_links() method.
"""
def __init__(
self,
session, # type: PipSession
search_scope, # type: SearchScope
):
# type: (...) -> None
self.search_scope = search_scope
self.session = session
@classmethod
def create(cls, session, options, suppress_no_index=False):
# type: (PipSession, Values, bool) -> LinkCollector
"""
:param session: The Session to use to make requests.
:param suppress_no_index: Whether to ignore the --no-index option
when constructing the SearchScope object.
"""
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index and not suppress_no_index:
logger.debug(
'Ignoring indexes: %s',
','.join(redact_auth_from_url(url) for url in index_urls),
)
index_urls = []
# Make sure find_links is a list before passing to create().
find_links = options.find_links or []
search_scope = SearchScope.create(
find_links=find_links, index_urls=index_urls,
)
link_collector = LinkCollector(
session=session, search_scope=search_scope,
)
return link_collector
@property
def find_links(self):
# type: () -> List[str]
return self.search_scope.find_links
def fetch_page(self, location):
# type: (Link) -> Optional[HTMLPage]
"""
Fetch an HTML page containing package links.
"""
return _get_html_page(location, session=self.session)
def collect_links(self, project_name):
# type: (str) -> CollectedLinks
"""Find all available links for the given project name.
:return: All the Link objects (unfiltered), as a CollectedLinks object.
"""
search_scope = self.search_scope
index_locations = search_scope.get_index_urls_locations(project_name)
index_file_loc, index_url_loc = group_locations(index_locations)
fl_file_loc, fl_url_loc = group_locations(
self.find_links, expand_dir=True,
)
file_links = [
Link(url) for url in itertools.chain(index_file_loc, fl_file_loc)
]
# We trust every directly linked archive in find_links
find_link_links = [Link(url, '-f') for url in self.find_links]
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links.
# We want to filter out anything that does not have a secure origin.
url_locations = [
link for link in itertools.chain(
# Mark PyPI indices as "cache_link_parsing == False" -- this
# will avoid caching the result of parsing the page for links.
(Link(url, cache_link_parsing=False) for url in index_url_loc),
(Link(url) for url in fl_url_loc),
)
if self.session.is_secure_origin(link)
]
url_locations = _remove_duplicate_links(url_locations)
lines = [
'{} location(s) to search for versions of {}:'.format(
len(url_locations), project_name,
),
]
for link in url_locations:
lines.append('* {}'.format(link))
logger.debug('\n'.join(lines))
return CollectedLinks(
files=file_links,
find_links=find_link_links,
project_urls=url_locations,
)
|
sserrot/champion_relationships
|
venv/Lib/site-packages/pip/_internal/index/collector.py
|
Python
|
mit
| 22,838
| 0
|
import json
import sys
import logging
import logging.handlers
def load_config():
'''Loads application configuration from a JSON file'''
try:
json_data = open('config.json')
config = json.load(json_data)
json_data.close()
return config
except Exception:
print """There was an error loading config.json.
Make sure that the file exists and it's a valid JSON file."""
sys.exit(1)
def init_logger(file_name='clouddump.log'):
'''
Initializes the logging file and module
parameters
----------
file_name: A string with the name of the file to write the logs in
'''
logger = logging.getLogger('clouddump')
log_file_handler = logging.handlers.RotatingFileHandler(
file_name, maxBytes = 10**9)
log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
log_file_handler.setFormatter(log_format)
logger.addHandler(log_file_handler)
logger.setLevel(logging.DEBUG)
if len(sys.argv) > 1:
if sys.argv[1] == '-v' or sys.argv[1] == '--verbose':
console = logging.StreamHandler()
console.setLevel(logging.INFO)
logger.addHandler(console)
|
svera/clouddump
|
tools.py
|
Python
|
gpl-2.0
| 1,226
| 0.006525
|
# -*- coding: utf-8 -*-
#
# RERO ILS
# Copyright (C) 2020 RERO
# Copyright (C) 2020 UCLouvain
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import mock
from flask import url_for
from invenio_accounts.testutils import login_user_via_session
from utils import get_json
from rero_ils.modules.patron_transactions.permissions import \
PatronTransactionPermission
def test_pttr_permissions_api(client, patron_martigny,
system_librarian_martigny,
librarian_martigny,
patron_transaction_overdue_martigny,
patron_transaction_overdue_saxon,
patron_transaction_overdue_sion):
"""Test patron transactions permissions api."""
pttr_permissions_url = url_for(
'api_blueprint.permissions',
route_name='patron_transactions'
)
pttr_martigny_permission_url = url_for(
'api_blueprint.permissions',
route_name='patron_transactions',
record_pid=patron_transaction_overdue_martigny.pid
)
pttr_saxon_permission_url = url_for(
'api_blueprint.permissions',
route_name='patron_transactions',
record_pid=patron_transaction_overdue_saxon.pid
)
pttr_sion_permission_url = url_for(
'api_blueprint.permissions',
route_name='patron_transactions',
record_pid=patron_transaction_overdue_sion.pid
)
# Not logged
res = client.get(pttr_permissions_url)
assert res.status_code == 401
# Logged as patron
login_user_via_session(client, patron_martigny.user)
res = client.get(pttr_permissions_url)
assert res.status_code == 403
# Logged as librarian
# * lib can 'list' and 'read' pttr of its own organisation
# * lib can 'create', 'update', 'delete' only for its library
# * lib can't 'read' acq_account of others organisation.
# * lib can't 'create', 'update', 'delete' acq_account for other org/lib
login_user_via_session(client, librarian_martigny.user)
res = client.get(pttr_martigny_permission_url)
assert res.status_code == 200
data = get_json(res)
assert data['read']['can']
assert data['list']['can']
assert data['create']['can']
assert data['update']['can']
# 'delete' should be true but return false because an event is linked
# assert data['delete']['can']
res = client.get(pttr_saxon_permission_url)
assert res.status_code == 200
data = get_json(res)
assert data['read']['can']
assert data['list']['can']
assert data['update']['can']
# 'delete' should be true but return false because an event is linked
# assert not data['delete']['can']
res = client.get(pttr_sion_permission_url)
assert res.status_code == 200
data = get_json(res)
assert not data['read']['can']
assert data['list']['can']
assert not data['update']['can']
assert not data['delete']['can']
# Logged as system librarian
# * sys_lib can do everything about pttr of its own organisation
# * sys_lib can't do anything about pttr of other organisation
login_user_via_session(client, system_librarian_martigny.user)
res = client.get(pttr_saxon_permission_url)
assert res.status_code == 200
data = get_json(res)
assert data['read']['can']
assert data['list']['can']
assert data['create']['can']
assert data['update']['can']
# 'delete' should be true but return false because an event is linked
# assert data['delete']['can']
res = client.get(pttr_sion_permission_url)
assert res.status_code == 200
data = get_json(res)
assert not data['read']['can']
assert not data['update']['can']
assert not data['delete']['can']
def test_pttr_permissions(patron_martigny,
librarian_martigny,
system_librarian_martigny,
org_martigny, patron_transaction_overdue_saxon,
patron_transaction_overdue_sion,
patron_transaction_overdue_martigny):
"""Test patron transaction permissions class."""
# Anonymous user
assert not PatronTransactionPermission.list(None, {})
assert not PatronTransactionPermission.read(None, {})
assert not PatronTransactionPermission.create(None, {})
assert not PatronTransactionPermission.update(None, {})
assert not PatronTransactionPermission.delete(None, {})
# As Patron
pttr_m = patron_transaction_overdue_martigny
pttr_sa = patron_transaction_overdue_saxon
pttr_si = patron_transaction_overdue_sion
with mock.patch(
'rero_ils.modules.patron_transactions.permissions.current_patrons',
[patron_martigny]
):
assert PatronTransactionPermission.list(None, pttr_m)
assert PatronTransactionPermission.read(None, pttr_m)
assert not PatronTransactionPermission.create(None, pttr_m)
assert not PatronTransactionPermission.update(None, pttr_m)
assert not PatronTransactionPermission.delete(None, pttr_m)
# As Librarian
with mock.patch(
'rero_ils.modules.patron_transactions.permissions.current_librarian',
librarian_martigny
):
assert PatronTransactionPermission.list(None, pttr_m)
assert PatronTransactionPermission.read(None, pttr_m)
assert PatronTransactionPermission.create(None, pttr_m)
assert PatronTransactionPermission.update(None, pttr_m)
assert PatronTransactionPermission.delete(None, pttr_m)
assert PatronTransactionPermission.read(None, pttr_sa)
assert PatronTransactionPermission.create(None, pttr_sa)
assert PatronTransactionPermission.update(None, pttr_sa)
assert PatronTransactionPermission.delete(None, pttr_sa)
assert not PatronTransactionPermission.read(None, pttr_si)
assert not PatronTransactionPermission.create(None, pttr_si)
assert not PatronTransactionPermission.update(None, pttr_si)
assert not PatronTransactionPermission.delete(None, pttr_si)
# As System-librarian
with mock.patch(
'rero_ils.modules.patron_transactions.permissions.current_librarian',
system_librarian_martigny
):
assert PatronTransactionPermission.list(None, pttr_sa)
assert PatronTransactionPermission.read(None, pttr_sa)
assert PatronTransactionPermission.create(None, pttr_sa)
assert PatronTransactionPermission.update(None, pttr_sa)
assert PatronTransactionPermission.delete(None, pttr_sa)
assert not PatronTransactionPermission.read(None, pttr_si)
assert not PatronTransactionPermission.create(None, pttr_si)
assert not PatronTransactionPermission.update(None, pttr_si)
assert not PatronTransactionPermission.delete(None, pttr_si)
|
rero/reroils-app
|
tests/api/patron_transactions/test_patron_transactions_permissions.py
|
Python
|
gpl-2.0
| 7,427
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from . import SearchBackend
import importlib
import logging
class SearchBroker(SearchBackend):
def __init__(self, config_name=None):
super(SearchBroker, self).__init__(config_name)
self._servers = {}
if self._settings is None:
return
for server in self._settings:
if config_name is None or server in config_name:
try:
_module = '.'.join(self._settings[server]['ENGINE'].split('.')[:-1])
_search_class = self._settings[server]['ENGINE'].split('.')[-1]
except KeyError:
logging.warning("Search engine '%s' is missing the required "
"'ENGINE' setting" % server)
break
try:
module = importlib.import_module(_module)
try:
self._servers[server] = getattr(module, _search_class)(server)
except AttributeError:
logging.warning("Search backend '%s'. No search class "
"'%s' defined." % (server, _search_class))
except ImportError:
logging.warning("Search backend '%s'. Cannot import '%s'" %
(server, _module))
def search(self, unit):
if not self._servers:
return []
results = []
counter = {}
for server in self._servers:
for result in self._servers[server].search(unit):
translation_pair = result['source'] + result['target']
if translation_pair not in counter:
counter[translation_pair] = result['count']
results.append(result)
else:
counter[translation_pair] += result['count']
for item in results:
item['count'] = counter[item['source']+item['target']]
return results
def update(self, language, obj):
for server in self._servers:
self._servers[server].update(language, obj)
|
electrolinux/pootle
|
pootle/core/search/broker.py
|
Python
|
gpl-3.0
| 2,440
| 0.002049
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of Androwarn.
#
# Copyright (C) 2012, 2019, Thomas Debize <tdebize at mail.com>
# All rights reserved.
#
# Androwarn is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androwarn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androwarn. If not, see <http://www.gnu.org/licenses/>.
# Global imports
import logging
import codecs
import pprint
# Logguer
log = logging.getLogger('log')
def grab_main_activity(apk) :
"""
@param apk : an APK instance
@rtype : the name of the main activity
"""
return apk.get_main_activity()
def grab_activities(apk) :
"""
@param apk : an APK instance
@rtype : the android:name attribute of all activities
"""
return apk.get_activities()
def grab_services(apk) :
"""
@param apk : an APK instance
@rtype : the android:name attribute of all services
"""
return apk.get_services()
def grab_receivers(apk) :
"""
@param apk : an APK instance
@rtype : the android:name attribute of all receivers
"""
return apk.get_receivers()
def grab_providers(apk) :
"""
@param apk : an APK instance
@rtype : the android:name attribute of all providers
"""
return apk.get_providers()
def grab_permissions(apk) :
"""
@param apk : an APK instance
@rtype : a list of permissions
"""
'''
result = ["Asked: %s" % "\n".join(sorted(apk.get_permissions())),
"Implied: %s" % apk.get_uses_implied_permission_list(),
"Declared: %s" % apk.get_declared_permissions()]
'''
result = ["Asked: %s" % pprint.pformat(sorted(apk.get_permissions())),
"Implied: %s" % pprint.pformat(sorted(apk.get_uses_implied_permission_list())),
"Declared: %s" % pprint.pformat(sorted(apk.get_declared_permissions()))]
return result
def grab_features(apk) :
"""
@param apk : an APK instance
@rtype : a list of features
"""
return list(apk.get_features())
def grab_libraries(apk) :
"""
@param apk : an APK instance
@rtype : the libraries' names
"""
return list(apk.get_libraries())
def grab_file_list(apk) :
"""
@param apk : an APK instance
@rtype : the file list inside the AP
"""
return apk.get_files()
def grab_certificate_information(apk) :
"""
@param apk : an APK instance
@rtype : a certificate object by giving the name in the apk file
"""
cert_info = []
cert_info.append("APK is signed: %s\n" % apk.is_signed())
for index,cert in enumerate(apk.get_certificates()):
cert_info.append("Certificate #%s" % index)
cert_info_issuer = ["Issuer:", cert.issuer.human_friendly]
cert_info_subject = ["Subject:", cert.subject.human_friendly]
cert_info.extend(cert_info_issuer)
cert_info.extend(cert_info_subject)
cert_info.append("Serial number: %s" % cert.serial_number)
cert_info.append("Hash algorithm: %s" % cert.hash_algo)
cert_info.append("Signature algorithm: %s" % cert.signature_algo)
cert_info.append("SHA-1 thumbprint: %s" % codecs.encode(cert.sha1, 'hex').decode())
cert_info.append("SHA-256 thumbprint: %s" % codecs.encode(cert.sha256, 'hex').decode())
cert_info.append("")
return cert_info
def grab_sdk_versions(apk) :
result = ["Declared target SDK: %s" % apk.get_target_sdk_version(),
"Effective target SDK: %s" % apk.get_effective_target_sdk_version(),
"Min SDK: %s" % apk.get_min_sdk_version(),
"Max SDK: %s" % apk.get_max_sdk_version()]
return result
|
maaaaz/androwarn
|
warn/search/manifest/manifest.py
|
Python
|
lgpl-3.0
| 4,316
| 0.01089
|
import requests
import warnings
warnings.warn('\n\n\n**** data.session_client will be deprecated in the next py2cytoscape release. ****\n\n\n')
class SessionClient(object):
def __init__(self, url):
self.__url = url + 'session'
def delete(self):
requests.delete(self.__url)
def save(self, file_name=None):
if file_name is None:
raise ValueError('Session file name is required.')
post_url = self.__url
params = {'file': file_name}
res = requests.post(post_url, params=params)
return res
def open(self, file_name=None):
if file_name is None:
raise ValueError('Session file name is required.')
get_url = self.__url
params = {'file': file_name}
res = requests.get(get_url, params=params)
return res
|
idekerlab/py2cytoscape
|
py2cytoscape/data/session_client.py
|
Python
|
mit
| 839
| 0.001192
|
import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdbp_p.dcl.resource import RESOURCE_PATH
from mi.dataset.driver.flord_g.ctdbp_p.dcl.flord_g_ctdbp_p_dcl_recovered_driver import parse
_author__ = 'jeff roy'
log = get_logger()
class DriverTest(unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'ctdbp01_20150804_061734.DAT')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = DriverTest('test_one')
test.test_one()
|
renegelinas/mi-instrument
|
mi/dataset/driver/flord_g/ctdbp_p/dcl/test/test_flord_g_ctdbp_p_dcl_recovered_driver.py
|
Python
|
bsd-2-clause
| 893
| 0.003359
|
from setting import MATCH_TYPE_JC,MATCH_TYPE_M14
#url_m14_fmt = "http://www.okooo.com/livecenter/zucai/?mf=ToTo&date=15077"
#url_jc_fmt = "http://www.okooo.com/livecenter/jingcai/?date=2015-05-26"
url_jc_fmt = "http://www.okooo.com/livecenter/jingcai/?date={0}"
url_m14_fmt = "http://www.okooo.com/livecenter/zucai/?mf=ToTo&date={0}"
#url_jc_odds_change = "http://www.okooo.com/soccer/match/736957/odds/change/2/"
url_jc_odds_change_fmt = "http://www.okooo.com/soccer/match/{0}/odds/change/{1}/"
def get_url_jc(dt):
dt_str = dt.strftime("%Y-%m-%d")
return url_jc_fmt.format(dt_str)
def get_url_m14(sid):
return url_jc_m14.format(sid)
def get_url_odds_change(okooo_id,bookmaker_id=2):
return url_jc_odds_change_fmt.format(okooo_id,bookmaker_id)
OKOOO_BOOKMAKER_DATA = {
"jingcai":2,
}
|
justasabc/kubernetes-ubuntu
|
smartfootball/okooo/okooo_setting.py
|
Python
|
apache-2.0
| 796
| 0.023869
|
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import Http404
from django.http import HttpResponseForbidden
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import render
from wye.base.constants import WorkshopStatus, FeedbackType
from wye.base.emailer import send_mail
from wye.organisations.models import Organisation
from wye.profiles.models import Profile
from wye.regions.models import RegionalLead
from .models import Workshop, WorkshopFeedBack
class WorkshopAccessMixin(object):
def dispatch(self, request, *args, **kwargs):
user = request.user
pk = self.kwargs.get(self.pk_url_kwarg, None)
workshop = Workshop.objects.get(id=pk)
is_admin = Profile.is_admin(user)
is_lead = (Profile.is_regional_lead(user) and
RegionalLead.is_regional_lead(user, workshop.location))
is_organiser = (Profile.is_organiser(user) and
user in workshop.requester.user.all())
if not (is_admin or is_lead or is_organiser):
return HttpResponseForbidden("Not sufficent permission")
return super(WorkshopAccessMixin, self).dispatch(request, *args, **kwargs)
class WorkshopFeedBackMixin(object):
"""
Restrict access to feedback url if
- Workshop is not completed
- If the user accessing the url is not presenter or
organiser
"""
def dispatch(self, request, *args, **kwargs):
pk = self.kwargs.get('pk')
workshop = Workshop.objects.get(id=pk)
user = self.request.user
if workshop.status != WorkshopStatus.COMPLETED:
raise Http404
if not (workshop.is_presenter(user) or workshop.is_organiser(user)):
raise PermissionDenied
return super(WorkshopFeedBackMixin, self).dispatch(request, *args, **kwargs)
class WorkshopRestrictMixin(object):
"""
Mixin to restrict
- For organisation to add workshop if no feedback is shared.
- For presenter to takeup workshop if no feedback is shared
"""
allow_presenter = False
def dispatch(self, request, *args, **kwargs):
self.user = request.user
self.feedback_required = []
# check if user is tutor
if Profile.is_presenter(self.user) and self.allow_presenter:
self.validate_presenter_feedback()
elif (Profile.is_organiser(self.user) and
Organisation.list_user_organisations(self.user).exists()):
# if user is from organisation
self.validate_organisation_feedback()
elif (Profile.is_regional_lead(self.user) or
Profile.is_admin(self.user)):
pass # don't restrict lead and admin
else:
msg = """
To request workshop you need to create organisaiton.\n\n
Please use organisation tab above to create your organisation"""
# return json for ajax request
return render(request, 'error.html', {'message': msg})
if self.feedback_required:
return self.return_response(request)
return super(WorkshopRestrictMixin, self).dispatch(request, *args, **kwargs)
def validate_presenter_feedback(self):
workshops = Workshop.objects.filter(
presenter=self.user, status=WorkshopStatus.COMPLETED)
for workshop in workshops:
feedback = WorkshopFeedBack.objects.filter(
workshop=workshop, feedback_type=FeedbackType.PRESENTER
).count()
if feedback == 0:
self.feedback_required.append(workshop)
def validate_organisation_feedback(self):
workshops = Workshop.objects.filter(
requester__user=self.user, status=WorkshopStatus.COMPLETED)
for workshop in workshops:
feedback = WorkshopFeedBack.objects.filter(
workshop=workshop, feedback_type=FeedbackType.ORGANISATION
).count()
if feedback == 0:
self.feedback_required.append(workshop)
def return_response(self, request):
msg = "Please complete the feeback for %s" % (
", ".join(map(str, self.feedback_required)))
# return json for ajax request
if request.is_ajax():
return JsonResponse({"status": False, "msg": msg})
messages.error(request, msg)
return HttpResponseRedirect(reverse('workshops:workshop_list'))
class WorkshopEmailMixin(object):
def send_mail_to_presenter(self, user, context):
"""
Send email to presenter.
@param user: Is user object
@param context: Is dict of data required by email template.
"""
# Send email to presenter
return send_mail([user.email], context, self.email_dir)
def send_mail_to_group(self, context, exclude_emails=None):
"""
Send email to org/group users.
@param context: Is dict of data required by email template.
@exclude_emails: Is list of email to be excluded from
email update.
"""
if exclude_emails is None:
exclude_emails = []
# Collage POC and admin email
poc_admin_user = Profile.get_user_with_type(
user_type=['Collage POC', 'admin']
).values_list('email', flat=True)
# Org user email
org_user_emails = self.object.requester.user.filter(
is_active=True
).values_list('email', flat=True)
# all presenter if any
all_presenter_email = self.object.presenter.values_list(
'email', flat=True
)
# List of tutor who have shown interest in that location
region_interested_member = Profile.objects.filter(
interested_locations=self.object.requester.location,
usertype__slug='tutor'
).values_list('user__email', flat=True)
all_email = []
all_email.extend(org_user_emails)
all_email.extend(all_presenter_email)
all_email.extend(poc_admin_user)
all_email.extend(region_interested_member)
all_email = set(all_email)
all_email = list(all_email.difference(exclude_emails))
send_mail(all_email, context, self.email_dir)
|
harisibrahimkv/wye
|
wye/workshops/mixins.py
|
Python
|
mit
| 6,362
| 0.000629
|
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('form_processor', '0025_caseforms_server_date'),
]
operations = [
migrations.CreateModel(
name='CaseTransaction',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('form_uuid', models.CharField(max_length=255)),
('server_date', models.DateTimeField()),
('type', models.PositiveSmallIntegerField(choices=[(0, 'form'), (1, 'rebuild')])),
('case', models.ForeignKey(related_query_name='xform', related_name='xform_set', db_column='case_uuid', to_field='case_uuid', to='form_processor.CommCareCaseSQL', db_index=False, on_delete=models.CASCADE)),
],
options={
'ordering': ['server_date'],
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='caseforms',
unique_together=None,
),
migrations.RemoveField(
model_name='caseforms',
name='case',
),
migrations.DeleteModel(
name='CaseForms',
),
migrations.AlterUniqueTogether(
name='casetransaction',
unique_together=set([('case', 'form_uuid')]),
),
]
|
dimagi/commcare-hq
|
corehq/form_processor/migrations/0026_caseforms_to_casetransaction.py
|
Python
|
bsd-3-clause
| 1,422
| 0.00211
|
# This script only works for OCLC UDEV reports created after December 31, 2015
import csv
import datetime
import re
import requests
import sys
import time
from lxml import html
user_date = raw_input('Enter report month and year (mm/yyyy) or year only (yyyy): ')
if len(user_date) == 7: # For running a report for a single month
try:
input_date = datetime.datetime.strptime(user_date, '%m/%Y')
input_month = input_date.strftime('%b')
input_year = input_date.strftime('%Y')
except ValueError:
print 'Report month and year %s is not in mm/yyyy format. Please run again.' % (user_date)
sys.exit(1)
elif len(user_date) == 4: # For running a report for an entire year
input_date = datetime.datetime.strptime(user_date, '%Y')
input_year = input_date.strftime('%Y')
else:
print 'Report month and year %s is not in mm/yyyy or yyyy format. Please run again.' % (user_date)
sys.exit(1)
print '%s is running ...' % (sys.argv[0])
url = 'http://lms01.harvard.edu/oclc-project/udev/'
r = requests.get(url)
doc = html.fromstring(r.text)
index_links = doc.xpath('//a/@href') #Get all links
report_links = []
report_log = []
for index_link in index_links:
if len(user_date)==7 and (input_month in index_link and input_year in index_link) or len(user_date)==4 and input_year in index_link: #Find links on index page that match the user's input date
index_link = url + index_link
r = requests.get(index_link)
doc = html.fromstring(r.text)
page_links = doc.xpath('//a/@href') #Get report links on each dated report page
for page_link in page_links:
page_link = index_link + '/' + page_link
report_links.append(page_link)
oclc_symbol = ['HHG'] #List of OCLC symbols to match in records; separate codes with commas; put '' in list to retrieve all OCLC symbols
output_data = []
for report_link in report_links: #Process each report
report_date = report_link[report_link.find('/d')+2:report_link.find('/d')+9]
report_date = '20%s-%s-%s' % (report_date[:2], report_date[2:4], report_date[4:6])
r = requests.get(report_link)
content = r.text
count_949 = 0 # Count the number of 949 fields and log for troubleshooting
for symbol in oclc_symbol:
count_949 = count_949 + len(content.split('=l'+ symbol))-1 # Count the number of 949 fields and log for troubleshooting
report_log.append([report_date, count_949])
if count_949 > 0: # Only process reports that have records with relevant holdings
content = content[:content.find('\n \n PROCESSING SUMMARY STATISTICS')] # Remove report footer
content = content.replace('\n \n ERROR SEVERITY','\n ERROR SEVERITY') # Remove double line break before 'ERROR SEVERITY' note
content = content.replace('\n \n LDR','\n LDR') # Remove double line break before 'LDR'
content = content.replace('\n \n RECORD','\n RECORD') # Remove double line break before 'RECORD REJECTED' note
records = content.split('\n \n')
for record in records:
if any(symbol in record for symbol in ['=l'+ symbol for symbol in oclc_symbol]): # Only process records with holdings for selected OCLC symbols
lines = []
lines = record.split('\n')
record_data = []
title = []
last_tag = ''
for line in lines:
errors = {}
tag = line[8:11]
line_no = line[18:22]
if line.startswith(' ERROR:'):
errors['Error'] = line[8:]
errors['Report Date'] = report_date
errors['Report Filename'] = report_link
errors['Error Line'] = ''
if re.findall(r'\d{3}\sFIELD', line):
errors['Error Field'] = re.findall(r'\d{3}\sFIELD', line)[-1].split(' ')[0]
elif re.findall(r'\sFIELD\s\d{3}', line):
errors['Error Field'] = re.findall(r'\sFIELD\s\d{3}', line)[-1].split(' ')[-1]
else:
errors['Error Field'] = ''
if re.findall(r'POSITION[S]?\s[\d-]+', line):
errors['Error Position'] = re.findall(r'POSITION[S]?\s[\d-]+', line)[0].split(' ')[1]
else:
errors['Error Position'] = ''
record_data.append(errors)
elif line.startswith(' ERROR SEVERITY') or line.startswith(' RECORD REJECTED'):
for data in record_data:
data['OCLC Status'] = line.strip()
elif tag == '001': # 001 field within line start ("|") and line end ("+|") indicators
for data in record_data:
data['Bib No'] = line[27:-2].split('-')[1]
data['Hol No'] = aleph_hol = line[27:-2].split('-')[0]
elif tag == '949':
for data in record_data:
data['Library'] = line[line.index('=l')+2:-2] # 949 field within subfield l and line end indicator ("+|")
else:
if tag == ' ':
tag = last_tag
for data in record_data:
if tag == data['Error Field']:
if tag == '008' or tag == '006':
data['Error Line'] = line[27:].rstrip('|').rstrip('+')
elif data['Error Position'] == '':
data['Error Line'] = line[27:].rstrip('|').rstrip('+')
else:
if int(line_no) <= int(data['Error Position']) <= int(line_no) + len(line[27: ].rstrip('|')):
data['Error Line'] = line[27:].rstrip('|').rstrip('+')
if tag == '245':
title.append(line[27:].rstrip('|').rstrip('+'))
elif tag == ' ' and last_tag == '245':
title += [line[27:].rstrip('|').rstrip('+')]
if line[8:11] != ' ':
last_tag = line[8:11]
if title:
title = ''.join(title) # join parts of the title
title = ' '.join(re.split(r'=[a-z0-9]', title)) # remove subfield markers and tags
for data in record_data:
data['Title'] = title#.strip() temporarily commented out--strip causes error if 245 is missing
output_data += record_data
print 'Report for OCLC Symbol(s):', ', '.join(oclc_symbol)
report_log.sort()
for log in report_log:
print 'Report Date:', log[0], 'Record Count:', log[1]
if len(user_date) == 7:
f = 'OCLC_UDEV_%s_%s.csv' % (input_year, input_date.strftime('%m')) #Changes format of month in input date for filename
elif len(user_date) == 4:
f = 'OCLC_UDEV_%s.csv' % (input_year) #For an annual report
with open(f,'wb') as output:
fieldnames = ['Report Date', 'Library', 'Bib No', 'Hol No', 'Title', 'Error', 'Error Field', 'Error Position', 'Error Line', 'OCLC Status', 'Report Filename']
writer = csv.DictWriter(output, delimiter=',', fieldnames=fieldnames)
writer.writeheader()
for data in output_data:
writer.writerow(data)
|
vmdowney/oclc-udev
|
oclc_udev.py
|
Python
|
mit
| 7,800
| 0.010128
|
"""Tests for Openstack cloud volumes"""
import fauxfactory
import pytest
from cfme.cloud.provider.openstack import OpenStackProvider
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.blockers import BZ
from cfme.utils.log import logger
pytestmark = [
pytest.mark.usefixtures("setup_provider_modscope"),
pytest.mark.provider([OpenStackProvider], scope='module')
]
VOLUME_SIZE = 1
@pytest.yield_fixture(scope='function')
def volume(appliance, provider):
collection = appliance.collections.volumes
storage_manager = '{} Cinder Manager'.format(provider.name)
volume = collection.create(name=fauxfactory.gen_alpha(),
storage_manager=storage_manager,
tenant=provider.data['provisioning']['cloud_tenant'],
size=VOLUME_SIZE,
provider=provider)
yield volume
try:
if volume.exists:
volume.delete(wait=False)
except Exception:
logger.warning('Exception during volume deletion - skipping..')
@pytest.mark.meta(blockers=[BZ(1502609, forced_streams=["5.9"])])
def test_create_volume(volume, provider):
assert volume.exists
assert volume.size == '{} GB'.format(VOLUME_SIZE)
assert volume.tenant == provider.data['provisioning']['cloud_tenant']
@pytest.mark.meta(blockers=[BZ(1502609, forced_streams=["5.9"])])
def test_edit_volume(volume, appliance):
new_name = fauxfactory.gen_alpha()
volume.edit(new_name)
view = navigate_to(appliance.collections.volumes, 'All')
assert view.entities.get_entity(name=new_name, surf_pages=True)
@pytest.mark.meta(blockers=[BZ(1502609, forced_streams=["5.9"])])
def test_delete_volume(volume):
volume.delete()
assert not volume.exists
|
jkandasa/integration_tests
|
cfme/tests/openstack/cloud/test_volumes.py
|
Python
|
gpl-2.0
| 1,818
| 0.00055
|
# pylint: disable=redefined-outer-name, comparison-with-callable
"""Test helper functions."""
import gzip
import importlib
import logging
import os
import sys
from typing import Any, Dict, List, Optional, Tuple, Union
import cloudpickle
import numpy as np
import pytest
from _pytest.outcomes import Skipped
from packaging.version import Version
from ..data import InferenceData, from_dict
_log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def eight_schools_params():
"""Share setup for eight schools."""
return {
"J": 8,
"y": np.array([28.0, 8.0, -3.0, 7.0, -1.0, 1.0, 18.0, 12.0]),
"sigma": np.array([15.0, 10.0, 16.0, 11.0, 9.0, 11.0, 10.0, 18.0]),
}
@pytest.fixture(scope="module")
def draws():
"""Share default draw count."""
return 500
@pytest.fixture(scope="module")
def chains():
"""Share default chain count."""
return 2
def create_model(seed=10):
"""Create model with fake data."""
np.random.seed(seed)
nchains = 4
ndraws = 500
data = {
"J": 8,
"y": np.array([28.0, 8.0, -3.0, 7.0, -1.0, 1.0, 18.0, 12.0]),
"sigma": np.array([15.0, 10.0, 16.0, 11.0, 9.0, 11.0, 10.0, 18.0]),
}
posterior = {
"mu": np.random.randn(nchains, ndraws),
"tau": abs(np.random.randn(nchains, ndraws)),
"eta": np.random.randn(nchains, ndraws, data["J"]),
"theta": np.random.randn(nchains, ndraws, data["J"]),
}
posterior_predictive = {"y": np.random.randn(nchains, ndraws, len(data["y"]))}
sample_stats = {
"energy": np.random.randn(nchains, ndraws),
"diverging": np.random.randn(nchains, ndraws) > 0.90,
"max_depth": np.random.randn(nchains, ndraws) > 0.90,
}
log_likelihood = {
"y": np.random.randn(nchains, ndraws, data["J"]),
}
prior = {
"mu": np.random.randn(nchains, ndraws) / 2,
"tau": abs(np.random.randn(nchains, ndraws)) / 2,
"eta": np.random.randn(nchains, ndraws, data["J"]) / 2,
"theta": np.random.randn(nchains, ndraws, data["J"]) / 2,
}
prior_predictive = {"y": np.random.randn(nchains, ndraws, len(data["y"])) / 2}
sample_stats_prior = {
"energy": np.random.randn(nchains, ndraws),
"diverging": (np.random.randn(nchains, ndraws) > 0.95).astype(int),
}
model = from_dict(
posterior=posterior,
posterior_predictive=posterior_predictive,
sample_stats=sample_stats,
log_likelihood=log_likelihood,
prior=prior,
prior_predictive=prior_predictive,
sample_stats_prior=sample_stats_prior,
observed_data={"y": data["y"]},
dims={
"y": ["obs_dim"],
"log_likelihood": ["obs_dim"],
"theta": ["school"],
"eta": ["school"],
},
coords={"obs_dim": range(data["J"])},
)
return model
def create_multidimensional_model(seed=10):
"""Create model with fake data."""
np.random.seed(seed)
nchains = 4
ndraws = 500
ndim1 = 5
ndim2 = 7
data = {
"y": np.random.normal(size=(ndim1, ndim2)),
"sigma": np.random.normal(size=(ndim1, ndim2)),
}
posterior = {
"mu": np.random.randn(nchains, ndraws),
"tau": abs(np.random.randn(nchains, ndraws)),
"eta": np.random.randn(nchains, ndraws, ndim1, ndim2),
"theta": np.random.randn(nchains, ndraws, ndim1, ndim2),
}
posterior_predictive = {"y": np.random.randn(nchains, ndraws, ndim1, ndim2)}
sample_stats = {
"energy": np.random.randn(nchains, ndraws),
"diverging": np.random.randn(nchains, ndraws) > 0.90,
}
log_likelihood = {
"y": np.random.randn(nchains, ndraws, ndim1, ndim2),
}
prior = {
"mu": np.random.randn(nchains, ndraws) / 2,
"tau": abs(np.random.randn(nchains, ndraws)) / 2,
"eta": np.random.randn(nchains, ndraws, ndim1, ndim2) / 2,
"theta": np.random.randn(nchains, ndraws, ndim1, ndim2) / 2,
}
prior_predictive = {"y": np.random.randn(nchains, ndraws, ndim1, ndim2) / 2}
sample_stats_prior = {
"energy": np.random.randn(nchains, ndraws),
"diverging": (np.random.randn(nchains, ndraws) > 0.95).astype(int),
}
model = from_dict(
posterior=posterior,
posterior_predictive=posterior_predictive,
sample_stats=sample_stats,
log_likelihood=log_likelihood,
prior=prior,
prior_predictive=prior_predictive,
sample_stats_prior=sample_stats_prior,
observed_data={"y": data["y"]},
dims={"y": ["dim1", "dim2"], "log_likelihood": ["dim1", "dim2"]},
coords={"dim1": range(ndim1), "dim2": range(ndim2)},
)
return model
def create_data_random(groups=None, seed=10):
"""Create InferenceData object using random data."""
if groups is None:
groups = ["posterior", "sample_stats", "observed_data", "posterior_predictive"]
rng = np.random.default_rng(seed)
data = rng.normal(size=(4, 500, 8))
idata_dict = dict(
posterior={"a": data[..., 0], "b": data},
sample_stats={"a": data[..., 0], "b": data},
observed_data={"b": data[0, 0, :]},
posterior_predictive={"a": data[..., 0], "b": data},
prior={"a": data[..., 0], "b": data},
prior_predictive={"a": data[..., 0], "b": data},
warmup_posterior={"a": data[..., 0], "b": data},
warmup_posterior_predictive={"a": data[..., 0], "b": data},
warmup_prior={"a": data[..., 0], "b": data},
)
idata = from_dict(
**{group: ary for group, ary in idata_dict.items() if group in groups}, save_warmup=True
)
return idata
@pytest.fixture()
def data_random():
"""Fixture containing InferenceData object using random data."""
idata = create_data_random()
return idata
@pytest.fixture(scope="module")
def models():
"""Fixture containing 2 mock inference data instances for testing."""
# blank line to keep black and pydocstyle happy
class Models:
model_1 = create_model(seed=10)
model_2 = create_model(seed=11)
return Models()
@pytest.fixture(scope="module")
def multidim_models():
"""Fixture containing 2 mock inference data instances with multidimensional data for testing."""
# blank line to keep black and pydocstyle happy
class Models:
model_1 = create_multidimensional_model(seed=10)
model_2 = create_multidimensional_model(seed=11)
return Models()
def check_multiple_attrs(
test_dict: Dict[str, List[str]], parent: InferenceData
) -> List[Union[str, Tuple[str, str]]]:
"""Perform multiple hasattr checks on InferenceData objects.
It is thought to first check if the parent object contains a given dataset,
and then (if present) check the attributes of the dataset.
Given the output of the function, all mismatches between expectation and reality can
be retrieved: a single string indicates a group mismatch and a tuple of strings
``(group, var)`` indicates a mismatch in the variable ``var`` of ``group``.
Parameters
----------
test_dict: dict of {str : list of str}
Its structure should be `{dataset1_name: [var1, var2], dataset2_name: [var]}`.
A ``~`` at the beginning of a dataset or variable name indicates the name NOT
being present must be asserted.
parent: InferenceData
InferenceData object on which to check the attributes.
Returns
-------
list
List containing the failed checks. It will contain either the dataset_name or a
tuple (dataset_name, var) for all non present attributes.
Examples
--------
The output below indicates that ``posterior`` group was expected but not found, and
variables ``a`` and ``b``:
["posterior", ("prior", "a"), ("prior", "b")]
Another example could be the following:
[("posterior", "a"), "~observed_data", ("sample_stats", "~log_likelihood")]
In this case, the output indicates that variable ``a`` was not found in ``posterior``
as it was expected, however, in the other two cases, the preceding ``~`` (kept from the
input negation notation) indicates that ``observed_data`` group should not be present
but was found in the InferenceData and that ``log_likelihood`` variable was found
in ``sample_stats``, also against what was expected.
"""
failed_attrs: List[Union[str, Tuple[str, str]]] = []
for dataset_name, attributes in test_dict.items():
if dataset_name.startswith("~"):
if hasattr(parent, dataset_name[1:]):
failed_attrs.append(dataset_name)
elif hasattr(parent, dataset_name):
dataset = getattr(parent, dataset_name)
for attribute in attributes:
if attribute.startswith("~"):
if hasattr(dataset, attribute[1:]):
failed_attrs.append((dataset_name, attribute))
elif not hasattr(dataset, attribute):
failed_attrs.append((dataset_name, attribute))
else:
failed_attrs.append(dataset_name)
return failed_attrs
def emcee_version():
"""Check emcee version.
Returns
-------
int
Major version number
"""
import emcee
return int(emcee.__version__[0])
def needs_emcee3_func():
"""Check if emcee3 is required."""
# pylint: disable=invalid-name
needs_emcee3 = pytest.mark.skipif(emcee_version() < 3, reason="emcee3 required")
return needs_emcee3
def _emcee_lnprior(theta):
"""Proper function to allow pickling."""
mu, tau, eta = theta[0], theta[1], theta[2:]
# Half-cauchy prior, hwhm=25
if tau < 0:
return -np.inf
prior_tau = -np.log(tau ** 2 + 25 ** 2)
prior_mu = -((mu / 10) ** 2) # normal prior, loc=0, scale=10
prior_eta = -np.sum(eta ** 2) # normal prior, loc=0, scale=1
return prior_mu + prior_tau + prior_eta
def _emcee_lnprob(theta, y, sigma):
"""Proper function to allow pickling."""
mu, tau, eta = theta[0], theta[1], theta[2:]
prior = _emcee_lnprior(theta)
like_vect = -(((mu + tau * eta - y) / sigma) ** 2)
like = np.sum(like_vect)
return like + prior, (like_vect, np.random.normal((mu + tau * eta), sigma))
def emcee_schools_model(data, draws, chains):
"""Schools model in emcee."""
import emcee
chains = 10 * chains # emcee is sad with too few walkers
y = data["y"]
sigma = data["sigma"]
J = data["J"] # pylint: disable=invalid-name
ndim = J + 2
pos = np.random.normal(size=(chains, ndim))
pos[:, 1] = np.absolute(pos[:, 1]) # pylint: disable=unsupported-assignment-operation
if emcee_version() < 3:
sampler = emcee.EnsembleSampler(chains, ndim, _emcee_lnprob, args=(y, sigma))
# pylint: enable=unexpected-keyword-arg
sampler.run_mcmc(pos, draws)
else:
here = os.path.dirname(os.path.abspath(__file__))
data_directory = os.path.join(here, "saved_models")
filepath = os.path.join(data_directory, "reader_testfile.h5")
backend = emcee.backends.HDFBackend(filepath) # pylint: disable=no-member
backend.reset(chains, ndim)
# pylint: disable=unexpected-keyword-arg
sampler = emcee.EnsembleSampler(
chains, ndim, _emcee_lnprob, args=(y, sigma), backend=backend
)
# pylint: enable=unexpected-keyword-arg
sampler.run_mcmc(pos, draws, store=True)
return sampler
# pylint:disable=no-member,no-value-for-parameter,invalid-name
def _pyro_noncentered_model(J, sigma, y=None):
import pyro
import pyro.distributions as dist
mu = pyro.sample("mu", dist.Normal(0, 5))
tau = pyro.sample("tau", dist.HalfCauchy(5))
with pyro.plate("J", J):
eta = pyro.sample("eta", dist.Normal(0, 1))
theta = mu + tau * eta
return pyro.sample("obs", dist.Normal(theta, sigma), obs=y)
def pyro_noncentered_schools(data, draws, chains):
"""Non-centered eight schools implementation in Pyro."""
import torch
from pyro.infer import MCMC, NUTS
y = torch.from_numpy(data["y"]).float()
sigma = torch.from_numpy(data["sigma"]).float()
nuts_kernel = NUTS(_pyro_noncentered_model, jit_compile=True, ignore_jit_warnings=True)
posterior = MCMC(nuts_kernel, num_samples=draws, warmup_steps=draws, num_chains=chains)
posterior.run(data["J"], sigma, y)
# This block lets the posterior be pickled
posterior.sampler = None
posterior.kernel.potential_fn = None
return posterior
# pylint:disable=no-member,no-value-for-parameter,invalid-name
def _numpyro_noncentered_model(J, sigma, y=None):
import numpyro
import numpyro.distributions as dist
mu = numpyro.sample("mu", dist.Normal(0, 5))
tau = numpyro.sample("tau", dist.HalfCauchy(5))
with numpyro.plate("J", J):
eta = numpyro.sample("eta", dist.Normal(0, 1))
theta = mu + tau * eta
return numpyro.sample("obs", dist.Normal(theta, sigma), obs=y)
def numpyro_schools_model(data, draws, chains):
"""Centered eight schools implementation in NumPyro."""
from jax.random import PRNGKey
from numpyro.infer import MCMC, NUTS
mcmc = MCMC(
NUTS(_numpyro_noncentered_model),
num_warmup=draws,
num_samples=draws,
num_chains=chains,
chain_method="sequential",
)
mcmc.run(PRNGKey(0), extra_fields=("num_steps", "energy"), **data)
# This block lets the posterior be pickled
mcmc.sampler._sample_fn = None # pylint: disable=protected-access
mcmc.sampler._init_fn = None # pylint: disable=protected-access
mcmc.sampler._postprocess_fn = None # pylint: disable=protected-access
mcmc.sampler._potential_fn = None # pylint: disable=protected-access
mcmc.sampler._potential_fn_gen = None # pylint: disable=protected-access
mcmc._cache = {} # pylint: disable=protected-access
return mcmc
def pystan_noncentered_schools(data, draws, chains):
"""Non-centered eight schools implementation for pystan."""
schools_code = """
data {
int<lower=0> J;
real y[J];
real<lower=0> sigma[J];
}
parameters {
real mu;
real<lower=0> tau;
real eta[J];
}
transformed parameters {
real theta[J];
for (j in 1:J)
theta[j] = mu + tau * eta[j];
}
model {
mu ~ normal(0, 5);
tau ~ cauchy(0, 5);
eta ~ normal(0, 1);
y ~ normal(theta, sigma);
}
generated quantities {
vector[J] log_lik;
vector[J] y_hat;
for (j in 1:J) {
log_lik[j] = normal_lpdf(y[j] | theta[j], sigma[j]);
y_hat[j] = normal_rng(theta[j], sigma[j]);
}
}
"""
if pystan_version() == 2:
import pystan # pylint: disable=import-error
stan_model = pystan.StanModel(model_code=schools_code)
fit = stan_model.sampling(
data=data,
iter=draws + 500,
warmup=500,
chains=chains,
check_hmc_diagnostics=False,
control=dict(adapt_engaged=False),
)
else:
import stan # pylint: disable=import-error
stan_model = stan.build(schools_code, data=data)
fit = stan_model.sample(
num_chains=chains, num_samples=draws, num_warmup=500, save_warmup=False
)
return stan_model, fit
def pymc3_noncentered_schools(data, draws, chains):
"""Non-centered eight schools implementation for pymc3."""
import pymc3 as pm
with pm.Model() as model:
mu = pm.Normal("mu", mu=0, sd=5)
tau = pm.HalfCauchy("tau", beta=5)
eta = pm.Normal("eta", mu=0, sd=1, shape=data["J"])
theta = pm.Deterministic("theta", mu + tau * eta)
pm.Normal("obs", mu=theta, sd=data["sigma"], observed=data["y"])
trace = pm.sample(draws, chains=chains)
return model, trace
def library_handle(library):
"""Import a library and return the handle."""
if library == "pystan":
try:
module = importlib.import_module("pystan")
except ImportError:
module = importlib.import_module("stan")
else:
module = importlib.import_module(library)
return module
def load_cached_models(eight_schools_data, draws, chains, libs=None):
"""Load pymc3, pystan, emcee, and pyro models from pickle."""
here = os.path.dirname(os.path.abspath(__file__))
supported = (
("pystan", pystan_noncentered_schools),
("pymc3", pymc3_noncentered_schools),
("emcee", emcee_schools_model),
("pyro", pyro_noncentered_schools),
("numpyro", numpyro_schools_model),
)
data_directory = os.path.join(here, "saved_models")
models = {}
if isinstance(libs, str):
libs = [libs]
for library_name, func in supported:
if libs is not None and library_name not in libs:
continue
library = library_handle(library_name)
if library.__name__ == "stan":
# PyStan3 does not support pickling
# httpstan caches models automatically
_log.info("Generating and loading stan model")
models["pystan"] = func(eight_schools_data, draws, chains)
continue
py_version = sys.version_info
fname = "{0.major}.{0.minor}_{1.__name__}_{1.__version__}_{2}_{3}_{4}.pkl.gzip".format(
py_version, library, sys.platform, draws, chains
)
path = os.path.join(data_directory, fname)
if not os.path.exists(path):
with gzip.open(path, "wb") as buff:
try:
_log.info("Generating and caching %s", fname)
cloudpickle.dump(func(eight_schools_data, draws, chains), buff)
except AttributeError as err:
raise AttributeError(f"Failed caching {library_name}") from err
with gzip.open(path, "rb") as buff:
_log.info("Loading %s from cache", fname)
models[library.__name__] = cloudpickle.load(buff)
return models
def pystan_version():
"""Check PyStan version.
Returns
-------
int
Major version number
"""
try:
import pystan # pylint: disable=import-error
version = int(pystan.__version__[0])
except ImportError:
try:
import stan # pylint: disable=import-error
version = int(stan.__version__[0])
except ImportError:
version = None
return version
def test_precompile_models(eight_schools_params, draws, chains):
"""Precompile model files."""
load_cached_models(eight_schools_params, draws, chains)
def running_on_ci() -> bool:
"""Return True if running on CI machine."""
return os.environ.get("ARVIZ_CI_MACHINE") is not None
def importorskip(
modname: str, minversion: Optional[str] = None, reason: Optional[str] = None
) -> Any:
"""Import and return the requested module ``modname``.
Doesn't allow skips on CI machine.
Borrowed and modified from ``pytest.importorskip``.
:param str modname: the name of the module to import
:param str minversion: if given, the imported module's ``__version__``
attribute must be at least this minimal version, otherwise the test is
still skipped.
:param str reason: if given, this reason is shown as the message when the
module cannot be imported.
:returns: The imported module. This should be assigned to its canonical
name.
Example::
docutils = pytest.importorskip("docutils")
"""
# ARVIZ_CI_MACHINE is True if tests run on CI, where ARVIZ_CI_MACHINE env variable exists
ARVIZ_CI_MACHINE = running_on_ci()
if ARVIZ_CI_MACHINE:
import warnings
compile(modname, "", "eval") # to catch syntaxerrors
with warnings.catch_warnings():
# make sure to ignore ImportWarnings that might happen because
# of existing directories with the same name we're trying to
# import but without a __init__.py file
warnings.simplefilter("ignore")
__import__(modname)
mod = sys.modules[modname]
if minversion is None:
return mod
verattr = getattr(mod, "__version__", None)
if minversion is not None:
if verattr is None or Version(verattr) < Version(minversion):
raise Skipped(
"module %r has __version__ %r, required is: %r"
% (modname, verattr, minversion),
allow_module_level=True,
)
return mod
else:
return pytest.importorskip(modname=modname, minversion=minversion, reason=reason)
|
arviz-devs/arviz
|
arviz/tests/helpers.py
|
Python
|
apache-2.0
| 21,624
| 0.00148
|
#pragma error
#pragma repy
removefile("this.file.does.not.exist") # should fail (FNF)
|
sburnett/seattle
|
repy/tests/ut_repytests_testremovefilefnf.py
|
Python
|
mit
| 88
| 0.022727
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from main import *
import time
import random
import urllib2
import json
#import os
def genToken(L):
CharLib = map(chr,range(97,123)+range(65,91)+range(48,58))
Str = []
for i in range(L):
Str += random.sample(CharLib,1)
return ''.join(Str)
# Key is md5 for string "xiaok"
key = 'db884468559f4c432bf1c1775f3dc9da'
# 加密UID
def encryptUID(id):
return key + str(id)
# 解密SID
def decryptUID(uStr):
return int(uStr.split('a')[1])
# 获取cookies
def getCookie(name):
ck = web.cookies()
if ck.get(name):
return ck.get(name)
else:
return None
# 创建会话
def genSession(SID,Username,ShowName,LastIP,LastLocation,LastDate,Token,Lstat,kpl):
LoginDate = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time()))
Expiry = 86400
session = web.config._session
session.isLogin = True
session.SID = SID
session.Username = Username
session.ShowName = ShowName
session.LastLocation = LastLocation
# 登录是否正常返回
if Lstat == 'ok':
session.Lstat = '正常'
elif Lstat == 'other':
session.Lstat = '您的上次登录在别的电脑或者别的浏览器'
else:
session.Lstat = '未知'
# 获取客户端信息
#print 'HTTP_ENV: '
#print web.ctx.environ #来源地址
#print 'HTTP_REFERER: '
#print web.ctx.env.get('HTTP_REFERER', 'http://google.com')
#LoginHost = web.ctx.ip #这两种方法都能获取到客户端IP
LoginHost = web.ctx.environ['REMOTE_ADDR']
Agent = web.ctx.environ['HTTP_USER_AGENT']
# 测试解析
#LoginHost = '119.122.181.82'
# 本次登录地点判断
Location = 'Localhost'
ip = LoginHost.split('.')
if ip[0]+ip[1] in ['17216','192168','1270'] or ip[0] == '10':
Location = '本地局域网'
else:
# 这里要从公网去解析
url = "http://int.dpool.sina.com.cn/iplookup/iplookup.php?format=json&ip=" + LoginHost
response = urllib2.urlopen(url)
rt = json.load(response)
if rt['ret'] == 1 :
Location = rt['province'] + rt['city'] + ' [' + rt['isp'] + ']'
else:
Location = 'unkown'
# 登录日志写入数据库
if not Token:
# Token用来判断是否输入的用户名登录验证的还是从token验证过来的
Token = genToken(32)
if kpl == 'no':
Expiry = 0 # 不记住登录,设置数据库里存储的token的过期时间与登录时间相等
#db.query('''update users set loginhost="%s",logindate="%s" where id="%s"''' % (LoginHost,LoginDate,SID))
db.query('''insert into login_logs (uid,ip,location,agent,token,expiry) values ("%s","%s","%s","%s","%s",NOW() + INTERVAL %d SECOND)''' % (SID,LoginHost,Location,Agent,Token,Expiry))
db.query('''update users set loginfo=(select id from login_logs where uid="%s" and ip="%s" and token="%s" and status="yes" order by id desc limit 1) where id="%s"''' % (SID,LoginHost,Token,SID))
# 写入token到session,存储于服务器端
session.Token = Token
# 写入uid和token到cookies,存储于客户端
#web.setcookie('Username', Username, Expiry)
#用uid伪装成Username存储在cookies中
web.setcookie('Username', encryptUID(SID), Expiry)
web.setcookie('Token', Token, Expiry)
# 写入上次登录日期和IP到session
if LastDate:
# 格式化日期,加上年月日在前台显示,如果为None,表示用户是第一次登录
session.LastDate = time.strftime('%Y年%m月%d日 %H:%M:%S',time.strptime(str(LastDate),'%Y-%m-%d %H:%M:%S'))
else:
session.LastDate = '第一次登录'
session.LastIP = LastIP
# 写入当前登录日期和IP到数据库设计说明:
# 1.如果用户登录成功,就会从数据库获取上次登录的时间和IP,并写入session,然后立马把本次登录的IP和时间更新到数据库
# 2.还有一种方法就是用户登录时把本次登录的时间和IP写入session而先不动数据库里的记录,直到用户执行正常退出操作时再把session里存储的本次登录的信息写入数据库
# 3.第1个方法和第2个方法记录的数据是相反的,为什么不用第2种呢,因为万一用户不是正常退出呢,那数据库就不会更新本次登录的信息,所以...
# By Luxiaok 2014年4月7日 22:49:00
# 登录成功,这里执行DB操作应该要有异常处理的
# return True
class Login:
def GET(self,*args):
# URL做了多项正则匹配,要进行参数冗余处理,还不知道为什么url正则后会给GET传个参数进来
# 多余的参数就是匹配的url后缀
#print "Self =",self
#print "Args =",args
uid = getCookie('Username')
token = getCookie('Token')
sid = getCookie('xk_session')
HTTP_REFERER = getCookie('HTTP_REFERER')
#print 'Login referer from cookie: ',HTTP_REFERER
if uid and token:
#print 'uid =',uid
#print 'token =',token
#print 'sid =',sid
uid = decryptUID(uid)
try:
g = db.query('''
select U.id,U.username,U.nickname,U.loginfo,L.id as LID,L.ip,L.date from login_logs as L
left join users as U on L.uid=U.id
where U.id="%s" and L.token="%s" and L.status="yes" and L.expiry>now() and U.status="yes"''' % (uid,token))
except Exception,e:
print "MySQL Error: ",Exception,":",e
return "Database Error"
if g:
d = g[0]
Username = d.username
Lstat = 'ok' #是否异常登录反馈
if not d.nickname:
ShowName = d.username
else:
ShowName = d.nickname
if d.loginfo != d.LID:
g2 = db.query('''select L.ip,L.date from users as U left join login_logs as L on U.loginfo=L.id where U.id="%s"''' % uid)
d = g2[0]
# 这里还可以返回一个异地浏览器登录的提示
Lstat = "other" #上次登录在别的浏览器或者异地、异机
LastIP = d.ip
LastDate = d.date
genSession(uid,Username,ShowName,LastIP,LastDate,token,Lstat,kpl='yes')
if HTTP_REFERER:
web.setcookie('HTTP_REFERER', '88888888', -1000)
return web.seeother(HTTP_REFERER)
else:
return web.seeother("/dashboard")
else:
# 如果数据库里存储的token状态为no,即用户已经正常退出,会话无效了,那么清除本地cookies
web.setcookie('Username', '88888888', -1)
web.setcookie('Token', '88888888', -1)
if getLogin():
#SID = getLogin()['SID']
ShowName = getLogin()['ShowName']
#print "ShowName: " + ShowName
return render.dashboard(ShowName=ShowName)
else:
#return web.seeother("/login")
return render.login()
def POST(self,*args):
getPost = web.input()
#kpl = getPost.kpl # 是否记住登录
try:
getSQL = db.query('''select u.id,u.username,u.password,u.nickname,u.status,L.ip,L.location,L.date from users as u left join login_logs as L on u.loginfo=L.id where username="%s" and password=md5("%s")''' % (getPost.username,getPost.password))
except:
# 服务器(数据库)错误
return "false"
if getSQL:
# 获取登录数据
getData = getSQL[0]
SID = getData['id']
Username = getData['username']
Status = getData['status']
ShowName = getData['nickname']
LastDate = getData['date']
LastIP = getData['ip']
LastLocation = getData['location']
if not ShowName:
ShowName = Username
if Status == 'yes':
# 符合登录要求,登录数据写入session,创建会话
genSession(SID,Username,ShowName,LastIP,LastLocation,LastDate,False,Lstat='ok',kpl=getPost.kpl)
#HTTP_REFERER = getCookie('HTTP_REFERER')
#if HTTP_REFERER:
# web.setcookie('HTTP_REFERER', '88888888', -1000)
# return web.seeother(HTTP_REFERER)
#else:
# web.setcookie('HTTP_REFERER', '88888888', -1000)
# return web.seeother("/dashboard")
return "true"
else:
# 用户被禁用
return "disable"
else:
# 用户名或密码错误
return "error"
class Logout:
def GET(self):
uid = getCookie('Username')
token = getCookie('Token')
sidName = getCookie('xk_session')
if uid and token and sidName:
uid = decryptUID(uid)
#sfile = 'session/' + sidName
# 删除会话文件,貌似kill方法会把sessionID文件干掉
#try:
# os.remove(sfile)
#except Exception,e:
# print "Session File Error: ",Exception,":",e
# 设置cookies的status为no
try:
db.query('''update login_logs set status="no" where uid="%s" and token="%s"''' % (uid,token))
except Exception,e:
print "MySQL Error: ",Exception,":",e
web.setcookie('Username', '88888888', -1)
web.setcookie('Token', '88888888', -1)
web.config._session.kill()
raise web.seeother("/")
# 测试页面
class Test:
def GET(self):
if getLogin():
SID = getLogin()['SID']
ShowName = getLogin()['ShowName']
#print "ShowName: " + ShowName
return render.test(ShowName=ShowName,uid=SID)
else:
return web.seeother("/login")
|
awsok/SaltAdmin
|
view/index.py
|
Python
|
gpl-2.0
| 10,160
| 0.012983
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/ithorian/shared_ith_bandolier_s08.iff"
result.attribute_template_id = 11
result.stfName("wearables_name","ith_bandolier_s08")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/wearables/ithorian/shared_ith_bandolier_s08.py
|
Python
|
mit
| 472
| 0.04661
|
class Solution:
def pushDominoesSim(self, dominoes: str) -> str:
prev, curr, N = None, ["."] + list(dominoes) + ["."], len(dominoes)
while prev != curr:
prev = curr[:]
i = 1
while i <= N:
if curr[i] == "." and prev[i - 1] == "R" and prev[i + 1] != "L":
curr[i], i = "R", i + 1
i += 1
i = N
while i >= 1:
if curr[i] == "." and prev[i + 1] == "L" and prev[i - 1] != "R":
curr[i], i = "L", i - 1
i -= 1
return "".join(curr[1:-1])
def pushDominoes(self, dominoes: str) -> str:
d, n = list("L" + dominoes + "R"), len(dominoes)
distr, posr = [n] * (n + 2), -float("Inf")
for i in range(1, n + 1):
if d[i] == "R":
posr = i
elif d[i] == "L":
posr = -float("Inf")
else:
distr[i] = min(distr[i], i - posr)
distl, posl = [n] * (n + 2), float("Inf")
for i in range(1, n + 1)[::-1]:
if d[i] == "L":
posl = i
elif d[i] == "R":
posl = float("Inf")
else:
distl[i] = min(distl[i], posl - i)
if distl[i] < distr[i]:
d[i] = "L"
elif distr[i] < distl[i]:
d[i] = "R"
return "".join(d[1:-1])
# TESTS
for dominoes, expected in [
("RR.L", "RR.L"),
(".L.R...LR..L..", "LL.RR.LLRRLL.."),
("....", "...."),
("R...", "RRRR"),
("....L", "LLLLL"),
]:
sol = Solution()
actual = sol.pushDominoes(dominoes)
print("Final domino state of", dominoes, "->", actual)
assert actual == expected
|
l33tdaima/l33tdaima
|
p838m/push_dominoes.py
|
Python
|
mit
| 1,778
| 0.001125
|
import numpy as np
from scipy.misc import imrotate
from ImFEATbox.__helperCommands import conv2float
from scipy.stats import skew, kurtosis
def SVDF(I, returnShape=False):
"""
Input: - I: A 2D image
Output: - Out: A (1x780) vector containing 780 metrics calculated
from singular value decomposition
"""
# ************************************************************************
# Implemented for MRI feature extraction by the Department of Diagnostic
# and Interventional Radiology, University Hospital of Tuebingen, Germany
# and the Institute of Signal Processing and System Theory University of
# Stuttgart, Germany. Last modified: November 2016
#
# This implementation is part of ImFEATbox, a toolbox for image feature
# extraction and analysis. Available online at:
# https://github.com/annikaliebgott/ImFEATbox
#
# Contact: annika.liebgott@iss.uni-stuttgart.de
# ************************************************************************
if returnShape:
return (780,1)
## Calculate Singular Value Decomposition of the image
# convert image to float
I = conv2float(I)
I = I.T
# initialize feature variables
dia_elements = np.zeros((np.min(np.shape(I)),3))
eig_U = np.zeros((np.shape(I)[0],3))
eig_V = np.zeros((np.shape(I)[1],3))
det_U = np.zeros(3)
det_V = np.zeros(3)
trace_U = np.zeros(3)
trace_V = np.zeros(3)
rank_U = np.zeros(3)
rank_V = np.zeros(3)
median_eig_U = np.zeros(3)
median_eig_V = np.zeros(3)
max_eig_U = np.zeros(3)
max_eig_V = np.zeros(3)
mean_U = np.zeros(3)
mean_V = np.zeros(3)
mean_S = np.zeros(3)
std_U = np.zeros(3)
std_V = np.zeros(3)
std_S = np.zeros(3)
skewness_U = np.zeros(3)
skewness_V = np.zeros(3)
kurtosis_U = np.zeros(3)
kurtosis_V = np.zeros(3)
# Calculate the measures for 3 different orientations
for z in range(0,3):
if z == 1:
# rotate image by 90 degree
I = imrotate(I, 90, interp='bilinear')
elif z == 2:
# rotate image by -90 degree
I = imrotate(I, -180, interp='bilinear')
# calculate singular value decomposition with diagonal matrix S and
# unitary matrices U and V
[U,S,V] = np.linalg.svd(I)
#U, V = U.T, V.T
## feature extraction
# calculate diagonal elements of matrix S
#for i in range(0, np.count_nonzero(S)):
dia_elements[:,z] = S[:]
# eigen values of U and V
eig_U[:,z] = np.linalg.eig(U)[0]
eig_V[:,z] = np.linalg.eig(V)[0]
# determinant of U and V
det_U[z] = np.linalg.det(U)
det_V[z] = np.linalg.det(V)
# trace of U and V
trace_U[z] = np.trace(U)
trace_V[z] = np.trace(V)
# rank of U and V
rank_U[z] = np.linalg.matrix_rank(U)
rank_V[z] = np.linalg.matrix_rank(V)
# skewness of U and V
skewness_U[z] = skew(np.ndarray.flatten(U))
skewness_V[z] = skew(np.ndarray.flatten(V))
# kurtosis of U and V
kurtosis_U[z] = kurtosis(np.ndarray.flatten(U), fisher=False, bias=False)
kurtosis_V[z] = kurtosis(np.ndarray.flatten(V), fisher=False, bias=False)
# mean of U, V and S
mean_U[z] = np.mean(U)
mean_V[z] = np.mean(V)
mean_S[z] = np.mean(S)
# standard deviation of U, V and S
std_U[z] = np.std(U, ddof=1)
std_V[z] = np.std(V, ddof=1)
std_S[z] = np.std(S, ddof=1)
# median of eigen values of U and V
median_eig_U[z] = np.median(eig_U[:,z])
median_eig_V[z] = np.median(eig_V[:,z])
# maximum of eigen values of U and V
max_eig_U[z] = np.max(eig_U[:,z])
max_eig_V[z] = np.max(eig_V[:,z])
## return feature vector
#np.prod(np.shape(eig_U[:100,:]))
Out = np.hstack([np.ndarray.flatten(dia_elements[:40,:]),
np.ndarray.flatten(eig_U[:100,:]),
np.ndarray.flatten(eig_V[:100,:]),
det_U, det_V, trace_U, trace_V, rank_U, rank_V, skewness_U, skewness_V,
kurtosis_U, kurtosis_V, mean_U, mean_V, mean_S, std_U, std_V, std_S,
median_eig_U, median_eig_V, max_eig_U, max_eig_V])
return Out
|
annikaliebgott/ImFEATbox
|
features_python/ImFEATbox/GlobalFeatures/Intensity/_SVDF.py
|
Python
|
apache-2.0
| 4,317
| 0.008571
|
import sys
import re
from Bio import Seq,SeqIO
iname=sys.argv[1]
cdr3p=re.compile("(TT[TC]|TA[CT])(TT[CT]|TA[TC]|CA[TC]|GT[AGCT]|TGG)(TG[TC])(([GA][AGCT])|TC)[AGCT]([ACGT]{3}){5,32}TGGG[GCT][GCT]")
# Utility functions
def get_records(filename):
records=[]
for record in SeqIO.parse(filename,"fasta"):
records.append(record)
return records
records=get_records(iname)
numrecords=len(records)
results=[]
for i in range(numrecords):
r=records[i]
strseq=str(r.seq)
m=cdr3p.search(strseq)
if m!=None:
mspan=m.span()
result=strseq[mspan[0]:mspan[1]]
else:
result=""
results.append(result)
for i in range(numrecords):
r=records[i]
des=r.description
res=results[i]
if res!="":
print ">"+des+"\n"+res[9:-6]
|
sdwfrost/piggy
|
extract_CDR3.py
|
Python
|
mit
| 749
| 0.048064
|
import math
class ColorPoint:
"""
Simple color-storage class; stores way-points on a color ramp
"""
def __init__(self,idx,col,colType):
# index, X-coordinate, on a palette
self.idx = idx
# color; usually an RGBA quad
self.color = col
# One of ColorTypes members
self.colorType = colType
def __str__(self):
return "(Index=%d; Color=(%0.3f,%0.3f,%0.3f,%0.3f); ColorType=%d)" % (self.idx, self.color[0], self.color[1], self.color[2], self.color[3], self.colorType)
class ColorTypes:
"""
Simple enumerated type for internal color formats
"""
RGBAi = 0
RGBAf = 1
HEX6 = 2
HEX8 = 3
class ColorRamp:
"""
Model for a simple color ramp
See __main__ below for usage.
"""
# we assume linear ramps for now
LINEAR = 0
GAUSSIAN = 1
EXPONENTIAL = 2
CIRCLE_RADIUS = 3
def __init__(self, nColors, *args, **kwargs):
# size of this ramp
self.nColors = nColors
# the list of RGBA float values
self.ramp = []
# ordered array of color indices
self.keys = {}
# ready to use; boolean; we need at least two
# color points to define a ramp
self.ready = False
#
if 'handle' in kwargs:
self.handle = kwargs['handle']
if 'name' in kwargs:
self.name = kwargs['name']
# list of unique ids for objects on the map canvas
self.canvas_ids = {}
def __str__(self):
"""
instances created with ColorRamp(nColors=XYZ,name="foo") will return "foo"
otherwise a long-debug-friendly description is returned.
"""
if getattr(self,'name',None)!=None:
return self.name
else:
s = "Object Name: Nameless\n"
s+= "Ready to use: " + str(self.ready) + "\n"
s+= "Keys: " + str(self.keys.keys()) + "\n"
for k in self.keys:
s += "Color[%d] = %s\n" % (k,self.keys[k])
s += "ColorRamp with %d colors follows...\n" % self.nColors
if self.ready:
s += str(self.getRamp()) + "\n"
else:
s += "[]\n"
return s
def addColor(self, idx, col, colType=ColorTypes.RGBAf, colScale=1.0):
"""
adds color, 'col', to ramp at index 'idx'. If 'idx' exists, this
function overwrites the value
"""
# check user input: color location
# if beyond ends of ramp, make end of ramp
if idx<0:
idx=0
elif idx>self.nColors-1:
idx=self.nColors-1
# check user input: color format
if type(col) != ().__class__ or len(col)!=4:
print "Error: Colors must be spefied as a RGBA tuple with four values."
print "Error: %s was given instead." % str(col)
return
# check user input: color type format
if colType not in (ColorTypes.RGBAi, ColorTypes.RGBAf):
print "Error: Color type specification must be either, "
print "Error: ColorRamp.RGBAi or ColorRamp.RGBAf"
return
userCol = None
# convert color type if needed
if colType==ColorTypes.RGBAf:
userCol = col
elif colType==ColorTypes.RGBAi:
userCol = map(lambda c: float(c)/float(colScale), col)
# create a ColorPoint and insert it
self.keys[idx] = ColorPoint(idx, userCol, colType)
# is this ramp yet good to use?
self.updateReady()
# what else do we need to do to modify the model?
def checkPoint(self, pt, startX, X):
"""
Checks if there is a point between startX and X.
"""
ret_x = startX
if startX < X:
for x in range(int(startX)+1, int(X)+1):
if x in self.keys:
break
ret_x = x
elif startX > X:
for x in range(int(startX)-1, int(X)-1, -1):
if x in self.keys:
break
ret_x = x
return ret_x
def getPoint(self, pt):
"""
Returns a true index (horizontal potision) of a given point.
"""
if pt in self.canvas_ids:
return self.canvas_ids[pt]
return None
def getRampList(self):
"""
Returns a list of floats representing the color ramp.
"""
ramp_list = []
for x in range(0,360):
if x in self.keys:
col = list(self.keys[x].color)
ramp_list.append(float(x))
ramp_list.append(float(col[0]))
ramp_list.append(float(col[1]))
ramp_list.append(float(col[2]))
ramp_list.append(float(col[3]))
return ramp_list
def movePoint(self,pt,X,alpha):
if pt not in self.canvas_ids:
# print "Error: Could not move pt(%d)." % pt
return
idx = self.canvas_ids[pt]
if idx in self.keys:
col = list(self.keys[idx].color)
else:
# print "Logic error no such index in self.keys"
return
col[3] = alpha
# prevent extreme points from being replaced
if X <= 0:
return
if X >= 359:
return
self.removeColor(idx)
# prevent extreme points from moving horizontally
if idx == 0:
X = 0
if idx == 359:
X = 359
self.addColor(X, tuple(col))
def removePoint(self,pt):
if pt not in self.canvas_ids:
# print "Error: Could not remove pt(%d)." % pt
return
idx = self.canvas_ids[pt]
if idx <= 0 or idx >= 359:
return
self.removeColor(idx)
def removeColor(self, idx):
# check user input
if idx not in self.keys: return
if idx<0 or idx>self.nColors-1: return
# remove the point
del self.keys[idx]
# is this ramp still good to use?
self.updateReady()
def updateReady(self):
# are we ready to use?
self.ready = (0 in self.keys and self.nColors-1 in self.keys)
def updateRamp(self):
# if idx is specified then it was either added or removed
# so adjust the ramp about that point
if not self.ready:
# no use in updating a ramp w/o proper colors
print "Msg: This color ramp is not yet ready to use. Please add"
print "Msg: at least two colors at the ramp's extreme points 0 and %d" % (self.nColors-1)
return
# OPTIMIZATION TODO:
# if idx!=None and idx no in self.keys, then the point
# was removed, just update around those pts
# if idx!=None and does exists in self.keys, then they
# just added this point, so update the pts around it
self.ramp = []
keyList = self.keys.keys()
keyList.sort()
keyList.reverse()
lowerId = keyList.pop()
while len(keyList)>0:
upperId = keyList.pop()
# number of colors in between
span = int(abs(upperId-lowerId))
# get the actual colors
lowerCol, upperCol = self.keys[lowerId].color, self.keys[upperId].color
for x in range(span):
# linear mixing components
cUpper = float(x) / float(span)
cLower = 1.0 - cUpper
self.ramp.append((cLower * lowerCol[0] + cUpper * upperCol[0],
cLower * lowerCol[1] + cUpper * upperCol[1],
cLower * lowerCol[2] + cUpper * upperCol[2],
cLower * lowerCol[3] + cUpper * upperCol[3]))
lowerId = upperId
# fix the off-by one error
self.ramp.append(upperCol)
assert len(self.ramp)==self.nColors, "ColorRamp Logic Error: This ramp supports %d colors ONLY, but %d were found in the ramp." % (self.nColors, len(self.ramp))
def getRamp(self, colType=ColorTypes.RGBAf, colScale=1.0):
# update the ramp and return it
self.updateRamp()
if colType==ColorTypes.RGBAf:
if colScale==1.0:
return self.ramp
elif colType==ColorTypes.HEX6:
colScale = 255
return map(lambda col: "#%02x%02x%02x" % (colScale*col[0],colScale*col[1],colScale*col[2]), self.ramp)
elif colType==ColorTypes.HEX8:
colScale = 255
return map(lambda col: "#%02x%02x%02x%02x" % (colScale*col[0],colScale*col[1],colScale*col[2],colScale*col[3]), self.ramp)
def toPhotoImageString(self,nRows=1):
oneLine = "{" + " ".join(self.getRamp(ColorTypes.HEX6)) + "}"
if nRows==1:
return oneLine
else:
return " ".join([oneLine]*nRows)
# this belongs in the view
def toPhotoImage(self,nRows=1):
try:
from Tkinter import PhotoImage
except ImportError, e:
print "Error: could not import Tk. No image."
print "Error: ", e
return None
img = PhotoImage(width=self.nColors, height=nRows)
img.put(self.toPhotoImageString(nRows))
return img
def toCanvas(self,canvas,width,height,padX=0,padY=0):
r = self.CIRCLE_RADIUS
tmpKeys = self.keys.keys()
tmpKeys.sort()
# plottable window area; wH, wW
wH = height-2*padY
wW = width -2*padX
for idx in range(len(tmpKeys)):
pt1 = self.keys[tmpKeys[idx]]
origX1, origY1 = pt1.idx, pt1.color[3]
x1 = int(float(origX1)/float(self.nColors) * wW)
y1 = self.alphaToY(origY1)
y1 = int(wH * (1.0-float(y1)))
x1 += padX
y1 += padY
# if not last loop, then draw the line
if idx+1<len(tmpKeys):
pt2 = self.keys[tmpKeys[idx+1]]
origX2, origY2 = pt2.idx, pt2.color[3]
x2 = int(float(origX2)/float(self.nColors) * wW)
y2 = self.alphaToY(origY2)
y2 = int(wH * (1.0-float(y2)))
x2 += padX
y2 += padY
# plot the pt
unique_id = canvas.create_line((x1,y1,x2,y2),fill="black",width=1.0,tags="colorPt")
self.canvas_ids[unique_id] = idx
origColor = pt1.color
# convert the color from RGBA --> HEX6
colHEX6 = "#%02x%02x%02x" % (origColor[0]*255., origColor[1]*255., origColor[2]*255.)
# plot the pt
unique_id = canvas.create_oval((x1-r,y1-r,x1+r,y1+r),fill=colHEX6,tags="colorPt")
self.canvas_ids[unique_id] = tmpKeys[idx]
def clearCanvas(self):
for x in self.canvas_ids:
self.canvas.delete(x)
def getHandle(self):
return self.handle
def setHandle(self,handle):
self.handle = handle
def yToAlpha(self,y):
if y<=0:
return 0.0
elif y>=1:
return 1.0
else:
# return y
return (10.**y-1.) / 9.
def alphaToY(self,alpha):
if alpha<=0:
return 0.
elif alpha>=1:
return 1.0
else:
# return alpha
return math.log(1.0+9.*alpha,10.)
if __name__=="__main__":
c = ColorRamp(256,name="C")
# add some colors
c.addColor(1,(0,0,0,0))
print c
c.addColor(2,(1,1,1,1))
print c
c.addColor(250,(0.5, 0.5, 0.5, 0.5))
print c
# range checking
c.addColor(-1, (0,0,0,0))
print c
c.addColor(256, (1,2,3,4))
print c
# color scaling
c.addColor(45, (128, 255, 64, 32), colType=ColorTypes.RGBAi, colScale=255)
print c
# remove a color
c.removeColor(2)
print c
# range checking
c.removeColor(-1)
print c
# range checking
c.removeColor(2000)
print c
# check ready to use
c.addColor(0, (0,0,0,0))
print c
c.addColor(8, (1.0, 0.4, 0.0, 0.0))
print c
c.addColor(255, (1,1,1,1))
print c
# check ramp types
d = ColorRamp(32)
d.addColor(0, (0,0,0,0), colType=ColorTypes.RGBAi, colScale=255)
d.addColor(31, (255,255,255,255), colType=ColorTypes.RGBAi, colScale=255)
d.addColor(15, (1.0, 0.0, 0.0, 1.0))
print "Color Ramp as RGAf"
print d.getRamp()
print "Color Ramp as HEX6"
print d.getRamp(ColorTypes.HEX6)
print "Color Ramp as HEX8"
print d.getRamp(ColorTypes.HEX8)
print "Does adding/removing a pt screw up the model?"
f = ColorRamp(360)
# end pts
f.addColor(0, (0,0,0,0))
f.addColor(359, (1,1,1,1))
print f
print "Adding a pt"
f.addColor(7, (1.0, 0.0, 0.5, 0.25))
print f
print "Removing a pt"
f.removeColor(7)
print f
print "Add some more colors"
f.addColor(90, (1.0, 0.0, 0.0, 1.0))
f.addColor(270, (0.0, 0.0, 1.0, 1.0))
f.addColor(180, (0.0, 1.0, 0.0, 1.0))
print "Checking hex8 vlaues"
print f.getRamp(ColorTypes.HEX8)
print "To PhotoImage String: nRows=1"
print f.toPhotoImageString()
print "To PhotoImage String: nRows=32"
print f.toPhotoImageString(16)
try:
from Tkinter import *
root = Tk()
padX, padY = 30, 30
canvas = Canvas(root,height=10+padY,width=360+padX)
print "Try to make a color ramp image"
img = f.toPhotoImage(10)
canvas.create_image((padX/2, padY/2),image=img,anchor=NW)
canvas.pack()
root.mainloop()
except ImportError, e:
print "WARNING: Tkinter not installed for this Python version."
print "WARNING: Skipping the Tkinter test"
|
gratefulfrog/lib
|
python/pymol/colorramping.py
|
Python
|
gpl-2.0
| 13,994
| 0.011862
|
#!/usr/bin/python
############################################################
# Generates commands for the muscle alignment program
############################################################
import sys, os, core, argparse
############################################################
# Options
parser = argparse.ArgumentParser(description="MUSCLE command generator");
parser.add_argument("-i", dest="input", help="Directory of input FASTA files.", default=False);
parser.add_argument("-o", dest="output", help="Desired output directory for aligned files. Job name (-n) will be appended to output directory name.", default=False);
parser.add_argument("-n", dest="name", help="A short name for all files associated with this job.", default=False);
parser.add_argument("-p", dest="path", help="The path to MUSCLE. Default: muscle", default="muscle");
parser.add_argument("--overwrite", dest="overwrite", help="If the output directory already exists and you wish to overwrite it, set this option.", action="store_true", default=False);
parser.add_argument("--outname", dest="outname", help="Use the end of the output directory path as the job name.", action="store_true", default=False);
# IO options
parser.add_argument("-part", dest="part", help="SLURM partition option.", default=False);
parser.add_argument("-tasks", dest="tasks", help="SLURM --ntasks option.", type=int, default=1);
parser.add_argument("-cpus", dest="cpus", help="SLURM --cpus-per-task option.", type=int, default=1);
parser.add_argument("-mem", dest="mem", help="SLURM --mem option.", type=int, default=0);
# SLURM options
args = parser.parse_args();
if not args.input or not os.path.isdir(args.input):
sys.exit( " * Error 1: An input directory must be defined with -i.");
args.input = os.path.abspath(args.input);
if not args.name:
name = core.getRandStr();
else:
name = args.name;
if not args.output:
sys.exit( " * Error 2: An output directory must be defined with -o.");
args.output = os.path.abspath(args.output);
# if args.outname:
# name = os.path.basename(args.output);
# else:
# args.output = args.output + "-" + name + "/";
if os.path.isdir(args.output) and not args.overwrite:
sys.exit( " * Error 3: Output directory (-o) already exists! Explicity specify --overwrite to overwrite it.");
# IO option error checking
if not args.part:
sys.exit( " * Error 4: -part must be defined as a valid node partition on your clutser.");
if args.tasks < 1:
sys.exit( " * Error 5: -tasks must be a positive integer.");
if args.tasks < 1:
sys.exit( " * Error 6: -cpus must be a positive integer.");
if args.tasks < 1:
sys.exit( " * Error 7: -mem must be a positive integer.");
# SLURM option error checking
pad = 26
cwd = os.getcwd();
# Job vars
output_file = os.path.join(cwd, "jobs", "muscle_cmds_" + name + ".sh");
submit_file = os.path.join(cwd, "submit", "muscle_submit_" + name + ".sh");
logdir = os.path.join(args.output, "logs");
# Job files
##########################
# Reporting run-time info for records.
with open(output_file, "w") as outfile:
core.runTime("#!/bin/bash\n# MUSCLE command generator", outfile);
core.PWS("# IO OPTIONS", outfile);
core.PWS(core.spacedOut("# Input directory:", pad) + args.input, outfile);
if args.outname:
core.PWS(core.spacedOut("# --outname:", pad) + "Using end of output directory path as job name.", outfile);
if not args.name:
core.PWS("# -n not specified --> Generating random string for job name", outfile);
core.PWS(core.spacedOut("# Job name:", pad) + name, outfile);
core.PWS(core.spacedOut("# Output directory:", pad) + args.output, outfile);
if args.overwrite:
core.PWS(core.spacedOut("# --overwrite set:", pad) + "Overwriting previous files in output directory.", outfile);
if not os.path.isdir(args.output):
core.PWS("# Creating output directory.", outfile);
os.system("mkdir " + args.output);
core.PWS(core.spacedOut("# Logfile directory:", pad) + logdir, outfile);
if not os.path.isdir(logdir):
core.PWS("# Creating logfile directory.", outfile);
os.system("mkdir " + logdir);
core.PWS(core.spacedOut("# Job file:", pad) + output_file, outfile);
core.PWS("# ----------", outfile);
core.PWS("# SLURM OPTIONS", outfile);
core.PWS(core.spacedOut("# Submit file:", pad) + submit_file, outfile);
core.PWS(core.spacedOut("# SLURM partition:", pad) + args.part, outfile);
core.PWS(core.spacedOut("# SLURM ntasks:", pad) + str(args.tasks), outfile);
core.PWS(core.spacedOut("# SLURM cpus-per-task:", pad) + str(args.cpus), outfile);
core.PWS(core.spacedOut("# SLURM mem:", pad) + str(args.mem), outfile);
core.PWS("# ----------", outfile);
core.PWS("# BEGIN CMDS", outfile);
##########################
# Generating the commands in the job file.
for f in os.listdir(args.input):
base_input = os.path.splitext(f)[0];
cur_infile = os.path.join(args.input, f);
cur_outfile = os.path.join(args.output, base_input + "-muscle.fa");
cur_logfile = os.path.join(logdir, base_input + "-muscle.log");
muscle_cmd = args.path + " -in '" + cur_infile + "' -out '" + cur_outfile +"' > " + cur_logfile + " 2>&1";
outfile.write(muscle_cmd + "\n");
##########################
# Generating the submit script.
with open(submit_file, "w") as sfile:
submit = '''#!/bin/bash
#SBATCH --job-name={name}
#SBATCH --output={name}-%j.out
#SBATCH --mail-type=ALL
#SBATCH --mail-user=gregg.thomas@umontana.edu
#SBATCH --partition={partition}
#SBATCH --nodes=1
#SBATCH --ntasks={tasks}
#SBATCH --cpus-per-task={cpus}
#SBATCH --mem={mem}
parallel -j {tasks} < {output_file}'''
sfile.write(submit.format(name=name, partition=args.part, tasks=args.tasks, cpus=args.cpus, mem=args.mem, output_file=output_file));
##########################
|
gwct/core
|
python/generators/muscle_gen.py
|
Python
|
gpl-3.0
| 5,889
| 0.014943
|
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((7681.9, -659.304, 7494.63), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((6830.55, 913.984, 7439.66), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((5962.18, 1202.57, 5768.53), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((6075.58, -1067.94, 6332.77), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((5661.76, -1480.54, 4946.79), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((5263.15, 301.948, 3262.64), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((5657.6, 1350.01, 1989.44), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((5635.18, 436.967, 2062), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((6297.35, 2999.42, 1516.02), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((7688.67, 3286.28, 623.621), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((7528.29, 5130.63, 896.956), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((7978.73, 5062.12, 2257.63), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((8274.3, 5716.61, 3629.56), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((8927.85, 4349.38, 3977.65), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((10585.2, 4795.17, 5477.35), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((10431.7, 5633.75, 8439.06), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((8498.18, 5622.95, 8718.03), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((8579.44, 6803.65, 8035.93), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((8586.24, 6534.96, 6389.88), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((9478.99, 7035.42, 5315.88), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((7648.01, 6080.27, 3952.17), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((8670.79, 6900.96, 5612.24), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((7482.22, 7092.88, 6016.52), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((7640.8, 7712.46, 6897.92), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((7937.6, 6728.67, 7833.34), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((8784.55, 6464.83, 9140.4), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((8561.12, 6559.58, 7596.64), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((8055.79, 5370.63, 5806.84), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((7590.58, 6693.72, 5322), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((7350.34, 6844.11, 4092.02), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((6751.74, 6797.72, 4646.23), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((7496.03, 5847.74, 3396.54), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((7787.4, 7490.39, 4043.36), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((7057.62, 7493.13, 5358.24), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((7997.73, 7301.39, 6226.72), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((9084.09, 7630.68, 7008.2), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((7831.85, 6679.57, 5097.23), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((7975.45, 7942.47, 6461.27), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((7426.21, 6820.08, 6727.12), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((8197.8, 7836.26, 6453.28), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((7161.35, 7542.69, 5268.39), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((7516.72, 8143.81, 3602.91), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((7979.56, 9959.23, 5554.29), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((7698.16, 9005.61, 3943.77), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((7520.08, 7993.48, 5203.32), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((6251.17, 6995.83, 3897.44), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((4526.12, 7933.19, 4358.57), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((4777.3, 9951.86, 3932.76), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((3730.45, 8445.41, 3386.9), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((3645.38, 6673.44, 2770.34), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((3393.56, 7533.59, 1937.91), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((4470.18, 6177.12, 2196.01), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((5451.47, 4718.14, 2682.86), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((5359.78, 4077.32, 1282.18), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((4869.43, 4615.46, 965.169), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((4070, 5597.4, 2605.5), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((2946.94, 4126.04, 3735.78), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((1232.87, 4153.39, 5607.78), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((880.485, 4236.97, 6097.52), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((1259.04, 3663.86, 6487.98), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((1023.1, 4187.53, 5260.2), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((253.436, 4078.41, 4956.96), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((2149.55, 4189.64, 4578.63), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((369.268, 3483.28, 4564.11), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((-1506.62, 2997.87, 5129.44), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((-1226.96, 4387.94, 4009.78), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((-1984.03, 5176.71, 5297.32), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((191.654, 4646.05, 4446.86), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((-1121.1, 3864.81, 6009.96), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((-1615.33, 3703.63, 7676.27), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((-505.695, 3992.24, 7924.24), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
|
batxes/4Cin
|
SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/mtx1_models/SHH_WT_models22582.py
|
Python
|
gpl-3.0
| 17,582
| 0.025082
|
"""
This inline script utilizes harparser.HAR from https://github.com/JustusW/harparser
to generate a HAR log object.
"""
try:
from harparser import HAR
from pytz import UTC
except ImportError as e:
import sys
print >> sys.stderr, "\r\nMissing dependencies: please run `pip install mitmproxy[examples]`.\r\n"
raise
from datetime import datetime, timedelta, tzinfo
class _HARLog(HAR.log):
# The attributes need to be registered here for them to actually be available later via self. This is
# due to HAREncodable linking __getattr__ to __getitem__. Anything that is set only in __init__ will
# just be added as key/value pair to self.__classes__.
__page_list__ = []
__page_count__ = 0
__page_ref__ = {}
def __init__(self, page_list):
self.__page_list__ = page_list
self.__page_count__ = 0
self.__page_ref__ = {}
HAR.log.__init__(self, {"version": "1.2",
"creator": {"name": "MITMPROXY HARExtractor",
"version": "0.1",
"comment": ""},
"pages": [],
"entries": []})
def reset(self):
self.__init__(self.__page_list__)
def add(self, obj):
if isinstance(obj, HAR.pages):
self['pages'].append(obj)
if isinstance(obj, HAR.entries):
self['entries'].append(obj)
def create_page_id(self):
self.__page_count__ += 1
return "autopage_%s" % str(self.__page_count__)
def set_page_ref(self, page, ref):
self.__page_ref__[page] = ref
def get_page_ref(self, page):
return self.__page_ref__.get(page, None)
def get_page_list(self):
return self.__page_list__
def start(context, argv):
"""
On start we create a HARLog instance. You will have to adapt this to suit your actual needs
of HAR generation. As it will probably be necessary to cluster logs by IPs or reset them
from time to time.
"""
context.dump_file = None
if len(argv) > 1:
context.dump_file = argv[1]
else:
raise ValueError('Usage: -s "har_extractor.py filename" '
'(- will output to stdout, filenames ending with .zhar will result in compressed har)')
context.HARLog = _HARLog(['https://github.com'])
context.seen_server = set()
def response(context, flow):
"""
Called when a server response has been received. At the time of this message both
a request and a response are present and completely done.
"""
# Values are converted from float seconds to int milliseconds later.
ssl_time = -.001
connect_time = -.001
if flow.server_conn not in context.seen_server:
# Calculate the connect_time for this server_conn. Afterwards add it to seen list, in
# order to avoid the connect_time being present in entries that use an existing connection.
connect_time = flow.server_conn.timestamp_tcp_setup - flow.server_conn.timestamp_start
context.seen_server.add(flow.server_conn)
if flow.server_conn.timestamp_ssl_setup is not None:
# Get the ssl_time for this server_conn as the difference between the start of the successful
# tcp setup and the successful ssl setup. If no ssl setup has been made it is left as -1 since
# it doesn't apply to this connection.
ssl_time = flow.server_conn.timestamp_ssl_setup - flow.server_conn.timestamp_tcp_setup
# Calculate the raw timings from the different timestamps present in the request and response object.
# For lack of a way to measure it dns timings can not be calculated. The same goes for HAR blocked:
# MITMProxy will open a server connection as soon as it receives the host and port from the client
# connection. So the time spent waiting is actually spent waiting between request.timestamp_end and
# response.timestamp_start thus it correlates to HAR wait instead.
timings_raw = {'send': flow.request.timestamp_end - flow.request.timestamp_start,
'wait': flow.response.timestamp_start - flow.request.timestamp_end,
'receive': flow.response.timestamp_end - flow.response.timestamp_start,
'connect': connect_time,
'ssl': ssl_time}
# HAR timings are integers in ms, so we have to re-encode the raw timings to that format.
timings = dict([(key, int(1000 * value)) for key, value in timings_raw.iteritems()])
# The full_time is the sum of all timings. Timings set to -1 will be ignored as per spec.
full_time = 0
for item in timings.values():
if item > -1:
full_time += item
started_date_time = datetime.fromtimestamp(flow.request.timestamp_start, tz=utc).isoformat()
request_query_string = [{"name": k, "value": v} for k, v in flow.request.get_query()]
request_http_version = ".".join([str(v) for v in flow.request.httpversion])
# Cookies are shaped as tuples by MITMProxy.
request_cookies = [{"name": k.strip(), "value": v[0]} for k, v in (flow.request.get_cookies() or {}).iteritems()]
request_headers = [{"name": k, "value": v} for k, v in flow.request.headers]
request_headers_size = len(str(flow.request.headers))
request_body_size = len(flow.request.content)
response_http_version = ".".join([str(v) for v in flow.response.httpversion])
# Cookies are shaped as tuples by MITMProxy.
response_cookies = [{"name": k.strip(), "value": v[0]} for k, v in (flow.response.get_cookies() or {}).iteritems()]
response_headers = [{"name": k, "value": v} for k, v in flow.response.headers]
response_headers_size = len(str(flow.response.headers))
response_body_size = len(flow.response.content)
response_body_decoded_size = len(flow.response.get_decoded_content())
response_body_compression = response_body_decoded_size - response_body_size
response_mime_type = flow.response.headers.get_first('Content-Type', '')
response_redirect_url = flow.response.headers.get_first('Location', '')
entry = HAR.entries({"startedDateTime": started_date_time,
"time": full_time,
"request": {"method": flow.request.method,
"url": flow.request.url,
"httpVersion": request_http_version,
"cookies": request_cookies,
"headers": request_headers,
"queryString": request_query_string,
"headersSize": request_headers_size,
"bodySize": request_body_size, },
"response": {"status": flow.response.code,
"statusText": flow.response.msg,
"httpVersion": response_http_version,
"cookies": response_cookies,
"headers": response_headers,
"content": {"size": response_body_size,
"compression": response_body_compression,
"mimeType": response_mime_type},
"redirectURL": response_redirect_url,
"headersSize": response_headers_size,
"bodySize": response_body_size, },
"cache": {},
"timings": timings, })
# If the current url is in the page list of context.HARLog or does not have a referrer we add it as a new
# pages object.
if flow.request.url in context.HARLog.get_page_list() or flow.request.headers.get('Referer', None) is None:
page_id = context.HARLog.create_page_id()
context.HARLog.add(HAR.pages({"startedDateTime": entry['startedDateTime'],
"id": page_id,
"title": flow.request.url, }))
context.HARLog.set_page_ref(flow.request.url, page_id)
entry['pageref'] = page_id
# Lookup the referer in the page_ref of context.HARLog to point this entries pageref attribute to the right
# pages object, then set it as a new reference to build a reference tree.
elif context.HARLog.get_page_ref(flow.request.headers.get('Referer', (None, ))[0]) is not None:
entry['pageref'] = context.HARLog.get_page_ref(flow.request.headers['Referer'][0])
context.HARLog.set_page_ref(flow.request.headers['Referer'][0], entry['pageref'])
context.HARLog.add(entry)
def done(context):
"""
Called once on script shutdown, after any other events.
"""
from pprint import pprint
import json
json_dump = context.HARLog.json()
compressed_json_dump = context.HARLog.compress()
print "=" * 100
if context.dump_file == '-':
pprint(json.loads(json_dump))
elif context.dump_file.endswith('.zhar'):
file(context.dump_file, "w").write(compressed_json_dump)
else:
file(context.dump_file, "w").write(json_dump)
print "=" * 100
print "HAR log finished with %s bytes (%s bytes compressed)" % (len(json_dump), len(compressed_json_dump))
print "Compression rate is %s%%" % str(100. * len(compressed_json_dump) / len(json_dump))
print "=" * 100
def print_attributes(obj, filter_string=None, hide_privates=False):
"""
Useful helper method to quickly get all attributes of an object and its values.
"""
for attr in dir(obj):
if hide_privates and "__" in attr:
continue
if filter_string is not None and filter_string not in attr:
continue
value = getattr(obj, attr)
print "%s.%s" % ('obj', attr), value, type(value)
|
devasia1000/anti_adblock
|
examples/har_extractor.py
|
Python
|
mit
| 10,062
| 0.004373
|
# -*- coding: utf-8 -*-
from module.plugins.internal.SimpleCrypter import SimpleCrypter
class CrockoComFolder(SimpleCrypter):
__name__ = "CrockoComFolder"
__type__ = "crypter"
__version__ = "0.06"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?crocko\.com/f/.+'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("folder_per_package", "Default;Yes;No",
"Create folder for each package", "Default"),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10)]
__description__ = """Crocko.com folder decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
LINK_PATTERN = r'<td class="last"><a href="(.+?)">download</a>'
|
rlindner81/pyload
|
module/plugins/crypter/CrockoComFolder.py
|
Python
|
gpl-3.0
| 875
| 0.002286
|
import numpy as np
import copy
import random
import deepchem
class TicTacToeEnvironment(deepchem.rl.Environment):
"""
Play tictactoe against a randomly acting opponent
"""
X = np.array([1.0, 0.0])
O = np.array([0.0, 1.0])
EMPTY = np.array([0.0, 0.0])
ILLEGAL_MOVE_PENALTY = -3.0
LOSS_PENALTY = -3.0
NOT_LOSS = 0.1
DRAW_REWARD = 5.0
WIN_REWARD = 10.0
def __init__(self):
super(TicTacToeEnvironment, self).__init__([(3, 3, 2)], 9)
self.reset()
def reset(self):
self._terminated = False
self._state = [np.zeros(shape=(3, 3, 2), dtype=np.float32)]
# Randomize who goes first
if random.randint(0, 1) == 1:
move = self.get_O_move()
self._state[0][move[0]][move[1]] = TicTacToeEnvironment.O
def step(self, action):
self._state = copy.deepcopy(self._state)
row = action // 3
col = action % 3
# Illegal move -- the square is not empty
if not np.all(self._state[0][row][col] == TicTacToeEnvironment.EMPTY):
self._terminated = True
return TicTacToeEnvironment.ILLEGAL_MOVE_PENALTY
# Move X
self._state[0][row][col] = TicTacToeEnvironment.X
# Did X Win
if self.check_winner(TicTacToeEnvironment.X):
self._terminated = True
return TicTacToeEnvironment.WIN_REWARD
if self.game_over():
self._terminated = True
return TicTacToeEnvironment.DRAW_REWARD
move = self.get_O_move()
self._state[0][move[0]][move[1]] = TicTacToeEnvironment.O
# Did O Win
if self.check_winner(TicTacToeEnvironment.O):
self._terminated = True
return TicTacToeEnvironment.LOSS_PENALTY
if self.game_over():
self._terminated = True
return TicTacToeEnvironment.DRAW_REWARD
return TicTacToeEnvironment.NOT_LOSS
def get_O_move(self):
empty_squares = []
for row in range(3):
for col in range(3):
if np.all(self._state[0][row][col] == TicTacToeEnvironment.EMPTY):
empty_squares.append((row, col))
return random.choice(empty_squares)
def check_winner(self, player):
for i in range(3):
row = np.sum(self._state[0][i][:], axis=0)
if np.all(row == player * 3):
return True
col = np.sum(self._state[0][:][i], axis=0)
if np.all(col == player * 3):
return True
diag1 = self._state[0][0][0] + self._state[0][1][1] + self._state[0][2][2]
if np.all(diag1 == player * 3):
return True
diag2 = self._state[0][0][2] + self._state[0][1][1] + self._state[0][2][0]
if np.all(diag2 == player * 3):
return True
return False
def game_over(self):
for i in range(3):
for j in range(3):
if np.all(self._state[0][i][j] == TicTacToeEnvironment.EMPTY):
return False
return True
def display(self):
state = self._state[0]
s = ""
for row in range(3):
for col in range(3):
if np.all(state[row][col] == TicTacToeEnvironment.EMPTY):
s += "_"
if np.all(state[row][col] == TicTacToeEnvironment.X):
s += "X"
if np.all(state[row][col] == TicTacToeEnvironment.O):
s += "O"
s += "\n"
return s
|
Agent007/deepchem
|
deepchem/rl/envs/tictactoe.py
|
Python
|
mit
| 3,149
| 0.013973
|
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
from distutils import versionpredicate
import netaddr
from oslo_utils import strutils
from oslo_versionedobjects import fields
import six
# TODO(berrange) Temporary import for Arch class
from nova.compute import arch
# TODO(berrange) Temporary import for CPU* classes
from nova.compute import cpumodel
# TODO(berrange) Temporary import for HVType class
from nova.compute import hv_type
# TODO(berrange) Temporary import for VMMode class
from nova.compute import vm_mode
from nova import exception
from nova.i18n import _
from nova.network import model as network_model
# Import field errors from oslo.versionedobjects
KeyTypeError = fields.KeyTypeError
ElementTypeError = fields.ElementTypeError
# Import fields from oslo.versionedobjects
BooleanField = fields.BooleanField
UnspecifiedDefault = fields.UnspecifiedDefault
IntegerField = fields.IntegerField
UUIDField = fields.UUIDField
FloatField = fields.FloatField
StringField = fields.StringField
EnumField = fields.EnumField
DateTimeField = fields.DateTimeField
DictOfStringsField = fields.DictOfStringsField
DictOfNullableStringsField = fields.DictOfNullableStringsField
DictOfIntegersField = fields.DictOfIntegersField
ListOfStringsField = fields.ListOfStringsField
SetOfIntegersField = fields.SetOfIntegersField
ListOfSetsOfIntegersField = fields.ListOfSetsOfIntegersField
ListOfDictOfNullableStringsField = fields.ListOfDictOfNullableStringsField
DictProxyField = fields.DictProxyField
ObjectField = fields.ObjectField
ListOfObjectsField = fields.ListOfObjectsField
# NOTE(danms): These are things we need to import for some of our
# own implementations below, our tests, or other transitional
# bits of code. These should be removable after we finish our
# conversion
Enum = fields.Enum
Field = fields.Field
FieldType = fields.FieldType
Set = fields.Set
Dict = fields.Dict
List = fields.List
Object = fields.Object
class Architecture(Enum):
# TODO(berrange): move all constants out of 'nova.compute.arch'
# into fields on this class
def __init__(self, **kwargs):
super(Architecture, self).__init__(
valid_values=arch.ALL, **kwargs)
def coerce(self, obj, attr, value):
try:
value = arch.canonicalize(value)
except exception.InvalidArchitectureName:
msg = _("Architecture name '%s' is not valid") % value
raise ValueError(msg)
return super(Architecture, self).coerce(obj, attr, value)
class BlockDeviceDestinationType(Enum):
"""Represents possible destination_type values for a BlockDeviceMapping."""
LOCAL = 'local'
VOLUME = 'volume'
ALL = (LOCAL, VOLUME)
def __init__(self):
super(BlockDeviceDestinationType, self).__init__(
valid_values=BlockDeviceDestinationType.ALL)
class BlockDeviceSourceType(Enum):
"""Represents the possible source_type values for a BlockDeviceMapping."""
BLANK = 'blank'
IMAGE = 'image'
SNAPSHOT = 'snapshot'
VOLUME = 'volume'
ALL = (BLANK, IMAGE, SNAPSHOT, VOLUME)
def __init__(self):
super(BlockDeviceSourceType, self).__init__(
valid_values=BlockDeviceSourceType.ALL)
class BlockDeviceType(Enum):
"""Represents possible device_type values for a BlockDeviceMapping."""
CDROM = 'cdrom'
DISK = 'disk'
FLOPPY = 'floppy'
FS = 'fs'
LUN = 'lun'
ALL = (CDROM, DISK, FLOPPY, FS, LUN)
def __init__(self):
super(BlockDeviceType, self).__init__(
valid_values=BlockDeviceType.ALL)
class CPUAllocationPolicy(Enum):
DEDICATED = "dedicated"
SHARED = "shared"
ALL = (DEDICATED, SHARED)
def __init__(self):
super(CPUAllocationPolicy, self).__init__(
valid_values=CPUAllocationPolicy.ALL)
class CPUMode(Enum):
# TODO(berrange): move all constants out of 'nova.compute.cpumodel'
# into fields on this class
def __init__(self, **kwargs):
super(CPUMode, self).__init__(
valid_values=cpumodel.ALL_CPUMODES, **kwargs)
class CPUMatch(Enum):
# TODO(berrange): move all constants out of 'nova.compute.cpumodel'
# into fields on this class
def __init__(self, **kwargs):
super(CPUMatch, self).__init__(
valid_values=cpumodel.ALL_MATCHES, **kwargs)
class CPUFeaturePolicy(Enum):
# TODO(berrange): move all constants out of 'nova.compute.cpumodel'
# into fields on this class
def __init__(self, **kwargs):
super(CPUFeaturePolicy, self).__init__(
valid_values=cpumodel.ALL_POLICIES, **kwargs)
class DiskBus(Enum):
FDC = "fdc"
IDE = "ide"
SATA = "sata"
SCSI = "scsi"
USB = "usb"
VIRTIO = "virtio"
XEN = "xen"
LXC = "lxc"
UML = "uml"
ALL = (FDC, IDE, SATA, SCSI, USB, VIRTIO, XEN, LXC, UML)
def __init__(self):
super(DiskBus, self).__init__(
valid_values=DiskBus.ALL)
class HVType(Enum):
# TODO(berrange): move all constants out of 'nova.compute.hv_type'
# into fields on this class
def __init__(self):
super(HVType, self).__init__(
valid_values=hv_type.ALL)
def coerce(self, obj, attr, value):
try:
value = hv_type.canonicalize(value)
except exception.InvalidHypervisorVirtType:
msg = _("Hypervisor virt type '%s' is not valid") % value
raise ValueError(msg)
return super(HVType, self).coerce(obj, attr, value)
class OSType(Enum):
LINUX = "linux"
WINDOWS = "windows"
ALL = (LINUX, WINDOWS)
def __init__(self):
super(OSType, self).__init__(
valid_values=OSType.ALL)
def coerce(self, obj, attr, value):
# Some code/docs use upper case or initial caps
# so canonicalize to all lower case
value = value.lower()
return super(OSType, self).coerce(obj, attr, value)
class RNGModel(Enum):
VIRTIO = "virtio"
ALL = (VIRTIO,)
def __init__(self):
super(RNGModel, self).__init__(
valid_values=RNGModel.ALL)
class SCSIModel(Enum):
BUSLOGIC = "buslogic"
IBMVSCSI = "ibmvscsi"
LSILOGIC = "lsilogic"
LSISAS1068 = "lsisas1068"
LSISAS1078 = "lsisas1078"
VIRTIO_SCSI = "virtio-scsi"
VMPVSCSI = "vmpvscsi"
ALL = (BUSLOGIC, IBMVSCSI, LSILOGIC, LSISAS1068,
LSISAS1078, VIRTIO_SCSI, VMPVSCSI)
def __init__(self):
super(SCSIModel, self).__init__(
valid_values=SCSIModel.ALL)
def coerce(self, obj, attr, value):
# Some compat for strings we'd see in the legacy
# vmware_adaptertype image property
value = value.lower()
if value == "lsilogicsas":
value = SCSIModel.LSISAS1068
elif value == "paravirtual":
value = SCSIModel.VMPVSCSI
return super(SCSIModel, self).coerce(obj, attr, value)
class VideoModel(Enum):
CIRRUS = "cirrus"
QXL = "qxl"
VGA = "vga"
VMVGA = "vmvga"
XEN = "xen"
ALL = (CIRRUS, QXL, VGA, VMVGA, XEN)
def __init__(self):
super(VideoModel, self).__init__(
valid_values=VideoModel.ALL)
class VIFModel(Enum):
LEGACY_VALUES = {"virtuale1000":
network_model.VIF_MODEL_E1000,
"virtuale1000e":
network_model.VIF_MODEL_E1000E,
"virtualpcnet32":
network_model.VIF_MODEL_PCNET,
"virtualsriovethernetcard":
network_model.VIF_MODEL_SRIOV,
"virtualvmxnet":
network_model.VIF_MODEL_VMXNET,
"virtualvmxnet3":
network_model.VIF_MODEL_VMXNET3,
}
def __init__(self):
super(VIFModel, self).__init__(
valid_values=network_model.VIF_MODEL_ALL)
def _get_legacy(self, value):
return value
def coerce(self, obj, attr, value):
# Some compat for strings we'd see in the legacy
# hw_vif_model image property
value = value.lower()
value = VIFModel.LEGACY_VALUES.get(value, value)
return super(VIFModel, self).coerce(obj, attr, value)
class VMMode(Enum):
# TODO(berrange): move all constants out of 'nova.compute.vm_mode'
# into fields on this class
def __init__(self):
super(VMMode, self).__init__(
valid_values=vm_mode.ALL)
def coerce(self, obj, attr, value):
try:
value = vm_mode.canonicalize(value)
except exception.InvalidVirtualMachineMode:
msg = _("Virtual machine mode '%s' is not valid") % value
raise ValueError(msg)
return super(VMMode, self).coerce(obj, attr, value)
class WatchdogAction(Enum):
NONE = "none"
PAUSE = "pause"
POWEROFF = "poweroff"
RESET = "reset"
ALL = (NONE, PAUSE, POWEROFF, RESET)
def __init__(self):
super(WatchdogAction, self).__init__(
valid_values=WatchdogAction.ALL)
class MonitorMetricType(Enum):
CPU_FREQUENCY = "cpu.frequency"
CPU_USER_TIME = "cpu.user.time"
CPU_KERNEL_TIME = "cpu.kernel.time"
CPU_IDLE_TIME = "cpu.idle.time"
CPU_IOWAIT_TIME = "cpu.iowait.time"
CPU_USER_PERCENT = "cpu.user.percent"
CPU_KERNEL_PERCENT = "cpu.kernel.percent"
CPU_IDLE_PERCENT = "cpu.idle.percent"
CPU_IOWAIT_PERCENT = "cpu.iowait.percent"
CPU_PERCENT = "cpu.percent"
NUMA_MEM_BW_MAX = "numa.membw.max"
NUMA_MEM_BW_CURRENT = "numa.membw.current"
ALL = (
CPU_FREQUENCY,
CPU_USER_TIME,
CPU_KERNEL_TIME,
CPU_IDLE_TIME,
CPU_IOWAIT_TIME,
CPU_USER_PERCENT,
CPU_KERNEL_PERCENT,
CPU_IDLE_PERCENT,
CPU_IOWAIT_PERCENT,
CPU_PERCENT,
NUMA_MEM_BW_MAX,
NUMA_MEM_BW_CURRENT,
)
def __init__(self):
super(MonitorMetricType, self).__init__(
valid_values=MonitorMetricType.ALL)
# NOTE(sbauza): Remove this on next release of oslo.versionedobjects
class VersionPredicate(fields.String):
@staticmethod
def coerce(obj, attr, value):
try:
versionpredicate.VersionPredicate('check (%s)' % value)
except ValueError:
raise ValueError(_('Version %(val)s is not a valid predicate in '
'field %(attr)s') %
{'val': value, 'attr': attr})
return value
class PciDeviceStatus(Enum):
AVAILABLE = "available"
CLAIMED = "claimed"
ALLOCATED = "allocated"
REMOVED = "removed" # The device has been hot-removed and not yet deleted
DELETED = "deleted" # The device is marked not available/deleted.
ALL = (AVAILABLE, CLAIMED, ALLOCATED, REMOVED, DELETED)
def __init__(self):
super(PciDeviceStatus, self).__init__(
valid_values=PciDeviceStatus.ALL)
class PciDeviceType(Enum):
# NOTE(jaypipes): It's silly that the word "type-" is in these constants,
# but alas, these were the original constant strings used...
STANDARD = "type-PCI"
SRIOV_PF = "type-PF"
SRIOV_VF = "type-VF"
ALL = (STANDARD, SRIOV_PF, SRIOV_VF)
def __init__(self):
super(PciDeviceType, self).__init__(
valid_values=PciDeviceType.ALL)
# NOTE(danms): Remove this on next release of oslo.versionedobjects
class FlexibleBoolean(fields.Boolean):
@staticmethod
def coerce(obj, attr, value):
return strutils.bool_from_string(value)
class IPAddress(FieldType):
@staticmethod
def coerce(obj, attr, value):
try:
return netaddr.IPAddress(value)
except netaddr.AddrFormatError as e:
raise ValueError(six.text_type(e))
def from_primitive(self, obj, attr, value):
return self.coerce(obj, attr, value)
@staticmethod
def to_primitive(obj, attr, value):
return str(value)
class IPV4Address(IPAddress):
@staticmethod
def coerce(obj, attr, value):
result = IPAddress.coerce(obj, attr, value)
if result.version != 4:
raise ValueError(_('Network "%(val)s" is not valid '
'in field %(attr)s') %
{'val': value, 'attr': attr})
return result
class IPV6Address(IPAddress):
@staticmethod
def coerce(obj, attr, value):
result = IPAddress.coerce(obj, attr, value)
if result.version != 6:
raise ValueError(_('Network "%(val)s" is not valid '
'in field %(attr)s') %
{'val': value, 'attr': attr})
return result
class IPV4AndV6Address(IPAddress):
@staticmethod
def coerce(obj, attr, value):
result = IPAddress.coerce(obj, attr, value)
if result.version != 4 and result.version != 6:
raise ValueError(_('Network "%(val)s" is not valid '
'in field %(attr)s') %
{'val': value, 'attr': attr})
return result
class IPNetwork(IPAddress):
@staticmethod
def coerce(obj, attr, value):
try:
return netaddr.IPNetwork(value)
except netaddr.AddrFormatError as e:
raise ValueError(six.text_type(e))
class IPV4Network(IPNetwork):
@staticmethod
def coerce(obj, attr, value):
try:
return netaddr.IPNetwork(value, version=4)
except netaddr.AddrFormatError as e:
raise ValueError(six.text_type(e))
class IPV6Network(IPNetwork):
@staticmethod
def coerce(obj, attr, value):
try:
return netaddr.IPNetwork(value, version=6)
except netaddr.AddrFormatError as e:
raise ValueError(six.text_type(e))
class NetworkModel(FieldType):
@staticmethod
def coerce(obj, attr, value):
if isinstance(value, network_model.NetworkInfo):
return value
elif isinstance(value, six.string_types):
# Hmm, do we need this?
return network_model.NetworkInfo.hydrate(value)
else:
raise ValueError(_('A NetworkModel is required in field %s') %
attr)
@staticmethod
def to_primitive(obj, attr, value):
return value.json()
@staticmethod
def from_primitive(obj, attr, value):
return network_model.NetworkInfo.hydrate(value)
def stringify(self, value):
return 'NetworkModel(%s)' % (
','.join([str(vif['id']) for vif in value]))
class NonNegativeFloat(FieldType):
@staticmethod
def coerce(obj, attr, value):
v = float(value)
if v < 0:
raise ValueError(_('Value must be >= 0 for field %s') % attr)
return v
class NonNegativeInteger(FieldType):
@staticmethod
def coerce(obj, attr, value):
v = int(value)
if v < 0:
raise ValueError(_('Value must be >= 0 for field %s') % attr)
return v
class AutoTypedField(fields.Field):
AUTO_TYPE = None
def __init__(self, **kwargs):
super(AutoTypedField, self).__init__(self.AUTO_TYPE, **kwargs)
# FIXME(danms): Remove this after oslo.versionedobjects gets it
class BaseEnumField(AutoTypedField):
'''This class should not be directly instantiated. Instead
subclass it and set AUTO_TYPE to be a SomeEnum()
where SomeEnum is a subclass of Enum.
'''
def __init__(self, **kwargs):
if self.AUTO_TYPE is None:
raise exception.EnumFieldUnset(
fieldname=self.__class__.__name__)
if not isinstance(self.AUTO_TYPE, Enum):
raise exception.EnumFieldInvalid(
typename=self.AUTO_TYPE.__class__.__name,
fieldname=self.__class__.__name__)
super(BaseEnumField, self).__init__(**kwargs)
def __repr__(self):
valid_values = self._type._valid_values
args = {
'nullable': self._nullable,
'default': self._default,
}
if valid_values:
args.update({'valid_values': valid_values})
args = OrderedDict(sorted(args.items()))
return '%s(%s)' % (self._type.__class__.__name__,
','.join(['%s=%s' % (k, v)
for k, v in args.items()]))
class ArchitectureField(BaseEnumField):
AUTO_TYPE = Architecture()
class BlockDeviceDestinationTypeField(BaseEnumField):
AUTO_TYPE = BlockDeviceDestinationType()
class BlockDeviceSourceTypeField(BaseEnumField):
AUTO_TYPE = BlockDeviceSourceType()
class BlockDeviceTypeField(BaseEnumField):
AUTO_TYPE = BlockDeviceType()
class CPUAllocationPolicyField(BaseEnumField):
AUTO_TYPE = CPUAllocationPolicy()
class CPUModeField(BaseEnumField):
AUTO_TYPE = CPUMode()
class CPUMatchField(BaseEnumField):
AUTO_TYPE = CPUMatch()
class CPUFeaturePolicyField(BaseEnumField):
AUTO_TYPE = CPUFeaturePolicy()
class DiskBusField(BaseEnumField):
AUTO_TYPE = DiskBus()
class HVTypeField(BaseEnumField):
AUTO_TYPE = HVType()
class OSTypeField(BaseEnumField):
AUTO_TYPE = OSType()
class RNGModelField(BaseEnumField):
AUTO_TYPE = RNGModel()
class SCSIModelField(BaseEnumField):
AUTO_TYPE = SCSIModel()
class VideoModelField(BaseEnumField):
AUTO_TYPE = VideoModel()
class VIFModelField(BaseEnumField):
AUTO_TYPE = VIFModel()
class VMModeField(BaseEnumField):
AUTO_TYPE = VMMode()
class WatchdogActionField(BaseEnumField):
AUTO_TYPE = WatchdogAction()
class MonitorMetricTypeField(BaseEnumField):
AUTO_TYPE = MonitorMetricType()
# FIXME(sbauza): Remove this after oslo.versionedobjects gets it
class VersionPredicateField(AutoTypedField):
AUTO_TYPE = VersionPredicate()
class PciDeviceStatusField(BaseEnumField):
AUTO_TYPE = PciDeviceStatus()
class PciDeviceTypeField(BaseEnumField):
AUTO_TYPE = PciDeviceType()
# FIXME(danms): Remove this after oslo.versionedobjects gets it
# This is a flexible interpretation of boolean
# values using common user friendly semantics for
# truth/falsehood. ie strings like 'yes', 'no',
# 'on', 'off', 't', 'f' get mapped to values you
# would expect.
class FlexibleBooleanField(AutoTypedField):
AUTO_TYPE = FlexibleBoolean()
class IPAddressField(AutoTypedField):
AUTO_TYPE = IPAddress()
class IPV4AddressField(AutoTypedField):
AUTO_TYPE = IPV4Address()
class IPV6AddressField(AutoTypedField):
AUTO_TYPE = IPV6Address()
class IPV4AndV6AddressField(AutoTypedField):
AUTO_TYPE = IPV4AndV6Address()
class IPNetworkField(AutoTypedField):
AUTO_TYPE = IPNetwork()
class IPV4NetworkField(AutoTypedField):
AUTO_TYPE = IPV4Network()
class IPV6NetworkField(AutoTypedField):
AUTO_TYPE = IPV6Network()
class ListOfIntegersField(AutoTypedField):
AUTO_TYPE = List(fields.Integer())
# FIXME(sbauza): Remove this after oslo.versionedobjects releases it
class DictOfListOfStringsField(AutoTypedField):
AUTO_TYPE = Dict(List(fields.String()))
class NonNegativeFloatField(AutoTypedField):
AUTO_TYPE = NonNegativeFloat()
class NonNegativeIntegerField(AutoTypedField):
AUTO_TYPE = NonNegativeInteger()
|
isyippee/nova
|
nova/objects/fields.py
|
Python
|
apache-2.0
| 19,847
| 0.00005
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
from sahara.plugins import exceptions as ex
from sahara.plugins.vanilla import plugin as p
from sahara.tests.unit import base
from sahara.tests.unit import testutils as tu
class ValidationTest(base.SaharaTestCase):
def setUp(self):
super(ValidationTest, self).setUp()
self.pl = p.VanillaProvider()
def test_validate(self):
self.ng = []
self.ng.append(tu.make_ng_dict("nn", "f1", ["namenode"], 0))
self.ng.append(tu.make_ng_dict("sn", "f1", ["secondarynamenode"], 0))
self.ng.append(tu.make_ng_dict("jt", "f1", ["resourcemanager"], 0))
self.ng.append(tu.make_ng_dict("tt", "f1", ["nodemanager"], 0))
self.ng.append(tu.make_ng_dict("dn", "f1", ["datanode"], 0))
self.ng.append(tu.make_ng_dict("hs", "f1", ["historyserver"], 0))
self.ng.append(tu.make_ng_dict("oo", "f1", ["oozie"], 0))
self._validate_case(1, 1, 1, 10, 10, 0, 0)
self._validate_case(1, 0, 1, 1, 4, 0, 0)
self._validate_case(1, 1, 1, 0, 3, 0, 0)
self._validate_case(1, 0, 1, 0, 3, 0, 0)
self._validate_case(1, 1, 0, 0, 3, 0, 0)
self._validate_case(1, 0, 1, 1, 3, 1, 1)
self._validate_case(1, 1, 1, 1, 3, 1, 0)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(0, 0, 1, 10, 3, 0, 0)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(2, 0, 1, 10, 3, 0, 0)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(1, 2, 1, 1, 3, 1, 1)
with testtools.ExpectedException(ex.RequiredServiceMissingException):
self._validate_case(1, 0, 0, 10, 3, 0, 0)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(1, 0, 2, 10, 3, 0, 0)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(1, 0, 1, 1, 3, 2, 1)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(1, 0, 1, 1, 3, 1, 2)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(1, 1, 1, 0, 2, 0, 0)
with testtools.ExpectedException(ex.RequiredServiceMissingException):
self._validate_case(1, 0, 1, 1, 3, 0, 1)
with testtools.ExpectedException(ex.RequiredServiceMissingException):
self._validate_case(1, 0, 1, 0, 3, 1, 1)
with testtools.ExpectedException(ex.RequiredServiceMissingException):
self._validate_case(1, 0, 1, 1, 0, 1, 1)
cl = self._create_cluster(
1, 1, 1, 0, 3, 0, 0,
cluster_configs={'HDFS': {'dfs.replication': 4}})
with testtools.ExpectedException(ex.InvalidComponentCountException):
self.pl.validate(cl)
self.ng.append(tu.make_ng_dict("hi", "f1", ["hiveserver"], 0))
self.ng.append(tu.make_ng_dict("sh", "f1",
["spark history server"], 0))
self._validate_case(1, 1, 0, 0, 3, 0, 0, 1, 0)
self._validate_case(1, 1, 0, 0, 3, 0, 0, 0, 1)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(1, 1, 0, 0, 3, 0, 0, 2, 0)
with testtools.ExpectedException(ex.InvalidComponentCountException):
self._validate_case(1, 1, 0, 0, 3, 0, 0, 0, 2)
def _create_cluster(self, *args, **kwargs):
lst = []
for i in range(0, len(args)):
self.ng[i]['count'] = args[i]
lst.append(self.ng[i])
return tu.create_cluster("cluster1", "tenant1", "vanilla",
"2.7.1", lst, **kwargs)
def _validate_case(self, *args):
cl = self._create_cluster(*args)
self.pl.validate(cl)
|
shakamunyi/sahara
|
sahara/tests/unit/plugins/vanilla/hadoop2/test_validation.py
|
Python
|
apache-2.0
| 4,497
| 0
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TtTrip.date'
db.add_column(u'timetable_tttrip', 'date',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'TtTrip.date'
db.delete_column(u'timetable_tttrip', 'date')
models = {
u'timetable.ttstop': {
'Meta': {'object_name': 'TtStop'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stop_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'stop_lat': ('django.db.models.fields.FloatField', [], {}),
'stop_lon': ('django.db.models.fields.FloatField', [], {}),
'stop_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'stop_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'timetable.ttstoptime': {
'Meta': {'object_name': 'TtStopTime'},
'exp_arrival': ('django.db.models.fields.DateTimeField', [], {}),
'exp_departure': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtStop']"}),
'stop_sequence': ('django.db.models.fields.IntegerField', [], {}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['timetable.TtTrip']"})
},
u'timetable.tttrip': {
'Meta': {'object_name': 'TtTrip'},
'date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shape_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'trip_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
}
}
complete_apps = ['timetable']
|
hasadna/OpenTrain
|
webserver/opentrain/timetable/migrations/0006_auto__add_field_tttrip_date.py
|
Python
|
bsd-3-clause
| 2,370
| 0.006329
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division,print_function,absolute_import,unicode_literals
import sys
import os
import subprocess
import codecs
import ctypes
import struct
import uuid
import datetime
import math
from LTsv_file import *
from LTsv_printf import *
LTsv_Tkinter=True
try:
import tkinter as Tk
import tkinter.scrolledtext as Tk_sc
import tkinter.filedialog as Tk_fd
# import messagebox as Tk_mb
except:
LTsv_Tkinter=False
#if LTsv_Tkinter == False:
# #http://shinobar.server-on.net/puppy/opt/tcl_tk-8.5.7-1-p4.sfs
# if os.path.exists("/usr/lib/python3.4"):
# sys.path.append("/usr/lib/python3.4")
# try:
# import tkinter as Tk
# import tkinter.scrolledtext as Tk_sc
# import tkinter.filedialog as Tk_fd
## import messagebox as Tk_mb
# LTsv_Tkinter=True
# except:
# LTsv_Tkinter=False
LTsv_libgtk,LTsv_libgdk,LTsv_libobj=None,None,None
LTsv_user32,LTsv_shell32,LTsv_kernel32,LTsv_gdi32=None,None,None,None
LTsv_GUI_ERROR,LTsv_GUI_GTK2,LTsv_GUI_Tkinter,LTsv_GUI_WinAPI="","GTK2","Tkinter","WinAPI"
LTsv_GUI,LTsv_Notify=LTsv_GUI_ERROR,LTsv_GUI_ERROR
#LTsv_CALLBACLTYPE=ctypes.CFUNCTYPE(ctypes.c_void_p,ctypes.POINTER(ctypes.c_ulong))
#LTsv_CALLBACLTYPE=ctypes.CFUNCTYPE(ctypes.c_bool,ctypes.c_void_p)
#LTsv_CALLBACLTYPE=ctypes.CFUNCTYPE(ctypes.c_void_p,ctypes.c_int)
LTsv_CALLBACLTYPE=ctypes.CFUNCTYPE(ctypes.c_void_p,ctypes.c_void_p)
LTsv_widgetLTSV=LTsv_newfile("LTsv_gui",LTsv_default=None)
LTsv_widgetOBJ={}; LTsv_widgetOBJcount=0
LTsv_timerOBJ={}; LTsv_timer_cbk={}
LTsv_canvas_motion_X,LTsv_canvas_motion_Y,LTsv_canvas_motion_Z=0,0,""
canvas_EMLenter,canvas_EMLmotion,canvas_EMLleave={},{},{}
canvas_CBKenter,canvas_CBKmotion,canvas_CBKleave,canvas_CBKtimeout,canvas_CBKafter,LTsv_canvasCBKpagename={},{},{},{},{},{}
LTsv_pictureOBJ,LTsv_pictureW,LTsv_pictureH={},{},{}
LTsv_iconOBJ={}; LTsv_iconOBJnotify=[]
LTsv_popupmenuOBJ={}
LTsv_default_iconuri=""
def LTsv_guiCDLLver(LTsv_libname,LTsv_libvermin,LTsv_libvermax):
LTsv_min,LTsv_max=(LTsv_libvermin,LTsv_libvermax) if LTsv_libvermin <= LTsv_libvermax else (LTsv_libvermax,LTsv_libvermin)
if LTsv_min == LTsv_max:
LTsv_max+=1
LTsv_CDLL=None
for LTsv_libver in range(LTsv_min,LTsv_max):
LTsv_CDLL=ctypes.CDLL(LTsv_libname.replace('?',str(LTsv_libver)))
if LTsv_CDLL != None:
break
return LTsv_CDLL
def LTsv_guiinit(LTsv_guistyle=LTsv_GUI_GTK2,LTsv_libvermin=0,LTsv_libvermax=0):
global LTsv_GUI,LTsv_Notify,LTsv_default_iconuri
global LTsv_libgtk,LTsv_libgdk,LTsv_libobj,LTsv_user32,LTsv_shell32,LTsv_kernel32,LTsv_gdi32
LTsv_GUI=LTsv_guistyle
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_Notify=LTsv_GUI_GTK2; LTsv_default_iconuri="/usr/share/pixmaps/python.xpm"
if sys.platform.startswith("linux"): #"/usr/lib/libgtk-x11-2.0.so.0"
LTsv_libgtk=LTsv_guiCDLLver("libgtk-x11-2.0.so.?",LTsv_libvermin,LTsv_libvermax)
LTsv_libgtk.gtk_range_get_value.restype=ctypes.c_double
LTsv_libgdk=LTsv_guiCDLLver("libgdk-x11-2.0.so.?",LTsv_libvermin,LTsv_libvermax)
LTsv_libobj=LTsv_guiCDLLver("libgobject-2.0.so.?",LTsv_libvermin,LTsv_libvermax)
LTsv_libobj.g_timeout_add.restype=ctypes.c_uint
# if sys.platform.startswith("cygwin"):
# LTsv_libgtk=LTsv_guiCDLLver("cyggtk-x11-2.0-?.dll",0,10)
# LTsv_libgdk=LTsv_guiCDLLver("cyggdk-x11-2.0-?.dll",0,10)
# LTsv_libobj=LTsv_guiCDLLver("cyggobject-2.0-?.dll",0,10)
# if sys.platform.startswith("darwin"):
# LTsv_libgtk=ctypes.CDLL("/opt/local/lib/libgtk-x11-2.0.0.dylib")#"/Library/Frameworks/Gtk.framework/Libraries/libgtk-quartz-2.0.0.dylib"
# LTsv_libgdk=ctypes.CDLL("/opt/local/lib/libgdk-x11-2.0.0.dylib")#"/Library/Frameworks/Gtk.framework/Libraries/libgdk-quartz-2.0.0.dylib"
# LTsv_libobj=ctypes.CDLL("/opt/local/lib/libgobject-2.0.0.dylib")#"/Library/Frameworks/Glib.framework/Libraries/libgobject-2.0.0.dylib"
if LTsv_libgtk == None or LTsv_libgdk == None or LTsv_libobj == None:
# if sys.platform.startswith("win"):
# LTsv_GUI=LTsv_GUI_WinAPI
LTsv_GUI=LTsv_GUI_Tkinter
else:
LTsv_libgtk.gtk_init(0,0)
if LTsv_GUI == LTsv_GUI_WinAPI or LTsv_GUI == LTsv_GUI_Tkinter:
if sys.platform.startswith("win"):
LTsv_Notify=LTsv_GUI_WinAPI; LTsv_default_iconuri=sys.executable
LTsv_shell32=ctypes.windll.shell32
LTsv_user32=ctypes.windll.user32
LTsv_kernel32=ctypes.windll.kernel32
LTsv_gdi32=ctypes.windll.gdi32
elif sys.platform.startswith("linux"):
pass
else:
LTsv_GUI,LTsv_Notify=LTsv_GUI_ERROR,LTsv_GUI_ERROR; LTsv_default_iconuri=""
if not LTsv_GUI in [LTsv_GUI_ERROR,LTsv_GUI_GTK2,LTsv_GUI_Tkinter,LTsv_GUI_WinAPI]: LTsv_GUI=LTsv_GUI_ERROR
return LTsv_GUI
def LTsv_global_GUI(): return LTsv_GUI
def LTsv_global_Notify(): return LTsv_Notify
def LTsv_global_GTK2(): return LTsv_GUI_GTK2
def LTsv_global_Tkinter(): return LTsv_GUI_Tkinter
def LTsv_global_WinAPI(): return LTsv_GUI_WinAPI
def LTsv_global_libgtk(): return LTsv_libgtk
def LTsv_global_libgdk(): return LTsv_libgdk
def LTsv_global_libobj(): return LTsv_libobj
def LTsv_global_canvasmotionX(): return LTsv_canvas_motion_X
def LTsv_global_canvasmotionY(): return LTsv_canvas_motion_Y
def LTsv_global_canvasmotionZ(): return LTsv_canvas_motion_Z
def LTsv_global_canvascolor(): return LTsv_canvascolor
def LTsv_global_canvasbgcolor(): return LTsv_canvasbgcolor
#def LTsv_global_widgetgetltsv(): return LTsv_widgetLTSV
def LTsv_global_widgetltsv(new_LTSV=None):
global LTsv_widgetLTSV
LTsv_widgetLTSV=LTsv_widgetLTSV if new_LTSV == None else new_LTSV
return LTsv_widgetLTSV
def LTsv_global_widgetgetpage(LTsv_widgetPAGENAME): return LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
def LTsv_global_widgetOBJ(LTsv_objid): return LTsv_widgetOBJ[LTsv_objid]
def LTsv_global_pictureOBJ(LTsv_objid): return LTsv_pictureOBJ[LTsv_objid]
def LTsv_global_pictureW(LTsv_objid): return LTsv_pictureW[LTsv_objid]
def LTsv_global_pictureH(LTsv_objid): return LTsv_pictureH[LTsv_objid]
def LTsv_global_iconOBJ(LTsv_objid): return LTsv_iconOBJ[LTsv_objid]
def LTsv_global_popupmenuOBJ(LTsv_objid): return LTsv_popupmenuOBJ[LTsv_objid]
def LTsv_widget_newUUID(LTsv_widgetID=None):
global LTsv_widget_oldID
if LTsv_widgetID == False:
LTsv_uuid=LTsv_widget_oldID
else:
LTsv_uuid=uuid.uuid4().hex+'+'+str(time.time())
LTsv_widget_oldID=LTsv_uuid
return LTsv_uuid
LTsv_widget_oldID=LTsv_widget_newUUID()
def LTsv_widget_newobj(LTsv_widgetPAGE,LTsv_widgetoption,widget_obj):
global LTsv_widgetOBJ,LTsv_widgetOBJcount
LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,LTsv_widgetoption,str(LTsv_widgetOBJcount))
LTsv_widgetOBJ[str(LTsv_widgetOBJcount)]=widget_obj; LTsv_widgetOBJcount+=1
return LTsv_widgetPAGE
def LTsv_widget_getobj(LTsv_widgetPAGE,LTsv_widgetoption):
LTsv_widgetOBJcount=LTsv_readlinerest(LTsv_widgetPAGE,LTsv_widgetoption)
if LTsv_widgetOBJcount in LTsv_widgetOBJ:
return LTsv_widgetOBJ[LTsv_widgetOBJcount]
else:
return None
def LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=None,widget_k=None,widget_t=None,widget_u=None,widget_s=None,widget_e=None,widget_a=None,widget_v=None,widget_b=None, \
widget_p=None,widget_m=None,widget_g=None,widget_f=None,widget_x=None,widget_y=None,widget_w=None,widget_h=None,widget_c=None, \
event_z=None,event_k=None,event_y=None,event_b=None,event_p=None,event_r=None,event_e=None,event_m=None,event_l=None,event_a=None,event_u=None, \
menu_o=None,menu_b=None,menu_c=None,dialog_t=None,dialog_c=None, \
kbd_p=None,kbd_r=None,kbd_m=None,kbd_e=None,kbd_l=None,kbd_i=None,kbd_s=None,kbd_d=None,kbd_t=None,kbd_u=None,kbd_k=None):
if widget_o != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetobj",widget_o)
if widget_k != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetkind",widget_k)
if widget_t != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgettext",widget_t)
if widget_u != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgeturi",widget_u)
if widget_s != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetstart",str(widget_s))
if widget_e != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetend",str(widget_e))
if widget_a != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetadd",str(widget_a))
if widget_v != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetstringvar",widget_v)
if widget_b != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetbooleanvar",widget_b)
if widget_p != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetphotoimage",widget_p)
if widget_m != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetpixmap",widget_m)
if widget_g != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetgc",widget_g)
if widget_f != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetfont",widget_f)
if widget_x != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetsizeX",str(widget_x))
if widget_y != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetsizeY",str(widget_y))
if widget_w != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetsizeW",str(widget_w))
if widget_h != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"widgetsizeH",str(widget_h))
if widget_c != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetcontainer",widget_c)
if event_z != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetresize",event_z)
if event_k != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"keyboard_press",event_k)
if event_y != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"keyboard_release",event_y)
if event_b != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"widgetcallback",event_b)
if event_p != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"mouse_press",event_p)
if event_r != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"mouse_release",event_r)
if event_e != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"mouse_enter",event_e)
if event_m != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"mouse_motion",event_m)
if event_l != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"mouse_leave",event_l)
if event_a != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"notify_activate",event_a)
if event_u != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"notify_popupmenu",event_u)
if menu_o != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"popupmenuobj",menu_o)
if menu_b != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"popupmenulist",menu_b)
if menu_c != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"popupmenuclick",menu_c)
if dialog_t != None: LTsv_widgetPAGE=LTsv_pushlinerest(LTsv_widgetPAGE,"dialog_type",str(dialog_t))
if dialog_c != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"dialog_close",dialog_c)
if kbd_p != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_press",kbd_p)
if kbd_r != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_release",kbd_r)
if kbd_m != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_motion",kbd_m)
if kbd_e != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_enter",kbd_e)
if kbd_l != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_leave",kbd_l)
if kbd_i != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_input",kbd_i)
if kbd_s != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_settext",kbd_s)
if kbd_d != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_deftext",kbd_d)
if kbd_t != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_gettext",kbd_t)
if kbd_u != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_geturi",kbd_u)
if kbd_k != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_keyenter",kbd_k)
return LTsv_widgetPAGE
def LTsv_widgetPAGEKBD(LTsv_widgetPAGE,clip_a=None,clip_b=None,clip_c=None,clip_d=None,clip_e=None,clip_f=None,clip_g=None, \
clip_h=None,clip_i=None,clip_j=None,clip_k=None,clip_l=None,clip_m=None,clip_n=None, \
clip_o=None,clip_p=None,clip_q=None,clip_r=None,clip_s=None,clip_t=None,clip_u=None, \
clip_v=None,clip_w=None,clip_x=None,clip_y=None,clip_z=None):
if clip_a != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_A",clip_a)
if clip_b != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_V",clip_b)
if clip_c != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_copy",clip_c)
if clip_d != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_D",clip_d)
if clip_e != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_R",clip_e)
if clip_f != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_find",clip_f)
if clip_g != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_G",clip_g)
if clip_h != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_H",clip_h)
if clip_i != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_I",clip_i)
if clip_j != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_J",clip_j)
if clip_k != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_K",clip_k)
if clip_l != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_L",clip_l)
if clip_m != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_M",clip_m)
if clip_n != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_N",clip_n)
if clip_o != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_open",clip_o)
if clip_p != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_P",clip_p)
if clip_q != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_Q",clip_q)
if clip_r != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_R",clip_r)
if clip_s != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_save",clip_s)
if clip_t != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_T",clip_t)
if clip_u != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_U",clip_u)
if clip_v != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_paste",clip_v)
if clip_w != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_W",clip_w)
if clip_x != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_cut",clip_x)
if clip_y != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_Y",clip_y)
if clip_z != None: LTsv_widgetPAGE=LTsv_widget_newobj(LTsv_widgetPAGE,"editcanvas_Z",clip_z)
return LTsv_widgetPAGE
def LTsv_fonttuple(LTsv_line):
LTsv_fontlist=None
if LTsv_line != None:
LTsv_fontopts=LTsv_line.replace('\n','\t').replace('\t',',').strip(',').split(',')
LTsv_fontlist=[]
for LTsv_fontopt in LTsv_fontopts:
LTsv_fontlist.append(LTsv_fontopt)
if len(LTsv_fontlist)>=3:
break
return tuple(LTsv_fontlist) if LTsv_fontlist != None else None
def LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f=None,widget_d=False):
LTsv_libgtk.gtk_widget_set_size_request(widget_o,widget_w,widget_h)
LTsv_libgtk.gtk_fixed_put(window_c,widget_o,widget_x,widget_y)
if widget_f != None:
LTsv_fontDesc=LTsv_libgtk.pango_font_description_from_string(widget_f.encode("utf-8"))
if widget_d:
LTsv_libgtk.gtk_widget_modify_font(LTsv_libgtk.gtk_bin_get_child(widget_o),LTsv_fontDesc)
else:
LTsv_libgtk.gtk_widget_modify_font(widget_o,LTsv_fontDesc)
LTsv_libgtk.pango_font_description_free(LTsv_fontDesc)
def LTsv_tkinter_hideondelete_shell(LTsv_windowPAGENAME):
def tkinter_hideondelete_kernel(window_objvoid=None,window_objptr=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
widget_o.withdraw()
return 0
return tkinter_hideondelete_kernel
LTsv_GTK_WINDOW_TOPLEVEL=0
LTsv_GTK_WIN_POS_CENTER=1
class LTsv_GdkEventKey(ctypes.Structure):
_fields_ = [
('type',ctypes.c_int),
('window',ctypes.c_void_p),
('send_event',ctypes.c_ubyte),
('time',ctypes.c_uint),
('state',ctypes.c_uint),
('keyval',ctypes.c_uint),
]
LTsv_CALLBACLTYPE_GdkEventKey=ctypes.CFUNCTYPE(ctypes.c_void_p,ctypes.POINTER(LTsv_GdkEventKey))
def LTsv_window_new(widget_n=None,event_b=None,widget_t="LTsv_window",widget_w=200,widget_h=120,event_z=None,event_k=None,event_y=None):
global LTsv_widgetLTSV
LTsv_windowPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_windowPAGE=""
LTsv_windowPAGE=LTsv_widgetPAGEXYWH(LTsv_windowPAGE,widget_k="window",widget_t=widget_t,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
window_o=LTsv_libgtk.gtk_window_new(LTsv_GTK_WINDOW_TOPLEVEL)
LTsv_libgtk.gtk_window_set_title(window_o,widget_t.encode("utf-8","xmlcharrefreplace"))
LTsv_libgtk.gtk_widget_set_size_request(window_o,widget_w,widget_h)
LTsv_libgtk.gtk_window_set_resizable(window_o,True if event_z !=None else False)
LTsv_libgtk.gtk_window_set_position(window_o,LTsv_GTK_WIN_POS_CENTER)
widget_c=LTsv_libgtk.gtk_fixed_new()
LTsv_libgtk.gtk_container_add(window_o,widget_c)
event_b_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_libgtk.gtk_widget_hide_on_delete
LTsv_libobj.g_signal_connect_data(window_o,"delete-event".encode("utf-8"),event_b_cbk,0,0,0)
event_z_cbk,event_k_cbk,event_y_cbk=None,None,None
if event_z:
event_z_cbk=LTsv_CALLBACLTYPE(event_z)
LTsv_libobj.g_signal_connect_data(window_o,"configure-event".encode("utf-8"),event_z_cbk,0,0,0)
if event_k:
# event_k_cbk=LTsv_CALLBACLTYPE(event_k)
event_k_cbk=LTsv_CALLBACLTYPE_GdkEventKey(event_k)
LTsv_libobj.g_signal_connect_data(window_o,"key-press-event".encode("utf-8"),event_k_cbk,0,0,0)
if event_y:
# event_y_cbk=LTsv_CALLBACLTYPE(event_y)
event_y_cbk=LTsv_CALLBACLTYPE_GdkEventKey(event_y)
LTsv_libobj.g_signal_connect_data(window_o,"key-release-event".encode("utf-8"),event_y_cbk,0,0,0)
LTsv_windowPAGE=LTsv_widgetPAGEXYWH(LTsv_windowPAGE,widget_o=window_o,widget_t=widget_t,widget_c=widget_c,event_b=event_b_cbk,event_z=event_z_cbk,event_k=event_k_cbk,event_y=event_y_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
window_o=Tk.Tk()
window_o.title(widget_t)
window_o.minsize(widget_w,widget_h)
window_o.geometry("{0}x{1}+{2}+{3}".format(widget_w,widget_h,(window_o.winfo_vrootwidth()-widget_w)//2,(window_o.winfo_vrootheight()-widget_h)//2))
event_b_cbk=event_b if event_b != None else LTsv_tkinter_hideondelete_shell(LTsv_windowPAGENAME)
window_o.protocol("WM_DELETE_WINDOW",event_b_cbk)
if event_z:
window_o.maxsize(window_o.winfo_vrootwidth(),window_o.winfo_vrootheight())
window_o.bind('<Configure>',event_z)
else:
window_o.maxsize(widget_w,widget_h); window_o.resizable(0,0)
if event_k:
window_o.bind('<KeyPress>',event_k)
if event_y:
window_o.bind('<KeyRelease>',event_y)
LTsv_windowPAGE=LTsv_widgetPAGEXYWH(LTsv_windowPAGE,widget_o=window_o,widget_t=widget_t,event_b=event_b_cbk,event_z=event_z,event_k=event_k,event_y=event_y)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_windowPAGENAME,LTsv_windowPAGE)
return LTsv_windowPAGENAME
def LTsv_widget_settext(LTsv_widgetPAGENAME,widget_t=""):
global LTsv_widgetLTSV
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
widget_v=None
if widget_k == "window":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_window_set_title(widget_o,widget_t.encode("utf-8","xmlcharrefreplace"))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.title(widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "label":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_label_set_text(widget_o,widget_t.encode("utf-8","xmlcharrefreplace"))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_v=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetstringvar")]; widget_v.set(widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "button":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_label_set_text(LTsv_libgtk.gtk_bin_get_child(widget_o),widget_t.encode("utf-8","xmlcharrefreplace"))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_v=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetstringvar")]; widget_v.set(widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "check":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_label_set_text(LTsv_libgtk.gtk_bin_get_child(widget_o),widget_t.encode("utf-8","xmlcharrefreplace"))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_v=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetstringvar")]; widget_v.set(widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "radio":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_label_set_text(LTsv_libgtk.gtk_bin_get_child(widget_o),widget_t.encode("utf-8","xmlcharrefreplace"))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_v=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetstringvar")]; widget_v.set(widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "clipboard":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_clipboard_set_text(widget_o,widget_t.encode("utf-8","xmlcharrefreplace"),-1)
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.clipboard_append(widget_t)
if widget_k == "edit":
if LTsv_GUI == LTsv_GUI_GTK2: widget_v=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetstringvar")]; LTsv_libgtk.gtk_text_buffer_set_text(widget_v,widget_t.encode("utf-8","xmlcharrefreplace"),-1)
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.delete(1.0,Tk.END); widget_o.insert(1.0,widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE)
if widget_k == "entry":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_entry_set_text(widget_o,widget_t.encode("utf-8","xmlcharrefreplace"))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.delete(0,Tk.END); widget_o.insert(0,widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "scale":
widget_s=int(float(widget_t))
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_range_set_value(widget_o,ctypes.c_double(widget_s))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.set(int(widget_s))
if widget_k == "spin":
widget_s=int(float(widget_t))
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_spin_button_set_value(widget_o,ctypes.c_double(int(float(widget_s))))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.delete(0,Tk.END); widget_o.insert(0,widget_t)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "notify":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_status_icon_set_tooltip(widget_o,widget_t.encode("utf-8"))
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o.szTip=widget_t[:64].encode("utf-8")
LTsv_shell32.Shell_NotifyIcon(ctypes.c_ulong(LTsv_ICON_NIM_MODIFY),ctypes.pointer(widget_o))
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
if widget_k == "combobox":
if LTsv_GUI == LTsv_GUI_GTK2:
if str(widget_o) in LTsv_popupmenuOBJ:
widget_combo=LTsv_popupmenuOBJ[str(widget_o)].split('\n')
widget_s=widget_combo.index(widget_t) if widget_t in widget_combo else 0
LTsv_libgtk.gtk_combo_box_set_active(widget_o,widget_s)
if widget_k == "editcanvas":
LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"editcanvas_deftext")](LTsv_widgetPAGENAME,TT=widget_t)
if widget_k == "filedialog":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_window_set_title(widget_o,widget_t.encode("utf-8","xmlcharrefreplace"))
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_t=widget_t)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
class LTsv_TextIter(ctypes.Structure):
_fields_ = [
('dummy1', ctypes.c_void_p),
('dummy2', ctypes.c_void_p),
('dummy3', ctypes.c_uint),
('dummy4', ctypes.c_uint),
('dummy5', ctypes.c_uint),
('dummy6', ctypes.c_uint),
('dummy7', ctypes.c_uint),
('dummy8', ctypes.c_uint),
('dummy9', ctypes.c_uint),
('dummy10', ctypes.c_void_p),
('dummy11', ctypes.c_void_p),
('dummy12', ctypes.c_uint),
('dummy13', ctypes.c_uint),
('dummy14', ctypes.c_void_p),
]
def LTsv_widget_gettext(LTsv_widgetPAGENAME):
global LTsv_widgetLTSV
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
widget_t=""
if widget_k == "window":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_window_get_title(widget_o)).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=LTsv_readlinerest(LTsv_widgetPAGE,"widgettext")
if widget_k == "label":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_label_get_text(widget_o)).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=widget_o.cget("text")
if widget_k == "button":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_label_get_text(LTsv_libgtk.gtk_bin_get_child(widget_o))).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=widget_o.cget("text")
if widget_k == "check":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_label_get_text(LTsv_libgtk.gtk_bin_get_child(widget_o))).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=widget_o.cget("text")
if widget_k == "radio":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_label_get_text(LTsv_libgtk.gtk_bin_get_child(widget_o))).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=widget_o.cget("text")
if widget_k == "clipboard":
try:
if LTsv_GUI == LTsv_GUI_GTK2: widget_t="{0}".format(ctypes.c_char_p(LTsv_libgtk.gtk_clipboard_wait_for_text(widget_o)).value.decode("utf-8"))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t="{0}".format(widget_o.clipboard_get())
except:
widget_t=""
if widget_k == "entry":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_entry_get_text(widget_o)).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=widget_o.get()
if widget_k == "edit":
if LTsv_GUI == LTsv_GUI_GTK2:
widget_v=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetstringvar")]
start_iter=LTsv_TextIter(); end_iter=LTsv_TextIter()
LTsv_libgtk.gtk_text_buffer_get_start_iter(widget_v,ctypes.pointer(start_iter)); LTsv_libgtk.gtk_text_buffer_get_end_iter(widget_v,ctypes.pointer(end_iter))
widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_text_buffer_get_text(widget_v,ctypes.pointer(start_iter),ctypes.pointer(end_iter),True)).value.decode("utf-8");
# LTsv_libgtk.gtk_text_iter_free(ctypes.pointer(start_iter)); LTsv_libgtk.gtk_text_iter_free(ctypes.pointer(end_iter))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=widget_o.get(1.0,Tk.END)
if widget_k == "scale":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=str(int(ctypes.c_double(LTsv_libgtk.gtk_range_get_value(widget_o)).value))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=str(widget_o.get())
if widget_k == "spin":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=str(int(ctypes.c_int(LTsv_libgtk.gtk_spin_button_get_value_as_int(widget_o)).value))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=str(widget_o.get())
if widget_k == "notify":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=LTsv_readlinerest(LTsv_widgetPAGE,"widgettext")
if widget_k == "combobox":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_combo_box_text_get_active_text(widget_o)).value.decode("utf-8") if LTsv_libgtk.gtk_tree_model_iter_n_children(LTsv_libgtk.gtk_combo_box_get_model(widget_o),None) > 0 else ""
if widget_k == "editcanvas":
widget_t=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"editcanvas_gettext")](LTsv_widgetPAGENAME)
if widget_k == "filedialog":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_window_get_title(widget_o)).value.decode("utf-8")
return widget_t
def LTsv_widget_setnumber(LTsv_widgetPAGENAME,widget_s=0):
global LTsv_widgetLTSV
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
widget_v=None
if widget_k == "check":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_toggle_button_set_active(widget_o,ctypes.c_int(min(max(int(float(widget_s)),0),1)))
if LTsv_GUI == LTsv_GUI_Tkinter: LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetbooleanvar")].set(True if int(float(widget_s)) !=0 else False)
if widget_k == "radio":
if LTsv_GUI == LTsv_GUI_GTK2:
radio_group=LTsv_libgtk.gtk_radio_button_get_group(widget_o)
radio_len=LTsv_libgtk.g_slist_length(radio_group); widget_s=min(max(int(float(widget_s)),0),radio_len-1)
LTsv_libgtk.gtk_toggle_button_set_active(LTsv_libgtk.g_slist_nth_data(radio_group,radio_len-widget_s-1),ctypes.c_int(1))
if LTsv_GUI == LTsv_GUI_Tkinter: LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetbooleanvar")].set(widget_s)
if widget_k == "entry":
LTsv_widget_settext(LTsv_widgetPAGENAME,widget_t="{0}".format(widget_s))
if widget_k == "edit":
LTsv_widget_settext(LTsv_widgetPAGENAME,widget_t="{0}".format(widget_s))
if widget_k == "scale":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_range_set_value(widget_o,ctypes.c_double(int(float(widget_s))))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.set(int(widget_s))
if widget_k == "spin":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_spin_button_set_value(widget_o,ctypes.c_double(int(float(widget_s))))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_o.delete(0,Tk.END); widget_o.insert(0,str(widget_s))
if widget_k == "combobox":
if LTsv_GUI == LTsv_GUI_GTK2: LTsv_libgtk.gtk_combo_box_set_active(widget_o,max(min(widget_s,LTsv_libgtk.gtk_tree_model_iter_n_children(LTsv_libgtk.gtk_combo_box_get_model(widget_o),None)-1),0))
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
def LTsv_widget_getnumber(LTsv_widgetPAGENAME):
global LTsv_widgetLTSV
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
widget_s=0
if widget_k == "check":
if LTsv_GUI == LTsv_GUI_GTK2: widget_s=ctypes.c_int(LTsv_libgtk.gtk_toggle_button_get_active(widget_o)).value
if LTsv_GUI == LTsv_GUI_Tkinter: widget_s=1 if LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetbooleanvar")].get() == True else 0
if widget_k == "radio":
if LTsv_GUI == LTsv_GUI_GTK2:
radio_group=LTsv_libgtk.gtk_radio_button_get_group(widget_o)
radio_len=LTsv_libgtk.g_slist_length(radio_group); widget_s=radio_len
for radio_count in range(radio_len):
if ctypes.c_int(LTsv_libgtk.gtk_toggle_button_get_active(LTsv_libgtk.g_slist_nth_data(radio_group,radio_count))).value:
widget_s=radio_len-radio_count-1
if LTsv_GUI == LTsv_GUI_Tkinter: widget_s=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetbooleanvar")].get()
if widget_k == "entry":
if LTsv_GUI == LTsv_GUI_GTK2: widget_t=ctypes.c_char_p(LTsv_libgtk.gtk_entry_get_text(widget_o)).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_t=widget_o.get()
widget_s=int(widget_t) if widget_t.isdecimal() else 0
if widget_k == "scale":
if LTsv_GUI == LTsv_GUI_GTK2: widget_s=int(float(ctypes.c_double(LTsv_libgtk.gtk_range_get_value(widget_o)).value))
if LTsv_GUI == LTsv_GUI_Tkinter: widget_s=int(widget_o.get())
if widget_k == "spin":
if LTsv_GUI == LTsv_GUI_GTK2: widget_s=int(ctypes.c_int(LTsv_libgtk.gtk_spin_button_get_value_as_int(widget_o)).value)
if LTsv_GUI == LTsv_GUI_Tkinter: widget_s=LTsv_intstr0x(widget_o.get())
if widget_k == "combobox":
if LTsv_GUI == LTsv_GUI_GTK2: widget_s=LTsv_libgtk.gtk_combo_box_get_active(widget_o)
return widget_s
def LTsv_widget_seturi(LTsv_widgetPAGENAME,widget_u=""):
global LTsv_widgetLTSV
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
if widget_k == "image":
if LTsv_GUI == LTsv_GUI_GTK2:
widget_p=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetphotoimage")]
LTsv_libgtk.gtk_image_set_from_file(widget_p,widget_u.encode("utf-8","xmlcharrefreplace"))
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_p=Tk.PhotoImage(file=widget_u)
widget_o.configure(image=widget_p)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_u=widget_u,widget_p=widget_p)
if widget_k == "notify":
if LTsv_GUI == LTsv_GUI_GTK2:
picture_o=LTsv_pictureOBJ[widget_u] if widget_u in LTsv_pictureOBJ else LTsv_draw_picture_load(widget_u)
if picture_o != None:
LTsv_libgtk.gtk_status_icon_set_from_pixbuf(widget_o,picture_o)
if LTsv_GUI == LTsv_GUI_Tkinter:
icon_o=LTsv_iconOBJ[widget_u] if widget_u in LTsv_iconOBJ else LTsv_icon_load(widget_u)
if icon_o != None:
widget_o.hIcon=icon_o
LTsv_shell32.Shell_NotifyIcon(ctypes.c_ulong(LTsv_ICON_NIM_MODIFY),ctypes.pointer(widget_o))
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_u=widget_u)
if widget_k == "editcanvas":
LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"editcanvas_deftext")](LTsv_widgetPAGENAME,UT=widget_u)
if widget_k == "filedialog":
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_u=widget_u)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
def LTsv_widget_geturi(LTsv_widgetPAGENAME):
global LTsv_widgetLTSV
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
widget_u=""
if widget_k == "image":
widget_u=LTsv_readlinerest(LTsv_widgetPAGE,"widgeturi")
if widget_k == "notify":
widget_u=LTsv_readlinerest(LTsv_widgetPAGE,"widgeturi")
if widget_k == "editcanvas":
widget_u=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"editcanvas_geturi")](LTsv_widgetPAGENAME)
if widget_k == "filedialog":
try:
if LTsv_GUI == LTsv_GUI_GTK2: widget_u=ctypes.c_char_p(LTsv_libgtk.gtk_file_chooser_get_filename(widget_o)).value.decode("utf-8")
if LTsv_GUI == LTsv_GUI_Tkinter: widget_u=LTsv_readlinerest(LTsv_widgetPAGE,"widgeturi")
except:
widget_u=""
return widget_u
def LTsv_widget_showhide(LTsv_widgetPAGENAME,widget_i):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_windowPAGE,"widgetkind")
if widget_k == "window":
if LTsv_GUI == LTsv_GUI_GTK2:
if widget_i:
LTsv_libgtk.gtk_widget_show_all(widget_o)
else:
LTsv_libobj.g_signal_emit_by_name(widget_o,"delete-event".encode("utf-8"),0,0)
if LTsv_GUI == LTsv_GUI_Tkinter:
if widget_i:
widget_o.deiconify()
else:
widget_o.withdraw()
elif widget_k == "filedialog":
if LTsv_GUI == LTsv_GUI_GTK2:
if widget_i:
LTsv_libgtk.gtk_widget_show_all(widget_o)
else:
LTsv_libgtk.gtk_widget_hide(widget_o)
if LTsv_GUI == LTsv_GUI_Tkinter:
if widget_i:
widget_o()
else:
pass
else:
if LTsv_GUI == LTsv_GUI_GTK2:
if widget_i:
LTsv_libgtk.gtk_widget_show_all(widget_o)
else:
LTsv_libgtk.gtk_widget_hide(widget_o)
if LTsv_GUI == LTsv_GUI_Tkinter:
if widget_i:
pass
else:
pass
return 0
def LTsv_widget_disableenable(LTsv_widgetPAGENAME,widget_i):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_windowPAGE,"widgetkind")
if widget_k != "window":
if LTsv_GUI == LTsv_GUI_GTK2:
if widget_i:
LTsv_libgtk.gtk_widget_set_sensitive(widget_o,True)
else:
LTsv_libgtk.gtk_widget_set_sensitive(widget_o,False)
if LTsv_GUI == LTsv_GUI_Tkinter:
if widget_i:
widget_o.configure(state=Tk.NORMAL)
else:
widget_o.configure(state=Tk.DISABLED)
return 0
def LTsv_widget_focus(LTsv_widgetPAGENAME):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_libgtk.gtk_widget_grab_focus(widget_o);
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o.focus_set()
return 0
def LTsv_window_main(LTsv_windowPAGENAME):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_libgtk.gtk_main()
if LTsv_GUI == LTsv_GUI_Tkinter:
window_o.mainloop()
def LTsv_window_after(LTsv_windowPAGENAME,event_b=None,event_i="mousemotion",event_w=1000):
global LTsv_widgetLTSV,LTsv_timerOBJ,LTsv_timer_cbk
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
if LTsv_GUI == LTsv_GUI_GTK2:
if event_i in LTsv_timerOBJ:
if LTsv_timerOBJ[event_i] != None:
# LTsv_libobj.g_source_remove(LTsv_timerOBJ[event_i])
LTsv_timerOBJ[event_i]=None
LTsv_timer_cbk[event_i]=None
if event_b != None:
LTsv_timer_cbk[event_i]=LTsv_CALLBACLTYPE(event_b)
LTsv_timerOBJ[event_i]=LTsv_libobj.g_timeout_add(max(event_w,10),LTsv_timer_cbk[event_i],None)
if LTsv_GUI == LTsv_GUI_Tkinter:
if event_i in LTsv_timerOBJ:
window_o.after_cancel(LTsv_timerOBJ[event_i])
if event_b != None:
LTsv_timerOBJ[event_i]=window_o.after(max(event_w,10),event_b)
return 0
def LTsv_window_foreground():
LTsv_window_activeID=""
if sys.platform.startswith("linux"):
LTsv_xprop=LTsv_subprocess("xprop -root")
LTsv_posL=LTsv_xprop.find("_NET_ACTIVE_WINDOW(WINDOW)"); LTsv_posC=LTsv_xprop.find("# 0x",LTsv_posL); LTsv_posR=LTsv_xprop.find('\n',LTsv_posL)
LTsv_window_activeID=LTsv_xprop[LTsv_posC+len("# "):LTsv_posR]
if sys.platform.startswith("win"):
LTsv_window_activeID="{0:#x}".format(LTsv_user32.GetForegroundWindow())
return LTsv_window_activeID
def LTsv_window_title(LTsv_window_id):
if sys.platform.startswith("linux"):
LTsv_xwininfo=LTsv_subprocess("xwininfo -id {0}".format(LTsv_window_id))
LTsv_posL=LTsv_xwininfo.find("xwininfo: Window id: {0}".format(LTsv_window_id)); LTsv_posC=LTsv_xwininfo.find('"',LTsv_posL); LTsv_posR=LTsv_xwininfo.find('\n',LTsv_posL)
LTsv_window_titleID=LTsv_xwininfo[LTsv_posC+len('"'):LTsv_posR-len('"')]
if sys.platform.startswith("win"):
LTsv_window_titleID=""
LTsv_window_titlelen=LTsv_user32.GetWindowTextLengthW(ctypes.c_int(int(LTsv_window_id,16)))+1
LTsv_window_titlebuf=ctypes.create_unicode_buffer(LTsv_window_titlelen)
LTsv_window_titleID=LTsv_window_titlebuf.value if LTsv_user32.GetWindowTextW(ctypes.c_int(int(LTsv_window_id,16)),LTsv_window_titlebuf,ctypes.sizeof(LTsv_window_titlebuf)) > 0 else ""
return LTsv_window_titleID
def LTsv_window_exit(window_objvoid=None,window_objptr=None):
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_libgtk.gtk_exit(0)
if LTsv_GUI == LTsv_GUI_Tkinter:
sys.exit(0)
return 0
LTsv_window_exit_cbk=LTsv_CALLBACLTYPE(LTsv_window_exit)
def LTsv_window_none(window_objvoid=None,window_objptr=None):
return 0
LTsv_window_none_cbk=LTsv_CALLBACLTYPE(LTsv_window_none)
def LTsv_screen_w(LTsv_windowPAGENAME):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
screen_w=-1
if LTsv_GUI == LTsv_GUI_GTK2:
screen_w=LTsv_libgtk.gdk_screen_get_width(LTsv_libgtk.gdk_screen_get_default())
if LTsv_GUI == LTsv_GUI_Tkinter:
if window_o!=None:
screen_w=window_o.winfo_vrootwidth()
return screen_w
def LTsv_screen_h(LTsv_windowPAGENAME):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
screen_h=-1
if LTsv_GUI == LTsv_GUI_GTK2:
screen_h=LTsv_libgtk.gdk_screen_height(LTsv_libgtk.gdk_screen_get_default())
if LTsv_GUI == LTsv_GUI_Tkinter:
if window_o!=None:
screen_h=window_o.winfo_vrootheight()
return screen_h
class LTsv_WINDOW_WIDTH(ctypes.Structure):
_fields_ = [ ('width', ctypes.c_uint) ]
class LTsv_WINDOW_HEIGHT(ctypes.Structure):
_fields_ = [ ('height', ctypes.c_uint) ]
def LTsv_window_wh(LTsv_windowPAGENAME):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
window_w,window_h=0,0
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_window_width,LTsv_window_height=LTsv_WINDOW_WIDTH(),LTsv_WINDOW_HEIGHT()
LTsv_libgtk.gtk_window_get_size(window_o,ctypes.byref(LTsv_window_width),ctypes.byref(LTsv_window_height))
window_w,window_h=LTsv_window_width.width,LTsv_window_height.height
if LTsv_GUI == LTsv_GUI_Tkinter:
window_w,window_h=window_o.winfo_width(),window_o.winfo_height()
return window_w,window_h
def LTsv_window_w(LTsv_windowPAGENAME):
window_w,window_h=LTsv_window_wh(LTsv_windowPAGENAME)
return window_w
def LTsv_window_h(LTsv_windowPAGENAME):
window_w,window_h=LTsv_window_wh(LTsv_windowPAGENAME)
return window_h
def LTsv_window_resize(LTsv_windowPAGENAME,widget_w=16,widget_h=16):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_libgtk.gtk_window_resize(window_o,widget_w,widget_h)
if LTsv_GUI == LTsv_GUI_Tkinter:
window_o.geometry("{0}x{1}".format(widget_w,widget_h))
def LTsv_label_new(LTsv_windowPAGENAME,widget_n=None,widget_t="LTsv_label",widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="label",widget_t=widget_t,widget_f=widget_f,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_label_new(widget_t.encode("utf-8","xmlcharrefreplace"))
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,False)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_v=Tk.StringVar()
widget_v.set(widget_t)
widget_o=Tk.Label(window_o,textvariable=widget_v,font=LTsv_fonttuple(widget_f))
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_v=widget_v)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_image_new(LTsv_windowPAGENAME,widget_n=None,widget_t="LTsv_logo.png",widget_x=0,widget_y=0):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="image",widget_x=widget_x,widget_y=widget_y)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_p=LTsv_libgtk.gtk_image_new_from_file(widget_t.encode("utf-8","xmlcharrefreplace"))
widget_o=LTsv_libgtk.gtk_event_box_new()
LTsv_libgtk.gtk_container_add(widget_o,widget_p)
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_libgtk.gtk_fixed_put(window_c,widget_o,widget_x,widget_y)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_p=widget_p)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_p=Tk.PhotoImage(file=widget_t)
widget_o=Tk.Label(window_o,image=widget_p)
widget_o.place(x=widget_x,y=widget_y)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_p=widget_p)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_button_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_t="LTsv_button",widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="button",widget_t=widget_t,widget_f=widget_f,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_button_new_with_label(widget_t.encode("utf-8","xmlcharrefreplace"))
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,True)
widget_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"clicked".encode("utf-8"),widget_cbk,2,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,event_b=widget_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_v=Tk.StringVar()
widget_v.set(widget_t)
widget_o=Tk.Button(window_o,textvariable=widget_v,command=event_b,font=LTsv_fonttuple(widget_f))
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_v=widget_v,event_b=event_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_check_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_t="LTsv_check",widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="check",widget_t=widget_t,widget_f=widget_f,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_check_button_new_with_label(widget_t.encode("utf-8","xmlcharrefreplace"))
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,True)
widget_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"clicked".encode("utf-8"),widget_cbk,2,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,event_b=widget_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_v=Tk.StringVar()
widget_v.set(widget_t)
widget_b=Tk.BooleanVar()
widget_b.set(False)
widget_o=Tk.Checkbutton(window_o,textvariable=widget_v,variable=widget_b,command=event_b,font=LTsv_fonttuple(widget_f))
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_v=widget_v,widget_b=widget_b,event_b=event_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_radio_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_t="LTsv_radio",widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_radioPAGENAME=LTsv_widget_newUUID(False)
LTsv_radioPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_radioPAGENAME)
radio_k=LTsv_readlinerest(LTsv_radioPAGE,"widgetkind")
radio_o=None if radio_k != "radio" else LTsv_widgetOBJ[LTsv_readlinerest(LTsv_radioPAGE,"widgetobj")]
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="radio",widget_t=widget_t,widget_f=widget_f,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_radio_button_new_with_label_from_widget(radio_o,widget_t.encode("utf-8","xmlcharrefreplace"))
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,True)
widget_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"clicked".encode("utf-8"),widget_cbk,2,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,event_b=widget_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_v=Tk.StringVar()
widget_v.set(widget_t)
if radio_k != "radio":
widget_b=Tk.IntVar(); widget_b.set(0)
else:
widget_b=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_radioPAGE,"widgetbooleanvar")]; widget_b.set(widget_b.get()+1)
widget_o=Tk.Radiobutton(window_o,textvariable=widget_v,variable=widget_b,value=widget_b.get(),command=event_b,font=LTsv_fonttuple(widget_f))
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_v=widget_v,widget_b=widget_b,event_b=event_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_clipboard_new(LTsv_windowPAGENAME,widget_n=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="clipboard")
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_clipboard_get(LTsv_libgtk.gdk_atom_intern("CLIPBOARD".encode("utf-8"),0))
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o=window_o
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_clipmenu_new(widget_o):
global LTsv_popupmenuOBJ
menu_o=Tk.Menu(widget_o,tearoff=False)
menu_o.add_cascade(label="Ctrl+X(Cut)")
menu_o.add_cascade(label='Ctrl+C(Copy)')
menu_o.add_cascade(label='Ctrl+P(Paste)')
menu_o.add_cascade(label='Ctrl+A(SelectAll)')
LTsv_popupmenuOBJ[str(widget_o)]=menu_o
def LTsv_entry_copypopup_show(event):
global LTsv_popupmenuOBJ
window_o=LTsv_popupmenuOBJ[str(event.widget)]
window_o.post(event.x_root,event.y_root)
window_o.entryconfigure("Ctrl+X(Cut)",command=lambda: event.widget.event_generate("<<Cut>>"))
window_o.entryconfigure("Ctrl+C(Copy)",command=lambda: event.widget.event_generate("<<Copy>>"))
window_o.entryconfigure("Ctrl+P(Paste)",command=lambda: event.widget.event_generate("<<Paste>>"))
window_o.entryconfigure("Ctrl+A(SelectAll)",command=lambda: event.widget.event_generate("<<SelectAll>>"))
menu_b=LTsv_entry_copypopup_show
return menu_o,menu_b
LTsv_G_TYPE_STRING=64
LTsv_GTK_SELECTION_SINGLE=1
LTsv_GTK_POLICY_AUTOMATIC=1
LTsv_GTK_SHADOW_ETCHED_IN=3
def LTsv_edit_new(LTsv_windowPAGENAME,widget_n=None,widget_t="LTsv_edit",widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="edit",widget_t=widget_t,widget_f=widget_f,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_scrolled_window_new(0,0)
widget_v=LTsv_libgtk.gtk_text_buffer_new(0)
widget_c=LTsv_libgtk.gtk_text_view_new_with_buffer(widget_v)
LTsv_libgtk.gtk_scrolled_window_set_policy(widget_o,LTsv_GTK_POLICY_AUTOMATIC,LTsv_GTK_POLICY_AUTOMATIC)
LTsv_libgtk.gtk_scrolled_window_set_shadow_type(widget_o,LTsv_GTK_SHADOW_ETCHED_IN)
LTsv_libgtk.gtk_container_add(widget_o,widget_c)
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,True)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_v=widget_v,widget_c=widget_c)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o=Tk_sc.ScrolledText(window_o,font=LTsv_fonttuple(widget_f))
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o)
menu_o,menu_b=LTsv_clipmenu_new(widget_o)
widget_o.bind('<Button-3>',menu_b)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,menu_o=menu_o,menu_b=menu_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_entry_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_t="LTsv_entry",widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="entry",widget_t=widget_t,widget_f=widget_f,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_entry_new()
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,False)
LTsv_libgtk.gtk_entry_set_text(widget_o,widget_t.encode("utf-8","xmlcharrefreplace"))
widget_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"activate".encode("utf-8"),widget_cbk,2,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,event_b=widget_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_v=Tk.StringVar()
widget_v.set(widget_t)
widget_o=Tk.Entry(window_o,textvariable=widget_v,font=LTsv_fonttuple(widget_f))
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
if event_b != None:
widget_o.bind('<Return>',event_b)
menu_o,menu_b=LTsv_clipmenu_new(widget_o)
widget_o.bind('<Button-3>',menu_b)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_v=widget_v,event_b=event_b,menu_o=menu_o,menu_b=menu_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_spin_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_s=0,widget_e=255,widget_a=1,widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="spin",widget_f=widget_f,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_spin_button_new_with_range(ctypes.c_double(widget_s),ctypes.c_double(widget_e),ctypes.c_double(widget_a))
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,False)
LTsv_libgtk.gtk_spin_button_set_value(widget_o,str(widget_s).encode("utf-8","xmlcharrefreplace"))
widget_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"activate".encode("utf-8"),widget_cbk,2,0,0)
LTsv_libobj.g_signal_connect_data(widget_o,"value-changed".encode("utf-8"),widget_cbk,2,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,event_b=widget_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o=Tk.Spinbox(window_o,from_=widget_s,to=widget_e,increment=widget_a,command=event_b,font=LTsv_fonttuple(widget_f))
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,event_b=event_b)
if event_b != None:
widget_o.bind('<Return>',event_b)
menu_o,menu_b=LTsv_clipmenu_new(widget_o)
widget_o.bind('<Button-3>',menu_b)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,event_b=event_b,menu_o=menu_o,menu_b=menu_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_scale_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_s=0,widget_e=255,widget_a=1,widget_x=0,widget_y=0,widget_w=16,widget_h=16):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="scale",widget_s=widget_s,widget_e=widget_e,widget_a=widget_a,widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
LTsv_widget_orient='v' if widget_w < widget_h else 'h'
if LTsv_GUI == LTsv_GUI_GTK2:
if widget_w < widget_h:
widget_o=LTsv_libgtk.gtk_vscale_new_with_range(ctypes.c_double(widget_s),ctypes.c_double(widget_e),ctypes.c_double(widget_a))
else:
widget_o=LTsv_libgtk.gtk_hscale_new_with_range(ctypes.c_double(widget_s),ctypes.c_double(widget_e),ctypes.c_double(widget_a))
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h)
widget_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"value-changed".encode("utf-8"),widget_cbk,2,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,event_b=widget_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o=Tk.Scale(window_o,orient=('v' if widget_w < widget_h else 'h'),from_=widget_s,to=widget_e,command=event_b)
widget_o.place(x=widget_x,y=widget_y,width=widget_w,height=widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,event_b=event_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def LTsv_scale_adjustment(LTsv_widgetPAGENAME,widget_s=0,widget_e=255,widget_a=1):
global LTsv_widgetLTSV,LTsv_popupmenuOBJ
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
if widget_k == "scale":
if LTsv_GUI == LTsv_GUI_GTK2:
widget_v=int(ctypes.c_double(LTsv_libgtk.gtk_range_get_value(widget_o)).value)
adjustment_o=LTsv_libgtk.gtk_adjustment_new(ctypes.c_double(max(min(widget_v,widget_e),widget_s)),ctypes.c_double(widget_s),ctypes.c_double(widget_e),ctypes.c_double(widget_a),ctypes.c_double(1),ctypes.c_double(0),)
LTsv_libgtk.gtk_range_set_adjustment(widget_o,adjustment_o)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o.configure(from_=widget_s,to=widget_e)
if widget_k == "spin":
if LTsv_GUI == LTsv_GUI_GTK2:
widget_v=int(ctypes.c_double(LTsv_libgtk.gtk_spin_button_get_value_as_int(widget_o)).value)
adjustment_o=LTsv_libgtk.gtk_adjustment_new(ctypes.c_double(max(min(widget_v,widget_e),widget_s)),ctypes.c_double(widget_s),ctypes.c_double(widget_e),ctypes.c_double(widget_a),ctypes.c_double(1),ctypes.c_double(0),)
LTsv_libgtk.gtk_spin_button_set_adjustment(widget_o,adjustment_o)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o.configure(from_=widget_s,to=widget_e)
return 0
def LTsv_combobox_list(LTsv_widgetPAGENAME,widget_t=""):
global LTsv_widgetLTSV,LTsv_popupmenuOBJ
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
LTsv_combobox_len=LTsv_libgtk.gtk_tree_model_iter_n_children(LTsv_libgtk.gtk_combo_box_get_model(widget_o),None)
for LTsv_combobox_id in range(LTsv_combobox_len):
LTsv_libgtk.gtk_combo_box_text_remove(widget_o,LTsv_combobox_len-LTsv_combobox_id-1)
for LTsv_combobox_text in widget_t.split('\n'):
if len(LTsv_combobox_text):
LTsv_libgtk.gtk_combo_box_text_append_text(widget_o,LTsv_combobox_text.encode("utf-8","xmlcharrefreplace"))
LTsv_libgtk.gtk_combo_box_set_active(widget_o,0)
LTsv_popupmenuOBJ[str(widget_o)]=widget_t
return 0
def LTsv_combobox_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_x=0,widget_y=0,widget_w=16,widget_h=16,widget_f=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="combobox",widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_o=LTsv_libgtk.gtk_combo_box_text_new()
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_GTKwidget_fixed(window_c,widget_o,widget_x,widget_y,widget_w,widget_h,widget_f,True)
widget_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"changed".encode("utf-8"),widget_cbk,2,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,event_b=widget_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
pass #Tkk
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
# typedef struct{ unsigned int pixel; unsigned short red; unsigned short green; unsigned short blue; }GdkColor;
class LTsv_GDKCOLOR(ctypes.Structure):
_fields_ = [
('pixel',ctypes.c_uint),
('colorR',ctypes.c_ushort),
('colorG',ctypes.c_ushort),
('colorB',ctypes.c_ushort)
]
def __init__(self):
LTsv_libgdk.gdk_color_parse("#ffffff".encode("utf-8"),ctypes.pointer(self))
# typedef struct{ int x; int y; }GdkPoint;
class LTsv_GDKPOINT(ctypes.Structure):
_fields_ = [
('X',ctypes.c_int),('Y',ctypes.c_int)
]
LTsv_GDK_POINTER_MOTION_MASK=1<<2
LTsv_GDK_BUTTON_RELEASE_MASK=1<<9
LTsv_GDK_KEY_PRESS_MASK= 1<<10
LTsv_GDK_SCROLL_MASK= 1<<21
LTsv_GDK_ARCFILL=23040
LTsv_canvascolor,LTsv_canvasbgcolor="black","white"
def LTsv_canvas_new(LTsv_windowPAGENAME,widget_n=None,widget_x=0,widget_y=0,widget_w=16,widget_h=16,event_p=None,event_r=None,event_e=None,event_m=None,event_l=None,event_w=100):
# global LTsv_widgetLTSV,canvas_CBKtimeout,canvas_EMLenter,canvas_EMLmotion,canvas_EMLleave
global LTsv_widgetLTSV
global canvas_EMLenter,canvas_EMLmotion,canvas_EMLleave
global canvas_CBKenter,canvas_CBKmotion,canvas_CBKleave,canvas_CBKtimeout,canvas_CBKafter,LTsv_canvasCBKpagename
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="canvas",widget_x=widget_x,widget_y=widget_y,widget_w=widget_w,widget_h=widget_h)
if LTsv_GUI == LTsv_GUI_GTK2:
widget_p=LTsv_libgtk.gtk_image_new()
widget_o=LTsv_libgtk.gtk_event_box_new()
LTsv_libgtk.gtk_container_add(widget_o,widget_p)
window_c=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetcontainer")]
LTsv_libgtk.gtk_fixed_put(window_c,widget_o,widget_x,widget_y)
widget_m=LTsv_libgdk.gdk_pixmap_new(LTsv_libgdk.gdk_get_default_root_window(),widget_w,widget_h,-1)
LTsv_libgtk.gtk_image_set_from_pixmap(widget_p,widget_m,0)
widget_g=LTsv_libgdk.gdk_gc_new(widget_m)
widget_gccolor=LTsv_GDKCOLOR(); LTsv_libgdk.gdk_color_parse("white".encode("utf-8"),ctypes.pointer(widget_gccolor))
LTsv_libgdk.gdk_gc_set_rgb_bg_color(widget_g,ctypes.pointer(widget_gccolor))
LTsv_libgdk.gdk_gc_set_rgb_fg_color(widget_g,ctypes.pointer(widget_gccolor))
LTsv_libgdk.gdk_draw_rectangle(widget_m,widget_g,True,0,0,widget_w,widget_h)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_p=widget_p,widget_m=widget_m,widget_g=widget_g)
if LTsv_GUI == LTsv_GUI_Tkinter:
widget_o=Tk.Canvas(window_o,width=widget_w,height=widget_h,bg="white")
widget_o.place(x=widget_x,y=widget_y)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o)
def LTsv_canvas_enter(window_objvoid=None,window_objptr=None):
global LTsv_canvas_motion_X,LTsv_canvas_motion_Y,LTsv_canvas_motion_Z
global canvas_CBKafter
if canvas_CBKafter[LTsv_widgetPAGENAME] == False:
canvas_CBKafter[LTsv_widgetPAGENAME]=True; LTsv_canvas_motion_Z=LTsv_widgetPAGENAME
if canvas_EMLenter[LTsv_widgetPAGENAME] != None:
LTsv_window_after(LTsv_windowPAGENAME,event_b=canvas_EMLenter[LTsv_widgetPAGENAME],event_i="{0}_enter".format(LTsv_canvasCBKpagename[LTsv_widgetPAGENAME]),event_w=event_w)
LTsv_canvas_timeout()
return 0
def LTsv_canvas_motion(window_objvoid=None,window_objptr=None):
global LTsv_canvas_motion_X,LTsv_canvas_motion_Y,LTsv_canvas_motion_Z
if LTsv_GUI == LTsv_GUI_GTK2:
mouse_x,mouse_y=ctypes.c_int(),ctypes.c_int(); LTsv_libgdk.gdk_window_at_pointer(ctypes.byref(mouse_x),ctypes.byref(mouse_y))
LTsv_canvas_motion_X,LTsv_canvas_motion_Y=int(mouse_x.value),int(mouse_y.value)
if LTsv_GUI == LTsv_GUI_Tkinter:
if window_objvoid != None:
mouse_x,mouse_y=window_objvoid.x,window_objvoid.y
LTsv_canvas_motion_X,LTsv_canvas_motion_Y=int(mouse_x),int(mouse_y)
return 0
def LTsv_canvas_timeout(window_objvoid=None,window_objptr=None):
global canvas_CBKafter
if canvas_CBKafter[LTsv_widgetPAGENAME] == True:
if canvas_EMLmotion[LTsv_widgetPAGENAME] != None: canvas_EMLmotion[LTsv_widgetPAGENAME]()
LTsv_window_after(LTsv_windowPAGENAME,event_b=LTsv_canvas_timeout,event_i="{0}_motion".format(LTsv_canvasCBKpagename[LTsv_widgetPAGENAME]),event_w=event_w)
return 0
def LTsv_canvas_leave(window_objvoid=None,window_objptr=None):
global LTsv_canvas_motion_X,LTsv_canvas_motion_Y,LTsv_canvas_motion_Z
global canvas_CBKafter,LTsv_canvasCBKpagename
canvas_CBKafter[LTsv_widgetPAGENAME]=False; LTsv_canvas_motion_Z=""
if canvas_EMLleave[LTsv_widgetPAGENAME] != None:
LTsv_window_after(LTsv_windowPAGENAME,event_b=canvas_EMLleave[LTsv_widgetPAGENAME],event_i="{0}_leave".format(LTsv_canvasCBKpagename[LTsv_widgetPAGENAME]),event_w=event_w)
LTsv_canvas_motion_X,LTsv_canvas_motion_Y=-1,-1
return 0
canvas_EMLenter[LTsv_widgetPAGENAME],canvas_EMLmotion[LTsv_widgetPAGENAME],canvas_EMLleave[LTsv_widgetPAGENAME]=event_e,event_m,event_l
canvas_CBKenter[LTsv_widgetPAGENAME],canvas_CBKmotion[LTsv_widgetPAGENAME],canvas_CBKleave[LTsv_widgetPAGENAME]=LTsv_canvas_enter,LTsv_canvas_motion,LTsv_canvas_leave
canvas_CBKtimeout[LTsv_widgetPAGENAME],canvas_CBKafter[LTsv_widgetPAGENAME],LTsv_canvasCBKpagename[LTsv_widgetPAGENAME]=LTsv_canvas_timeout,False,LTsv_widgetPAGENAME
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_libgtk.gtk_widget_set_events(widget_o,LTsv_GDK_POINTER_MOTION_MASK)
event_p_cbk=LTsv_CALLBACLTYPE(event_p) if event_p != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"button-press-event".encode("utf-8"),event_p_cbk,0,0,0)
event_r_cbk=LTsv_CALLBACLTYPE(event_r) if event_r != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"button-release-event".encode("utf-8"),event_r_cbk,0,0,0)
event_e_cbk=LTsv_CALLBACLTYPE(canvas_CBKenter[LTsv_widgetPAGENAME]) if LTsv_canvas_enter != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"enter-notify-event".encode("utf-8"),event_e_cbk,0,0,0)
event_m_cbk=LTsv_CALLBACLTYPE(canvas_CBKmotion[LTsv_widgetPAGENAME]) if LTsv_canvas_motion != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"motion-notify-event".encode("utf-8"),event_m_cbk,0,0,0)
event_l_cbk=LTsv_CALLBACLTYPE(canvas_CBKleave[LTsv_widgetPAGENAME]) if LTsv_canvas_leave != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"leave-notify-event".encode("utf-8"),event_l_cbk,0,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,event_p=event_p_cbk,event_r=event_r_cbk,event_e=event_e_cbk,event_m=event_m_cbk,event_l=event_l_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
if event_p != None:
widget_o.bind("<ButtonPress>",event_p)
if event_r != None:
widget_o.bind("<ButtonRelease>",event_r)
widget_o.bind("<Enter>",canvas_CBKenter[LTsv_widgetPAGENAME])
widget_o.bind("<Motion>",canvas_CBKmotion[LTsv_widgetPAGENAME])
widget_o.bind("<Leave>",canvas_CBKleave[LTsv_widgetPAGENAME])
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,event_p=event_p,event_r=event_r,event_e=LTsv_canvas_enter,event_m=LTsv_canvas_motion,event_l=LTsv_canvas_leave)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
LTsv_GTKcanvasPAGE,LTsv_GTKcanvas_o,LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,LTsv_GTKcanvas_font=None,None,None,None,None
LTsv_GTKcanvasW,LTsv_GTKcanvasH,LTsv_GTKcanvas_gccolor,LTsv_canvas_bccolor=None,None,None,None
def LTsv_drawGTK_selcanvas(LTsv_canvasPAGENAME,draw_g="LTsv_draw_tkTAG"):
global LTsv_GTKcanvasPAGE,LTsv_GTKcanvas_o,LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,LTsv_GTKcanvas_font
global LTsv_GTKcanvasW,LTsv_GTKcanvasH,LTsv_GTKcanvas_gccolor,LTsv_canvas_bccolor
LTsv_GTKcanvasPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_canvasPAGENAME)
LTsv_GTKcanvasW=int(LTsv_readlinerest(LTsv_GTKcanvasPAGE,"widgetsizeW"))
LTsv_GTKcanvasH=int(LTsv_readlinerest(LTsv_GTKcanvasPAGE,"widgetsizeH"))
LTsv_GTKcanvas_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_GTKcanvasPAGE,"widgetobj")]
LTsv_GTKcanvas_m=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_GTKcanvasPAGE,"widgetpixmap")]
LTsv_GTKcanvas_g=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_GTKcanvasPAGE,"widgetgc")]
LTsv_GTKcanvas_gccolor=LTsv_GDKCOLOR()
LTsv_canvas_bccolor=LTsv_GDKCOLOR()
LTsv_GTKcanvas_font=""
LTsv_drawGTK_color("")
LTsv_drawGTK_bgcolor("")
LTsv_TkintercanvasPAGE,LTsv_Tkintercanvas_o,LTsv_Tkintercanvas_TAG,LTsv_Tkintercanvas_font=None,None,None,None
LTsv_TkintercanvasW,LTsv_TkintercanvasH=None,None
def LTsv_drawTkinter_selcanvas(LTsv_canvasPAGENAME,draw_g="LTsv_draw_tkTAG"):
global LTsv_TkintercanvasPAGE,LTsv_Tkintercanvas_o,LTsv_Tkintercanvas_TAG
global LTsv_TkintercanvasW,LTsv_TkintercanvasH
LTsv_TkintercanvasPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_canvasPAGENAME)
LTsv_GTKcanvasW=int(LTsv_readlinerest(LTsv_TkintercanvasPAGE,"widgetsizeW"))
LTsv_GTKcanvasH=int(LTsv_readlinerest(LTsv_TkintercanvasPAGE,"widgetsizeH"))
LTsv_Tkintercanvas_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_TkintercanvasPAGE,"widgetobj")]
LTsv_Tkintercanvas_TAG=draw_g
LTsv_Tkintercanvas_font=""
LTsv_drawTkinter_color("")
LTsv_drawTkinter_bgcolor("")
def LTsv_draw_selcanvas_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_selcanvas
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_selcanvas
def LTsv_drawGTK_delete(draw_c="white"):
LTsv_drawGTK_color(draw_c)
LTsv_drawGTK_bgcolor(draw_c)
LTsv_libgdk.gdk_draw_rectangle(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,True,0,0,LTsv_GTKcanvasW,LTsv_GTKcanvasH)
def LTsv_drawTkinter_delete(draw_c="white"):
LTsv_drawTkinter_color(draw_c)
LTsv_drawTkinter_bgcolor(draw_c)
LTsv_Tkintercanvas_o.delete(LTsv_Tkintercanvas_TAG)
def LTsv_draw_delete_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_delete
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_delete
def LTsv_drawGTK_color(draw_c=""):
global LTsv_canvascolor,LTsv_canvasbgcolor
LTsv_canvascolor=draw_c
LTsv_libgdk.gdk_color_parse(draw_c.encode("utf-8"),ctypes.pointer(LTsv_GTKcanvas_gccolor))
LTsv_libgdk.gdk_gc_set_rgb_fg_color(LTsv_GTKcanvas_g,ctypes.pointer(LTsv_GTKcanvas_gccolor))
def LTsv_drawTkinter_color(draw_c=""):
global LTsv_canvascolor,LTsv_canvasbgcolor
LTsv_canvascolor=draw_c
def LTsv_draw_color_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_color
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_color
def LTsv_drawGTK_bgcolor(draw_c=""):
global LTsv_canvascolor,LTsv_canvasbgcolor
LTsv_canvasbgcolor=draw_c
LTsv_libgdk.gdk_color_parse(draw_c.encode("utf-8"),ctypes.pointer(LTsv_canvas_bccolor))
LTsv_libgdk.gdk_gc_set_rgb_fg_color(LTsv_GTKcanvas_g,ctypes.pointer(LTsv_canvas_bccolor))
def LTsv_drawTkinter_bgcolor(draw_c=""):
global LTsv_canvascolor,LTsv_canvasbgcolor
LTsv_canvasbgcolor=draw_c
def LTsv_draw_bgcolor_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_bgcolor
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_bgcolor
def LTsv_drawGTK_gcfcolor():
LTsv_libgdk.gdk_gc_set_rgb_fg_color(LTsv_GTKcanvas_g,ctypes.pointer(LTsv_GTKcanvas_gccolor))
def LTsv_drawGTK_gcbcolor():
LTsv_libgdk.gdk_gc_set_rgb_fg_color(LTsv_GTKcanvas_g,ctypes.pointer(LTsv_canvas_bccolor))
def LTsv_drawGTK_polygon(*draw_xy):
draw_xylen=len(draw_xy)//2; gdkpointsArrayType=LTsv_GDKPOINT*draw_xylen; gdkpointsArray=gdkpointsArrayType()
for draw_xy_count,gdkpoint in enumerate(gdkpointsArray):
gdkpoint.X,gdkpoint.Y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_libgdk.gdk_draw_polygon(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,False,ctypes.pointer(gdkpointsArray),draw_xylen)
def LTsv_drawTkinter_polygon(*draw_xy):
xyloop=draw_xy if len(draw_xy)%2 == 0 else draw_xy[:-1]
if len(xyloop) > 0:
LTsv_Tkintercanvas_o.create_polygon(*xyloop,fill="",outline=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_polygon_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_polygon
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_polygon
def LTsv_drawGTK_polygonfill(*draw_xy):
draw_xylen=len(draw_xy)//2; gdkpointsArrayType=LTsv_GDKPOINT*draw_xylen; gdkpointsArray=gdkpointsArrayType()
for draw_xy_count,gdkpoint in enumerate(gdkpointsArray):
gdkpoint.X,gdkpoint.Y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_libgdk.gdk_draw_polygon(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,True,ctypes.pointer(gdkpointsArray),draw_xylen)
def LTsv_drawTkinter_polygonfill(*draw_xy):
xyloop=draw_xy if len(draw_xy)%2 == 0 else draw_xy[:-1]
if len(xyloop) > 0:
# LTsv_Tkintercanvas_o.create_polygon(*xyloop,fill=LTsv_canvascolor,outline=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
LTsv_Tkintercanvas_o.create_polygon(*xyloop,fill=LTsv_canvascolor,outline="",tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_polygonfill_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_polygonfill
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_polygonfill
def LTsv_drawTkinter_fontfill(*draw_xy):
xyloop=draw_xy if len(draw_xy)%2 == 0 else draw_xy[:-1]
if len(xyloop) > 0:
LTsv_Tkintercanvas_o.create_polygon(*xyloop,fill=LTsv_canvasbgcolor,outline=LTsv_canvasbgcolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_drawGTK_squares(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_libgdk.gdk_draw_rectangle(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,False,-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh,draw_wh)
def LTsv_drawTkinter_squares(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_Tkintercanvas_o.create_rectangle(-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh//2+draw_x,draw_wh//2+draw_y,fill="",outline=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_squares_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_squares
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_squares
def LTsv_drawGTK_squaresfill(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_libgdk.gdk_draw_rectangle(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,True,-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh,draw_wh)
def LTsv_drawTkinter_squaresfill(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_Tkintercanvas_o.create_rectangle(-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh//2+draw_x,draw_wh//2+draw_y,fill=LTsv_canvascolor,outline=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_squaresfill_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_squaresfill
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_squaresfill
def LTsv_drawGTK_circles(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_libgdk.gdk_draw_arc(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,False,-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh,draw_wh,0,LTsv_GDK_ARCFILL)
def LTsv_drawTkinter_circles(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_Tkintercanvas_o.create_oval(-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh//2+draw_x,draw_wh//2+draw_y,fill="",outline=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_circles_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_circles
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_circles
def LTsv_drawGTK_circlesfill(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_libgdk.gdk_draw_arc(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,True,-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh,draw_wh,0,LTsv_GDK_ARCFILL)
def LTsv_drawTkinter_circlesfill(draw_wh=16,*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_Tkintercanvas_o.create_oval(-draw_wh//2+draw_x,-draw_wh//2+draw_y,draw_wh//2+draw_x,draw_wh//2+draw_y,fill=LTsv_canvascolor,outline=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_circlesfill_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_circlesfill
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_circlesfill
def LTsv_drawGTK_points(*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_libgdk.gdk_draw_point(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,draw_x,draw_y)
def LTsv_drawTkinter_points(*draw_xy):
for draw_xy_count in range(len(draw_xy)//2):
draw_x,draw_y=draw_xy[draw_xy_count*2],draw_xy[draw_xy_count*2+1]
LTsv_Tkintercanvas_o.create_line(draw_x,draw_y,draw_x+1,draw_y+1,fill=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_points_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_points
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_points
def LTsv_drawGTK_arc(draw_x,draw_y,draw_w,draw_h,draw_s=-math.pi,draw_e=math.pi):
LTsv_libgdk.gdk_draw_arc(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,False,draw_x,draw_y,draw_w,draw_h,int(draw_s*LTsv_GDK_ARCFILL/2.0/math.pi),int(draw_e*LTsv_GDK_ARCFILL/2.0/math.pi))
def LTsv_drawTkinter_arc(draw_x,draw_y,draw_w,draw_h,draw_s=-math.pi,draw_e=math.pi):
LTsv_Tkintercanvas_o.create_arc(draw_x,draw_y,draw_x+draw_w,draw_y+draw_h,fill="",outline=LTsv_canvascolor,start=draw_s*360.0/2.0/math.pi,extent=draw_e*360.0/2.0/math.pi,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_arc_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_arc
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_arc
def LTsv_drawGTK_arcfill(draw_x,draw_y,draw_w,draw_h,draw_s=-math.pi,draw_e=math.pi):
LTsv_libgdk.gdk_draw_arc(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,True,draw_x,draw_y,draw_w,draw_h,int(draw_s*LTsv_GDK_ARCFILL/2.0/math.pi),int(draw_e*LTsv_GDK_ARCFILL/2.0/math.pi))
def LTsv_drawTkinter_arcfill(draw_x,draw_y,draw_w,draw_h,draw_s=-math.pi,draw_e=math.pi):
LTsv_Tkintercanvas_o.create_arc(draw_x,draw_y,draw_x+draw_w,draw_y+draw_h,fill=LTsv_canvascolor,outline=LTsv_canvascolor,start=draw_s*360.0/2.0/math.pi,extent=draw_e*360.0/2.0/math.pi,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_arcfill_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_arcfill
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_arcfill
def LTsv_drawGTK_font(draw_f=""):
global LTsv_GTKcanvas_font
LTsv_GTKcanvas_font=draw_f
def LTsv_drawTkinter_font(draw_f=None):
global LTsv_Tkintercanvas_font
LTsv_Tkintercanvas_font=LTsv_fonttuple(draw_f)
def LTsv_draw_font_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_font
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_font
def LTsv_drawGTK_text(draw_t="",draw_x=0,draw_y=0):
pango_l=LTsv_libgtk.gtk_widget_create_pango_layout(LTsv_GTKcanvas_o,0)
LTsv_libgtk.pango_layout_set_text(pango_l,draw_t.encode("utf-8","xmlcharrefreplace"),-1)
LTsv_fontDesc=LTsv_libgtk.pango_font_description_from_string(LTsv_GTKcanvas_font.encode("utf-8"))
LTsv_libgtk.pango_layout_set_font_description(pango_l,LTsv_fontDesc)
LTsv_libgdk.gdk_draw_layout_with_colors(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,draw_x,draw_y,pango_l,ctypes.pointer(LTsv_GTKcanvas_gccolor),0)
LTsv_libobj.g_object_unref(pango_l)
LTsv_libgtk.pango_font_description_free(LTsv_fontDesc)
def LTsv_drawTkinter_text(draw_t="",draw_x=0,draw_y=0):
LTsv_Tkintercanvas_o.create_text(draw_x,draw_y,text=draw_t,font=LTsv_Tkintercanvas_font,anchor="nw",fill=LTsv_canvascolor,tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_text_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_text
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_text
def LTsv_draw_picture_load(LTsv_picturepath):
global LTsv_pictureOBJ,LTsv_pictureW,LTsv_pictureH
if os.path.isfile(LTsv_picturepath):
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_pictureOBJ[LTsv_picturepath]=LTsv_libgdk.gdk_pixbuf_new_from_file(LTsv_picturepath.encode("utf-8","xmlcharrefreplace"),0)
LTsv_pictureW[LTsv_picturepath]=LTsv_libgdk.gdk_pixbuf_get_width(LTsv_pictureOBJ[LTsv_picturepath])
LTsv_pictureH[LTsv_picturepath]=LTsv_libgdk.gdk_pixbuf_get_height(LTsv_pictureOBJ[LTsv_picturepath])
if LTsv_GUI == LTsv_GUI_Tkinter:
LTsv_pictureOBJ[LTsv_picturepath]=Tk.PhotoImage(file=LTsv_picturepath)
LTsv_pictureW[LTsv_picturepath]=LTsv_pictureOBJ[LTsv_picturepath].width()
LTsv_pictureH[LTsv_picturepath]=LTsv_pictureOBJ[LTsv_picturepath].height()
else:
LTsv_pictureOBJ[LTsv_picturepath]=None
LTsv_pictureW[LTsv_picturepath]=0
LTsv_pictureH[LTsv_picturepath]=0
return LTsv_pictureOBJ[LTsv_picturepath]
def LTsv_draw_picture_celldiv(LTsv_picturepath,picture_divw,picture_divh):
global LTsv_pictureOBJ
picture_divptr=""
if not LTsv_picturepath in LTsv_pictureOBJ:
LTsv_draw_picture_load(LTsv_picturepath)
picture_o=LTsv_pictureOBJ[LTsv_picturepath]
if picture_o != None:
picture_divw,picture_divh=max(picture_divw,1),max(picture_divh,1)
picture_w=LTsv_libgdk.gdk_pixbuf_get_width(picture_o); cell_w=picture_w//picture_divw
picture_h=LTsv_libgdk.gdk_pixbuf_get_height(picture_o); cell_h=picture_h//picture_divh
if picture_w%cell_w != 0: picture_divw=picture_divw-1
if picture_h%cell_h != 0: picture_divh=picture_divh-1
if LTsv_GUI == LTsv_GUI_GTK2:
for picture_y in range(picture_divh):
for picture_x in range(picture_divw):
picture_t="{0}[{1}]".format(LTsv_picturepath,picture_y*picture_divw+picture_x)
LTsv_pictureOBJ[picture_t]=LTsv_libgdk.gdk_pixbuf_new_subpixbuf(ctypes.c_char_p(picture_o),picture_x*cell_w,picture_y*cell_h,cell_w,cell_h)
LTsv_pictureW[picture_t]=LTsv_libgdk.gdk_pixbuf_get_width(LTsv_pictureOBJ[picture_t])
LTsv_pictureH[picture_t]=LTsv_libgdk.gdk_pixbuf_get_height(LTsv_pictureOBJ[picture_t])
if LTsv_GUI == LTsv_GUI_Tkinter:
pass
def LTsv_draw_picture_save(LTsv_pictureoldpath,LTsv_picturenewpath):
global LTsv_pictureOBJ
picture_o=LTsv_pictureOBJ[LTsv_pictureoldpath]
if picture_o != None:
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_picturenewext=os.path.splitext(LTsv_picturenewpath)[1].lstrip('.').lower()
if LTsv_picturenewext == "png":
LTsv_libgdk.gdk_pixbuf_save(picture_o,LTsv_picturenewpath.encode("utf-8","xmlcharrefreplace"),"png".encode("utf-8"),None,"compression".encode("utf-8"),"9".encode("utf-8"),None)
elif LTsv_picturenewext == "ico":
LTsv_libgdk.gdk_pixbuf_save(picture_o,LTsv_picturenewpath.encode("utf-8","xmlcharrefreplace"),"ico".encode("utf-8"),None,"depth".encode("utf-8"),"16".encode("utf-8"),None)
else:
LTsv_libgdk.gdk_pixbuf_save(picture_o,LTsv_picturenewpath.encode("utf-8","xmlcharrefreplace"),LTsv_picturenewext.encode("utf-8"),None)
def LTsv_draw_canvas_save(LTsv_canvasPAGENAME,LTsv_picturenewpath):
global LTsv_widgetLTSV
LTsv_canvasPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_canvasPAGENAME)
canvas_w=int(LTsv_readlinerest(LTsv_canvasPAGE,"widgetsizeW"))
canvas_h=int(LTsv_readlinerest(LTsv_canvasPAGE,"widgetsizeH"))
LTsv_picturenewext=os.path.splitext(LTsv_picturenewpath)[1].lstrip('.').lower()
if LTsv_GUI == LTsv_GUI_GTK2:
canvas_m=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_canvasPAGE,"widgetpixmap")]
canvas_d=LTsv_libgdk.gdk_pixbuf_get_from_drawable(0,canvas_m,LTsv_libgdk.gdk_colormap_get_system(),0,0,0,0,canvas_w,canvas_h)
if LTsv_picturenewext == "png":
LTsv_libgdk.gdk_pixbuf_save(canvas_d,LTsv_picturenewpath.encode("utf-8","xmlcharrefreplace"),"png".encode("utf-8"),None,"compression".encode("utf-8"),"9".encode("utf-8"),None)
else:
LTsv_libgdk.gdk_pixbuf_save(canvas_d,LTsv_picturenewpath.encode("utf-8","xmlcharrefreplace"),LTsv_picturenewext.encode("utf-8"),None)
LTsv_libobj.g_object_unref(canvas_d)
if LTsv_GUI == LTsv_GUI_Tkinter:
canvas_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_canvasPAGE,"widgetobj")]
# canvas_d=Image.new("RGB",(canvas_w,canvas_h),(255,255,255))
# canvas_d.paste(canvas_o,(0,0))
# canvas_d.save(fileName,returnFormat(LTsv_picturenewext))
def LTsv_drawGTK_picture(draw_t="",draw_x=0,draw_y=0):
picture_o,picture_w,picture_h=LTsv_pictureOBJ[draw_t],LTsv_pictureW[draw_t],LTsv_pictureH[draw_t]
LTsv_libgdk.gdk_draw_pixbuf(LTsv_GTKcanvas_m,LTsv_GTKcanvas_g,picture_o,0,0,draw_x,draw_y,picture_w,picture_h,0,0,0)
def LTsv_drawTkinter_picture(draw_t="",draw_x=0,draw_y=0):
picture_o,picture_w,picture_h=LTsv_pictureOBJ[draw_t],LTsv_pictureW[draw_t],LTsv_pictureH[draw_t]
LTsv_Tkintercanvas_o.create_image(draw_x,draw_y,image=picture_o,anchor="nw",tag=LTsv_Tkintercanvas_TAG)
def LTsv_draw_picture_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_picture
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_picture
def LTsv_drawGTK_queue():
global LTsv_GTKcanvas_o
LTsv_libgtk.gtk_widget_queue_draw(LTsv_GTKcanvas_o)
def LTsv_drawTkinter_queue():
pass
def LTsv_draw_queue_shell(LTsv_GUI):
if LTsv_GUI == LTsv_GUI_GTK2: return LTsv_drawGTK_queue
if LTsv_GUI == LTsv_GUI_Tkinter: return LTsv_drawTkinter_queue
def LTsv_clockwise(*draw_xy):
clockwise=0
if len(draw_xy) >= 6:
xyloop=draw_xy+draw_xy if len(draw_xy)%2 == 0 else draw_xy[:-1]+draw_xy[:-1]
for draw_xy_count in range(0,len(draw_xy),2):
Px,Py,Cx,Cy,Qx,Qy=range(draw_xy_count,draw_xy_count+6)
PCQ=(xyloop[Px]-xyloop[Cx])*(xyloop[Qy]-xyloop[Cy])-(xyloop[Py]-xyloop[Cy])*(xyloop[Qx]-xyloop[Cx])
clockwise=clockwise+1 if PCQ < 0 else clockwise-1 if PCQ > 0 else clockwise
return clockwise
LTsv_WM_TRAYEVENTSTART=0x900
LTsv_ICON_NIM_ADD =0x0000
LTsv_ICON_NIM_MODIFY=0x0001
LTsv_ICON_NIM_DELETE=0x0002
class LTsv_EXTRACTICON(ctypes.Structure):
_fields_ = [
('phIcon',ctypes.c_uint*1)
]
class LTsv_NOTIFYICONDATAUNION(ctypes.Structure):
_fields_ = [
('uTimeout',ctypes.c_uint),
('uVersion',ctypes.c_uint)
]
class LTsv_GUID(ctypes.Structure):
_fields_ = [
('Data1',ctypes.c_ulong),
('Data2',ctypes.c_ushort),
('Data3',ctypes.c_ushort),
('Data4',ctypes.c_ubyte*8)
]
class LTsv_NOTIFYICONDATA(ctypes.Structure):
_fields_ = [
('cbSize', ctypes.c_ulong),
('hWnd', ctypes.c_void_p),
('uID', ctypes.c_uint),
('uFlags', ctypes.c_uint),
('uCallbackMessage',ctypes.c_uint),
('hIcon', ctypes.c_void_p),
('szTip', ctypes.c_char*64),
('dwState', ctypes.c_ulong),
('dwStateMask', ctypes.c_ulong),
('szInfo', ctypes.c_char*256),
('union', LTsv_NOTIFYICONDATAUNION),
('szInfoTitle', ctypes.c_char* 64),
('dwInfoFlags', ctypes.c_ulong),
('guidItem', LTsv_GUID)
]
def __init__(self):
self.cbSize=ctypes.sizeof(self)
self.uFlags=7
self.uCallbackMessage=LTsv_WM_TRAYEVENTSTART
def LTsv_icon_load(LTsv_picturepath):
global LTsv_iconOBJ
if os.path.isfile(LTsv_picturepath):
if LTsv_Notify == LTsv_GUI_GTK2:
pass
if LTsv_Notify == LTsv_GUI_WinAPI:
LTsv_phIconSmall,LTsv_phIconLarge=0,0
LTsv_EXEICON=LTsv_EXTRACTICON()
LTsv_Icons=LTsv_shell32.ExtractIconEx(LTsv_picturepath.encode(sys.stdout.encoding,"xmlcharrefreplace"),-1,0,0,0)
if LTsv_Icons > 0:
LTsv_shell32.ExtractIconEx(LTsv_picturepath.encode(sys.stdout.encoding,"xmlcharrefreplace"),0,LTsv_phIconLarge,ctypes.pointer(LTsv_EXEICON),1)
LTsv_iconOBJ[LTsv_picturepath]=LTsv_EXEICON.phIcon[0]
for icon_n in range(LTsv_Icons):
LTsv_shell32.ExtractIconEx(LTsv_picturepath.encode(sys.stdout.encoding,"xmlcharrefreplace"),icon_n,LTsv_phIconLarge,ctypes.pointer(LTsv_EXEICON),1)
LTsv_iconOBJ["{0}[{1}]".format(LTsv_picturepath,icon_n)]=LTsv_EXEICON.phIcon[0]
return LTsv_Icons
def LTsv_notifyicon_new(LTsv_windowPAGENAME,notify_n=None,widget_t="",widget_u="",menu_b=None,menu_c=None):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(notify_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="notify")
iconuri=widget_u
if LTsv_GUI == LTsv_GUI_GTK2:
picture_o=LTsv_pictureOBJ[iconuri] if iconuri in LTsv_pictureOBJ else None
if picture_o == None:
iconuri=LTsv_default_iconuri
LTsv_draw_picture_load(iconuri)
picture_o=LTsv_pictureOBJ[iconuri]
widget_o=LTsv_libgtk.gtk_status_icon_new_from_pixbuf(picture_o)
LTsv_libgtk.gtk_status_icon_set_tooltip(widget_o,widget_t.encode("utf-8"))
menu_o=LTsv_libgtk.gtk_menu_new()
LTsv_notify_popupmenu_cbk=LTsv_CALLBACLTYPE(menu_c) if menu_c != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"popup-menu".encode("utf-8"),LTsv_notify_popupmenu_cbk,0,0,0)
def LTsv_notifyicon_defmenu_yield():
yield ("window exit",LTsv_window_exit_cbk)
yield ("-----------",None)
yield ("notify click",LTsv_notify_popupmenu_cbk)
LTsv_notifyicon_menu_yield=menu_b if menu_b != None else LTsv_notifyicon_defmenu_yield()
for LTsv_popup_count,LTsv_popup_menu in enumerate(LTsv_notifyicon_menu_yield):
if LTsv_popup_menu[0]=="" or LTsv_popup_menu[1] == None:
LTsv_popup_menu_label=LTsv_libgtk.gtk_separator_menu_item_new()
LTsv_libgtk.gtk_menu_shell_append(menu_o,LTsv_popup_menu_label)
else:
LTsv_popup_menu_label=LTsv_libgtk.gtk_menu_item_new_with_label(LTsv_popup_menu[0].encode("utf-8","xmlcharrefreplace"))
LTsv_libgtk.gtk_menu_shell_append(menu_o,LTsv_popup_menu_label)
LTsv_libobj.g_signal_connect_data(LTsv_popup_menu_label,"activate".encode("utf-8"),LTsv_popup_menu[1],LTsv_popup_count,0,0)
def LTsv_notifyicon_activate(window_objvoid=None,window_objptr=None):
LTsv_libgtk.gtk_widget_show_all(menu_o)
LTsv_libgtk.gtk_menu_popup(menu_o,0,0,LTsv_libgtk.gtk_status_icon_position_menu,widget_o,0,0)
LTsv_widget_showhide(LTsv_windowPAGENAME,True)
LTsv_notify_activate_cbk=LTsv_CALLBACLTYPE(LTsv_notifyicon_activate)
LTsv_libobj.g_signal_connect_data(widget_o,"activate".encode("utf-8"),LTsv_notify_activate_cbk,-1,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_u=iconuri,event_a=LTsv_notify_activate_cbk,event_u=LTsv_notify_popupmenu_cbk,menu_o=menu_o,menu_b=LTsv_notifyicon_menu_yield,menu_c=menu_c)
if LTsv_GUI == LTsv_GUI_Tkinter:
icon_o=LTsv_iconOBJ[iconuri] if iconuri in LTsv_iconOBJ else None
if icon_o == None:
iconuri=LTsv_default_iconuri
LTsv_icon_load(sys.executable)
icon_o=LTsv_iconOBJ[iconuri]
if LTsv_widgetPAGENAME not in LTsv_iconOBJnotify:
LTsv_iconOBJnotify.append(LTsv_widgetPAGENAME)
widget_o=LTsv_NOTIFYICONDATA()
widget_o.hWnd=int(window_o.frame(),16)
widget_o.hIcon=icon_o
widget_o.uID=LTsv_iconOBJnotify.index(LTsv_widgetPAGENAME)
widget_o.szTip=widget_t[:64].encode("utf-8")
LTsv_shell32.Shell_NotifyIcon(ctypes.c_ulong(LTsv_ICON_NIM_ADD),ctypes.pointer(widget_o))
def LTsv_notifyicon_activate(window_objvoid=None,window_objptr=None):
LTsv_widget_showhide(LTsv_windowPAGENAME,True)
# window_o.protocol("WM_TRAYEVENTSTART",LTsv_notifyicon_activate)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,widget_u=iconuri)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
LTsv_GTK_RESPONSE_ACCEPT= -3
LTsv_GTK_RESPONSE_APPLY= -10
LTsv_GTK_RESPONSE_CANCEL= -6
LTsv_GTK_RESPONSE_CLOSE= -7
LTsv_GTK_RESPONSE_DELETE_EVENT=-4
LTsv_GTK_RESPONSE_HELP= -11
LTsv_GTK_RESPONSE_NO= -9
LTsv_GTK_RESPONSE_OK= -5
LTsv_GTK_RESPONSE_YES= -8
def LTsv_filedialog_new(LTsv_windowPAGENAME,widget_n=None,event_b=None,widget_t="LTsv_filedialog",dialog_t=0):
global LTsv_widgetLTSV
LTsv_windowPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_windowPAGENAME)
window_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_windowPAGE,"widgetobj")]
LTsv_widgetPAGENAME=LTsv_widget_newUUID(widget_n); LTsv_widgetPAGE=""
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_k="filedialog")
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_dialogtype=max(dialog_t,0)%4
LTsv_dialogtypename=["fileopen","filesave","diropen","dirsave"]
widget_w,widget_h=LTsv_screen_w(LTsv_windowPAGENAME)//2,LTsv_screen_h(LTsv_windowPAGENAME)//2
widget_o=LTsv_libgtk.gtk_file_chooser_dialog_new(widget_t.encode("utf-8","xmlcharrefreplace"),0,LTsv_dialogtype,"gtk-cancel".encode("utf-8"),LTsv_GTK_RESPONSE_CANCEL,LTsv_dialogtypename[LTsv_dialogtype].encode("utf-8","xmlcharrefreplace"),LTsv_GTK_RESPONSE_ACCEPT,0)
LTsv_libgtk.gtk_widget_set_size_request(widget_o,widget_w,widget_h)
LTsv_libgtk.gtk_window_set_resizable(widget_o,True)
LTsv_libgtk.gtk_window_set_position(window_o,LTsv_GTK_WIN_POS_CENTER)
event_r_cbk=LTsv_CALLBACLTYPE(event_b) if event_b != None else LTsv_CALLBACLTYPE(LTsv_window_none)
LTsv_libobj.g_signal_connect_data(widget_o,"response".encode("utf-8"),event_r_cbk,0,0,0)
event_c_cbk=LTsv_libgtk.gtk_widget_hide_on_delete
LTsv_libobj.g_signal_connect_data(widget_o,"delete-event".encode("utf-8"),event_c_cbk,0,0,0)
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,dialog_t=dialog_t,dialog_c=event_c_cbk,event_b=event_r_cbk)
if LTsv_GUI == LTsv_GUI_Tkinter:
LTsv_dialogtype=max(dialog_t,0)%4
def LTsv_filedialog_askopen(window_objvoid=None,window_objptr=None):
global LTsv_widgetLTSV
LTsv_widgetPAGE=LTsv_getpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME)
widget_o=LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetobj")]
widget_k=LTsv_readlinerest(LTsv_widgetPAGE,"widgetkind")
if widget_k == "filedialog":
dialog_t=int(LTsv_readlinerest(LTsv_widgetPAGE,"dialog_type"))
print(dialog_t)
if dialog_t == 0:
widget_u=Tk_fd.askopenfilename()
if dialog_t == 1:
widget_u=Tk_fd.asksaveasfile()
if dialog_t == 2:
widget_u=Tk_fd.askdirectory()
if dialog_t == 3:
widget_u=Tk_fd.asksaveasdirectory()
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_u=widget_u)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
LTsv_widgetOBJ[LTsv_readlinerest(LTsv_widgetPAGE,"widgetcallback")]()
widget_o=LTsv_filedialog_askopen
LTsv_widgetPAGE=LTsv_widgetPAGEXYWH(LTsv_widgetPAGE,widget_o=widget_o,widget_t=widget_t,dialog_t=LTsv_dialogtype,event_b=event_b)
LTsv_widgetLTSV=LTsv_putpage(LTsv_widgetLTSV,LTsv_widgetPAGENAME,LTsv_widgetPAGE)
return LTsv_widgetPAGENAME
def debug_canvas(window_objvoid=None,window_objptr=None):
global debug_scaleRGB
LTsv_draw_selcanvas(debug_keysetup_canvas)
LTsv_draw_delete("white")
LTsv_draw_font(debug_font_entry)
LTsv_draw_color("#fffff0"); LTsv_draw_polygonfill(0,0,debug_canvas_W,0,debug_canvas_W,debug_canvas_H,0,debug_canvas_H)
mouse_x,mouse_y=LTsv_global_canvasmotionX(),LTsv_global_canvasmotionY()
LTsv_draw_color(debug_scaleRGB); LTsv_draw_text("mouseX,Y\n[{0},{1}]".format(mouse_x,mouse_y),draw_x=mouse_x,draw_y=mouse_y)
LTsv_putdaytimenow(); LTsv_checkFPS()
LTsv_draw_color("black"); LTsv_draw_text(LTsv_getdaytimestr(LTsv_widget_gettext(debug_keysetup_timentry)),draw_x=0,draw_y=0)
LTsv_setkbddata(25,25)
kbdlabels=LTsv_getkbdlabels().replace('\t',' ').replace('た','\nた').replace('ち','\nち').replace('つ','\nつ').replace('NFER','\nNFER')
if LTsv_joymax > 0:
LTsv_setjoydata(0); pad_axis=LTsv_readlinerest(LTsv_getjoystr(0),LTsv_joyaxis_label())
for pad_xy,pad_circle in enumerate([LTsv_draw_squares,LTsv_draw_circles,LTsv_draw_circles]):
pad_x,pad_y=debug_joypad_X+pad_xy*debug_joypad_W,debug_joypad_Y
pad_circle(debug_joypad_W,*(pad_x,pad_y))
LTsv_draw_points(pad_x,pad_y)
if LTsv_joymax > 0:
stick_w=int(LTsv_pickdatalabel(pad_axis,debug_padxkey[pad_xy]))
stick_h=int(LTsv_pickdatalabel(pad_axis,debug_padykey[pad_xy]))
stick_t=math.atan2(stick_w,stick_h)
stick_s=LTsv_atanscalar(stick_w,stick_h)
LTsv_draw_polygon(*(pad_x,pad_y,pad_x+int(math.sin(stick_t)*stick_s*debug_joypad_W/2/LTsv_WINJOYCENTER),pad_y+int(math.cos(stick_t)*stick_s*debug_joypad_W/2/LTsv_WINJOYCENTER)))
LTsv_draw_text("{0},{1}\n{2},{3}\nθ{4}\n∇{5}".format(debug_padxkey[pad_xy],debug_padykey[pad_xy],LTsv_pickdatalabel(pad_axis,debug_padxkey[pad_xy]),LTsv_pickdatalabel(pad_axis,debug_padykey[pad_xy]),stick_t,stick_s),draw_x=pad_x+3,draw_y=pad_y+3)
else:
LTsv_draw_text("{0},{1}".format(debug_padxkey[pad_xy],debug_padykey[pad_xy]),draw_x=pad_x+3,draw_y=pad_y+3)
debug_arc_W=debug_joypad_X+int(debug_joypad_W*2.5)
LTsv_draw_arc(debug_arc_W,debug_joypad_Y-debug_joypad_W//2,debug_canvas_W-debug_arc_W-2,debug_joypad_W,-math.pi*0.5,math.pi*1.8)
txt_x,txt_y=500,debug_joypad_Y+debug_joypad_H//2
if LTsv_joymax > 0:
LTsv_draw_text(LTsv_getjoystr(0).replace('\t',' '),draw_x=0,draw_y=txt_y)
LTsv_draw_text(kbdlabels,draw_x=0,draw_y=txt_y+debug_label_WH*3)
LTsv_draw_text("getkbdnames:{0}".format(LTsv_getkbdnames()),draw_x=txt_x,draw_y=txt_y+debug_label_WH*1)
LTsv_draw_text("getkbdcodes:{0}".format(LTsv_getkbdcodes()),draw_x=txt_x,draw_y=txt_y+debug_label_WH*2)
LTsv_draw_text("getkbdkanas:{0}".format(LTsv_getkbdkanas()),draw_x=txt_x,draw_y=txt_y+debug_label_WH*3)
LTsv_draw_text("debug_keyevent:\n{0}".format(debug_keyevent),draw_x=txt_x,draw_y=txt_y+debug_label_WH*5)
LTsv_draw_color(debug_scaleRGB)
LTsv_draw_polygon(*tuple(debug_polygonpointlist))
if LTsv10_logoOBJ:
LTsv_draw_picture(LTsv10_logoPATH,debug_arc_W+LTsv_global_pictureW(LTsv10_logoPATH),debug_joypad_Y-LTsv_global_pictureH(LTsv10_logoPATH)//2)
LTsv_draw_queue()
LTsv_window_after(debug_keysetup_window,event_b=debug_canvas,event_i="debug_canvas",event_w=50)
def debug_timebutton(callback_void=None,callback_ptr=None):
LTsv_widget_settext(debug_keysetup_timentry,widget_t=debug_timentry_default)
def debug_calc(callback_void=None,callback_ptr=None):
calc_value=LTsv_widget_gettext(debug_keysetup_calcentry)
if not calc_value.endswith('⇔'):
calc_value+='⇔'
calc_Q=calc_value[:calc_value.find('⇔')]
calc_A=LTsv_calc(calc_Q)
LTsv_widget_settext(debug_keysetup_calcentry,calc_Q+'⇔'+calc_A)
def debug_polygonpoints(callback_void=None,callback_ptr=None):
global debug_polygonpointlist
polygonpoints=LTsv_widget_gettext(debug_keysetup_polygonentry).strip("[]").replace(" ","").split(',')
debug_polygonpointlist=map(LTsv_intstr0x,polygonpoints)
if len(debug_polygonpointlist)%2 == 1:
debug_polygonpointlist.pop()
LTsv_widget_settext(debug_keysetup_polygonentry,widget_t="{0}".format(debug_polygonpointlist))
def debug_activewindow(callback_void=None,callback_ptr=None):
LTsv_widget_settext(debug_keysetup_activelabel,widget_t=LTsv_window_foreground())
LTsv_window_after(debug_keysetup_window,event_b=debug_activewindow,event_i="debug_activewindow",event_w=500)
def debug_canvas_press(callback_void=None,callback_ptr=None):
global debug_polygonpointlist
mouse_x,mouse_y=LTsv_global_canvasmotionX(),LTsv_global_canvasmotionY()
LTsv_setkbddata(0,25); debug_getkbdstr=LTsv_getkbdlabels("MouseL\tMouseR\tMouseC")
cursorLCR="{0}{1}{2}".format(LTsv_pickdatalabel(debug_getkbdstr,"MouseL"),LTsv_pickdatalabel(debug_getkbdstr,"MouseC"),LTsv_pickdatalabel(debug_getkbdstr,"MouseR"))
if cursorLCR == "100" or cursorLCR == "000":
debug_polygonpointlist+=[mouse_x]; debug_polygonpointlist+=[mouse_y]
if cursorLCR == "001" or cursorLCR == "010":
debug_polygonbutton()
LTsv_widget_settext(debug_keysetup_polygonentry,widget_t="{0}".format(debug_polygonpointlist))
LTsv_widget_focus(debug_keysetup_polygonentry)
def debug_polygonbutton(callback_void=None,callback_ptr=None):
global debug_polygonpointlist
if len(debug_polygonpointlist) >= 2:
debug_polygonpointlist.pop(); debug_polygonpointlist.pop()
LTsv_widget_settext(debug_keysetup_polygonentry,widget_t="{0}".format(debug_polygonpointlist))
def debug_color_scale(window_objvoid=None,window_objptr=None):
global debug_scaleRGB
scaleR,scaleG,scaleB=hex(LTsv_widget_getnumber(debug_keysetup_scaleR)).replace("0x",""),hex(LTsv_widget_getnumber(debug_keysetup_scaleG)).replace("0x",""),hex(LTsv_widget_getnumber(debug_keysetup_scaleB)).replace("0x","")
scaleR,scaleG,scaleB=scaleR if len(scaleR) == 2 else "0"+scaleR,scaleG if len(scaleG) == 2 else "0"+scaleG,scaleB if len(scaleB) == 2 else "0"+scaleB
debug_scaleRGB="#{0}{1}{2}".format(scaleR,scaleG,scaleB)
def debug_color_combo(window_objvoid=None,window_objptr=None):
global debug_scaleRGB
if LTsv_widget_gettext(debug_keysetup_combobox) in debug_colordic:
scaleR,scaleG,scaleB=debug_colordic[LTsv_widget_gettext(debug_keysetup_combobox)]
LTsv_widget_setnumber(debug_keysetup_scaleR,scaleR)
LTsv_widget_setnumber(debug_keysetup_scaleG,scaleG)
LTsv_widget_setnumber(debug_keysetup_scaleB,scaleB)
def debug_edit_clip(window_objvoid=None,window_objptr=None):
edit_clip=LTsv_widget_gettext(debug_edit)
LTsv_libc_printf("edit_clip={0}".format(edit_clip))
LTsv_widget_settext(debug_clipboard,widget_t=edit_clip)
debug_check=[""]*3
def debug_checkbutton_shell(checkNumber):
def debug_checkbutton_kernel(window_objvoid=None,window_objptr=None):
LTsv_widget_settext(debug_edit,widget_t="{0}:{1}\n".format(LTsv_widget_gettext(debug_check[checkNumber]),LTsv_widget_getnumber(debug_check[checkNumber])))
return debug_checkbutton_kernel
debug_radio=[""]*3
def debug_radiobutton_shell(radioNumber):
def debug_radiobutton_kernel(window_objvoid=None,window_objptr=None):
# LTsv_libc_printf("{0}".format(LTsv_widget_gettext(debug_radio[radioNumber])))
LTsv_widget_settext(debug_edit,widget_t="{0}:{1}\n".format(LTsv_widget_gettext(debug_radio[radioNumber]),LTsv_widget_getnumber(debug_radio[radioNumber])))
return debug_radiobutton_kernel
#class LTsv_GdkEventKey(ctypes.Structure):
# _fields_ = [
# ('type',ctypes.c_int),
# ('window',ctypes.c_void_p),
# ('send_event',ctypes.c_ubyte),
# ('time',ctypes.c_uint),
# ('state',ctypes.c_uint),
# ('keyval',ctypes.c_uint),
# ]
debug_keyevent=""
def debug_keypress(window_objvoid=None,window_objptr=None):
global debug_keyevent
if LTsv_GUI == LTsv_GUI_GTK2:
print("debug_keypress",window_objvoid,window_objptr)
# window_objptr.restype = ctypes.POINTER(LTsv_GdkEventKey)
debug_keyevent="debug_keypress"
if LTsv_GUI == LTsv_GUI_Tkinter:
debug_keyevent+="\t{0}".format(window_objvoid.char)
def debug_keyrelease(window_objvoid=None,window_objptr=None):
global debug_keyevent
if LTsv_GUI == LTsv_GUI_GTK2:
print("debug_keyrelease",window_objvoid,window_objptr)
debug_keyevent="debug_keyrelease"
if LTsv_GUI == LTsv_GUI_Tkinter:
debug_keyevent=debug_keyevent.replace("\t{0}".format(window_objvoid.char),"")
def debug_filedialog(window_objvoid=None,window_objptr=None):
LTsv_widget_settext(debug_filedialogwindow,"")
LTsv_widget_showhide(debug_filedialogwindow,True)
#LTsv_GTK_RESPONSE_ACCEPT
def debug_filedialog_response(window_objvoid=None,window_objptr=None):
filedialog_filename=LTsv_widget_geturi(debug_filedialogwindow)
LTsv_widget_showhide(debug_filedialogwindow,False)
LTsv_widget_settext(debug_edit,widget_t=filedialog_filename)
if __name__=="__main__":
from LTsv_printf import *
from LTsv_file import *
print("__main__ Python{0.major}.{0.minor}.{0.micro},{1},{2}".format(sys.version_info,sys.platform,sys.stdout.encoding))
print("")
LTsv_GUI=LTsv_guiinit()
# LTsv_GUI=LTsv_guiinit(LTsv_GUI_Tkinter)
if len(LTsv_GUI) > 0:
import math
from LTsv_joy import *
from LTsv_calc import *
from LTsv_kbd import *
LTsv_kbdinit("./LTsv_kbd.tsv",LTsv_initmouse=False)
LTsv_joymax=LTsv_joyinit()
debug_fontname="kan5x5comic"
debug_fontsize_entry=10; debug_font_entry="{0},{1}".format(debug_fontname,debug_fontsize_entry); debug_label_WH=debug_fontsize_entry*2
debug_keysetup_W,debug_keysetup_H=800,600
debug_canvas_X,debug_canvas_Y,debug_canvas_W,debug_canvas_H=0,debug_label_WH,debug_keysetup_W-120,debug_keysetup_H*3//5
debug_combobox_X,debug_combobox_Y,debug_combobox_W,debug_combobox_H=debug_canvas_W,debug_canvas_Y,debug_keysetup_W-debug_canvas_W,debug_label_WH*2
debug_scale_X,debug_scale_Y,debug_scale_W,debug_scale_H=debug_canvas_W,debug_canvas_Y+debug_combobox_H,debug_keysetup_W-debug_canvas_W,debug_canvas_H-debug_combobox_H
debug_joypad_X,debug_joypad_Y,debug_joypad_W,debug_joypad_H=debug_canvas_W//6,debug_canvas_H*1//4+debug_label_WH*2,debug_canvas_H*2//4,debug_canvas_H*2//4
debug_padxkey,debug_padykey=["Px","Lx","Rx"],["Py","Ly","Ry"]
debug_keyspin_X,debug_keyspin_Y,debug_keyspin_W,debug_keyspin_H=0,debug_keysetup_H-debug_label_WH*9,debug_keysetup_W//14,debug_label_WH
debug_keysetup_window=LTsv_window_new(widget_t="L:Tsv GUI test and KeyCode Setup",event_b=LTsv_window_exit,widget_w=debug_keysetup_W,widget_h=debug_keysetup_H,event_z=None,event_k=debug_keypress,event_y=debug_keyrelease)
debug_timentry_default="@000y年-@0m月(@0wnyi/@ywi週)-@0dm日(@wdj曜)@0h:@0n:@0s FPS:@0fpc"
debug_keysetup_timentry=LTsv_entry_new(debug_keysetup_window,widget_t="",widget_x=0,widget_y=0,widget_w=debug_keysetup_W-debug_keyspin_W*1,widget_h=debug_label_WH,widget_f=debug_font_entry)
debug_keysetup_timebutton=LTsv_button_new(debug_keysetup_window,widget_t="reset",widget_x=debug_keysetup_W-debug_keyspin_W*1,widget_y=0,widget_w=debug_keyspin_W*1,widget_h=debug_label_WH,widget_f=debug_font_entry,event_b=debug_timebutton)
debug_keysetup_canvas=LTsv_canvas_new(debug_keysetup_window,widget_x=debug_canvas_X,widget_y=debug_canvas_Y,widget_w=debug_canvas_W,widget_h=debug_canvas_H,event_w=50,event_p=debug_canvas_press)
LTsv10_logoPATH="../icon_cap/LTsv10_logo.png"; LTsv10_logoOBJ=LTsv_draw_picture_load(LTsv10_logoPATH)
debug_polygonpointlist=[556, 12, 566, 31, 583, 33, 574, 47, 581, 63, 561, 55, 537, 60, 547, 42, 529, 32, 552, 28]
debug_keysetup_scaleR=LTsv_scale_new(debug_keysetup_window,widget_x=debug_scale_X+debug_scale_W*0//3,widget_y=debug_scale_Y,widget_w=debug_scale_W//3,widget_h=debug_scale_H,widget_s=0,widget_e=255,widget_a=1,event_b=debug_color_scale)
debug_keysetup_scaleG=LTsv_scale_new(debug_keysetup_window,widget_x=debug_scale_X+debug_scale_W*1//3,widget_y=debug_scale_Y,widget_w=debug_scale_W//3,widget_h=debug_scale_H,widget_s=0,widget_e=255,widget_a=1,event_b=debug_color_scale)
debug_keysetup_scaleB=LTsv_scale_new(debug_keysetup_window,widget_x=debug_scale_X+debug_scale_W*2//3,widget_y=debug_scale_Y,widget_w=debug_scale_W//3,widget_h=debug_scale_H,widget_s=0,widget_e=255,widget_a=1,event_b=debug_color_scale)
debug_scaleRGB=""
debug_keysetup_calcentry=LTsv_entry_new(debug_keysetup_window,widget_t="2/3⇔2|3",widget_x=0,widget_y=debug_canvas_Y+debug_canvas_H,widget_w=(debug_keysetup_W-debug_scale_W)*2//3,widget_h=debug_label_WH,widget_f=debug_font_entry,event_b=debug_calc)
debug_keysetup_polygonentry=LTsv_entry_new(debug_keysetup_window,widget_t="{0}".format(debug_polygonpointlist),widget_x=(debug_keysetup_W-debug_scale_W)*2//3,widget_y=debug_canvas_Y+debug_canvas_H,widget_w=(debug_keysetup_W-debug_scale_W)*1//3-debug_keyspin_W,widget_h=debug_label_WH,widget_f=debug_font_entry,event_b=debug_polygonpoints)
debug_keysetup_polygonbutton=LTsv_button_new(debug_keysetup_window,widget_t="del",widget_x=debug_scale_X-debug_keyspin_W,widget_y=debug_canvas_Y+debug_canvas_H,widget_w=debug_keyspin_W,widget_h=debug_label_WH,widget_f=debug_font_entry,event_b=debug_polygonbutton)
debug_keysetup_activelabel=LTsv_label_new(debug_keysetup_window,widget_t="0x--------",widget_x=debug_keysetup_W-debug_scale_W,widget_y=debug_canvas_Y+debug_canvas_H,widget_w=debug_scale_W,widget_h=debug_label_WH,widget_f=debug_font_entry)
debug_keysetup_keys=["ぬ","ふ","あ","う","え","お","や","ゆ","よ","わ","ほ","へ",
"た","て","い","す","か","ん","な","に","ら","せ","゛","゜",
"ち","と","し","は","き","く","ま","の","り","れ","け","む",
"つ","さ","そ","ひ","こ","み","も","ね","る","め","ろ","¥",
"NFER"," ","XFER","KANA"]
debug_keysetup_spin,debug_keysetup_label=[None]*len(debug_keysetup_keys),[None]*len(debug_keysetup_keys)
for debug_kbdxy,debug_kbdlabel in enumerate(debug_keysetup_keys):
debug_kbdx,debug_kbd_y=(debug_kbdxy%12*debug_keyspin_W if debug_kbdlabel != "¥" else debug_keyspin_W*10+debug_keyspin_W//2),(debug_kbdxy//12*debug_keyspin_H*2 if debug_kbdlabel != "¥" else 0); debug_kbdx+=(debug_keyspin_W//2)*(debug_kbdxy//12+1 if debug_kbdxy < 48 else 9 if debug_kbdxy < 50 else 12)
debug_keysetup_spin[debug_kbdxy]=LTsv_spin_new(debug_keysetup_window,widget_x=debug_keyspin_X+debug_kbdx,widget_y=debug_keyspin_Y+debug_kbd_y,widget_w=debug_keyspin_W if debug_kbdlabel != " " else debug_keyspin_W*3,widget_h=debug_keyspin_H,widget_s=1,widget_e=255,widget_a=1,widget_f=debug_font_entry)
LTsv_widget_setnumber(debug_keysetup_spin[debug_kbdxy],LTsv_kbdgettypegana(debug_kbdlabel))
LTsv_widget_disableenable(debug_keysetup_spin[debug_kbdxy],False)
debug_kbdcodename="「{0}」({1})".format(debug_kbdlabel,LTsv_kbdgettypename(LTsv_kbdgettypegana(debug_kbdlabel))) if debug_kbdlabel != "NFER" and debug_kbdlabel != "NFER" and debug_kbdlabel != "XFER" and debug_kbdlabel != "KANA" else "「{0}」".format(debug_kbdlabel)
debug_keysetup_label[debug_kbdxy]=LTsv_label_new(debug_keysetup_window,widget_t=debug_kbdcodename,widget_x=debug_keyspin_X+debug_kbdx,widget_y=debug_keyspin_Y+debug_kbd_y-debug_keyspin_H,widget_w=debug_keyspin_W if debug_kbdlabel != " " else debug_keyspin_W*3,widget_h=debug_keyspin_H,widget_f=debug_font_entry)
debug_edit=LTsv_edit_new(debug_keysetup_window,widget_t="",widget_x=0,widget_y=debug_keysetup_H-debug_keyspin_H*4,widget_w=debug_keyspin_W*2,widget_h=debug_keyspin_H*4,widget_f=debug_font_entry)
debug_clipboard=LTsv_clipboard_new(debug_keysetup_window)
debug_clipbutton=LTsv_button_new(debug_keysetup_window,widget_t="clip",widget_x=0,widget_y=debug_keysetup_H-debug_keyspin_H*5,widget_w=debug_keyspin_W*1,widget_h=debug_keyspin_H*1,widget_f=debug_font_entry,event_b=debug_edit_clip)
debug_filedialogbutton=LTsv_button_new(debug_keysetup_window,widget_t="open",widget_x=debug_keyspin_W*2,widget_y=debug_keysetup_H-debug_keyspin_H*1,widget_w=debug_keyspin_W*1,widget_h=debug_keyspin_H*1,widget_f=debug_font_entry,event_b=debug_filedialog)
debug_filedialogwindow=LTsv_filedialog_new(debug_keysetup_window,widget_t="debug_filedialog",dialog_t=3,event_b=debug_filedialog_response)
for count,label in enumerate(["Acheck","Bcheck","Ccheck"]):
debug_check[count]=LTsv_check_new(debug_keysetup_window,widget_t=label,widget_x=debug_keysetup_W-debug_keyspin_W*(3-count),widget_y=debug_keysetup_H-debug_keyspin_H*1,widget_w=debug_keyspin_W*1,widget_h=debug_keyspin_H*1,widget_f=debug_font_entry,event_b=debug_checkbutton_shell(count))
for count,label in enumerate(["Aradio","Bradio","Cradio"]):
debug_radio[count]=LTsv_radio_new(debug_keysetup_window,widget_t=label,widget_x=debug_keysetup_W-debug_keyspin_W*(3-count),widget_y=debug_keysetup_H-debug_keyspin_H*2,widget_w=debug_keyspin_W*1,widget_h=debug_keyspin_H*1,widget_f=debug_font_entry,event_b=debug_radiobutton_shell(count))
if LTsv_GUI == LTsv_GUI_GTK2:
debug_keysetup_combobox=LTsv_combobox_new(debug_keysetup_window,widget_x=debug_combobox_X,widget_y=debug_combobox_Y,widget_w=debug_combobox_W,widget_h=debug_combobox_H,widget_f=debug_font_entry,event_b=debug_color_combo)
#/usr/share/X11/rgb.txt
#c:\Python34\Tools\pynche\X\rgb.txt
debug_Tk_colors="DarkGray,DarkBlue,DarkCyan,DarkMagenta,DarkRed,LightGreen,GhostWhite,FloralWhite,OldLace,linen," \
"PapayaWhip,BlanchedAlmond,moccasin,MintCream,AliceBlue,lavender,white,black,DimGray,LightSlateGray," \
"gray,LightGray,MidnightBlue,navy,NavyBlue,CornflowerBlue,DarkSlateBlue,MediumSlateBlue,LightSlateBlue,MediumBlue,PowderBlue," \
"DarkTurquoise,MediumTurquoise,MediumAquamarine,DarkGreen,MediumSeaGreen,LightSeaGreen,LawnGreen,MediumSpringGreen,GreenYellow,LimeGreen," \
"YellowGreen,ForestGreen,DarkKhaki,PaleGoldenrod,LightGoldenrodYellow,SaddleBrown,peru,beige,SandyBrown,DarkSalmon," \
"LightCoral,MediumVioletRed,violet,DarkViolet,BlueViolet".replace(',','\n')
debug_Tk_colors1234="snow,seashell,AntiqueWhite,bisque,PeachPuff,NavajoWhite,LemonChiffon,cornsilk,ivory,honeydew," \
"LavenderBlush,MistyRose,azure,SlateBlue,RoyalBlue,blue,DodgerBlue,SteelBlue,DeepSkyBlue,SkyBlue," \
"LightSkyBlue,SlateGray,LightSteelBlue,LightBlue,LightCyan,PaleTurquoise,CadetBlue,turquoise,cyan,DarkSlateGray," \
"aquamarine,DarkSeaGreen,SeaGreen,PaleGreen,SpringGreen,green,chartreuse,OliveDrab,DarkOliveGreen,khaki," \
"LightGoldenrod,LightYellow,yellow,gold,goldenrod,DarkGoldenrod,RosyBrown,IndianRed,sienna,burlywood," \
"wheat,tan,chocolate,firebrick,brown,salmon,LightSalmon,orange,DarkOrange,coral," \
"tomato,OrangeRed,red,DeepPink,HotPink,pink,LightPink,PaleVioletRed,maroon,VioletRed," \
"magenta,orchid,plum,MediumOrchid,DarkOrchid,purple,MediumPurple,thistle".split(',')
for debug_colors1234 in debug_Tk_colors1234: debug_Tk_colors+="".join("\n{0}{1}".format(debug_colors1234,debug_gray) for debug_gray in ["","1","2","3","4"])
debug_colordic={"IndianRed":[205,92,92]}
LTsv_combobox_list(debug_keysetup_combobox,debug_Tk_colors)
LTsv_widget_showhide(debug_keysetup_window,True)
LTsv_draw_selcanvas,LTsv_draw_delete,LTsv_draw_queue,LTsv_draw_picture=LTsv_draw_selcanvas_shell(LTsv_GUI),LTsv_draw_delete_shell(LTsv_GUI),LTsv_draw_queue_shell(LTsv_GUI),LTsv_draw_picture_shell(LTsv_GUI)
LTsv_draw_color,LTsv_draw_bgcolor,LTsv_draw_font,LTsv_draw_text=LTsv_draw_color_shell(LTsv_GUI),LTsv_draw_bgcolor_shell(LTsv_GUI),LTsv_draw_font_shell(LTsv_GUI),LTsv_draw_text_shell(LTsv_GUI)
LTsv_draw_polygon,LTsv_draw_polygonfill=LTsv_draw_polygon_shell(LTsv_GUI),LTsv_draw_polygonfill_shell(LTsv_GUI)
LTsv_draw_squares,LTsv_draw_squaresfill=LTsv_draw_squares_shell(LTsv_GUI),LTsv_draw_squaresfill_shell(LTsv_GUI)
LTsv_draw_circles,LTsv_draw_circlesfill=LTsv_draw_circles_shell(LTsv_GUI),LTsv_draw_circlesfill_shell(LTsv_GUI)
LTsv_draw_points=LTsv_draw_points_shell(LTsv_GUI)
LTsv_draw_arc,LTsv_draw_arcfill=LTsv_draw_arc_shell(LTsv_GUI),LTsv_draw_arcfill_shell(LTsv_GUI)
debug_timebutton()
debug_canvas()
debug_color_scale()
debug_activewindow()
if LTsv_GUI == LTsv_GUI_GTK2:
LTsv_widget_settext(debug_keysetup_combobox,"IndianRed")
LTsv_window_main(debug_keysetup_window)
else:
LTsv_libc_printf("LTsv_GUI,LTsv_Notify→{0},{1}".format(LTsv_GUI,LTsv_Notify))
LTsv_libc_printf("GUIの設定に失敗しました。")
print("")
print("__main__",LTsv_file_ver())
# Copyright (c) 2016 ooblog
# License: MIT
# https://github.com/ooblog/LTsv10kanedit/blob/master/LICENSE
|
ooblog/yonmoji_ge
|
LTsv/LTsv_gui.py
|
Python
|
mit
| 127,236
| 0.032237
|
# -*- coding: utf-8 -*-
#
# destiny_account documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 15 00:23:55 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from datetime import datetime
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ['SETUP_NORUN'] = '1'
import setup as setup_info # noqa
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
html_theme = 'default'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = setup_info.VERBOSE_NAME
copyright = u'%d, %s' % (datetime.now().year, setup_info.AUTHOR_NAME)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = setup_info.VERSION
# The full version, including alpha/beta/rc tags.
release = setup_info.VERSION
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "%s v%s documentation" % (setup_info.VERBOSE_NAME, setup_info.VERSION)
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % setup_info.NAME
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
#
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
#
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', '%s.tex' % setup_info.NAME, u'%s Documentation' % setup_info.VERBOSE_NAME,
setup_info.AUTHOR_NAME, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', setup_info.NAME, u'%s Documentation' % setup_info.VERBOSE_NAME,
[setup_info.AUTHOR_NAME], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', setup_info.NAME, u'%s Documentation' % setup_info.VERBOSE_NAME,
setup_info.AUTHOR_NAME, setup_info.NAME, 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
Xaroth/plex-export
|
docs/conf.py
|
Python
|
mit
| 9,091
| 0.00517
|
from nagm.engine.attack import Attack
from .types import *
from .defs import precision, stat, heal, offensive, faux_chage_effect
prec = precision(prec=0.9)
mimi_queue = Attack(name='Mimi-queue', type=normal, effects=(prec, stat(stat='dfse', value=-1),))
charge = Attack(name='Charge', type=normal, effects=(prec, offensive(force=10),))
griffe = Attack(name='Griffe', type=normal, effects=(prec, offensive(force=10),))
fouet_lianes = Attack(name='Fouet lianes', type=plante, effects=(prec, offensive(force=20),))
flameche = Attack(name='Flamèche', type=feu, effects=(prec, offensive(force=20),))
pistolet_a_o = Attack(name='Pistolet à o', type=eau, effects=(prec, offensive(force=20),))
eclair = Attack(name='Éclair', type=electrik, effects=(prec, offensive(force=20),))
soin = Attack(name='Soin', type=normal, effects=(prec, heal(heal=50),), reflexive=True)
abime = Attack(name='Abîme', type=normal, effects=(precision(prec=0.1), offensive(force=1000),))
faux_chage = Attack(name='Faux-chage', type=normal, effects=(prec, faux_chage_effect,))
|
entwanne/NAGM
|
games/test_game/attacks.py
|
Python
|
bsd-3-clause
| 1,048
| 0.009579
|
# ScummVM - Graphic Adventure Engine
#
# ScummVM is the legal property of its developers, whose names
# are too numerous to list here. Please refer to the COPYRIGHT
# file distributed with this source distribution.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import os, re
from proc import proc
import lex
import op
class parser:
def __init__(self):
self.strip_path = 0
self.__globals = {}
self.__offsets = {}
self.__stack = []
self.proc = None
self.proc_list = []
self.binary_data = []
self.symbols = []
self.link_later = []
def visible(self):
for i in self.__stack:
if not i or i == 0:
return False
return True
def push_if(self, text):
value = self.eval(text)
#print "if %s -> %s" %(text, value)
self.__stack.append(value)
def push_else(self):
#print "else"
self.__stack[-1] = not self.__stack[-1]
def pop_if(self):
#print "endif"
return self.__stack.pop()
def set_global(self, name, value):
if len(name) == 0:
raise Exception("empty name is not allowed")
name = name.lower()
#print "adding global %s -> %s" %(name, value)
if self.__globals.has_key(name):
raise Exception("global %s was already defined", name)
self.__globals[name] = value
def get_global(self, name):
name = name.lower()
g = self.__globals[name]
g.used = True
return g
def get_globals(self):
return self.__globals
def has_global(self, name):
name = name.lower()
return self.__globals.has_key(name)
def set_offset(self, name, value):
if len(name) == 0:
raise Exception("empty name is not allowed")
name = name.lower()
#print "adding global %s -> %s" %(name, value)
if self.__offsets.has_key(name):
raise Exception("global %s was already defined", name)
self.__offsets[name] = value
def get_offset(self, name):
name = name.lower()
return self.__offsets[name]
def include(self, basedir, fname):
path = fname.split('\\')[self.strip_path:]
path = os.path.join(basedir, os.path.pathsep.join(path))
#print "including %s" %(path)
self.parse(path)
def eval(self, stmt):
try:
return self.parse_int(stmt)
except:
pass
value = self.__globals[stmt.lower()].value
return int(value)
def expr_callback(self, match):
name = match.group(1).lower()
g = self.get_global(name)
if isinstance(g, op.const):
return g.value
else:
return "0x%04x" %g.offset
def eval_expr(self, expr):
n = 1
while n > 0:
expr, n = re.subn(r'\b([a-zA-Z_]+[a-zA-Z0-9_]*)', self.expr_callback, expr)
return eval(expr)
def expand_globals(self, text):
return text
def fix_dollar(self, v):
print("$ = %d" %len(self.binary_data))
return re.sub(r'\$', "%d" %len(self.binary_data), v)
def parse_int(self, v):
if re.match(r'[01]+b$', v):
v = int(v[:-1], 2)
if re.match(r'[\+-]?[0-9a-f]+h$', v):
v = int(v[:-1], 16)
return int(v)
def compact_data(self, width, data):
#print "COMPACTING %d %s" %(width, data)
r = []
base = 0x100 if width == 1 else 0x10000
for v in data:
if v[0] == '"':
if v[-1] != '"':
raise Exception("invalid string %s" %v)
if width == 2:
raise Exception("string with data width more than 1") #we could allow it :)
for i in xrange(1, len(v) - 1):
r.append(ord(v[i]))
continue
m = re.match(r'(\w+)\s+dup\s+\((\s*\S+\s*)\)', v)
if m is not None:
#we should parse that
n = self.parse_int(m.group(1))
if m.group(2) != '?':
value = self.parse_int(m.group(2))
else:
value = 0
for i in xrange(0, n):
v = value
for b in xrange(0, width):
r.append(v & 0xff);
v >>= 8
continue
try:
v = self.parse_int(v)
if v < 0:
v += base
except:
#global name
print "global/expr: %s" %v
try:
g = self.get_global(v)
v = g.offset
except:
print "unknown address %s" %(v)
self.link_later.append((len(self.binary_data) + len(r), v))
v = 0
for b in xrange(0, width):
r.append(v & 0xff);
v >>= 8
#print r
return r
def parse(self, fname):
# print "opening file %s..." %(fname, basedir)
fd = open(fname, 'rb')
for line in fd:
line = line.strip()
if len(line) == 0 or line[0] == ';' or line[0] == chr(0x1a):
continue
#print line
m = re.match('(\w+)\s*?:', line)
if m is not None:
line = line[len(m.group(0)):].strip()
if self.visible():
name = m.group(1)
if self.proc is not None:
self.proc.add_label(name)
print "offset %s -> %d" %(name, len(self.binary_data))
self.set_offset(name, (len(self.binary_data), self.proc, len(self.proc.stmts) if self.proc is not None else 0))
#print line
cmd = line.split()
if len(cmd) == 0:
continue
cmd0 = str(cmd[0])
if cmd0 == 'if':
self.push_if(cmd[1])
continue
elif cmd0 == 'else':
self.push_else()
continue
elif cmd0 == 'endif':
self.pop_if()
continue
if not self.visible():
continue
if cmd0 == 'db' or cmd0 == 'dw' or cmd0 == 'dd':
arg = line[len(cmd0):].strip()
print "%d:1: %s" %(len(self.binary_data), arg) #fixme: COPYPASTE
binary_width = {'b': 1, 'w': 2, 'd': 4}[cmd0[1]]
self.binary_data += self.compact_data(binary_width, lex.parse_args(arg))
continue
elif cmd0 == 'include':
self.include(os.path.dirname(fname), cmd[1])
continue
elif cmd0 == 'endp':
self.proc = None
continue
elif cmd0 == 'assume':
print "skipping: %s" %line
continue
elif cmd0 == 'rep':
self.proc.add(cmd0)
self.proc.add(" ".join(cmd[1:]))
continue
if len(cmd) >= 3:
cmd1 = cmd[1]
if cmd1 == 'equ':
v = cmd[2]
self.set_global(cmd0, op.const(self.fix_dollar(v)))
elif cmd1 == 'db' or cmd1 == 'dw' or cmd1 == 'dd':
binary_width = {'b': 1, 'w': 2, 'd': 4}[cmd1[1]]
offset = len(self.binary_data)
arg = line[len(cmd0):].strip()
arg = arg[len(cmd1):].strip()
print "%d: %s" %(offset, arg)
self.binary_data += self.compact_data(binary_width, lex.parse_args(arg))
self.set_global(cmd0.lower(), op.var(binary_width, offset))
continue
elif cmd1 == 'proc':
name = cmd0.lower()
self.proc = proc(name)
print "procedure %s, #%d" %(name, len(self.proc_list))
self.proc_list.append(name)
self.set_global(name, self.proc)
continue
if (self.proc):
self.proc.add(line)
else:
#print line
pass
fd.close()
return self
def link(self):
for addr, expr in self.link_later:
v = self.eval_expr(expr)
print "link: patching %04x -> %04x" %(addr, v)
while v != 0:
self.binary_data[addr] = v & 0xff
addr += 1
v >>= 8
|
chrisws/scummvm
|
devtools/tasmrecover/tasm/parser.py
|
Python
|
gpl-2.0
| 7,312
| 0.040618
|
# -*- coding: utf-8 -*-
from django.db import models
from datetime import datetime
class Place(models.Model):
"""
Holder object for basic info about the rooms
in the university.
"""
room_place = models.CharField(max_length=255)
floor = models.IntegerField()
def __unicode__(self):
return self.room_place
class HierarchyUnit(models.Model):
PROGRAM = 'PR'
YEAR = 'YR'
GROUP = 'GR'
TYPES = (
(PROGRAM, u"Специалност"),
(YEAR, u"Курс"),
(GROUP, u"Група"),
)
type_value = models.CharField(max_length=255, choices=TYPES)
value = models.CharField(max_length=255)
parent = models.ForeignKey("schedule.HierarchyUnit", null=True, blank=True, default=None)
def get_all_info_for_parents(self):
if self.type_value == 'PR':
return self.value
if self.type_value == 'YR':
return ', '.join([self.parent.value, self.value+u' курс'])
else:
return ', '.join([self.parent.parent.value, self.parent.value+u' курс', self.value+u' група'])
def get_all_childs(self):
return HierarchyUnit.objects.filter(parent=self)
def __unicode__(self):
return self.get_all_info_for_parents()
class Block(models.Model):
"""
Group representing a set of optional subjects.
Example: Core of Computer Science.
"""
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Subject(models.Model):
"""
Representation of all subjects.
Example: Calculus 1.
"""
MANDATORY = 'MN'
OPTIONAL = 'OP'
TYPES = (
(MANDATORY, u"Задължителен"),
(OPTIONAL, u"Избираем"),
)
type_value = models.CharField(max_length=255, choices=TYPES)
name = models.CharField(max_length=255)
block = models.ForeignKey(Block, null=True, blank=True, default=None)
year = models.ForeignKey(HierarchyUnit, null=True, blank=True, default=None, limit_choices_to={'type_value': HierarchyUnit.YEAR})
def get_year_value(self):
return ', '.join([self.year.parent.value, self.year.value+u' курс'])
def __unicode__(self):
return self.name
class Department(models.Model):
"""
Group representing a set of lecturers
grouped by field of teaching.
"""
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Teacher(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255)
email = models.CharField(max_length=255)
full_name = models.CharField(max_length=255)
position = models.CharField(max_length=255)
subjects = models.ManyToManyField(Subject, null=True, blank=True, default=None)
department = models.ForeignKey(Department, null=True, blank=True, default=None)
def __unicode__(self):
return self.name
class Event(models.Model):
WEEKLY = 'WKL'
TYPES = (
(WEEKLY, u'Седмично'),
)
type_value = models.CharField(max_length=255, null=True, blank=True, default=None)
inserted = models.DateField(default=datetime.now())
name = models.CharField(max_length=255)
place = models.ForeignKey(Place, blank=True, default=None, null=True)
date_start = models.DateTimeField()
date_end = models.DateTimeField(default=datetime.now())
repeatable = models.BooleanField()
duratation = models.IntegerField()
subject = models.ForeignKey(Subject, blank=True, default=None, null=True)
teacher = models.ForeignKey(Teacher, blank=True, default=None, null=True)
def __unicode__(self):
return self.name
class Student(models.Model):
PROGRAM = (('BK', 'Бакалавър'),('MG', 'Магистър'))
name = models.CharField(max_length=255)
program = models.CharField(max_length=255,choices=PROGRAM, blank=True, default=PROGRAM[0][0])
fac_number = models.CharField(max_length=255)
email = models.CharField(max_length=255)
group = models.ForeignKey(HierarchyUnit, limit_choices_to={'type_value': HierarchyUnit.GROUP}, blank=True, default=None, null=True)
events = models.ManyToManyField(Event, blank=True, default=None, null=True)
def __unicode__(self):
return self.name
class Comment(models.Model):
from_user = models.ForeignKey(Student, blank=True, default=None, null=True)
event = models.ForeignKey(Event, blank=True, default=None, null=True)
start_date = models.DateField()
end_date = models.DateField()
dtstamp = models.DateField(default=datetime.now())
desc = models.TextField()
|
DeltaEpsilon-HackFMI2/FMICalendar-REST
|
schedule/models.py
|
Python
|
mit
| 4,666
| 0.00349
|
from django import forms
from models import FormDataGroup
import re
# On this page, users can upload an xsd file from their laptop
# Then they get redirected to a page where they can download the xsd
class RegisterXForm(forms.Form):
file = forms.FileField()
form_display_name= forms.CharField(max_length=128, label=u'Form Display Name')
class SubmitDataForm(forms.Form):
file = forms.FileField()
class FormDataGroupForm(forms.ModelForm):
"""Form for basic form group data"""
display_name = forms.CharField(widget=forms.TextInput(attrs={'size':'80'}))
view_name = forms.CharField(widget=forms.TextInput(attrs={'size':'40'}))
def clean_view_name(self):
view_name = self.cleaned_data["view_name"]
if not re.match(r"^\w+$", view_name):
raise forms.ValidationError("View name can only contain numbers, letters, and underscores!")
# check that the view name is unique... if it was changed.
if self.instance.id:
if FormDataGroup.objects.get(id=self.instance.id).view_name != view_name and \
FormDataGroup.objects.filter(view_name=view_name).count() > 0:
raise forms.ValidationError("Sorry, view name %s is already in use! Please pick a new one." % view_name)
return self.cleaned_data["view_name"]
class Meta:
model = FormDataGroup
fields = ("display_name", "view_name")
|
commtrack/commtrack-core
|
apps/xformmanager/forms.py
|
Python
|
bsd-3-clause
| 1,428
| 0.009804
|
################################# LICENSE ##################################
# Copyright (c) 2009, South African Astronomical Observatory (SAAO) #
# All rights reserved. #
# #
############################################################################
#!/usr/bin/env python
"""
ImageDisplay--Class for displaying and interacting with ds9
Author Version Date
-----------------------------------------------
S M Crawford (SAAO) 0.1 19 Jun 2011
"""
import os
import pyds9 as ds9
class ImageDisplay:
def __init__(self, target='ImageDisplay:*'):
self.ds9 = ds9.ds9()
def display(self, filename, pa=None):
cmd='file %s' % filename
self.ds9.set(cmd)
self.ds9.set('zscale')
self.ds9.set('match frames wcs')
# print pa
if pa:
self.ds9.set('rotate to %f' % pa)
else:
self.ds9.set('rotate to %f' % 0)
def regions(self, rgnstr):
cmd = 'regions %s'
def rssregion(self, ra, dec):
"""Plot the FOV for RSS"""
# cmd='color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=0 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\nfk5\ncircle(%s, %s, 4\')' % (ra, dec)
# cmd='fk5\ncircle(%s, %s, 4\') # color=yellow background dashlist=8 3 width=1 font="helvetica 10 normal roman" select=0 highlite=1 dash=0 fixed=0 edit=0 move=0 delete=0 include=1 source=1\n' % (ra, dec)
# ds9.set(cmd)
self.ds9.set('regions', 'fk5; circle(%f,%f,4\') # color=yellow background dashlist=8 3 width=3 font="helvetica 10 normal roman" select=0 highlite=1 dash=0 fixed=0 edit=0 move=0 delete=0 include=1 source=1'%(ra, dec))
def rotate(self, angle):
"""Rotate the image"""
self.ds9.set('rotate to %f' % angle)
def regionfromfile(self, regfile, d=None, rformat='ds9'):
cmd='regions %s -format %s' % (regfile, rformat)
self.ds9.set(cmd)
def deleteregions(self):
"""Delete all regions in the frame"""
cmd='regions delete all'
self.ds9.set(cmd)
def getregions(self):
"""Return a list of regions"""
rgnstr=self.ds9.get('regions -system fk5')
i = 0
newslits = {}
#print rgnstr
for l in rgnstr.split('\n'):
tags = ''
# work out how to use tags and just deal with "slit" tags
if l.startswith('box'):
#first look for tags
l = l[4:].split('#')
if len(l) > 1:
tags = l[-1]
l = l[0][:-2].split(',')
newslits[i] = [l, tags]
i += 1
elif l.startswith('circle'):
l = l[7:].split('#')
#print l
if len(l) > 1:
tags=l
l = l[0][:-2].split(',')
newslits[i] = [l, tags]
i += 1
return newslits
|
saltastro/pysalt
|
proptools/ImageDisplay.py
|
Python
|
bsd-3-clause
| 3,112
| 0.008355
|
# -*- coding: utf-8 -*-
# privacyIDEA is a fork of LinOTP
#
# 2014-12-07 Cornelius Kölbel <cornelius@privacyidea.org>
#
# Copyright (C) 2014 Cornelius Kölbel
# License: AGPLv3
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
This is a helper module for the challenges database table.
It is used by the lib.tokenclass
The method is tested in test_lib_challenges
"""
import logging
import six
from .log import log_with
from ..models import Challenge
from privacyidea.lib.error import ParameterError
log = logging.getLogger(__name__)
@log_with(log)
def get_challenges(serial=None, transaction_id=None, challenge=None):
"""
This returns a list of database challenge objects.
:param serial: challenges for this very serial number
:param transaction_id: challenges with this very transaction id
:param challenge: The challenge to be found
:return: list of objects
"""
sql_query = Challenge.query
if serial is not None:
# filter for serial
sql_query = sql_query.filter(Challenge.serial == serial)
if transaction_id is not None:
# filter for transaction id
sql_query = sql_query.filter(Challenge.transaction_id ==
transaction_id)
if challenge is not None:
# filter for this challenge
sql_query = sql_query.filter(Challenge.challenge == challenge)
challenges = sql_query.all()
return challenges
@log_with(log)
def get_challenges_paginate(serial=None, transaction_id=None,
sortby=Challenge.timestamp,
sortdir="asc", psize=15, page=1):
"""
This function is used to retrieve a challenge list, that can be displayed in
the Web UI. It supports pagination.
Each retrieved page will also contain a "next" and a "prev", indicating
the next or previous page. If either does not exist, it is None.
:param serial: The serial of the token
:param transaction_id: The transaction_id of the challenge
:param sortby: Sort by a Challenge DB field. The default is
Challenge.timestamp.
:type sortby: A Challenge column or a string.
:param sortdir: Can be "asc" (default) or "desc"
:type sortdir: basestring
:param psize: The size of the page
:type psize: int
:param page: The number of the page to view. Starts with 1 ;-)
:type page: int
:return: dict with challenges, prev, next and count
:rtype: dict
"""
sql_query = _create_challenge_query(serial=serial,
transaction_id=transaction_id)
if isinstance(sortby, six.string_types):
# convert the string to a Challenge column
cols = Challenge.__table__.columns
sortby = cols.get(sortby)
if sortdir == "desc":
sql_query = sql_query.order_by(sortby.desc())
else:
sql_query = sql_query.order_by(sortby.asc())
pagination = sql_query.paginate(page, per_page=psize,
error_out=False)
challenges = pagination.items
prev = None
if pagination.has_prev:
prev = page-1
next = None
if pagination.has_next:
next = page + 1
challenge_list = []
for challenge in challenges:
challenge_dict = challenge.get()
challenge_list.append(challenge_dict)
ret = {"challenges": challenge_list,
"prev": prev,
"next": next,
"current": page,
"count": pagination.total}
return ret
def _create_challenge_query(serial=None, transaction_id=None):
"""
This function create the sql query for fetching transaction_ids. It is
used by get_challenge_paginate.
:return: An SQLAlchemy sql query
"""
sql_query = Challenge.query
if serial is not None and serial.strip("*"):
# filter for serial
if "*" in serial:
# match with "like"
sql_query = sql_query.filter(Challenge.serial.like(serial.replace(
"*", "%")))
else:
# exact match
sql_query = sql_query.filter(Challenge.serial == serial)
if transaction_id is not None and transaction_id.strip("*"):
# filter for serial
if "*" in transaction_id:
# match with "like"
sql_query = sql_query.filter(Challenge.transaction_id.like(
transaction_id.replace(
"*", "%")))
else:
# exact match
sql_query = sql_query.filter(Challenge.transaction_id == transaction_id)
return sql_query
def extract_answered_challenges(challenges):
"""
Given a list of challenge objects, extract and return a list of *answered* challenge.
A challenge is answered if it is not expired yet *and* if its ``otp_valid`` attribute
is set to True.
:param challenges: a list of challenge objects
:return: a list of answered challenge objects
"""
answered_challenges = []
for challenge in challenges:
# check if we are still in time.
if challenge.is_valid():
_, status = challenge.get_otp_status()
if status is True:
answered_challenges.append(challenge)
return answered_challenges
|
privacyidea/privacyidea
|
privacyidea/lib/challenge.py
|
Python
|
agpl-3.0
| 5,837
| 0.000857
|
'''
Created on Feb 26, 2014
@author: dstuart
'''
import LevelClass as L
import Util as U
class Region(object):
def __init__(self, **kwargs):
self.mapTiles = set()
self.name = None
self.worldMap = None
# TODO:
# worldMapId = Column(Integer, ForeignKey("levels.id"))
def addTile(self, tile):
self.mapTiles.add(tile)
tile.setRegion(self)
def replaceTile(self, oldtile, newtile):
assert oldtile.getXY() == newtile.getXY()
self.mapTiles.remove(oldtile)
self.addTile(newtile)
def getTileType(self):
return self.tileType
class WorldMap(L.MapBase):
def __init__(self, **kwargs):
super(WorldMap, self).__init__(**kwargs)
self.name = None
self.mapTiles = set()
self.regions = set()
self.num_regions = kwargs['num_regions']
self.creatures = set()
# Initialize self.hasTile
self.hasTile = []
for dummyx in range(self.width):
newCol = []
for dummyy in range(self.height):
newCol.append(False)
self.hasTile.append(newCol)
def load(self):
pass
def getMapTiles(self):
return self.mapTiles
def addTile(self, tile):
self.mapTiles.add(tile)
self.hasTile[tile.getX()][tile.getY()] = True
if tile.getLevel() is not self:
tile.setLevel(self)
def replaceTile(self, newtile):
oldtile = self.getTile(newtile.getX(), newtile.getY())
assert oldtile.getXY() == newtile.getXY()
reg = oldtile.getRegion()
if reg:
reg.replaceTile(oldtile, newtile)
oldnumtiles = len(self.mapTiles)
self.mapTiles.remove(oldtile)
oldtile.remove()
self.addTile(newtile)
self.tileArray[newtile.getX()][newtile.getY()] = newtile
newnumtiles = len(self.mapTiles)
assert newnumtiles == oldnumtiles
def buildTileArray(self):
self.tileArray = []
# Initialize
for dummyx in range(self.width):
newCol = []
for dummyy in range(self.height):
newCol.append(None)
self.tileArray.append(newCol)
# Fill in
for tile in self.mapTiles:
assert tile is not None
self.tileArray[tile.x][tile.y] = tile
def getTile(self, x, y):
if not self.__dict__.get('tileArray'):
# print "self.tileArray not initialized!"
self.buildTileArray()
if x >= 0 and x < self.width and y >= 0 and y < self.height:
return self.tileArray[x][y]
return None
def getRegions(self):
return self.regions
def addRegion(self, reg):
self.regions.add(reg)
def distance(self, tilea, tileb):
return self.coordinateDistance(tilea.getX(), tileb.getX(), tilea.getY(), tileb.getY())
def coordinateDistance(self, xa, xb, ya, yb):
return U.ChebyshevDistance(xa, xb, ya, yb)
def getTilesInRadius(self, radius, centerX, centerY, tileClass=None):
assert radius >= 0 and radius == int(radius)
tiles = []
for rad in range(0, radius + 1):
tiles += self.getTilesAtRadius(rad, centerX, centerY, tileClass)
return tiles
def getTilesInRange(self, rmin, rmax, centerX, centerY, tileClass=None):
assert rmin <= rmax and rmin > 0
tiles = []
for rad in range(rmin, rmax + 1):
tiles += self.getTilesAtRadius(rad, centerX, centerY, tileClass)
return tiles
def getNearestTile(self, fromTile, tileClass):
import Game as G
random = G.getRandom()
centerX, centerY = fromTile.getXY()
radius = 1
while True:
matches = self.getTilesAtRadius(radius, centerX, centerY, tileClass)
if not matches:
radius += 1
continue
return random.choice(matches)
def getTilesAtRadius(self, radius, centerX, centerY, tileClass=None):
assert radius >= 0 and radius == int(radius)
centerTile = self.getTile(centerX, centerY)
tiles = []
if radius == 0:
return [centerTile]
x1 = max(0, centerX - radius)
y1 = max(0, centerY - radius)
x2 = min(centerX + radius, self.width)
y2 = min(centerY + radius, self.height)
for x in range(x1, x2 + 1):
tile1 = self.getTile(x, y1)
tile2 = self.getTile(x, y2)
if tile1 and (tileClass is None or isinstance(tile1, tileClass)): tiles.append(tile1)
if tile2 and (tileClass is None or isinstance(tile2, tileClass)): tiles.append(tile2)
for y in range(y1 + 1, y2):
tile1 = self.getTile(x1, y)
tile2 = self.getTile(x2, y)
if tile1 and (tileClass is None or isinstance(tile1, tileClass)): tiles.append(tile1)
if tile2 and (tileClass is None or isinstance(tile2, tileClass)): tiles.append(tile2)
return tiles
def getTilesToDraw(self, dummyx, dummyy, cameradims, visibility = True):
retArray = []
camx, camy, camwidth, camheight = cameradims
for tile in self.mapTiles:
if tile:
x = tile.x
y = tile.y
# Is the tile in the camera's range?
if (x < camx or x >= camx + camwidth or y < camy or y >= camy + camheight):
continue
symbol = tile.getSymbol()
color = tile.getColor()
background = tile.getBackgroundColor()
# Good lord, what made me think this was a good idea?
# symbol = symbol.encode('ascii', 'ignore')
retArray.append((x, y, symbol, color, background))
return retArray
def getAdjacentTiles(self, fromTile):
# return self.getTilesAtRadius(1, fromTile.getX(), fromTile.getY())
tiles = []
x, y = fromTile.getXY()
for i in (-1, 0, 1):
for j in (-1, 0, 1):
if not (i == 0 and j == 0):
tile = self.getTile(x + i, y + j)
if tile: tiles.append(tile)
return tiles
def handleRemovedCreature(self, tile, creature):
pass
def handleAddedCreature(self, tile, creature):
pass
def buildMap(self):
raise NotImplementedError("buildMap() not implemented, use a subclass")
|
drestuart/delvelib
|
src/world/WorldMapClass.py
|
Python
|
lgpl-3.0
| 6,833
| 0.009074
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""LSTM Block Cell ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.contrib.rnn.ops import gen_lstm_ops
from tensorflow.contrib.util import loader
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.layers import base as base_layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.platform import resource_loader
_lstm_ops_so = loader.load_op_library(
resource_loader.get_path_to_datafile("_lstm_ops.so"))
LayerRNNCell = rnn_cell_impl._LayerRNNCell # pylint: disable=invalid-name,protected-access
# pylint: disable=invalid-name
def _lstm_block_cell(x,
cs_prev,
h_prev,
w,
b,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""Computes the LSTM cell forward propagation for 1 time step.
This implementation uses 1 weight matrix and 1 bias vector, and there's an
optional peephole connection.
This kernel op implements the following mathematical equations:
```python
xh = [x, h_prev]
[i, ci, f, o] = xh * w + b
f = f + forget_bias
if not use_peephole:
wci = wcf = wco = 0
i = sigmoid(cs_prev * wci + i)
f = sigmoid(cs_prev * wcf + f)
ci = tanh(ci)
cs = ci .* i + cs_prev .* f
cs = clip(cs, cell_clip)
o = sigmoid(cs * wco + o)
co = tanh(cs)
h = co .* o
```
Args:
x: A `Tensor`. Must be one of the following types: `float32`.
The input to the LSTM cell, shape (batch_size, num_inputs).
cs_prev: A `Tensor`. Must have the same type as `x`.
Value of the cell state at previous time step.
h_prev: A `Tensor`. Must have the same type as `x`.
Output of the previous cell at previous time step.
w: A `Tensor`. Must have the same type as `x`. The weight matrix.
b: A `Tensor`. Must have the same type as `x`. The bias vector.
wci: A `Tensor`. Must have the same type as `x`.
The weight matrix for input gate peephole connection.
wcf: A `Tensor`. Must have the same type as `x`.
The weight matrix for forget gate peephole connection.
wco: A `Tensor`. Must have the same type as `x`.
The weight matrix for output gate peephole connection.
forget_bias: An optional `float`. Defaults to `1`. The forget gate bias.
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
Value to clip the 'cs' value to. Disable by setting to negative value.
use_peephole: An optional `bool`. Defaults to `False`.
Whether to use peephole weights.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A `Tensor`. Has the same type as `x`. The input gate.
cs: A `Tensor`. Has the same type as `x`. The cell state before the tanh.
f: A `Tensor`. Has the same type as `x`. The forget gate.
o: A `Tensor`. Has the same type as `x`. The output gate.
ci: A `Tensor`. Has the same type as `x`. The cell input.
co: A `Tensor`. Has the same type as `x`. The cell after the tanh.
h: A `Tensor`. Has the same type as `x`. The output h vector.
Raises:
ValueError: If cell_size is None.
"""
if wci is None:
cell_size = cs_prev.get_shape().with_rank(2)[1].value
if cell_size is None:
raise ValueError("cell_size from `cs_prev` should not be None.")
wci = array_ops.constant(0, dtype=dtypes.float32, shape=[cell_size])
wcf = wci
wco = wci
# pylint: disable=protected-access
return gen_lstm_ops.lstm_block_cell(
x=x,
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip if cell_clip is not None else -1,
use_peephole=use_peephole,
name=name)
# pylint: enable=protected-access
def _block_lstm(seq_len_max,
x,
w,
b,
cs_prev=None,
h_prev=None,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""TODO(williamchan): add doc.
Args:
seq_len_max: A `Tensor` of type `int64`.
x: A list of at least 1 `Tensor` objects of the same type in: `float32`.
w: A `Tensor`. Must have the same type as `x`.
b: A `Tensor`. Must have the same type as `x`.
cs_prev: A `Tensor`. Must have the same type as `x`.
h_prev: A `Tensor`. Must have the same type as `x`.
wci: A `Tensor`. Must have the same type as `x`.
wcf: A `Tensor`. Must have the same type as `x`.
wco: A `Tensor`. Must have the same type as `x`.
forget_bias: An optional `float`. Defaults to `1`.
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
use_peephole: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
cs: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
f: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
o: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
ci: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
co: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
h: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
Raises:
ValueError: If `b` does not have a valid shape.
"""
batch_size = x[0].get_shape().with_rank(2)[0].value
cell_size4 = b.get_shape().with_rank(1)[0].value
if cell_size4 is None:
raise ValueError("`b` shape must not be None.")
cell_size = cell_size4 / 4
zero_state = None
if cs_prev is None or h_prev is None:
zero_state = array_ops.constant(
0, dtype=dtypes.float32, shape=[batch_size, cell_size])
if cs_prev is None:
cs_prev = zero_state
if h_prev is None:
h_prev = zero_state
if wci is None:
wci = array_ops.constant(0, dtype=dtypes.float32, shape=[cell_size])
wcf = wci
wco = wci
# pylint: disable=protected-access
i, cs, f, o, ci, co, h = gen_lstm_ops.block_lstm(
seq_len_max=seq_len_max,
x=array_ops.stack(x),
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip if cell_clip is not None else -1,
name=name,
use_peephole=use_peephole)
return array_ops.unstack(i), array_ops.unstack(cs), array_ops.unstack(
f), array_ops.unstack(o), array_ops.unstack(ci), array_ops.unstack(
co), array_ops.unstack(h)
# pylint: enable=protected-access
# pylint: enable=invalid-name
_lstm_block_cell_grad_outputs = ["cs_prev_grad", "dicfo"]
@ops.RegisterGradient("LSTMBlockCell")
def _LSTMBlockCellGrad(op, *grad):
"""Gradient for LSTMBlockCell."""
(x, cs_prev, h_prev, w, wci, wcf, wco, b) = op.inputs
(i, cs, f, o, ci, co, _) = op.outputs
(_, cs_grad, _, _, _, _, h_grad) = grad
batch_size = x.get_shape().with_rank(2)[0].value
if batch_size is None:
batch_size = -1
input_size = x.get_shape().with_rank(2)[1].value
if input_size is None:
raise ValueError("input_size from `x` should not be None.")
cell_size = cs_prev.get_shape().with_rank(2)[1].value
if cell_size is None:
raise ValueError("cell_size from `cs_prev` should not be None.")
(cs_prev_grad, dicfo, wci_grad, wcf_grad,
wco_grad) = gen_lstm_ops.lstm_block_cell_grad(
x,
cs_prev,
h_prev,
w,
wci,
wcf,
wco,
b,
i,
cs,
f,
o,
ci,
co,
cs_grad,
h_grad,
use_peephole=op.get_attr("use_peephole"))
# Backprop from dicfo to xh.
xh_grad = math_ops.matmul(dicfo, w, transpose_b=True)
x_grad = array_ops.slice(xh_grad, (0, 0), (batch_size, input_size))
x_grad.get_shape().merge_with(x.get_shape())
h_prev_grad = array_ops.slice(xh_grad, (0, input_size),
(batch_size, cell_size))
h_prev_grad.get_shape().merge_with(h_prev.get_shape())
# Backprop from dicfo to w.
xh = array_ops.concat([x, h_prev], 1)
w_grad = math_ops.matmul(xh, dicfo, transpose_a=True)
w_grad.get_shape().merge_with(w.get_shape())
# Backprop from dicfo to b.
b_grad = nn_ops.bias_add_grad(dicfo)
b_grad.get_shape().merge_with(b.get_shape())
return (x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad,
wco_grad, b_grad)
@ops.RegisterGradient("BlockLSTM")
def _BlockLSTMGrad(op, *grad):
"""Gradient for BlockLSTM."""
seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b = op.inputs
i, cs, f, o, ci, co, h = op.outputs
cs_grad = grad[1]
h_grad = grad[6]
(x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad, wco_grad,
b_grad) = gen_lstm_ops.block_lstm_grad(
seq_len_max,
x,
cs_prev,
h_prev,
w,
wci,
wcf,
wco,
b,
i,
cs,
f,
o,
ci,
co,
h,
cs_grad,
h_grad,
use_peephole=op.get_attr("use_peephole"))
return [
None, x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad,
wco_grad, b_grad
]
class LSTMBlockCell(LayerRNNCell):
"""Basic LSTM recurrent network cell.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add `forget_bias` (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
Unlike `rnn_cell_impl.LSTMCell`, this is a monolithic op and should be much
faster. The weight and bias matrices should be compatible as long as the
variable scope matches.
"""
def __init__(self,
num_units,
forget_bias=1.0,
cell_clip=None,
use_peephole=False,
reuse=None,
name="lstm_cell"):
"""Initialize the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
use_peephole: Whether to use peephole connections or not.
reuse: (optional) boolean describing whether to reuse variables in an
existing scope. If not `True`, and the existing scope already has the
given variables, an error is raised.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases. By default this is "lstm_cell", for variable-name compatibility
with `tf.nn.rnn_cell.LSTMCell`.
When restoring from CudnnLSTM-trained checkpoints, must use
CudnnCompatibleLSTMBlockCell instead.
"""
super(LSTMBlockCell, self).__init__(_reuse=reuse, name=name)
self._num_units = num_units
self._forget_bias = forget_bias
self._use_peephole = use_peephole
self._cell_clip = cell_clip if cell_clip is not None else -1
self._names = {
"W": "kernel",
"b": "bias",
"wci": "w_i_diag",
"wcf": "w_f_diag",
"wco": "w_o_diag",
"scope": "lstm_cell"
}
# Inputs must be 2-dimensional.
self.input_spec = base_layer.InputSpec(ndim=2)
@property
def state_size(self):
return rnn_cell_impl.LSTMStateTuple(self._num_units, self._num_units)
@property
def output_size(self):
return self._num_units
def build(self, inputs_shape):
if not inputs_shape[1].value:
raise ValueError(
"Expecting inputs_shape[1] to be set: %s" % str(inputs_shape))
input_size = inputs_shape[1].value
self._kernel = self.add_variable(
self._names["W"], [input_size + self._num_units, self._num_units * 4])
self._bias = self.add_variable(
self._names["b"], [self._num_units * 4],
initializer=init_ops.constant_initializer(0.0))
if self._use_peephole:
self._w_i_diag = self.add_variable(self._names["wci"], [self._num_units])
self._w_f_diag = self.add_variable(self._names["wcf"], [self._num_units])
self._w_o_diag = self.add_variable(self._names["wco"], [self._num_units])
self.built = True
def call(self, inputs, state):
"""Long short-term memory cell (LSTM)."""
if len(state) != 2:
raise ValueError("Expecting state to be a tuple with length 2.")
if self._use_peephole:
wci = self._w_i_diag
wcf = self._w_f_diag
wco = self._w_o_diag
else:
wci = wcf = wco = array_ops.zeros([self._num_units])
(cs_prev, h_prev) = state
(_, cs, _, _, _, _, h) = _lstm_block_cell(
inputs,
cs_prev,
h_prev,
self._kernel,
self._bias,
wci=wci,
wcf=wcf,
wco=wco,
forget_bias=self._forget_bias,
cell_clip=self._cell_clip,
use_peephole=self._use_peephole)
new_state = rnn_cell_impl.LSTMStateTuple(cs, h)
return h, new_state
class LSTMBlockWrapper(base_layer.Layer):
"""This is a helper class that provides housekeeping for LSTM cells.
This may be useful for alternative LSTM and similar type of cells.
The subclasses must implement `_call_cell` method and `num_units` property.
"""
@abc.abstractproperty
def num_units(self):
"""Number of units in this cell (output dimension)."""
pass
@abc.abstractmethod
def _call_cell(self, inputs, initial_cell_state, initial_output, dtype,
sequence_length):
"""Run this LSTM on inputs, starting from the given state.
This method must be implemented by subclasses and does the actual work
of calling the cell.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
initial_cell_state: initial value for cell state, shape `[batch_size,
self._num_units]`
initial_output: initial value of cell output, shape `[batch_size,
self._num_units]`
dtype: The data type for the initial state and expected output.
sequence_length: Specifies the length of each sequence in inputs. An int32
or int64 vector (tensor) size [batch_size], values in [0, time_len) or
None.
Returns:
A pair containing:
- State: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
- Output: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
"""
pass
def call(self, inputs, initial_state=None, dtype=None, sequence_length=None):
"""Run this LSTM on inputs, starting from the given state.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
or a list of `time_len` tensors of shape `[batch_size, input_size]`.
initial_state: a tuple `(initial_cell_state, initial_output)` with tensors
of shape `[batch_size, self._num_units]`. If this is not provided, the
cell is expected to create a zero initial state of type `dtype`.
dtype: The data type for the initial state and expected output. Required
if `initial_state` is not provided or RNN state has a heterogeneous
dtype.
sequence_length: Specifies the length of each sequence in inputs. An
`int32` or `int64` vector (tensor) size `[batch_size]`, values in `[0,
time_len).`
Defaults to `time_len` for each element.
Returns:
A pair containing:
- Output: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
or a list of time_len tensors of shape `[batch_size, output_size]`,
to match the type of the `inputs`.
- Final state: a tuple `(cell_state, output)` matching `initial_state`.
Raises:
ValueError: in case of shape mismatches
"""
is_list = isinstance(inputs, list)
if is_list:
inputs = array_ops.stack(inputs)
inputs_shape = inputs.get_shape().with_rank(3)
if not inputs_shape[2]:
raise ValueError("Expecting inputs_shape[2] to be set: %s" % inputs_shape)
batch_size = inputs_shape[1].value
if batch_size is None:
batch_size = array_ops.shape(inputs)[1]
time_len = inputs_shape[0].value
if time_len is None:
time_len = array_ops.shape(inputs)[0]
# Provide default values for initial_state and dtype
if initial_state is None:
if dtype is None:
raise ValueError("Either initial_state or dtype needs to be specified")
z = array_ops.zeros(
array_ops.stack([batch_size, self.num_units]), dtype=dtype)
initial_state = z, z
else:
if len(initial_state) != 2:
raise ValueError(
"Expecting initial_state to be a tuple with length 2 or None")
if dtype is None:
dtype = initial_state[0].dtype
# create the actual cell
if sequence_length is not None:
sequence_length = ops.convert_to_tensor(sequence_length)
initial_cell_state, initial_output = initial_state # pylint: disable=unpacking-non-sequence
cell_states, outputs = self._call_cell(
inputs, initial_cell_state, initial_output, dtype, sequence_length)
if sequence_length is not None:
# Mask out the part beyond sequence_length
mask = array_ops.transpose(
array_ops.sequence_mask(sequence_length, time_len, dtype=dtype),
[1, 0])
mask = array_ops.tile(
array_ops.expand_dims(mask, [-1]), [1, 1, self.num_units])
outputs *= mask
# Prepend initial states to cell_states and outputs for indexing to work
# correctly,since we want to access the last valid state at
# sequence_length - 1, which can even be -1, corresponding to the
# initial state.
mod_cell_states = array_ops.concat(
[array_ops.expand_dims(initial_cell_state, [0]), cell_states], 0)
mod_outputs = array_ops.concat(
[array_ops.expand_dims(initial_output, [0]), outputs], 0)
final_cell_state = self._gather_states(mod_cell_states, sequence_length,
batch_size)
final_output = self._gather_states(mod_outputs, sequence_length,
batch_size)
else:
# No sequence_lengths used: final state is the last state
final_cell_state = cell_states[-1]
final_output = outputs[-1]
if is_list:
# Input was a list, so return a list
outputs = array_ops.unstack(outputs)
final_state = rnn_cell_impl.LSTMStateTuple(final_cell_state, final_output)
return outputs, final_state
def _gather_states(self, data, indices, batch_size):
"""Produce `out`, s.t. out(i, j) = data(indices(i), i, j)."""
mod_indices = indices * batch_size + math_ops.range(batch_size)
return array_ops.gather(
array_ops.reshape(data, [-1, self.num_units]), mod_indices)
class LSTMBlockFusedCell(LSTMBlockWrapper):
"""FusedRNNCell implementation of LSTM.
This is an extremely efficient LSTM implementation, that uses a single TF op
for the entire LSTM. It should be both faster and more memory-efficient than
LSTMBlockCell defined above.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add forget_bias (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
The variable naming is consistent with `rnn_cell_impl.LSTMCell`.
"""
def __init__(self,
num_units,
forget_bias=1.0,
cell_clip=None,
use_peephole=False,
reuse=None,
name="lstm_fused_cell"):
"""Initialize the LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
cell_clip: clip the cell to this value. Default is no cell clipping.
use_peephole: Whether to use peephole connections or not.
reuse: (optional) boolean describing whether to reuse variables in an
existing scope. If not `True`, and the existing scope already has the
given variables, an error is raised.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases. By default this is "lstm_cell", for variable-name compatibility
with `tf.nn.rnn_cell.LSTMCell`.
"""
super(LSTMBlockFusedCell, self).__init__(_reuse=reuse, name=name)
self._num_units = num_units
self._forget_bias = forget_bias
self._cell_clip = cell_clip if cell_clip is not None else -1
self._use_peephole = use_peephole
# Inputs must be 3-dimensional.
self.input_spec = base_layer.InputSpec(ndim=3)
@property
def num_units(self):
"""Number of units in this cell (output dimension)."""
return self._num_units
def build(self, input_shape):
input_size = input_shape[2].value
self._kernel = self.add_variable(
"kernel", [input_size + self._num_units, self._num_units * 4])
self._bias = self.add_variable(
"bias", [self._num_units * 4],
initializer=init_ops.constant_initializer(0.0))
if self._use_peephole:
self._w_i_diag = self.add_variable("w_i_diag", [self._num_units])
self._w_f_diag = self.add_variable("w_f_diag", [self._num_units])
self._w_o_diag = self.add_variable("w_o_diag", [self._num_units])
self.built = True
def _call_cell(self,
inputs,
initial_cell_state=None,
initial_output=None,
dtype=None,
sequence_length=None):
"""Run this LSTM on inputs, starting from the given state.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
initial_cell_state: initial value for cell state, shape `[batch_size,
self._num_units]`
initial_output: initial value of cell output, shape `[batch_size,
self._num_units]`
dtype: The data type for the initial state and expected output.
sequence_length: Specifies the length of each sequence in inputs. An
`int32` or `int64` vector (tensor) size `[batch_size]`, values in `[0,
time_len)` or None.
Returns:
A pair containing:
- Cell state (cs): A `3-D` tensor of shape `[time_len, batch_size,
output_size]`
- Output (h): A `3-D` tensor of shape `[time_len, batch_size,
output_size]`
"""
inputs_shape = inputs.get_shape().with_rank(3)
time_len = inputs_shape[0].value
if time_len is None:
time_len = array_ops.shape(inputs)[0]
if self._use_peephole:
wci = self._w_i_diag
wco = self._w_o_diag
wcf = self._w_f_diag
else:
wci = wcf = wco = array_ops.zeros([self._num_units], dtype=dtype)
if sequence_length is None:
max_seq_len = math_ops.to_int64(time_len)
else:
max_seq_len = math_ops.to_int64(math_ops.reduce_max(sequence_length))
_, cs, _, _, _, _, h = gen_lstm_ops.block_lstm(
seq_len_max=max_seq_len,
x=inputs,
cs_prev=initial_cell_state,
h_prev=initial_output,
w=self._kernel,
wci=wci,
wcf=wcf,
wco=wco,
b=self._bias,
forget_bias=self._forget_bias,
cell_clip=self._cell_clip,
use_peephole=self._use_peephole)
return cs, h
|
jwlawson/tensorflow
|
tensorflow/contrib/rnn/python/ops/lstm_ops.py
|
Python
|
apache-2.0
| 24,941
| 0.005052
|
class Base(object):
def meth(self):
pass
class Derived1(Base):
def meth(self):
return super().meth()
class Derived2(Derived1):
def meth(self):
return super().meth()
class Derived3(Derived1):
pass
class Derived4(Derived3, Derived2):
def meth(self):
return super().meth()
class Derived5(Derived1):
def meth(self):
return super().meth()
class Derived6(Derived5, Derived2):
def meth(self):
return super().meth()
|
github/codeql
|
python/ql/test/3/library-tests/PointsTo/inheritance/test.py
|
Python
|
mit
| 496
| 0.012097
|
import numpy as np
import unittest
import ray
from ray import tune
from ray.rllib import _register_all
MB = 1024 * 1024
@ray.remote(memory=100 * MB)
class Actor(object):
def __init__(self):
pass
def ping(self):
return "ok"
@ray.remote(object_store_memory=100 * MB)
class Actor2(object):
def __init__(self):
pass
def ping(self):
return "ok"
def train_oom(config, reporter):
ray.put(np.zeros(200 * 1024 * 1024))
reporter(result=123)
class TestMemoryScheduling(unittest.TestCase):
def testMemoryRequest(self):
try:
ray.init(num_cpus=1, memory=200 * MB)
# fits first 2
a = Actor.remote()
b = Actor.remote()
ok, _ = ray.wait(
[a.ping.remote(), b.ping.remote()],
timeout=60.0,
num_returns=2)
self.assertEqual(len(ok), 2)
# does not fit
c = Actor.remote()
ok, _ = ray.wait([c.ping.remote()], timeout=5.0)
self.assertEqual(len(ok), 0)
finally:
ray.shutdown()
def testObjectStoreMemoryRequest(self):
try:
ray.init(num_cpus=1, object_store_memory=300 * MB)
# fits first 2 (70% allowed)
a = Actor2.remote()
b = Actor2.remote()
ok, _ = ray.wait(
[a.ping.remote(), b.ping.remote()],
timeout=60.0,
num_returns=2)
self.assertEqual(len(ok), 2)
# does not fit
c = Actor2.remote()
ok, _ = ray.wait([c.ping.remote()], timeout=5.0)
self.assertEqual(len(ok), 0)
finally:
ray.shutdown()
def testTuneDriverHeapLimit(self):
try:
_register_all()
result = tune.run(
"PG",
stop={"timesteps_total": 10000},
config={
"env": "CartPole-v0",
"memory": 100 * 1024 * 1024, # too little
},
raise_on_failed_trial=False)
self.assertEqual(result.trials[0].status, "ERROR")
self.assertTrue(
"RayOutOfMemoryError: Heap memory usage for ray_PG_" in
result.trials[0].error_msg)
finally:
ray.shutdown()
def testTuneDriverStoreLimit(self):
try:
_register_all()
self.assertRaisesRegexp(
ray.tune.error.TuneError,
".*Insufficient cluster resources.*",
lambda: tune.run(
"PG",
stop={"timesteps_total": 10000},
config={
"env": "CartPole-v0",
# too large
"object_store_memory": 10000 * 1024 * 1024,
}))
finally:
ray.shutdown()
def testTuneWorkerHeapLimit(self):
try:
_register_all()
result = tune.run(
"PG",
stop={"timesteps_total": 10000},
config={
"env": "CartPole-v0",
"num_workers": 1,
"memory_per_worker": 100 * 1024 * 1024, # too little
},
raise_on_failed_trial=False)
self.assertEqual(result.trials[0].status, "ERROR")
self.assertTrue(
"RayOutOfMemoryError: Heap memory usage for ray_Rollout" in
result.trials[0].error_msg)
finally:
ray.shutdown()
def testTuneWorkerStoreLimit(self):
try:
_register_all()
self.assertRaisesRegexp(
ray.tune.error.TuneError,
".*Insufficient cluster resources.*",
lambda:
tune.run("PG", stop={"timesteps_total": 0}, config={
"env": "CartPole-v0",
"num_workers": 1,
# too large
"object_store_memory_per_worker": 10000 * 1024 * 1024,
}))
finally:
ray.shutdown()
def testTuneObjectLimitApplied(self):
try:
result = tune.run(
train_oom,
resources_per_trial={"object_store_memory": 150 * 1024 * 1024},
raise_on_failed_trial=False)
self.assertTrue(result.trials[0].status, "ERROR")
self.assertTrue("ObjectStoreFullError: Failed to put" in
result.trials[0].error_msg)
finally:
ray.shutdown()
if __name__ == "__main__":
unittest.main(verbosity=2)
|
ujvl/ray-ng
|
python/ray/tests/test_memory_scheduling.py
|
Python
|
apache-2.0
| 4,709
| 0
|
'''
Check the performance counters from SQL Server
See http://blogs.msdn.com/b/psssql/archive/2013/09/23/interpreting-the-counter-values-from-sys-dm-os-performance-counters.aspx
for information on how to report the metrics available in the sys.dm_os_performance_counters table
'''
# stdlib
import traceback
from contextlib import contextmanager
# 3rd party
import adodbapi
try:
import pyodbc
except ImportError:
pyodbc = None
from config import _is_affirmative
# project
from checks import AgentCheck
EVENT_TYPE = SOURCE_TYPE_NAME = 'sql server'
ALL_INSTANCES = 'ALL'
VALID_METRIC_TYPES = ('gauge', 'rate', 'histogram')
# Constant for SQLServer cntr_type
PERF_LARGE_RAW_BASE = 1073939712
PERF_RAW_LARGE_FRACTION = 537003264
PERF_AVERAGE_BULK = 1073874176
PERF_COUNTER_BULK_COUNT = 272696576
PERF_COUNTER_LARGE_RAWCOUNT = 65792
# Queries
COUNTER_TYPE_QUERY = '''select distinct cntr_type
from sys.dm_os_performance_counters
where counter_name = ?;'''
BASE_NAME_QUERY = '''select distinct counter_name
from sys.dm_os_performance_counters
where (counter_name=? or counter_name=?
or counter_name=?) and cntr_type=%s;''' % PERF_LARGE_RAW_BASE
INSTANCES_QUERY = '''select instance_name
from sys.dm_os_performance_counters
where counter_name=? and instance_name!='_Total';'''
VALUE_AND_BASE_QUERY = '''select cntr_value
from sys.dm_os_performance_counters
where (counter_name=? or counter_name=?)
and instance_name=?
order by cntr_type;'''
DATABASE_EXISTS_QUERY = 'select name from sys.databases;'
class SQLConnectionError(Exception):
"""
Exception raised for SQL instance connection issues
"""
pass
class SQLServer(AgentCheck):
SERVICE_CHECK_NAME = 'sqlserver.can_connect'
# FIXME: 6.x, set default to 5s (like every check)
DEFAULT_COMMAND_TIMEOUT = 30
DEFAULT_DATABASE = 'master'
DEFAULT_DRIVER = 'SQL Server'
DEFAULT_DB_KEY = 'database'
PROC_GUARD_DB_KEY = 'proc_only_if_database'
METRICS = [
('sqlserver.buffer.cache_hit_ratio', 'Buffer cache hit ratio', ''), # RAW_LARGE_FRACTION
('sqlserver.buffer.page_life_expectancy', 'Page life expectancy', ''), # LARGE_RAWCOUNT
('sqlserver.stats.batch_requests', 'Batch Requests/sec', ''), # BULK_COUNT
('sqlserver.stats.sql_compilations', 'SQL Compilations/sec', ''), # BULK_COUNT
('sqlserver.stats.sql_recompilations', 'SQL Re-Compilations/sec', ''), # BULK_COUNT
('sqlserver.stats.connections', 'User Connections', ''), # LARGE_RAWCOUNT
('sqlserver.stats.lock_waits', 'Lock Waits/sec', '_Total'), # BULK_COUNT
('sqlserver.access.page_splits', 'Page Splits/sec', ''), # BULK_COUNT
('sqlserver.stats.procs_blocked', 'Processes blocked', ''), # LARGE_RAWCOUNT
('sqlserver.buffer.checkpoint_pages', 'Checkpoint pages/sec', '') # BULK_COUNT
]
valid_connectors = ['adodbapi']
if pyodbc is not None:
valid_connectors.append('odbc')
def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
# Cache connections
self.connections = {}
self.failed_connections = {}
self.instances_metrics = {}
self.existing_databases = None
self.do_check = {}
self.proc_type_mapping = {
'gauge': self.gauge,
'rate' : self.rate,
'histogram': self.histogram
}
self.connector = init_config.get('connector', 'adodbapi')
if not self.connector.lower() in self.valid_connectors:
self.log.error("Invalid database connector %s, defaulting to adodbapi" % self.connector)
self.connector = 'adodbapi'
# Pre-process the list of metrics to collect
self.custom_metrics = init_config.get('custom_metrics', [])
for instance in instances:
try:
instance_key = self._conn_key(instance, self.DEFAULT_DB_KEY)
self.do_check[instance_key] = True
# check to see if the database exists before we try any connections to it
with self.open_managed_db_connections(instance, None, db_name=self.DEFAULT_DATABASE):
db_exists, context = self._check_db_exists(instance)
if db_exists:
if instance.get('stored_procedure') is None:
with self.open_managed_db_connections(instance, self.DEFAULT_DB_KEY):
self._make_metric_list_to_collect(instance, self.custom_metrics)
else:
# How much do we care that the DB doesn't exist?
ignore = _is_affirmative(instance.get("ignore_missing_database", False))
if ignore is not None and ignore:
# not much : we expect it. leave checks disabled
self.do_check[instance_key] = False
self.log.warning("Database %s does not exist. Disabling checks for this instance." % (context))
else:
# yes we do. Keep trying
self.log.error("Database %s does not exist. Fix issue and restart agent" % (context))
except SQLConnectionError:
self.log.exception("Skipping SQL Server instance")
continue
def _check_db_exists(self, instance):
"""
Check if the database we're targeting actually exists
If not then we won't do any checks
This allows the same config to be installed on many servers but fail gracefully
"""
dsn, host, username, password, database, driver = self._get_access_info(instance, self.DEFAULT_DB_KEY)
context = "%s - %s" % (host, database)
if self.existing_databases is None:
cursor = self.get_cursor(instance, None, self.DEFAULT_DATABASE)
try:
self.existing_databases = {}
cursor.execute(DATABASE_EXISTS_QUERY)
for row in cursor:
self.existing_databases[row.name] = True
except Exception, e:
self.log.error("Failed to check if database %s exists: %s" % (database, e))
return False, context
finally:
self.close_cursor(cursor)
return database in self.existing_databases, context
def _make_metric_list_to_collect(self, instance, custom_metrics):
"""
Store the list of metrics to collect by instance_key.
Will also create and cache cursors to query the db.
"""
metrics_to_collect = []
for name, counter_name, instance_name in self.METRICS:
try:
sql_type, base_name = self.get_sql_type(instance, counter_name)
metrics_to_collect.append(self.typed_metric(instance,
name,
counter_name,
base_name,
None,
sql_type,
instance_name,
None))
except SQLConnectionError:
raise
except Exception:
self.log.warning("Can't load the metric %s, ignoring", name, exc_info=True)
continue
# Load any custom metrics from conf.d/sqlserver.yaml
for row in custom_metrics:
user_type = row.get('type')
if user_type is not None and user_type not in VALID_METRIC_TYPES:
self.log.error('%s has an invalid metric type: %s', row['name'], user_type)
sql_type = None
try:
if user_type is None:
sql_type, base_name = self.get_sql_type(instance, row['counter_name'])
except Exception:
self.log.warning("Can't load the metric %s, ignoring", row['name'], exc_info=True)
continue
metrics_to_collect.append(self.typed_metric(instance,
row['name'],
row['counter_name'],
base_name,
user_type,
sql_type,
row.get('instance_name', ''),
row.get('tag_by', None)))
instance_key = self._conn_key(instance, self.DEFAULT_DB_KEY)
self.instances_metrics[instance_key] = metrics_to_collect
def typed_metric(self, instance, dd_name, sql_name, base_name, user_type, sql_type, instance_name, tag_by):
'''
Create the appropriate SqlServerMetric object, each implementing its method to
fetch the metrics properly.
If a `type` was specified in the config, it is used to report the value
directly fetched from SQLServer. Otherwise, it is decided based on the
sql_type, according to microsoft's documentation.
'''
metric_type_mapping = {
PERF_COUNTER_BULK_COUNT: (self.rate, SqlSimpleMetric),
PERF_COUNTER_LARGE_RAWCOUNT: (self.gauge, SqlSimpleMetric),
PERF_LARGE_RAW_BASE: (self.gauge, SqlSimpleMetric),
PERF_RAW_LARGE_FRACTION: (self.gauge, SqlFractionMetric),
PERF_AVERAGE_BULK: (self.gauge, SqlIncrFractionMetric)
}
if user_type is not None:
# user type overrides any other value
metric_type = getattr(self, user_type)
cls = SqlSimpleMetric
else:
metric_type, cls = metric_type_mapping[sql_type]
return cls(self._get_connector(instance), dd_name, sql_name, base_name,
metric_type, instance_name, tag_by, self.log)
def _get_connector(self, instance):
connector = instance.get('connector', self.connector)
if connector != self.connector:
if not connector.lower() in self.valid_connectors:
self.log.warning("Invalid database connector %s using default %s" ,
connector, self.connector)
connector = self.connector
else:
self.log.debug("Overriding default connector for %s with %s", instance['host'], connector)
return connector
def _get_access_info(self, instance, db_key, db_name=None):
''' Convenience method to extract info from instance
'''
dsn = instance.get('dsn')
host = instance.get('host')
username = instance.get('username')
password = instance.get('password')
database = instance.get(db_key) if db_name is None else db_name
driver = instance.get('driver')
if not dsn:
if not host:
host = '127.0.0.1,1433'
if not database:
database = self.DEFAULT_DATABASE
if not driver:
driver = self.DEFAULT_DRIVER
return dsn, host, username, password, database, driver
def _conn_key(self, instance, db_key, db_name=None):
''' Return a key to use for the connection cache
'''
dsn, host, username, password, database, driver = self._get_access_info(instance, db_key, db_name)
return '%s:%s:%s:%s:%s:%s' % (dsn, host, username, password, database, driver)
def _conn_string_odbc(self, db_key, instance=None, conn_key=None, db_name=None):
''' Return a connection string to use with odbc
'''
if instance:
dsn, host, username, password, database, driver = self._get_access_info(instance, db_key, db_name)
elif conn_key:
dsn, host, username, password, database, driver = conn_key.split(":")
conn_str = ''
if dsn:
conn_str = 'DSN=%s;' % (dsn)
if driver:
conn_str += 'DRIVER={%s};' % (driver)
if host:
conn_str += 'Server=%s;' % (host)
if database:
conn_str += 'Database=%s;' % (database)
if username:
conn_str += 'UID=%s;' % (username)
self.log.debug("Connection string (before password) %s" , conn_str)
if password:
conn_str += 'PWD=%s;' % (password)
return conn_str
def _conn_string_adodbapi(self, db_key, instance=None, conn_key=None, db_name=None):
''' Return a connection string to use with adodbapi
'''
if instance:
_, host, username, password, database, _ = self._get_access_info(instance, db_key, db_name)
elif conn_key:
_, host, username, password, database, _ = conn_key.split(":")
conn_str = 'Provider=SQLOLEDB;Data Source=%s;Initial Catalog=%s;' \
% (host, database)
if username:
conn_str += 'User ID=%s;' % (username)
if password:
conn_str += 'Password=%s;' % (password)
if not username and not password:
conn_str += 'Integrated Security=SSPI;'
return conn_str
@contextmanager
def get_managed_cursor(self, instance, db_key, db_name=None):
cursor = self.get_cursor(instance, db_key, db_name)
yield cursor
self.close_cursor(cursor)
def get_cursor(self, instance, db_key, db_name=None):
'''
Return a cursor to execute query against the db
Cursor are cached in the self.connections dict
'''
conn_key = self._conn_key(instance, db_key, db_name)
conn = self.connections[conn_key]['conn']
cursor = conn.cursor()
return cursor
def get_sql_type(self, instance, counter_name):
'''
Return the type of the performance counter so that we can report it to
StackState correctly
If the sql_type is one that needs a base (PERF_RAW_LARGE_FRACTION and
PERF_AVERAGE_BULK), the name of the base counter will also be returned
'''
with self.get_managed_cursor(instance, self.DEFAULT_DB_KEY) as cursor:
cursor.execute(COUNTER_TYPE_QUERY, (counter_name,))
(sql_type,) = cursor.fetchone()
if sql_type == PERF_LARGE_RAW_BASE:
self.log.warning("Metric %s is of type Base and shouldn't be reported this way",
counter_name)
base_name = None
if sql_type in [PERF_AVERAGE_BULK, PERF_RAW_LARGE_FRACTION]:
# This is an ugly hack. For certains type of metric (PERF_RAW_LARGE_FRACTION
# and PERF_AVERAGE_BULK), we need two metrics: the metrics specified and
# a base metrics to get the ratio. There is no unique schema so we generate
# the possible candidates and we look at which ones exist in the db.
candidates = (counter_name + " base",
counter_name.replace("(ms)", "base"),
counter_name.replace("Avg ", "") + " base"
)
try:
cursor.execute(BASE_NAME_QUERY, candidates)
base_name = cursor.fetchone().counter_name.strip()
self.log.debug("Got base metric: %s for metric: %s", base_name, counter_name)
except Exception as e:
self.log.warning("Could not get counter_name of base for metric: %s", e)
return sql_type, base_name
def check(self, instance):
if self.do_check[self._conn_key(instance, self.DEFAULT_DB_KEY)]:
proc = instance.get('stored_procedure')
if proc is None:
self.do_perf_counter_check(instance)
else:
self.do_stored_procedure_check(instance, proc)
else:
self.log.debug("Skipping check")
def do_perf_counter_check(self, instance):
"""
Fetch the metrics from the sys.dm_os_performance_counters table
"""
custom_tags = instance.get('tags', [])
instance_key = self._conn_key(instance, self.DEFAULT_DB_KEY)
with self.open_managed_db_connections(instance, self.DEFAULT_DB_KEY):
# if the server was down at check __init__ key could be missing.
if instance_key not in self.instances_metrics:
self._make_metric_list_to_collect(instance, self.custom_metrics)
metrics_to_collect = self.instances_metrics[instance_key]
with self.get_managed_cursor(instance, self.DEFAULT_DB_KEY) as cursor:
for metric in metrics_to_collect:
try:
metric.fetch_metric(cursor, custom_tags)
except Exception as e:
self.log.warning("Could not fetch metric %s: %s" % (metric.stackstate_name, e))
def do_stored_procedure_check(self, instance, proc):
"""
Fetch the metrics from the stored proc
"""
guardSql = instance.get('proc_only_if')
if (guardSql and self.proc_check_guard(instance, guardSql)) or not guardSql:
self.open_db_connections(instance, self.DEFAULT_DB_KEY)
cursor = self.get_cursor(instance, self.DEFAULT_DB_KEY)
try:
cursor.callproc(proc)
rows = cursor.fetchall()
for row in rows:
tags = [] if row.tags is None or row.tags == '' else row.tags.split(',')
if row.type in self.proc_type_mapping:
self.proc_type_mapping[row.type](row.metric, row.value, tags)
else:
self.log.warning('%s is not a recognised type from procedure %s, metric %s'
% (row.type, proc, row.metric))
except Exception, e:
self.log.warning("Could not call procedure %s: %s" % (proc, e))
self.close_cursor(cursor)
self.close_db_connections(instance, self.DEFAULT_DB_KEY)
else:
self.log.info("Skipping call to %s due to only_if" % (proc))
def proc_check_guard(self, instance, sql):
"""
check to see if the guard SQL returns a single column containing 0 or 1
We return true if 1, else False
"""
self.open_db_connections(instance, self.PROC_GUARD_DB_KEY)
cursor = self.get_cursor(instance, self.PROC_GUARD_DB_KEY)
should_run = False
try:
cursor.execute(sql, ())
result = cursor.fetchone()
should_run = result[0] == 1
except Exception, e:
self.log.error("Failed to run proc_only_if sql %s : %s" % (sql, e))
self.close_cursor(cursor)
self.close_db_connections(instance, self.PROC_GUARD_DB_KEY)
return should_run
def close_cursor(self, cursor):
"""
We close the cursor explicitly b/c we had proven memory leaks
We handle any exception from closing, although according to the doc:
"in adodbapi, it is NOT an error to re-close a closed cursor"
"""
try:
cursor.close()
except Exception as e:
self.log.warning("Could not close adodbapi cursor\n{0}".format(e))
def close_db_connections(self, instance, db_key, db_name=None):
"""
We close the db connections explicitly b/c when we don't they keep
locks on the db. This presents as issues such as the SQL Server Agent
being unable to stop.
"""
conn_key = self._conn_key(instance, db_key, db_name)
if conn_key not in self.connections:
return
try:
self.connections[conn_key]['conn'].close()
del self.connections[conn_key]
except Exception as e:
self.log.warning("Could not close adodbapi db connection\n{0}".format(e))
@contextmanager
def open_managed_db_connections(self, instance, db_key, db_name=None):
self.open_db_connections(instance, db_key, db_name)
yield
self.close_db_connections(instance, db_key, db_name)
def open_db_connections(self, instance, db_key, db_name=None):
"""
We open the db connections explicitly, so we can ensure they are open
before we use them, and are closable, once we are finished. Open db
connections keep locks on the db, presenting issues such as the SQL
Server Agent being unable to stop.
"""
conn_key = self._conn_key(instance, db_key, db_name)
timeout = int(instance.get('command_timeout',
self.DEFAULT_COMMAND_TIMEOUT))
dsn, host, username, password, database, driver = self._get_access_info(
instance, db_key, db_name)
service_check_tags = [
'host:%s' % host,
'db:%s' % database
]
try:
if self._get_connector(instance) == 'adodbapi':
cs = self._conn_string_adodbapi(db_key, instance=instance, db_name=db_name)
# autocommit: true disables implicit transaction
rawconn = adodbapi.connect(cs, {'timeout':timeout, 'autocommit':True})
else:
cs = self._conn_string_odbc(db_key, instance=instance, db_name=db_name)
rawconn = pyodbc.connect(cs, timeout=timeout)
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK,
tags=service_check_tags)
if conn_key not in self.connections:
self.connections[conn_key] = {'conn': rawconn, 'timeout': timeout}
else:
try:
# explicitly trying to avoid leaks...
self.connections[conn_key]['conn'].close()
except Exception as e:
self.log.info("Could not close adodbapi db connection\n{0}".format(e))
self.connections[conn_key]['conn'] = rawconn
except Exception as e:
cx = "%s - %s" % (host, database)
message = "Unable to connect to SQL Server for instance %s." % cx
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=message)
password = instance.get('password')
tracebk = traceback.format_exc()
if password is not None:
tracebk = tracebk.replace(password, "*" * 6)
cxn_failure_exp = SQLConnectionError("%s \n %s" % (message, tracebk))
raise cxn_failure_exp
class SqlServerMetric(object):
'''General class for common methods, should never be instantiated directly
'''
def __init__(self, connector, stackstate_name, sql_name, base_name,
report_function, instance, tag_by, logger):
self.connector = connector
self.stackstate_name = stackstate_name
self.sql_name = sql_name
self.base_name = base_name
self.report_function = report_function
self.instance = instance
self.tag_by = tag_by
self.instances = None
self.past_values = {}
self.log = logger
def fetch_metrics(self, cursor, tags):
raise NotImplementedError
class SqlSimpleMetric(SqlServerMetric):
def fetch_metric(self, cursor, tags):
query_base = '''
select instance_name, cntr_value
from sys.dm_os_performance_counters
where counter_name = ?
'''
if self.instance == ALL_INSTANCES:
query = query_base + "and instance_name!= '_Total'"
query_content = (self.sql_name,)
else:
query = query_base + "and instance_name=?"
query_content = (self.sql_name, self.instance)
cursor.execute(query, query_content)
rows = cursor.fetchall()
for instance_name, cntr_value in rows:
metric_tags = tags
if self.instance == ALL_INSTANCES:
metric_tags = metric_tags + ['%s:%s' % (self.tag_by, instance_name.strip())]
self.report_function(self.stackstate_name, cntr_value,
tags=metric_tags)
class SqlFractionMetric(SqlServerMetric):
def set_instances(self, cursor):
if self.instance == ALL_INSTANCES:
cursor.execute(INSTANCES_QUERY, (self.sql_name,))
self.instances = [row.instance_name for row in cursor.fetchall()]
else:
self.instances = [self.instance]
def fetch_metric(self, cursor, tags):
'''
Because we need to query the metrics by matching pairs, we can't query
all of them together without having to perform some matching based on
the name afterwards so instead we query instance by instance.
We cache the list of instance so that we don't have to look it up every time
'''
if self.instances is None:
self.set_instances(cursor)
for instance in self.instances:
cursor.execute(VALUE_AND_BASE_QUERY, (self.sql_name, self.base_name, instance))
rows = cursor.fetchall()
if len(rows) != 2:
self.log.warning("Missing counter to compute fraction for "
"metric %s instance %s, skipping", self.sql_name, instance)
continue
if self.connector == 'odbc':
value = rows[0].cntr_value
base = rows[1].cntr_value
else:
value = rows[0, "cntr_value"]
base = rows[1, "cntr_value"]
metric_tags = tags
if self.instance == ALL_INSTANCES:
metric_tags = metric_tags + ['%s:%s' % (self.tag_by, instance.strip())]
self.report_fraction(value, base, metric_tags)
def report_fraction(self, value, base, metric_tags):
try:
result = value / float(base)
self.report_function(self.stackstate_name, result, tags=metric_tags)
except ZeroDivisionError:
self.log.debug("Base value is 0, won't report metric %s for tags %s",
self.stackstate_name, metric_tags)
class SqlIncrFractionMetric(SqlFractionMetric):
def report_fraction(self, value, base, metric_tags):
key = "key:" + "".join(metric_tags)
if key in self.past_values:
old_value, old_base = self.past_values[key]
diff_value = value - old_value
diff_base = base - old_base
try:
result = diff_value / float(diff_base)
self.report_function(self.stackstate_name, result, tags=metric_tags)
except ZeroDivisionError:
self.log.debug("Base value is 0, won't report metric %s for tags %s",
self.stackstate_name, metric_tags)
self.past_values[key] = (value, base)
|
StackVista/sts-agent-integrations-core
|
sqlserver/check.py
|
Python
|
bsd-3-clause
| 27,601
| 0.002935
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_padcv.py
-------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from __future__ import absolute_import
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .r_li import checkMovingWindow, configFile
def checkParameterValuesBeforeExecuting(alg):
return checkMovingWindow(alg)
def processCommand(alg):
configFile(alg)
|
drnextgis/QGIS
|
python/plugins/processing/algs/grass7/ext/r_li_padcv.py
|
Python
|
gpl-2.0
| 1,324
| 0
|
from setuptools import setup, find_packages
setup(
name='django-test-html-form',
version='0.1',
description="Make your Django HTML form tests more explicit and concise.",
long_description=open('README.rst').read(),
keywords='django test assert',
author='Dan Claudiu Pop',
author_email='dancladiupop@gmail.com',
url='https://github.com/danclaudiupop/assertHtmlForm',
license='BSD License',
packages=find_packages(),
include_package_data=True,
install_requires=[
'beautifulsoup4',
],
)
|
danclaudiupop/django-test-html-form
|
setup.py
|
Python
|
bsd-3-clause
| 546
| 0
|
import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from django_messages.models import Message
class SendTestCase(TestCase):
def setUp(self):
self.user1 = User.objects.create_user('user1', 'user1@example.com', '123456')
self.user2 = User.objects.create_user('user2', 'user2@example.com', '123456')
self.msg1 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text', body='Body Text')
self.msg1.save()
def testBasic(self):
self.assertEquals(self.msg1.sender, self.user1)
self.assertEquals(self.msg1.recipient, self.user2)
self.assertEquals(self.msg1.subject, 'Subject Text')
self.assertEquals(self.msg1.body, 'Body Text')
self.assertEquals(self.user1.sent_messages.count(), 1)
self.assertEquals(self.user1.received_messages.count(), 0)
self.assertEquals(self.user2.received_messages.count(), 1)
self.assertEquals(self.user2.sent_messages.count(), 0)
class DeleteTestCase(TestCase):
def setUp(self):
self.user1 = User.objects.create_user('user3', 'user3@example.com', '123456')
self.user2 = User.objects.create_user('user4', 'user4@example.com', '123456')
self.msg1 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text 1', body='Body Text 1')
self.msg2 = Message(sender=self.user1, recipient=self.user2, subject='Subject Text 2', body='Body Text 2')
self.msg1.sender_deleted_at = datetime.datetime.now()
self.msg2.recipient_deleted_at = datetime.datetime.now()
self.msg1.save()
self.msg2.save()
def testBasic(self):
self.assertEquals(Message.objects.outbox_for(self.user1).count(), 1)
self.assertEquals(Message.objects.outbox_for(self.user1)[0].subject, 'Subject Text 2')
self.assertEquals(Message.objects.inbox_for(self.user2).count(),1)
self.assertEquals(Message.objects.inbox_for(self.user2)[0].subject, 'Subject Text 1')
#undelete
self.msg1.sender_deleted_at = None
self.msg2.recipient_deleted_at = None
self.msg1.save()
self.msg2.save()
self.assertEquals(Message.objects.outbox_for(self.user1).count(), 2)
self.assertEquals(Message.objects.inbox_for(self.user2).count(),2)
|
mirumee/django-messages
|
django_messages/tests.py
|
Python
|
bsd-3-clause
| 2,362
| 0.008044
|
# -*- coding: utf-8 -*-
"""
Folium
-------
Make beautiful, interactive maps with Python and Leaflet.js
"""
from __future__ import absolute_import
from branca.colormap import StepColormap
from branca.utilities import color_brewer
from .map import LegacyMap, FitBounds
from .features import GeoJson, TopoJson
class Map(LegacyMap):
"""Create a Map with Folium and Leaflet.js
Generate a base map of given width and height with either default
tilesets or a custom tileset URL. The following tilesets are built-in
to Folium. Pass any of the following to the "tiles" keyword:
- "OpenStreetMap"
- "Mapbox Bright" (Limited levels of zoom for free tiles)
- "Mapbox Control Room" (Limited levels of zoom for free tiles)
- "Stamen" (Terrain, Toner, and Watercolor)
- "Cloudmade" (Must pass API key)
- "Mapbox" (Must pass API key)
- "CartoDB" (positron and dark_matter)
You can pass a custom tileset to Folium by passing a Leaflet-style
URL to the tiles parameter:
http://{s}.yourtiles.com/{z}/{x}/{y}.png
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Map (Northing, Easting).
width: pixel int or percentage string (default: '100%')
Width of the map.
height: pixel int or percentage string (default: '100%')
Height of the map.
tiles: str, default 'OpenStreetMap'
Map tileset to use. Can choose from a list of built-in tiles,
pass a custom URL or pass `None` to create a map without tiles.
API_key: str, default None
API key for Cloudmade or Mapbox tiles.
max_zoom: int, default 18
Maximum zoom depth for the map.
zoom_start: int, default 10
Initial zoom level for the map.
attr: string, default None
Map tile attribution; only required if passing custom tile URL.
detect_retina: bool, default False
If true and user is on a retina display, it will request four
tiles of half the specified size and a bigger zoom level in place
of one to utilize the high resolution.
crs : str, default 'EPSG3857'
Defines coordinate reference systems for projecting geographical points
into pixel (screen) coordinates and back.
You can use Leaflet's values :
* EPSG3857 : The most common CRS for online maps, used by almost all
free and commercial tile providers. Uses Spherical Mercator projection.
Set in by default in Map's crs option.
* EPSG4326 : A common CRS among GIS enthusiasts.
Uses simple Equirectangular projection.
* EPSG3395 : Rarely used by some commercial tile providers.
Uses Elliptical Mercator projection.
* Simple : A simple CRS that maps longitude and latitude into
x and y directly. May be used for maps of flat surfaces
(e.g. game maps). Note that the y axis should still be inverted
(going from bottom to top).
control_scale : bool, default False
Whether to add a control scale on the map.
prefer_canvas : bool, default False
Forces Leaflet to use the Canvas back-end (if available) for
vector layers instead of SVG. This can increase performance
considerably in some cases (e.g. many thousands of circle
markers on the map).
no_touch : bool, default False
Forces Leaflet to not use touch events even if it detects them.
disable_3d : bool, default False
Forces Leaflet to not use hardware-accelerated CSS 3D
transforms for positioning (which may cause glitches in some
rare environments) even if they're supported.
Returns
-------
Folium LegacyMap Object
Examples
--------
>>> map = folium.LegacyMap(location=[45.523, -122.675],
... width=750, height=500)
>>> map = folium.LegacyMap(location=[45.523, -122.675],
tiles='Mapbox Control Room')
>>> map = folium.LegacyMap(location=(45.523, -122.675), max_zoom=20,
tiles='Cloudmade', API_key='YourKey')
>>> map = folium.LegacyMap(location=[45.523, -122.675], zoom_start=2,
tiles=('http://{s}.tiles.mapbox.com/v3/'
'mapbox.control-room/{z}/{x}/{y}.png'),
attr='Mapbox attribution')
"""
def fit_bounds(self, bounds, padding_top_left=None,
padding_bottom_right=None, padding=None, max_zoom=None):
"""Fit the map to contain a bounding box with the
maximum zoom level possible.
Parameters
----------
bounds: list of (latitude, longitude) points
Bounding box specified as two points [southwest, northeast]
padding_top_left: (x, y) point, default None
Padding in the top left corner. Useful if some elements in
the corner, such as controls, might obscure objects you're zooming
to.
padding_bottom_right: (x, y) point, default None
Padding in the bottom right corner.
padding: (x, y) point, default None
Equivalent to setting both top left and bottom right padding to
the same value.
max_zoom: int, default None
Maximum zoom to be used.
Examples
--------
>>> map.fit_bounds([[52.193636, -2.221575], [52.636878, -1.139759]])
"""
self.add_child(FitBounds(bounds,
padding_top_left=padding_top_left,
padding_bottom_right=padding_bottom_right,
padding=padding,
max_zoom=max_zoom,
)
)
def choropleth(self, geo_path=None, geo_str=None, data_out='data.json',
data=None, columns=None, key_on=None, threshold_scale=None,
fill_color='blue', fill_opacity=0.6, line_color='black',
line_weight=1, line_opacity=1, legend_name="",
topojson=None, reset=False, smooth_factor=None,
highlight=None):
"""
Apply a GeoJSON overlay to the map.
Plot a GeoJSON overlay on the base map. There is no requirement
to bind data (passing just a GeoJSON plots a single-color overlay),
but there is a data binding option to map your columnar data to
different feature objects with a color scale.
If data is passed as a Pandas DataFrame, the "columns" and "key-on"
keywords must be included, the first to indicate which DataFrame
columns to use, the second to indicate the layer in the GeoJSON
on which to key the data. The 'columns' keyword does not need to be
passed for a Pandas series.
Colors are generated from color brewer (http://colorbrewer2.org/)
sequential palettes on a D3 threshold scale. The scale defaults to the
following quantiles: [0, 0.5, 0.75, 0.85, 0.9]. A custom scale can be
passed to `threshold_scale` of length <=6, in order to match the
color brewer range.
TopoJSONs can be passed as "geo_path", but the "topojson" keyword must
also be passed with the reference to the topojson objects to convert.
See the topojson.feature method in the TopoJSON API reference:
https://github.com/mbostock/topojson/wiki/API-Reference
Parameters
----------
geo_path: string, default None
URL or File path to your GeoJSON data
geo_str: string, default None
String of GeoJSON, alternative to geo_path
data_out: string, default 'data.json'
Path to write Pandas DataFrame/Series to JSON if binding data
data: Pandas DataFrame or Series, default None
Data to bind to the GeoJSON.
columns: dict or tuple, default None
If the data is a Pandas DataFrame, the columns of data to be bound.
Must pass column 1 as the key, and column 2 the values.
key_on: string, default None
Variable in the GeoJSON file to bind the data to. Must always
start with 'feature' and be in JavaScript objection notation.
Ex: 'feature.id' or 'feature.properties.statename'.
threshold_scale: list, default None
Data range for D3 threshold scale. Defaults to the following range
of quantiles: [0, 0.5, 0.75, 0.85, 0.9], rounded to the nearest
order-of-magnitude integer. Ex: 270 rounds to 200, 5600 to 6000.
fill_color: string, default 'blue'
Area fill color. Can pass a hex code, color name, or if you are
binding data, one of the following color brewer palettes:
'BuGn', 'BuPu', 'GnBu', 'OrRd', 'PuBu', 'PuBuGn', 'PuRd', 'RdPu',
'YlGn', 'YlGnBu', 'YlOrBr', and 'YlOrRd'.
fill_opacity: float, default 0.6
Area fill opacity, range 0-1.
line_color: string, default 'black'
GeoJSON geopath line color.
line_weight: int, default 1
GeoJSON geopath line weight.
line_opacity: float, default 1
GeoJSON geopath line opacity, range 0-1.
legend_name: string, default empty string
Title for data legend.
topojson: string, default None
If using a TopoJSON, passing "objects.yourfeature" to the topojson
keyword argument will enable conversion to GeoJSON.
reset: boolean, default False
Remove all current geoJSON layers, start with new layer
smooth_factor: float, default None
How much to simplify the polyline on each zoom level. More means
better performance and smoother look, and less means more accurate
representation. Leaflet defaults to 1.0.
highlight: boolean, default False
Enable highlight functionality when hovering over a GeoJSON area.
Returns
-------
GeoJSON data layer in obj.template_vars
Examples
--------
>>> m.choropleth(geo_path='us-states.json', line_color='blue',
... line_weight=3)
>>> m.choropleth(geo_path='geo.json', data=df,
... columns=['Data 1', 'Data 2'],
... key_on='feature.properties.myvalue',
... fill_color='PuBu',
... threshold_scale=[0, 20, 30, 40, 50, 60])
>>> m.choropleth(geo_path='countries.json',
... topojson='objects.countries')
>>> m.choropleth(geo_path='geo.json', data=df,
... columns=['Data 1', 'Data 2'],
... key_on='feature.properties.myvalue',
... fill_color='PuBu',
... threshold_scale=[0, 20, 30, 40, 50, 60],
... highlight=True)
"""
if threshold_scale and len(threshold_scale) > 6:
raise ValueError
if data is not None and not color_brewer(fill_color):
raise ValueError('Please pass a valid color brewer code to '
'fill_local. See docstring for valid codes.')
# Create GeoJson object
if geo_path:
geo_data = open(geo_path)
elif geo_str:
geo_data = geo_str
else:
geo_data = {}
# Create color_data dict
if hasattr(data, 'set_index'):
# This is a pd.DataFrame
color_data = data.set_index(columns[0])[columns[1]].to_dict()
elif hasattr(data, 'to_dict'):
# This is a pd.Series
color_data = data.to_dict()
elif data:
color_data = dict(data)
else:
color_data = None
# Compute color_domain
if threshold_scale:
color_domain = list(threshold_scale)
elif color_data:
# To avoid explicit pandas dependency ; changed default behavior.
data_min = min(color_data.values())
data_max = max(color_data.values())
if data_min == data_max:
data_min = (data_min if data_min < 0 else 0
if data_min > 0 else -1)
data_max = (data_max if data_max > 0 else 0
if data_max < 0 else 1)
data_min, data_max = (1.01*data_min-0.01*data_max,
1.01*data_max-0.01*data_min)
nb_class = 6
color_domain = [data_min+i*(data_max-data_min)*1./nb_class
for i in range(1+nb_class)]
else:
color_domain = None
if color_domain and key_on:
key_on = key_on[8:] if key_on.startswith('feature.') else key_on
color_range = color_brewer(fill_color, n=len(color_domain))
def get_by_key(obj, key):
return (obj.get(key, None) if len(key.split('.')) <= 1 else
get_by_key(obj.get(key.split('.')[0], None),
'.'.join(key.split('.')[1:])))
def color_scale_fun(x):
return color_range[len(
[u for u in color_domain if
get_by_key(x, key_on) in color_data and
u <= color_data[get_by_key(x, key_on)]])]
else:
def color_scale_fun(x):
return fill_color
def style_function(x):
return {
"weight": line_weight,
"opacity": line_opacity,
"color": line_color,
"fillOpacity": fill_opacity,
"fillColor": color_scale_fun(x)
}
def highlight_function(x):
return {
"weight": line_weight + 2,
"fillOpacity": fill_opacity + .2
}
if topojson:
geo_json = TopoJson(
geo_data,
topojson,
style_function=style_function,
smooth_factor=smooth_factor)
else:
geo_json = GeoJson(
geo_data,
style_function=style_function,
smooth_factor=smooth_factor,
highlight_function=highlight_function if highlight else None)
self.add_child(geo_json)
# Create ColorMap.
if color_domain:
brewed = color_brewer(fill_color, n=len(color_domain))
color_scale = StepColormap(
brewed[1:len(color_domain)],
index=color_domain,
vmin=color_domain[0],
vmax=color_domain[-1],
caption=legend_name,
)
self.add_child(color_scale)
|
shankari/folium
|
folium/folium.py
|
Python
|
mit
| 14,914
| 0
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the zapwallettxes functionality.
- start two iopd nodes
- create two transactions on node 0 - one is confirmed and one is unconfirmed.
- restart node 0 and verify that both the confirmed and the unconfirmed
transactions are still available.
- restart node 0 with zapwallettxes and persistmempool, and verify that both
the confirmed and the unconfirmed transactions are still available.
- restart node 0 with just zapwallettxed and verify that the confirmed
transactions are still available, but that the unconfirmed transaction has
been zapped.
"""
from test_framework.test_framework import IoPTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
wait_until,
)
class ZapWalletTXesTest (IoPTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
self.nodes[1].generate(100)
self.sync_all()
# This transaction will be confirmed
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10)
self.nodes[0].generate(1)
self.sync_all()
# This transaction will not be confirmed
txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20)
# Confirmed and unconfirmed transactions are now in the wallet.
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet.
self.stop_node(0)
self.start_node(0)
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed
# transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
self.stop_node(0)
self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"])
wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3)
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop node0 and restart with zapwallettxes, but not persistmempool.
# The unconfirmed transaction is zapped and is no longer in the wallet.
self.stop_node(0)
self.start_node(0, ["-zapwallettxes=2"])
# tx1 is still be available because it was confirmed
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
# This will raise an exception because the unconfirmed transaction has been zapped
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2)
if __name__ == '__main__':
ZapWalletTXesTest().main()
|
Anfauglith/iop-hd
|
test/functional/zapwallettxes.py
|
Python
|
mit
| 3,234
| 0.002474
|
import sys
import os
import shutil
def import_package(name):
_filepath = os.path.abspath(__file__)
path = backup = os.path.dirname(_filepath)
while os.path.basename(path) != name:
path = os.path.join(path, '..')
path = os.path.abspath(path)
if path != backup:
sys.path.insert(0, path)
module = __import__(name)
return module
|
ffunenga/virtuallinks
|
tests/core/core.py
|
Python
|
mit
| 375
| 0
|
"""
A module of restricted Boltzmann machine (RBM) modified
from the Deep Learning Tutorials (www.deeplearning.net/tutorial/).
Copyright (c) 2008-2013, Theano Development Team All rights reserved.
Modified by Yifeng Li
CMMT, UBC, Vancouver
Sep 23, 2014
Contact: yifeng.li.cn@gmail.com
"""
from __future__ import division
import time
import math
import numpy
import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
import classification as cl
class RBM(object):
"""Restricted Boltzmann Machine (RBM) """
def __init__(self, input=None, n_visible=784, n_hidden=500, \
W=None, hbias=None, vbias=None, numpy_rng=None,
theano_rng=None):
"""
RBM constructor. Defines the parameters of the model along with
basic operations for inferring hidden from visible (and vice-versa),
as well as for performing CD updates.
:param input: None for standalone RBMs or symbolic variable if RBM is
part of a larger graph.
:param n_visible: number of visible units
:param n_hidden: number of hidden units
:param W: None for standalone RBMs or symbolic variable pointing to a
shared weight matrix in case RBM is part of a DBN network; in a DBN,
the weights are shared between RBMs and layers of a MLP
:param hbias: None for standalone RBMs or symbolic variable pointing
to a shared hidden units bias vector in case RBM is part of a
different network
:param vbias: None for standalone RBMs or a symbolic variable
pointing to a shared visible units bias
"""
self.n_visible = n_visible
self.n_hidden = n_hidden
if numpy_rng is None:
# create a number generator
numpy_rng = numpy.random.RandomState(1234)
if theano_rng is None:
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
if W is None:
# W is initialized with `initial_W` which is uniformely
# sampled from -4*sqrt(6./(n_visible+n_hidden)) and
# 4*sqrt(6./(n_hidden+n_visible)) the output of uniform if
# converted using asarray to dtype theano.config.floatX so
# that the code is runable on GPU
initial_W = numpy.asarray(numpy_rng.uniform(
low=-4 * numpy.sqrt(6. / (n_hidden + n_visible)),
high=4 * numpy.sqrt(6. / (n_hidden + n_visible)),
size=(n_visible, n_hidden)),
dtype=theano.config.floatX)
# theano shared variables for weights and biases
W = theano.shared(value=initial_W, name='W', borrow=True)
if hbias is None:
# create shared variable for hidden units bias
hbias = theano.shared(value=numpy.zeros(n_hidden,
dtype=theano.config.floatX),
name='hbias', borrow=True)
if vbias is None:
# create shared variable for visible units bias
vbias = theano.shared(value=numpy.zeros(n_visible,
dtype=theano.config.floatX),
name='vbias', borrow=True)
# initialize input layer for standalone RBM or layer0 of DBN
self.input = input
if not input:
self.input = T.matrix('input')
self.W = W
self.hbias = hbias
self.vbias = vbias
self.theano_rng = theano_rng
# **** WARNING: It is not a good idea to put things in this list
# other than shared variables created in this function.
self.params = [self.W, self.hbias, self.vbias]
def free_energy(self, v_sample):
''' Function to compute the free energy '''
wx_b = T.dot(v_sample, self.W) + self.hbias
vbias_term = T.dot(v_sample, self.vbias)
hidden_term = T.sum(T.log(1 + T.exp(wx_b)), axis=1)
return -hidden_term - vbias_term
def propup(self, vis):
'''This function propagates the visible units activation upwards to
the hidden units
Note that we return also the pre-sigmoid activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
'''
pre_sigmoid_activation = T.dot(vis, self.W) + self.hbias
return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]
def sample_h_given_v(self, v0_sample):
''' This function infers state of hidden units given visible units '''
# compute the activation of the hidden units given a sample of
# the visibles
pre_sigmoid_h1, h1_mean = self.propup(v0_sample)
# get a sample of the hiddens given their activation
# Note that theano_rng.binomial returns a symbolic sample of dtype
# int64 by default. If we want to keep our computations in floatX
# for the GPU we need to specify to return the dtype floatX
h1_sample = self.theano_rng.binomial(size=h1_mean.shape,
n=1, p=h1_mean,
dtype=theano.config.floatX)
return [pre_sigmoid_h1, h1_mean, h1_sample]
def propdown(self, hid):
'''This function propagates the hidden units activation downwards to
the visible units
Note that we return also the pre_sigmoid_activation of the
layer. As it will turn out later, due to how Theano deals with
optimizations, this symbolic variable will be needed to write
down a more stable computational graph (see details in the
reconstruction cost function)
'''
pre_sigmoid_activation = T.dot(hid, self.W.T) + self.vbias
return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]
def sample_v_given_h(self, h0_sample):
''' This function infers state of visible units given hidden units '''
# compute the activation of the visible given the hidden sample
pre_sigmoid_v1, v1_mean = self.propdown(h0_sample)
# get a sample of the visible given their activation
# Note that theano_rng.binomial returns a symbolic sample of dtype
# int64 by default. If we want to keep our computations in floatX
# for the GPU we need to specify to return the dtype floatX
v1_sample = self.theano_rng.binomial(size=v1_mean.shape,
n=1, p=v1_mean,
dtype=theano.config.floatX)
return [pre_sigmoid_v1, v1_mean, v1_sample]
def gibbs_hvh(self, h0_sample):
''' This function implements one step of Gibbs sampling,
starting from the hidden state'''
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
return [pre_sigmoid_v1, v1_mean, v1_sample,
pre_sigmoid_h1, h1_mean, h1_sample]
def gibbs_vhv(self, v0_sample):
''' This function implements one step of Gibbs sampling,
starting from the visible state'''
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v0_sample)
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h1_sample)
return [pre_sigmoid_h1, h1_mean, h1_sample,
pre_sigmoid_v1, v1_mean, v1_sample]
def get_cost_updates(self, lr=0.1, persistent=None, k=1):
"""This functions implements one step of CD-k or PCD-k
:param lr: learning rate used to train the RBM
:param persistent: None for CD. For PCD, shared variable
containing old state of Gibbs chain. This must be a shared
variable of size (batch size, number of hidden units).
:param k: number of Gibbs steps to do in CD-k/PCD-k
Returns a proxy for the cost and the updates dictionary. The
dictionary contains the update rules for weights and biases but
also an update of the shared variable used to store the persistent
chain, if one is used.
"""
# compute positive phase
pre_sigmoid_ph, ph_mean, ph_sample = self.sample_h_given_v(self.input)
# decide how to initialize persistent chain:
# for CD, we use the newly generate hidden sample
# for PCD, we initialize from the old state of the chain
if persistent is None:
chain_start = ph_sample
else:
chain_start = persistent
# perform actual negative phase
# in order to implement CD-k/PCD-k we need to scan over the
# function that implements one gibbs step k times.
# Read Theano tutorial on scan for more information :
# http://deeplearning.net/software/theano/library/scan.html
# the scan will return the entire Gibbs chain
# udpate is a dictionary type, updates of values of shared variables
# including model parameters and persistent chain
[pre_sigmoid_nvs, nv_means, nv_samples,
pre_sigmoid_nhs, nh_means, nh_samples], updates = \
theano.scan(self.gibbs_hvh,
# the None are place holders, saying that
# chain_start is the initial state corresponding to the
# 6th output
outputs_info=[None, None, None, None, None, chain_start],
n_steps=k)
# determine gradients on RBM parameters
# not that we only need the sample at the end of the chain
chain_end = nv_samples[-1]
cost = T.mean(self.free_energy(self.input)) - T.mean(
self.free_energy(chain_end))
# We must not compute the gradient through the gibbs sampling
gparams = T.grad(cost, self.params, consider_constant=[chain_end])
# constructs the update dictionary
for gparam, param in zip(gparams, self.params):
# make sure that the learning rate is of the right dtype
# update is a dictionary, add the parameter update dictionary items
updates[param] = param - gparam * T.cast(lr,
dtype=theano.config.floatX)
if persistent:
# Note that this works only if persistent is a shared variable
updates[persistent] = nh_samples[-1]
# pseudo-likelihood is a better proxy for PCD
monitoring_cost = self.get_pseudo_likelihood_cost(updates)
else:
# reconstruction cross-entropy is a better proxy for CD
monitoring_cost = self.get_reconstruction_cost(updates,
pre_sigmoid_nvs[-1])
return monitoring_cost, updates
def get_pseudo_likelihood_cost(self, updates):
"""Stochastic approximation to the pseudo-likelihood"""
# index of bit i in expression p(x_i | x_{\i})
bit_i_idx = theano.shared(value=0, name='bit_i_idx')
# binarize the input image by rounding to nearest integer
xi = T.round(self.input)
# calculate free energy for the given bit configuration
fe_xi = self.free_energy(xi)
# flip bit x_i of matrix xi and preserve all other bits x_{\i}
# Equivalent to xi[:,bit_i_idx] = 1-xi[:, bit_i_idx], but assigns
# the result to xi_flip, instead of working in place on xi.
xi_flip = T.set_subtensor(xi[:, bit_i_idx], 1 - xi[:, bit_i_idx])
# calculate free energy with bit flipped
fe_xi_flip = self.free_energy(xi_flip)
# equivalent to e^(-FE(x_i)) / (e^(-FE(x_i)) + e^(-FE(x_{\i})))
cost = T.mean(self.n_visible * T.log(T.nnet.sigmoid(fe_xi_flip -
fe_xi)))
# increment bit_i_idx % number as part of updates
updates[bit_i_idx] = (bit_i_idx + 1) % self.n_visible
return cost
def get_reconstruction_cost(self, updates, pre_sigmoid_nv):
"""Approximation to the reconstruction error
Note that this function requires the pre-sigmoid activation as
input. To understand why this is so you need to understand a
bit about how Theano works. Whenever you compile a Theano
function, the computational graph that you pass as input gets
optimized for speed and stability. This is done by changing
several parts of the subgraphs with others. One such
optimization expresses terms of the form log(sigmoid(x)) in
terms of softplus. We need this optimization for the
cross-entropy since sigmoid of numbers larger than 30. (or
even less then that) turn to 1. and numbers smaller than
-30. turn to 0 which in terms will force theano to compute
log(0) and therefore we will get either -inf or NaN as
cost. If the value is expressed in terms of softplus we do not
get this undesirable behaviour. This optimization usually
works fine, but here we have a special case. The sigmoid is
applied inside the scan op, while the log is
outside. Therefore Theano will only see log(scan(..)) instead
of log(sigmoid(..)) and will not apply the wanted
optimization. We can not go and replace the sigmoid in scan
with something else also, because this only needs to be done
on the last step. Therefore the easiest and more efficient way
is to get also the pre-sigmoid activation as an output of
scan, and apply both the log and sigmoid outside scan such
that Theano can catch and optimize the expression.
"""
cross_entropy = T.mean(
T.sum(self.input * T.log(T.nnet.sigmoid(pre_sigmoid_nv)) +
(1 - self.input) * T.log(1 - T.nnet.sigmoid(pre_sigmoid_nv)),
axis=1))
return cross_entropy
def train_model(rng=numpy.random.RandomState(100), train_set_x_org=None, n_hidden=100,
learning_rate=0.1, training_epochs=100, batch_size=100, persistent_chain_k=15):
"""
Train a RBM model given training data.
INPUTS:
rng: numpy random number state.
train_set_x_org: numpy 2d array, each row is a training sample.
n_hidden, int, number of hidden units.
learning_rate: float scalar, the initial learning rate.
training_epochs: int scalar, the maximal number of epochs.
batch_size: int scalar, minibatch size.
persistent_chain_k: length of persistent chain from the last sampling to new sampling.
OUTPUTS:
rbm: object of RBM. The model learned.
mean_hidden: numpy 2d array, each row is a reduced training sample.
training_time: training time.
"""
train_set_x = theano.shared(numpy.asarray(train_set_x_org,dtype=theano.config.floatX),borrow=True)
n_train_batches = int(math.ceil(train_set_x.get_value(borrow=True).shape[0] / batch_size))
# allocate symbolic variables for the data
index = T.lscalar() # index to a [mini]batch
x = T.matrix('x') # the data is presented as rasterized images
# shared variable to reduce the learning rate
learning_rate_shared=theano.shared(learning_rate,name='learn_rate_shared')
# learning_rate_init=T.scalar(name='learning_rate_init',dtype=theano.config.floatX)
# epoch_variable=T.iscalar(name='epoch_variable')
decay_rate=T.scalar(name='decay_rate',dtype=theano.config.floatX)
# compute_learn_rate=theano.function([learning_rate_init,epoch_variable,decay_rate],learning_rate_shared, \
# updates=[(learning_rate_shared,learning_rate_init*decay_rate**(epoch_variable//100))]) # thenao does not support math.pow, instead use T.pow() or a**b
reduce_learning_rate=theano.function([decay_rate],learning_rate_shared,updates=[(learning_rate_shared,learning_rate_shared*decay_rate)])
n_visible=train_set_x_org.shape[1] # number of input features
theano_rng = RandomStreams(rng.randint(2 ** 30))
# initialize storage for the persistent chain (state = hidden
# layer of chain)
persistent_chain = theano.shared(numpy.zeros((batch_size, n_hidden),
dtype=theano.config.floatX),
borrow=True)
# construct the RBM class
rbm = RBM(input=x, n_visible=n_visible,
n_hidden=n_hidden, numpy_rng=rng, theano_rng=theano_rng)
# get the cost and the gradient corresponding to one step of CD-15
cost, updates = rbm.get_cost_updates(lr=learning_rate,persistent=persistent_chain,k=persistent_chain_k)
# it is ok for a theano function to have no output
# the purpose of train_rbm is solely to update the RBM parameters
train_rbm_one_iteration = theano.function([index], cost, updates=updates,
givens={x: train_set_x[index * batch_size:(index + 1) * batch_size]},
name='train_rbm')
# optimization, gradient descent
max_num_epoch_change_learning_rate=100
max_num_epoch_not_improve=2*max_num_epoch_change_learning_rate
max_num_epoch_change_rate=0.8
epoch_change_count=0
best_cost=numpy.inf
# train the model using training set
start_time=time.clock()
for epoch in xrange(training_epochs):
c=[] # costs of all minibatches of this epoch
epoch_change_count=epoch_change_count+1
if epoch_change_count % max_num_epoch_change_learning_rate ==0:
reduce_learning_rate(0.5)
max_num_epoch_change_learning_rate= \
cl.change_max_num_epoch_change_learning_rate(max_num_epoch_change_learning_rate,max_num_epoch_change_rate)
max_num_epoch_not_improve=2*max_num_epoch_change_learning_rate
epoch_change_count=0
for batch_index in xrange(n_train_batches):
c_batch=train_rbm_one_iteration(batch_index)
c.append(c_batch)
this_cost=numpy.mean(c)
print 'Training eopch: %d, cost: %f' % (epoch,this_cost)
if this_cost<best_cost:
best_cost=this_cost
num_epoch_not_improve=0
if this_cost>=best_cost:
num_epoch_not_improve=num_epoch_not_improve+1
if num_epoch_not_improve>=max_num_epoch_not_improve:
break
end_time=time.clock()
training_time=end_time-start_time
print 'Training time: %f' %(training_time/60)
# return the trained model and the reduced training set
extracted=rbm.propup(train_set_x)
get_extracted=theano.function([],extracted)
pre_activation,mean_hidden=get_extracted()
return rbm, mean_hidden, training_time
def test_model(model_trained,test_set_x_org=None):
"""
Get the reduced data using the model learned.
INPUTS:
model_trained: object of RBM, RBM model learned.
test_set_x_org: numpy 2d array, each row is a sample.
OUTPUTS:
mean_hidden: numpy 2d array, the reduced data.
"""
test_set_x=theano.shared(numpy.asarray(test_set_x_org,dtype=theano.config.floatX),borrow=True)
extracted=model_trained.propup(test_set_x)
get_extracted=theano.function([],extracted)
pre_activation,mean_hidden=get_extracted()
return mean_hidden
def sample_model(rng,model_trained,test_set_x_org=None,n_chains=20,n_samples=10,sample_gap=1000):
"""
Sample from the trained RBM given some actual examples to initialize the algorithm.
INPUTS:
rng: numpy random number state.
model_trained: object of RBM, RBM model learned.
test_set_x_org: numpy 2d array, each row is a actual example.
n_chains: number of Gibbs chains to be sampled indepently.
n_samples: int, number of samples to be taking in each chain.
A sample is taken every "sample_gap" steps.
sample_gap: int, steps of Gibbs sampling before taking samples.
OUTPUTS:
samples_vis: numpy array of n_samples X n_chains X num_visible_units,
sampled samples.
samples_vis_mf: numpy array of n_samples X n_chains X num_visible_units,
mean fields of sampled samples.
"""
test_set_x=theano.shared(numpy.asarray(test_set_x_org,dtype=theano.config.floatX),borrow=True)
number_of_test_samples = test_set_x.get_value(borrow=True).shape[0]
# pick random test examples, with which to initialize the persistent chain
test_idx = rng.randint(number_of_test_samples - n_chains)
persistent_vis_chain = theano.shared(numpy.asarray(
test_set_x.get_value(borrow=True)[test_idx:test_idx + n_chains],
dtype=theano.config.floatX))
# sampling
[presig_hids, hid_mfs, hid_samples, presig_vis,
vis_mfs, vis_samples], updates = \
theano.scan(model_trained.gibbs_vhv,
outputs_info=[None, None, None, None,
None, persistent_vis_chain],
n_steps=sample_gap)
# add to updates the shared variable that takes care of our persistent
# chain :.
updates.update({persistent_vis_chain: vis_samples[-1]})
# construct the function that implements our persistent chain.
# we generate the "mean field" activations for plotting and the actual
# samples for reinitializing the state of our persistent chain
sample_fn = theano.function([], [vis_mfs[-1], vis_samples[-1]],
updates=updates,
name='sample_fn')
# sample n_samples here
samples_vis=numpy.zeros((n_samples,n_chains,model_trained.n_visible),dtype=test_set_x_org.dtype)
samples_vis_mf=samples_vis
for idx in xrange(n_samples):
vis_mf, vis_sample = sample_fn()
samples_vis[idx,:,:]=vis_sample
samples_vis_mf[idx,:,:]=vis_mf
return samples_vis, samples_vis_mf
|
yifeng-li/DECRES
|
rbm.py
|
Python
|
bsd-3-clause
| 22,163
| 0.006723
|
"""Provide common test tools for Z-Wave JS."""
AIR_TEMPERATURE_SENSOR = "sensor.multisensor_6_air_temperature"
HUMIDITY_SENSOR = "sensor.multisensor_6_humidity"
ENERGY_SENSOR = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed_2"
POWER_SENSOR = "sensor.smart_plug_with_two_usb_ports_value_electric_consumed"
SWITCH_ENTITY = "switch.smart_plug_with_two_usb_ports"
LOW_BATTERY_BINARY_SENSOR = "binary_sensor.multisensor_6_low_battery_level"
ENABLED_LEGACY_BINARY_SENSOR = "binary_sensor.z_wave_door_window_sensor_any"
DISABLED_LEGACY_BINARY_SENSOR = "binary_sensor.multisensor_6_any"
NOTIFICATION_MOTION_BINARY_SENSOR = (
"binary_sensor.multisensor_6_home_security_motion_detection"
)
NOTIFICATION_MOTION_SENSOR = "sensor.multisensor_6_home_security_motion_sensor_status"
PROPERTY_DOOR_STATUS_BINARY_SENSOR = (
"binary_sensor.august_smart_lock_pro_3rd_gen_the_current_status_of_the_door"
)
CLIMATE_RADIO_THERMOSTAT_ENTITY = "climate.z_wave_thermostat"
CLIMATE_DANFOSS_LC13_ENTITY = "climate.living_connect_z_thermostat"
CLIMATE_EUROTRONICS_SPIRIT_Z_ENTITY = "climate.thermostatic_valve"
CLIMATE_FLOOR_THERMOSTAT_ENTITY = "climate.floor_thermostat"
CLIMATE_MAIN_HEAT_ACTIONNER = "climate.main_heat_actionner"
BULB_6_MULTI_COLOR_LIGHT_ENTITY = "light.bulb_6_multi_color"
EATON_RF9640_ENTITY = "light.allloaddimmer"
AEON_SMART_SWITCH_LIGHT_ENTITY = "light.smart_switch_6"
ID_LOCK_CONFIG_PARAMETER_SENSOR = (
"sensor.z_wave_module_for_id_lock_150_and_101_config_parameter_door_lock_mode"
)
|
w1ll1am23/home-assistant
|
tests/components/zwave_js/common.py
|
Python
|
apache-2.0
| 1,508
| 0.002653
|
#! /usr/bin/env python
import sys
import PEAT_SA.Core as Core
import Protool
import itertools
def getPathSequence(combinations):
path = []
currentSet = set(combinations[0].split(','))
path.append(combinations[0])
for i in range(1, len(combinations)):
newSet = set(combinations[i].split(','))
newElement = newSet.difference(currentSet)
path.append(list(newElement)[0])
currentSet = newSet
return path
def getTypePath(combinations, typeMap):
path = getPathSequence(combinations)
print 'Mutation accumulation patern ', path
types = []
for el in path:
try:
types.append(typeMap[el])
except KeyError:
types.append('N/A')
print types
return types
def codesToCombinations(mutationCodes):
'''Converts a mutation code, which involves both chains, to a list of non-chain specific codes
e.g. A84V+B84V+A32F+B32F => 84V, 32F'''
holder = []
for code in mutationCodes:
mutationSet = Core.Data.MutationSet(code)
holder.append(list(set([code[1:] for code in mutationSet.reducedMutationCodes()])))
return holder
def observedPathsForCombination(combination, observedCombinations):
print '\nSearching for observed paths to combination %s' % combination
numberOfMutations = len(combination)
pathways = itertools.permutations(combination, numberOfMutations)
checked = 0
found = 0
observedPathways = []
for pathway in pathways:
#print 'Putative pathway %s' % list(pathway)
parts = []
for j in range(1,numberOfMutations + 1):
observed = False
sub = pathway[:j]
#print '\tChecking subpath %s is observed' % list(sub)
subPerms = itertools.permutations(sub)
#Check if this sub combination
for subPerm in subPerms:
subPerm = ','.join(subPerm)
if observedCombinations.count(subPerm) == 1:
#print '\tObserved Sub %s!' % subPerm
parts.append(subPerm)
observed = True
break
if observed is False:
break
if observed:
found = found + 1
observedPathways.append(parts)
checked = checked + 1
print '%d putative pathways. %d found\n' % (checked, found)
return observedPathways
def vitalityProfileForPath(path, vitalities, fold):
print 'Vitalities :',
values = []
for combination in path:
print vitalities[combination],
values.append(vitalities[combination])
print '\n',
print 'Foldn :',
folds = []
for combination in path:
print fold[combination],
folds.append(fold[combination])
print '\n'
return values, folds
#Read in types
typeData = Core.Matrix.matrixFromCSVFile(sys.argv[2])
typeIndex = typeData.indexOfColumnWithHeader('Type')
#Get all entries for specified drug
drugName = sys.argv[4]
trimMatrix = Core.Matrix.PEATSAMatrix(rows=[[0]*9], headers=typeData.columnHeaders())
drugNameIndex = typeData.indexOfColumnWithHeader('Drug Name')
for row in typeData:
if row[drugNameIndex] == drugName:
trimMatrix.addRow(row)
#Read in combinations
combinationData = Core.Matrix.matrixFromCSVFile(sys.argv[1])
mutationCodes = combinationData.column(0)
combinations = codesToCombinations(mutationCodes)
print combinations
vitalities = combinationData.columnWithHeader(drugName+'Vitality')
fold = combinationData.columnWithHeader(drugName+'Fold')
pdb = Protool.structureIO()
pdb.readpdb(sys.argv[3])
types = []
combinationStrings = [','.join(combo) for combo in combinations]
#Skip WT
mutations = trimMatrix.columnWithHeader('Mutations')[1:]
mutations = [(el[:-2] + el[-1]) for el in mutations]
typeMap = dict(zip(mutations, trimMatrix.column(typeIndex)))
filteredPaths = []
for combination in combinations:
paths = observedPathsForCombination(combination, combinationStrings)
for path in paths:
accumulationPattern = getPathSequence(path)
if accumulationPattern[-1][:2] == '46' and len(accumulationPattern) > 1:
print 'Found paths ending with mutation to 46'
filteredPaths.append(path)
results = []
for path in filteredPaths:
#typePath = getTypePath(path, typeMap)
profile, foldres = vitalityProfileForPath(path, dict(zip(combinationStrings, vitalities)), dict(zip(combinationStrings, fold)))
if profile[-2:].count('') == 0:
mutation = path[-2]
entry = [mutation, profile[-1] - profile[-2], foldres[-1]/foldres[-2]]
if results.count(entry) == 0:
results.append(entry)
else:
print 'Skipping - Missing data\n'
for m in results:
print m
|
dmnfarrell/peat
|
PEATSA/Tools/HIVTools/CombinationConverter.py
|
Python
|
mit
| 4,305
| 0.029268
|
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities and decorators for converting external types into the fqe
intrinsics
"""
#there are two places where access to protected members improves code quality
#pylint: disable=protected-access
from typing import Any, Dict, Tuple, Union, Optional, List
from functools import wraps
from itertools import permutations
import copy
import numpy
from openfermion import FermionOperator
from openfermion.utils import is_hermitian
from openfermion import normal_ordered
from fqe.hamiltonians import hamiltonian
from fqe.hamiltonians import general_hamiltonian
from fqe.hamiltonians import diagonal_hamiltonian
from fqe.hamiltonians import diagonal_coulomb
from fqe.hamiltonians import gso_hamiltonian
from fqe.hamiltonians import restricted_hamiltonian
from fqe.hamiltonians import sparse_hamiltonian
from fqe.hamiltonians import sso_hamiltonian
from fqe.openfermion_utils import largest_operator_index
from fqe.util import validate_tuple, reverse_bubble_list
from fqe.fqe_ops import fqe_ops_utils
def build_hamiltonian(ops: Union[FermionOperator, hamiltonian.Hamiltonian],
norb: int = 0,
conserve_number: bool = True,
e_0: complex = 0. + 0.j) -> 'hamiltonian.Hamiltonian':
"""Build a Hamiltonian object for FQE.
Args:
ops (FermionOperator, hamiltonian.Hamiltonian): input operator as \
FermionOperator. If a Hamiltonian is passed as an argument, \
this function returns as is.
norb (int): the number of orbitals in the system
conserve_number (bool): whether the operator conserves the number
e_0 (complex): the scalar part of the operator
Returns:
(hamiltonian.Hamiltonian): General Hamiltonian that is created from ops
"""
if isinstance(ops, hamiltonian.Hamiltonian):
return ops
if isinstance(ops, tuple):
validate_tuple(ops)
if norb != 0 and ops[0].shape[0] == norb:
return restricted_hamiltonian.RestrictedHamiltonian(ops, e_0=e_0)
else:
return general_hamiltonian.General(ops, e_0=e_0)
if not isinstance(ops, FermionOperator):
raise TypeError('Expected FermionOperator' \
' but received {}.'.format(type(ops)))
assert is_hermitian(ops)
out: Any
if len(ops.terms) <= 2:
out = sparse_hamiltonian.SparseHamiltonian(ops, e_0=e_0)
else:
if not conserve_number:
ops = transform_to_spin_broken(ops)
ops = normal_ordered(ops)
ops_rank, e_0 = split_openfermion_tensor(ops) # type: ignore
if norb == 0:
for term in ops_rank.values():
ablk, bblk = largest_operator_index(term)
norb = max(norb, ablk // 2 + 1, bblk // 2 + 1)
else:
norb = norb
ops_mat = {}
maxrank = 0
for rank, term in ops_rank.items():
index = rank // 2 - 1
ops_mat[index] = fermionops_tomatrix(term, norb)
maxrank = max(index, maxrank)
if len(ops_mat) == 1 and (0 in ops_mat):
out = process_rank2_matrix(ops_mat[0], norb=norb, e_0=e_0)
elif len(ops_mat) == 1 and \
(1 in ops_mat) and \
check_diagonal_coulomb(ops_mat[1]):
out = diagonal_coulomb.DiagonalCoulomb(ops_mat[1], e_0=e_0)
else:
dtypes = [xx.dtype for xx in ops_mat.values()]
dtypes = numpy.unique(dtypes)
assert len(dtypes) == 1
for i in range(maxrank + 1):
if i not in ops_mat:
mat_dim = tuple([2 * norb for _ in range((i + 1) * 2)])
ops_mat[i] = numpy.zeros(mat_dim, dtype=dtypes[0])
ops_mat2 = []
for i in range(maxrank + 1):
ops_mat2.append(ops_mat[i])
out = general_hamiltonian.General(tuple(ops_mat2), e_0=e_0)
out._conserve_number = conserve_number
return out
def transform_to_spin_broken(ops: 'FermionOperator') -> 'FermionOperator':
"""Convert a FermionOperator string from number broken to spin broken
operators.
Args:
ops (FermionOperator): input FermionOperator
Returns:
(FermionOperator): transformed FermionOperator to spin broken indexing
"""
newstr = FermionOperator()
for term in ops.terms:
opstr = ''
for element in term:
if element[0] % 2:
if element[1]:
opstr += str(element[0]) + ' '
else:
opstr += str(element[0]) + '^ '
else:
if element[1]:
opstr += str(element[0]) + '^ '
else:
opstr += str(element[0]) + ' '
newstr += FermionOperator(opstr, ops.terms[term])
return newstr
def split_openfermion_tensor(ops: 'FermionOperator'
) -> Tuple[Dict[int, 'FermionOperator'], complex]:
"""Given a string of openfermion operators, split them according to their
rank.
Args:
ops (FermionOperator): a string of OpenFermion Fermion Operators
Returns:
split dict[int] = FermionOperator: a list of Fermion Operators sorted
according to their rank.
"""
e_0 = 0. + 0.j
split: Dict[int, 'FermionOperator'] = {}
for term in ops:
rank = term.many_body_order()
if rank % 2:
raise ValueError('Odd rank term not accepted')
if rank == 0:
e_0 += term.terms[()]
else:
if rank not in split:
split[rank] = term
else:
split[rank] += term
return split, e_0
def fermionops_tomatrix(ops: 'FermionOperator', norb: int) -> numpy.ndarray:
"""Convert FermionOperators to a matrix.
Args:
ops (FermionOperator): input FermionOperator from OpenFermion
norb (int): the number of orbitals in the system
Returns:
(numpy.ndarray): resulting matrix
"""
ablk, bblk = largest_operator_index(ops)
if norb <= ablk // 2:
raise ValueError('Highest alpha index exceeds the norb of orbitals')
if norb <= bblk // 2:
raise ValueError('Highest beta index exceeds the norb of orbitals')
rank = ops.many_body_order()
if rank % 2:
raise ValueError('Odd rank operator not supported')
tensor_dim = [norb * 2 for _ in range(rank)]
index_mask = [0 for _ in range(rank)]
index_dict_dagger = [[0, 0] for _ in range(rank // 2)]
index_dict_nondagger = [[0, 0] for _ in range(rank // 2)]
tensor = numpy.zeros(tensor_dim, dtype=numpy.complex128)
for term in ops.terms:
for i in range(rank):
index = term[i][0]
if i < rank // 2:
if not term[i][1]:
raise ValueError('Found annihilation operator where' \
'creation is expected')
elif term[i][1]:
raise ValueError('Found creation operator where ' \
'annihilation is expected')
spin = index % 2
if spin == 1:
ind = (index - 1) // 2 + norb
else:
ind = index // 2
if i < rank // 2:
index_dict_dagger[i][0] = spin
index_dict_dagger[i][1] = ind
else:
index_dict_nondagger[i - rank // 2][0] = spin
index_dict_nondagger[i - rank // 2][1] = ind
parity = reverse_bubble_list(index_dict_dagger)
parity += reverse_bubble_list(index_dict_nondagger)
for i in range(rank):
if i < rank // 2:
index_mask[i] = index_dict_dagger[i][1]
else:
index_mask[i] = index_dict_nondagger[i - rank // 2][1]
tensor[tuple(index_mask)] += (-1)**parity * ops.terms[term]
tensor2 = numpy.zeros_like(tensor)
length = 0
seed = range(rank // 2)
for ip in permutations(seed):
iperm = list(ip)
jperm = copy.deepcopy(iperm)
for j in range(rank // 2):
jperm[j] += rank // 2
tensor2 += tensor.transpose(iperm + jperm)
length += 1
tensor2 /= length
return tensor2
def process_rank2_matrix(mat: numpy.ndarray, norb: int,
e_0: complex = 0. + 0.j) -> 'hamiltonian.Hamiltonian':
"""Look at the structure of the (1, 0) component of the one-body matrix and
determine the symmetries.
Args:
mat (numpy.ndarray): input matrix to be processed
norb (int): the number of orbitals in the system
e_0 (complex): scalar part of the Hamiltonian
Returns:
(Hamiltonian): resulting Hamiltonian
"""
if not numpy.allclose(mat, mat.conj().T):
raise ValueError('Input matrix is not Hermitian')
test = numpy.copy(mat)
numpy.fill_diagonal(test, 0.0)
diagonal = not numpy.any(test)
if diagonal:
return diagonal_hamiltonian.Diagonal(mat.diagonal(), e_0=e_0)
if mat[norb:2 * norb, :norb].any():
return gso_hamiltonian.GSOHamiltonian(tuple([mat]), e_0=e_0)
if numpy.allclose(mat[:norb, :norb], mat[norb:, norb:]):
return restricted_hamiltonian.RestrictedHamiltonian(
(mat[:norb, :norb],), e_0=e_0)
return sso_hamiltonian.SSOHamiltonian(tuple([mat]), e_0=e_0)
def check_diagonal_coulomb(mat: numpy.ndarray) -> bool:
"""Look at the structure of the two-body matrix and determine
if it is diagonal coulomb
Args:
mat (numpy.ndarray): input two-body Hamiltonian elements
Returns:
(bool): whether mat is diagonal Coulomb
"""
dim = mat.shape[0]
assert mat.shape == (dim, dim, dim, dim)
test = numpy.copy(mat).reshape((dim * dim, dim * dim))
numpy.fill_diagonal(test, 0.0)
return not numpy.any(test)
def wrap_rdm(rdm):
"""Decorator to convert parameters to `Wavefunction.rdm()` \
to FQE internal classes.
"""
@wraps(rdm)
def symmetry_process(self, string, brawfn=None):
if self.conserve_spin() and not self.conserve_number():
wfn = self._copy_beta_inversion()
else:
wfn = self
if any(char.isdigit() for char in string):
if self.conserve_spin() and not self.conserve_number():
string = fqe_ops_utils.switch_broken_symmetry(string)
return rdm(wfn, string, brawfn=brawfn)
return symmetry_process
def wrap_apply(apply):
"""Decorator to convert parameters to `Wavefunction.apply()` \
to FQE internal classes.
"""
@wraps(apply)
def convert(self, ops: Union['FermionOperator', 'hamiltonian.Hamiltonian']):
""" Converts an FermionOperator to hamiltonian.Hamiltonian
Args:
ops (FermionOperator or Hamiltonian): input operator
"""
hamil = build_hamiltonian(ops,
norb=self.norb(),
conserve_number=self.conserve_number())
return apply(self, hamil)
return convert
def wrap_time_evolve(time_evolve):
"""Decorator to convert parameters to `Wavefunction.time_evolve()` \
to FQE internal classes.
"""
@wraps(time_evolve)
def convert(self,
time: float,
ops: Union['FermionOperator', 'hamiltonian.Hamiltonian'],
inplace: bool = False):
""" Converts an FermionOperator to hamiltonian.Hamiltonian
Args:
time (float): time to be propagated
ops (FermionOperator or Hamiltonian): input operator
"""
hamil = build_hamiltonian(ops,
norb=self.norb(),
conserve_number=self.conserve_number())
return time_evolve(self, time, hamil, inplace)
return convert
def wrap_apply_generated_unitary(apply_generated_unitary):
"""Decorator to convert parameters to \
`Wavefunction.apply_generated_unitary()` to FQE internal classes.
"""
@wraps(apply_generated_unitary)
def convert(self,
time: float,
algo: str,
ops: Union['FermionOperator', 'hamiltonian.Hamiltonian'],
accuracy: float = 0.0,
expansion: int = 30,
spec_lim: Optional[List[float]] = None):
"""Perform the exponentiation of fermionic algebras to the
wavefunction according to the method and accuracy.
Args:
time (float): the final time value to evolve to
algo (string): polynomial expansion algorithm to be used
hamil (Hamiltonian): the Hamiltonian used to generate the unitary
accuracy (double): the accuracy to which the system should be evolved
expansion (int): the maximum number of terms in the polynomial expansion
spec_lim (List[float]): spectral range of the Hamiltonian, the length of \
the list should be 2. Optional.
Returns:
newwfn (Wavefunction): a new intialized wavefunction object
"""
hamil = build_hamiltonian(ops,
norb=self.norb(),
conserve_number=self.conserve_number())
return apply_generated_unitary(self,
time,
algo,
hamil,
accuracy=accuracy,
expansion=expansion,
spec_lim=spec_lim)
return convert
|
quantumlib/OpenFermion-FQE
|
src/fqe/fqe_decorators.py
|
Python
|
apache-2.0
| 14,347
| 0.000767
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ContainerServiceDiagnosticsProfile(Model):
"""Profile for diagnostics on the container service cluster.
:param vm_diagnostics: Profile for diagnostics on the container service
VMs.
:type vm_diagnostics:
~azure.mgmt.containerservice.models.ContainerServiceVMDiagnostics
"""
_validation = {
'vm_diagnostics': {'required': True},
}
_attribute_map = {
'vm_diagnostics': {'key': 'vmDiagnostics', 'type': 'ContainerServiceVMDiagnostics'},
}
def __init__(self, vm_diagnostics):
super(ContainerServiceDiagnosticsProfile, self).__init__()
self.vm_diagnostics = vm_diagnostics
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-containerservice/azure/mgmt/containerservice/models/container_service_diagnostics_profile.py
|
Python
|
mit
| 1,170
| 0.000855
|
# Copyright 2017 ContextLabs B.V.
import time
import hashlib
import urllib
import requests
import sawtooth_signing as signing
from base64 import b64decode
from random import randint
from sawtooth_omi.protobuf.work_pb2 import Work
from sawtooth_omi.protobuf.recording_pb2 import Recording
from sawtooth_omi.protobuf.identity_pb2 import IndividualIdentity
from sawtooth_omi.protobuf.identity_pb2 import OrganizationalIdentity
from sawtooth_omi.protobuf.txn_payload_pb2 import OMITransactionPayload
from sawtooth_omi.handler import FAMILY_NAME, OMI_ADDRESS_PREFIX, make_omi_address, _get_address_infix
from sawtooth_omi.handler import WORK, RECORDING, INDIVIDUAL, ORGANIZATION
from sawtooth_sdk.protobuf.batch_pb2 import Batch
from sawtooth_sdk.protobuf.batch_pb2 import BatchHeader
from sawtooth_sdk.protobuf.batch_pb2 import BatchList
from sawtooth_sdk.protobuf.transaction_pb2 import Transaction
from sawtooth_sdk.protobuf.transaction_pb2 import TransactionHeader
TAG_MAP = {
OrganizationalIdentity: ORGANIZATION,
Recording: RECORDING,
Work: WORK,
IndividualIdentity: INDIVIDUAL,
}
def get_object_address(name, tag):
return make_omi_address(name, tag)
def get_type_prefix(tag):
return OMI_ADDRESS_PREFIX + _get_address_infix(tag)
class Cursor:
def __init__(self, endpoint, message_type, count=100):
self.endpoint = endpoint
qs = urllib.parse.parse_qs(urllib.parse.urlparse(endpoint).query)
if 'count' not in qs:
sep = '&' if qs else '?'
self._next = "%s%scount=%d" % (self.endpoint, sep, count)
else:
self._next = self.endpoint
self.message_type = message_type
self.data = []
def _get_page(self, url):
r = requests.get(url)
r.raise_for_status()
result = r.json()
paging = result['paging']
if 'next' in paging:
self._next = paging['next']
else:
self._next = None
self.data.extend(result['data'])
def _xform(self, item):
# item['address']
# item['data']
return self.message_type.FromString(b64decode(item['data']))
def __iter__(self):
return self
def __next__(self):
if not self.data and self._next:
self._get_page(self._next)
if self.data:
return self._xform(self.data.pop(0))
raise StopIteration()
def submit_omi_transaction(base_url, private_key, action, message_type, natural_key_field, omi_obj, additional_inputs=None):
obj = message_type(**omi_obj)
if additional_inputs is None:
additional_inputs = []
public_key_hex = signing.generate_pubkey(private_key)
address = get_object_address(omi_obj[natural_key_field], TAG_MAP[message_type])
data = obj.SerializeToString()
payload = OMITransactionPayload(
action=action,
data=data,
)
payload_bytes = payload.SerializeToString()
payload_sha512 = hashlib.sha512(payload_bytes).hexdigest()
txn_header = TransactionHeader(
batcher_pubkey=public_key_hex,
family_name=FAMILY_NAME,
family_version='1.0',
inputs=[address] + additional_inputs,
outputs=[address],
nonce=str(randint(0, 1000000000)),
payload_encoding='application/protobuf',
payload_sha512=payload_sha512,
signer_pubkey=public_key_hex,
)
txn_header_bytes = txn_header.SerializeToString()
key_handler = signing.secp256k1_signer._decode_privkey(private_key)
# ecdsa_sign automatically generates a SHA-256 hash
txn_signature = key_handler.ecdsa_sign(txn_header_bytes)
txn_signature_bytes = key_handler.ecdsa_serialize_compact(txn_signature)
txn_signature_hex = txn_signature_bytes.hex()
# print([txn_signature_hex])
txn = Transaction(
header=txn_header_bytes,
header_signature=txn_signature_hex,
payload=payload_bytes,
)
batch_header = BatchHeader(
signer_pubkey=public_key_hex,
transaction_ids=[txn.header_signature],
)
batch_header_bytes = batch_header.SerializeToString()
batch_signature = key_handler.ecdsa_sign(batch_header_bytes)
batch_signature_bytes = key_handler.ecdsa_serialize_compact(batch_signature)
batch_signature_hex = batch_signature_bytes.hex()
batch = Batch(
header=batch_header_bytes,
header_signature=batch_signature_hex,
transactions=[txn],
)
batch_list = BatchList(batches=[batch])
batch_bytes = batch_list.SerializeToString()
batch_id = batch_signature_hex
url = "%s/batches" % base_url
headers = {
'Content-Type': 'application/octet-stream',
}
r = requests.post(url, data=batch_bytes, headers=headers)
r.raise_for_status()
link = r.json()['link']
return BatchStatus(batch_id, link)
class BatchStatus:
"""
Provides a function to query for the current status of a submitted transaction.
That is, whether or not the transaction has been committed to the block chain.
"""
def __init__(self, batch_id, status_url):
self.batch_id = batch_id
self.status_url = status_url
def check(self, timeout=5):
"""
Returns the batch status from a transaction submission. The status is one
of ['PENDING', 'COMMITTED', 'INVALID', 'UNKNOWN'].
"""
r = requests.get("%s&wait=%s" % (self.status_url, timeout))
r.raise_for_status()
return r.json()['data'][self.batch_id]
def wait_for_committed(self, timeout=30, check_timeout=5):
start_time = time.time()
while True:
current_time = time.time()
status = self.check(timeout=check_timeout)
if status == "PENDING":
return status
if start_time + timeout >= current_time:
return status
return status
class OMIClient:
def __init__(self, sawtooth_rest_url, private_key, cursor_count=100):
self.sawtooth_rest_url = sawtooth_rest_url
self.private_key = private_key
self.public_key = signing.generate_pubkey(private_key)
self.cursor_count = cursor_count
def _cursor(self, message_type):
type_prefix = get_type_prefix(TAG_MAP[message_type])
url = "%s/state?address=%s" % (self.sawtooth_rest_url, type_prefix)
return Cursor(
url,
message_type,
count=self.cursor_count
)
def _state_entry(self, message_type, name):
address = get_object_address(name, TAG_MAP[message_type])
url = "%s/state/%s" % (self.sawtooth_rest_url, address)
r = requests.get(url)
r.raise_for_status()
data = r.json()['data']
return message_type.FromString(b64decode(data))
def set_individual(self, individual):
omi_obj = dict(individual)
omi_obj['pubkey'] = self.public_key
return submit_omi_transaction(
base_url=self.sawtooth_rest_url,
private_key=self.private_key,
action='SetIndividualIdentity',
message_type=IndividualIdentity,
natural_key_field='name',
omi_obj=omi_obj,
)
def get_individual(self, name):
return self._state_entry(IndividualIdentity, name)
def get_individuals(self):
return self._cursor(IndividualIdentity)
def set_organization(self, organization):
omi_obj = dict(organization)
omi_obj['pubkey'] = self.public_key
return submit_omi_transaction(
base_url=self.sawtooth_rest_url,
private_key=self.private_key,
action='SetOrganizationalIdentity',
message_type=OrganizationalIdentity,
natural_key_field='name',
omi_obj=omi_obj,
)
def get_organization(self, name):
return self._state_entry(OrganizationalIdentity, name)
def get_organizations(self):
return self._cursor(OrganizationalIdentity)
def set_recording(self, recording):
omi_obj = dict(recording)
omi_obj['registering_pubkey'] = self.public_key
label_name = omi_obj.get('label_name', None)
contributor_splits = omi_obj.get('contributor_splits', [])
derived_work_splits = omi_obj.get('derived_work_splits', [])
derived_recording_splits = omi_obj.get('derived_recording_splits', [])
references = []
if label_name:
references.append(get_object_address(label_name, ORGANIZATION))
for split in contributor_splits:
references.append(get_object_address(split['contributor_name'], INDIVIDUAL))
for split in derived_work_splits:
references.append(get_object_address(split['work_name'], WORK))
for split in derived_recording_splits:
references.append(get_object_address(split['recording_name'], RECORDING))
return submit_omi_transaction(
base_url=self.sawtooth_rest_url,
private_key=self.private_key,
action='SetRecording',
message_type=Recording,
natural_key_field='title',
omi_obj=omi_obj,
additional_inputs=references,
)
def get_recording(self, title):
return self._state_entry(Recording, title)
def get_recordings(self):
return self._cursor(Recording)
def set_work(self, work):
omi_obj = dict(work)
omi_obj['registering_pubkey'] = self.public_key
songwriter_publisher_splits = omi_obj.get('songwriter_publisher_splits', [])
references = []
songwriter_publishers = [split['songwriter_publisher'] for split in songwriter_publisher_splits]
for split in songwriter_publishers:
references.append(get_object_address(split['songwriter_name'], INDIVIDUAL))
references.append(get_object_address(split['publisher_name'], ORGANIZATION))
return submit_omi_transaction(
base_url=self.sawtooth_rest_url,
private_key=self.private_key,
action='SetWork',
message_type=Work,
natural_key_field='title',
omi_obj=omi_obj,
additional_inputs=references,
)
def get_work(self, title):
return self._state_entry(Work, title)
def get_works(self):
return self._cursor(Work)
|
omi/stl-api-gateway
|
omi_api/client.py
|
Python
|
mit
| 10,377
| 0.001253
|
from .TestContainersDeviceAndManager import TestContainerDeviceDataFlow
from .TestContainersReceivingSerialDataAndObserverPattern import TestContainersReceivingSerialDataAndObserverPattern
|
rCorvidae/OrionPI
|
src/tests/Devices/Containers/__init__.py
|
Python
|
mit
| 188
| 0.010638
|
#===============================================================================
# Copyright (C) 2014-2019 Anton Vorobyov
#
# This file is part of Phobos.
#
# Phobos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Phobos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Phobos. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
from .cached_property import cachedproperty
from .eve_normalize import EveNormalizer
from .resource_browser import ResourceBrowser
from .translator import Translator
|
DarkFenX/Phobos
|
util/__init__.py
|
Python
|
gpl-3.0
| 1,042
| 0.003839
|
#!/usr/bin/env python
# Copyright (C) 2011 Rohan Jain
# Copyright (C) 2011 Alexis Le-Quoc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from sys import version
from os.path import expanduser
import paster
if version < '2.2.3':
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(name='paster',
version=paster.version,
description='A generic pastebin posting tool',
author='Rohan Jain',
author_email='crodjer@gmail.com',
long_description=open('README.md').read(),
url='https://github.com/crodjer/paster',
packages = ['paster'],
data_files=[(expanduser('~'), ['paster.cfg']),],
license="GPLv3",
platforms=["all"],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Software Development',
'Programming Language :: Python',
'Natural Language :: English',
],
scripts=['pstr'],
)
|
crodjer/paster
|
setup.py
|
Python
|
gpl-3.0
| 2,026
| 0.001974
|
__author__ = 'Arunkumar Eli'
__email__ = "elrarun@gmail.com"
from selenium.webdriver.common.by import By
class DigitalOceanLocators(object):
ACCESS_KEY_INPUT = (By.ID, 'accessKey')
SECRET_KEY_INPUT = (By.ID, 'secretKey')
NEXT_BTN = (By.CSS_SELECTOR, "button.btn.btn-primary")
AVAILABILITY_ZONE = (By.XPATH, "//section[3]/div/div/span")
ZONE_SELECT = (By.ID, "selectedZone")
VPC_RADIO_BTN = (By.XPATH, "//div[3]/div[2]/div/label")
SUBNET_RADIO_BTN = (By.XPATH, "//div[2]/label")
SECURITY_GROUP = (By.XPATH, "///section[5]/div/div/span")
INSTANCE = (By.XPATH, "//section[7]/div/div/span")
ACCOUNT_ACCESS = (By.XPATH, "//section/div/div/span")
STD_RADIO_BTN=(By.XPATH,"//section[5]/div[1]/div[2]/div[2]/div[1]/label/input")
CUSTOM_RADIO_BTN=(By.XPATH,"//section[5]/div[1]/div[2]/div[2]/div[2]/label/input")
SET_INSTANCE_OPTION_BTN = (By.XPATH, "//div[2]/button")
SLIDE_BAR_CLICK_3 = (By.XPATH, "//div[2]/div[3]/div")
HOST_NAME_INPUT = (By.ID, "prefix")
HOST_DESC_INPUT = (By.ID, "description")
HOST_INSTANCE_TYPE_SELECT = (By.ID, "instanceType")
HOST_MEM_SIZE_INPUT = (By.ID, "rootSize")
HOST_CREATE_BTN = (By.XPATH, "//div[2]/button")
|
aruneli/rancher-test
|
ui-selenium-tests/locators/RackspaceLocators.py
|
Python
|
apache-2.0
| 1,212
| 0.006601
|
#!/usr/bin/env python
"""
Script to fetch test status info from sqlit data base. Before use this
script, avocado We must be lanuch with '--journal' option.
"""
import os
import sys
import sqlite3
import argparse
from avocado.core import data_dir
from dateutil import parser as dateparser
def colour_result(result):
"""Colour result in the test status info"""
colours_map = {"PASS": "\033[92mPASS\033[00m",
"ERROR": "\033[93mERROR\033[00m",
"FAIL": "\033[91mFAIL\033[00m"}
return colours_map.get(result) or result
def summarise_records(records):
"""Summarise test records and print it in cyan"""
num_row = len(records[0])
rows = tuple([("row%s" % x) for x in xrange(num_row)])
records_summary = {}
for rows in records:
records_summary[rows[1]] = records_summary.get(rows[1], 0) + 1
records_summary[rows[4]] = records_summary.get(rows[4], 0) + 1
res = ", ".join("%s=%r" % (
key, val) for (key, val) in records_summary.iteritems())
print "\033[96mSummary: \n" + res + "\033[00m"
def get_total_seconds(td):
""" Alias for get total_seconds in python2.6 """
if hasattr(td, 'total_seconds'):
return td.total_seconds()
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6) / 1e6
def fetch_data(db_file=".journal.sqlite"):
""" Fetch tests status info from journal database"""
records = []
con = sqlite3.connect(db_file)
try:
cur = con.cursor()
cur.execute("select tag, time, action, status from test_journal")
while True:
# First record contation start info, second contain end info
# merged start info and end info into one record.
data = cur.fetchmany(2)
if not data:
break
tag = data[0][0]
result = "N/A"
status = "Running"
end_time = None
end_str = None
elapsed = None
start_time = dateparser.parse(data[0][1])
start_str = start_time.strftime("%Y-%m-%d %X")
if len(data) > 1:
status = "Finshed"
result = data[1][3]
end_time = dateparser.parse(data[1][1])
time_delta = end_time - start_time
elapsed = get_total_seconds(time_delta)
end_str = end_time.strftime("%Y-%m-%d %X")
record = (tag, status, start_str, end_str, result, elapsed)
records.append(record)
finally:
con.close()
return records
def print_data(records, skip_timestamp=False):
""" Print formated tests status info"""
if not records:
return
if not skip_timestamp:
print "%-40s %-15s %-15s %-15s %-10s %-10s" % (
"CaseName", "Status", "StartTime",
"EndTime", "Result", "TimeElapsed")
else:
print "%-40s %-15s %-10s" % ("CaseName", "Status", "Result")
for row in records:
if not skip_timestamp:
print "%s %s %s %s %s %s" % (
row[0], row[1], row[2], row[3], colour_result(row[4]), row[5])
else:
print "%s %s %s" % (row[0], row[1], colour_result(row[4]))
summarise_records(records)
if __name__ == "__main__":
default_results_dir = os.path.join(data_dir.get_logs_dir(), 'latest')
parser = argparse.ArgumentParser(description="Avocado journal dump tool")
parser.add_argument(
'-d',
'--test-results-dir',
action='store',
default=default_results_dir,
dest='results_dir',
help="avocado test results dir, Default: %s" %
default_results_dir)
parser.add_argument(
'-s',
'--skip-timestamp',
action='store_true',
default=False,
dest='skip_timestamp',
help="skip timestamp output (leaving status and result enabled)")
parser.add_argument(
'-v',
'--version',
action='version',
version='%(prog)s 1.0')
arguments = parser.parse_args()
db_file = os.path.join(arguments.results_dir, '.journal.sqlite')
if not os.path.isfile(db_file):
print "`.journal.sqlite` DB not found in results directory, "
print "Please start avocado with option '--journal'."
parser.print_help()
sys.exit(1)
data = fetch_data(db_file)
print_data(data, arguments.skip_timestamp)
|
CongLi/avocado-vt
|
scripts/scan_results.py
|
Python
|
gpl-2.0
| 4,423
| 0
|
#!/usr/bin/python
import sys
import csv
import lxml.etree as ET
# This script creates a CSV file from an XCCDF file formatted in the
# structure of a STIG. This should enable its ingestion into VMS,
# as well as its comparison with VMS output.
xccdf_ns = "http://checklists.nist.gov/xccdf/1.1"
disa_cciuri = "http://iase.disa.mil/stigs/cci/Pages/index.aspx"
disa_srguri = "http://iase.disa.mil/stigs/srgs/Pages/index.aspx"
def parse_xml_file(xmlfile):
with open(xmlfile, 'r') as xml_file:
filestring = xml_file.read()
tree = ET.fromstring(filestring)
return tree
def reflist(refs):
refstring = ', '.join(refs)
return refstring
def node_to_text(node):
textslist = node.xpath(".//text()")
return ''.join(textslist)
def main():
if len(sys.argv) < 2:
print "Provide an XCCDF file to convert into a CSV file."
sys.exit(1)
xccdffile = sys.argv[1]
xccdftree = parse_xml_file(xccdffile)
rules = xccdftree.findall(".//{%s}Rule" % xccdf_ns)
rulewriter = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL)
for rule in rules:
cci_refs = [ref.text for ref in rule.findall("{%s}ident[@system='%s']"
% (xccdf_ns, disa_cciuri))]
srg_refs = [ref.text for ref in rule.findall("{%s}ident[@system='%s']"
% (xccdf_ns, disa_srguri))]
title = rule.find("{%s}title" % xccdf_ns).text
description = node_to_text(rule.find("{%s}description" % xccdf_ns))
fixtext = node_to_text(rule.find("{%s}fixtext" % xccdf_ns))
checktext = node_to_text(rule.find(".//{%s}check-content" % xccdf_ns))
row = [reflist(cci_refs), reflist(srg_refs), title, description, fixtext, checktext]
rulewriter.writerow(row)
sys.exit(0)
if __name__ == "__main__":
main()
|
mpreisler/scap-security-guide-debian
|
scap-security-guide-0.1.21/shared/modules/xccdf2csv_stig_module.py
|
Python
|
gpl-2.0
| 1,883
| 0.002124
|
# -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2014-2015 Jérémy Bobbio <lunar@debian.org>
# © 2015 Reiner Herrmann <reiner@reiner-h.de>
# © 2012-2013 Olivier Matz <zer0@droids-corp.org>
# © 2012 Alan De Smet <adesmet@cs.wisc.edu>
# © 2012 Sergey Satskiy <sergey.satskiy@gmail.com>
# © 2012 scito <info@scito.ch>
#
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <https://www.gnu.org/licenses/>.
#
#
# Most of the code is borrowed from diff2html.py available at:
# http://git.droids-corp.org/?p=diff2html.git
#
# Part of the code is inspired by diff2html.rb from
# Dave Burt <dave (at) burt.id.au> (mainly for html theme)
#
import base64
import codecs
import collections
import contextlib
import hashlib
import html
import io
import logging
import os
import re
import sys
from urllib.parse import urlparse
from diffoscope import VERSION
from diffoscope.config import Config
from diffoscope.diff import SideBySideDiff, DIFFON, DIFFOFF
from ..icon import FAVICON_BASE64
from ..utils import sizeof_fmt, PrintLimitReached, DiffBlockLimitReached, \
Presenter, make_printer, PartialString
from . import templates
# minimum line size, we add a zero-sized breakable space every
# LINESIZE characters
LINESIZE = 20
TABSIZE = 8
# Characters we're willing to word wrap on
WORDBREAK = " \t;.,/):-"
JQUERY_SYSTEM_LOCATIONS = (
'/usr/share/javascript/jquery/jquery.js',
)
logger = logging.getLogger(__name__)
re_anchor_prefix = re.compile(r'^[^A-Za-z]')
re_anchor_suffix = re.compile(r'[^A-Za-z-_:\.]')
def send_and_exhaust(iterator, arg, default):
"""Send a single value to a coroutine, exhaust it, and return the final
element or a default value if it was empty."""
# Python's coroutine syntax is still a bit rough when you want to do
# slightly more complex stuff. Watch this logic closely.
output = default
try:
output = iterator.send(arg)
except StopIteration:
pass
for output in iterator:
pass
return output
def md5(s):
return hashlib.md5(s.encode('utf-8')).hexdigest()
def escape_anchor(val):
"""
ID and NAME tokens must begin with a letter ([A-Za-z]) and may be followed
by any number of letters, digits ([0-9]), hyphens ("-"), underscores ("_"),
colons (":"), and periods (".").
"""
for pattern, repl in (
(re_anchor_prefix, 'D'),
(re_anchor_suffix, '-'),
):
val = pattern.sub(repl, val)
return val
def output_diff_path(path):
return ' / '.join(n.source1 for n in path[1:])
def output_anchor(path):
return escape_anchor(output_diff_path(path))
def convert(s, ponct=0, tag=''):
i = 0
t = io.StringIO()
for c in s:
# used by diffs
if c == DIFFON:
t.write('<%s>' % tag)
elif c == DIFFOFF:
t.write('</%s>' % tag)
# special highlighted chars
elif c == "\t" and ponct == 1:
n = TABSIZE - (i % TABSIZE)
if n == 0:
n = TABSIZE
t.write('<span class="diffponct">\xbb</span>'+'\xa0'*(n-1))
elif c == " " and ponct == 1:
t.write('<span class="diffponct">\xb7</span>')
elif c == "\n" and ponct == 1:
t.write('<br/><span class="diffponct">\</span>')
elif ord(c) < 32:
conv = u"\\x%x" % ord(c)
t.write('<em>%s</em>' % conv)
i += len(conv)
else:
t.write(html.escape(c))
i += 1
if WORDBREAK.count(c) == 1:
t.write('\u200b')
i = 0
if i > LINESIZE:
i = 0
t.write('\u200b')
return t.getvalue()
def output_visual(visual, path, indentstr, indentnum):
logger.debug('including image for %s', visual.source)
indent = tuple(indentstr * (indentnum + x) for x in range(3))
anchor = output_anchor(path)
return u"""{0[0]}<div class="difference">
{0[1]}<div class="diffheader">
{0[1]}<div class="diffcontrol">⊟</div>
{0[1]}<div><span class="source">{1}</span>
{0[2]}<a class="anchor" href="#{2}" name="{2}">\xb6</a>
{0[1]}</div>
{0[1]}</div>
{0[1]}<div class="difference"><img src=\"data:{3},{4}\" alt=\"compared images\" /></div>
{0[0]}</div>""".format(indent, html.escape(visual.source), anchor, visual.data_type, visual.content)
def output_node_frame(difference, path, indentstr, indentnum, body):
indent = tuple(indentstr * (indentnum + x) for x in range(3))
anchor = output_anchor(path)
dctrl_class, dctrl = ("diffcontrol", u'⊟') if difference.has_visible_children() else ("diffcontrol-nochildren", u'⊡')
if difference.source1 == difference.source2:
header = u"""{0[1]}<div class="{1}">{2}</div>
{0[1]}<div><span class="diffsize">{3}</span></div>
{0[1]}<div><span class="source">{5}</span>
{0[2]}<a class="anchor" href="#{4}" name="{4}">\xb6</a>
{0[1]}</div>
""".format(indent, dctrl_class, dctrl, sizeof_fmt(difference.size()), anchor,
html.escape(difference.source1))
else:
header = u"""{0[1]}<div class="{1} diffcontrol-double">{2}</div>
{0[1]}<div><span class="diffsize">{3}</span></div>
{0[1]}<div><span class="source">{5}</span> vs.</div>
{0[1]}<div><span class="source">{6}</span>
{0[2]}<a class="anchor" href="#{4}" name="{4}">\xb6</a>
{0[1]}</div>
""".format(indent, dctrl_class, dctrl, sizeof_fmt(difference.size()), anchor,
html.escape(difference.source1),
html.escape(difference.source2))
return PartialString.numl(u"""{0[1]}<div class="diffheader">
{1}{0[1]}</div>
{2}""", 3).pformatl(indent, header, body)
def output_node(ctx, difference, path, indentstr, indentnum):
"""Returns a tuple (parent, continuation) where
- parent is a PartialString representing the body of the node, including
its comments, visuals, unified_diff and headers for its children - but
not the bodies of the children
- continuation is either None or (only in html-dir mode) a function which
when called with a single integer arg, the maximum size to print, will
print any remaining "split" pages for unified_diff up to the given size.
"""
indent = tuple(indentstr * (indentnum + x) for x in range(3))
t, cont = PartialString.cont()
comments = u""
if difference.comments:
comments = u'{0[1]}<div class="comment">\n{1}{0[1]}</div>\n'.format(
indent, "".join(u"{0[2]}{1}<br/>\n".format(indent, html.escape(x)) for x in difference.comments))
visuals = u""
for visual in difference.visuals:
visuals += output_visual(visual, path, indentstr, indentnum+1)
udiff = u""
ud_cont = None
if difference.unified_diff:
ud_cont = HTMLSideBySidePresenter().output_unified_diff(
ctx, difference.unified_diff, difference.has_internal_linenos)
udiff = next(ud_cont)
if isinstance(udiff, PartialString):
ud_cont = ud_cont.send
udiff = udiff.pformatl(PartialString.of(ud_cont))
else:
for _ in ud_cont:
pass # exhaust the iterator, avoids GeneratorExit
ud_cont = None
# PartialString for this node
body = PartialString.numl(u"{0}{1}{2}{-1}", 3, cont).pformatl(comments, visuals, udiff)
if len(path) == 1:
# root node, frame it
body = output_node_frame(difference, path, indentstr, indentnum, body)
t = cont(t, body)
# Add holes for child nodes
for d in difference.details:
child = output_node_frame(d, path + [d], indentstr, indentnum+1, PartialString.of(d))
child = PartialString.numl(u"""{0[1]}<div class="difference">
{1}{0[1]}</div>
{-1}""", 2, cont).pformatl(indent, child)
t = cont(t, child)
assert len(t.holes) >= len(difference.details) + 1 # there might be extra holes for the unified diff continuation
return cont(t, u""), ud_cont
def output_header(css_url, our_css_url=False, icon_url=None):
if css_url:
css_link = u' <link href="%s" type="text/css" rel="stylesheet" />\n' % css_url
else:
css_link = u''
if our_css_url:
css_style = u' <link href="%s" type="text/css" rel="stylesheet" />\n' % our_css_url
else:
css_style = u'<style type="text/css">\n' + templates.STYLES + u'</style>\n'
if icon_url:
favicon = icon_url
else:
favicon = u'data:image/png;base64,' + FAVICON_BASE64
return templates.HEADER % {
'title': html.escape(' '.join(sys.argv)),
'favicon': favicon,
'css_link': css_link,
'css_style': css_style
}
def output_footer(jquery_url=None):
footer = templates.FOOTER % {'version': VERSION}
if jquery_url:
return templates.SCRIPTS % {'jquery_url': html.escape(jquery_url)} + footer
return footer
@contextlib.contextmanager
def file_printer(directory, filename):
with codecs.open(os.path.join(directory, filename), 'w', encoding='utf-8') as f:
yield f.write
@contextlib.contextmanager
def spl_file_printer(directory, filename, accum):
with codecs.open(os.path.join(directory, filename), 'w', encoding='utf-8') as f:
print_func = f.write
def recording_print_func(s):
print_func(s)
recording_print_func.bytes_written += len(s)
accum.bytes_written += len(s)
recording_print_func.bytes_written = 0
yield recording_print_func
class HTMLPrintContext(collections.namedtuple("HTMLPrintContext",
"target single_page jquery_url css_url our_css_url icon_url")):
@property
def directory(self):
return None if self.single_page else self.target
class HTMLSideBySidePresenter(object):
supports_visual_diffs = True
def __init__(self):
self.max_lines = Config().max_diff_block_lines # only for html-dir
self.max_lines_parent = Config().max_page_diff_block_lines
self.max_page_size_child = Config().max_page_size_child
def new_unified_diff(self):
self.spl_rows = 0
self.spl_current_page = 0
self.spl_print_func = None
self.spl_print_ctrl = None
# the below apply to child pages only, the parent page limit works
# differently and is controlled by output_difference later below
self.bytes_max_total = 0
self.bytes_written = 0
self.error_row = None
def output_hunk_header(self, hunk_off1, hunk_size1, hunk_off2, hunk_size2):
self.spl_print_func(u'<tr class="diffhunk"><td colspan="2">Offset %d, %d lines modified</td>' % (hunk_off1, hunk_size1))
self.spl_print_func(u'<td colspan="2">Offset %d, %d lines modified</td></tr>\n' % (hunk_off2, hunk_size2))
def output_line(self, has_internal_linenos, type_name, s1, line1, s2, line2):
self.spl_print_func(u'<tr class="diff%s">' % type_name)
try:
if s1:
if has_internal_linenos:
self.spl_print_func(u'<td colspan="2" class="diffpresent">')
else:
self.spl_print_func(u'<td class="diffline">%d </td>' % line1)
self.spl_print_func(u'<td class="diffpresent">')
self.spl_print_func(convert(s1, ponct=1, tag='del'))
self.spl_print_func(u'</td>')
else:
self.spl_print_func(u'<td colspan="2">\xa0</td>')
if s2:
if has_internal_linenos:
self.spl_print_func(u'<td colspan="2" class="diffpresent">')
else:
self.spl_print_func(u'<td class="diffline">%d </td>' % line2)
self.spl_print_func(u'<td class="diffpresent">')
self.spl_print_func(convert(s2, ponct=1, tag='ins'))
self.spl_print_func(u'</td>')
else:
self.spl_print_func(u'<td colspan="2">\xa0</td>')
finally:
self.spl_print_func(u"</tr>\n")
def spl_print_enter(self, print_context, rotation_params):
# Takes ownership of print_context
self.spl_print_ctrl = print_context.__exit__, rotation_params
self.spl_print_func = print_context.__enter__()
ctx, _ = rotation_params
# Print file and table headers
self.spl_print_func(output_header(ctx.css_url, ctx.our_css_url, ctx.icon_url))
def spl_had_entered_child(self):
return self.spl_print_ctrl and self.spl_print_ctrl[1] and self.spl_current_page > 0
def spl_print_exit(self, *exc_info):
if not self.spl_had_entered_child():
return False
self.spl_print_func(output_footer())
_exit, _ = self.spl_print_ctrl
self.spl_print_func = None
self.spl_print_ctrl = None
return _exit(*exc_info)
def check_limits(self):
if not self.spl_print_ctrl[1]:
# html-dir single output, don't need to rotate
if self.spl_rows >= self.max_lines_parent:
raise DiffBlockLimitReached()
return False
else:
# html-dir output, perhaps need to rotate
if self.spl_rows >= self.max_lines:
raise DiffBlockLimitReached()
if self.spl_current_page == 0: # on parent page
if self.spl_rows < self.max_lines_parent:
return False
logger.debug("new unified-diff subpage, parent page went over %s lines", self.max_lines_parent)
else: # on child page
if self.bytes_max_total and self.bytes_written > self.bytes_max_total:
raise PrintLimitReached()
if self.spl_print_func.bytes_written < self.max_page_size_child:
return False
logger.debug("new unified-diff subpage, previous subpage went over %s bytes", self.max_page_size_child)
return True
def new_child_page(self):
_, rotation_params = self.spl_print_ctrl
ctx, mainname = rotation_params
self.spl_current_page += 1
filename = "%s-%s.html" % (mainname, self.spl_current_page)
if self.spl_current_page > 1:
# previous page was a child, close it
self.spl_print_func(templates.UD_TABLE_FOOTER % {"filename": html.escape(filename), "text": "load diff"})
self.spl_print_func(u"</table>\n")
self.spl_print_exit(None, None, None)
# rotate to the next child page
context = spl_file_printer(ctx.directory, filename, self)
self.spl_print_enter(context, rotation_params)
self.spl_print_func(templates.UD_TABLE_HEADER)
def output_limit_reached(self, limit_type, total, bytes_processed):
logger.debug('%s print limit reached', limit_type)
bytes_left = total - bytes_processed
self.error_row = templates.UD_TABLE_LIMIT_FOOTER % {
"limit_type": limit_type,
"bytes_left": bytes_left,
"bytes_total": total,
"percent": (bytes_left / total) * 100
}
self.spl_print_func(self.error_row)
def output_unified_diff_table(self, unified_diff, has_internal_linenos):
"""Output a unified diff <table> possibly over multiple pages.
It is the caller's responsibility to set up self.spl_* correctly.
Yields None for each extra child page, and then True or False depending
on whether the whole output was truncated.
"""
try:
ydiff = SideBySideDiff(unified_diff)
for t, args in ydiff.items():
if t == "L":
self.output_line(has_internal_linenos, *args)
elif t == "H":
self.output_hunk_header(*args)
elif t == "C":
self.spl_print_func(u'<td colspan="2">%s</td>\n' % args)
else:
raise AssertionError()
self.spl_rows += 1
if not self.check_limits():
continue
self.new_child_page()
new_limit = yield None
if new_limit:
self.bytes_max_total = new_limit
self.bytes_written = 0
self.check_limits()
wrote_all = True
except GeneratorExit:
return
except DiffBlockLimitReached:
self.output_limit_reached("diff block lines", len(unified_diff), ydiff.bytes_processed)
wrote_all = False
except PrintLimitReached:
self.output_limit_reached("report size", len(unified_diff), ydiff.bytes_processed)
wrote_all = False
finally:
# no footer on the last page, just a close tag
self.spl_print_func(u"</table>")
yield wrote_all
def output_unified_diff(self, ctx, unified_diff, has_internal_linenos):
self.new_unified_diff()
rotation_params = None
if ctx.directory:
mainname = md5(unified_diff)
rotation_params = ctx, mainname
try:
udiff = io.StringIO()
udiff.write(templates.UD_TABLE_HEADER)
self.spl_print_func = udiff.write
self.spl_print_ctrl = None, rotation_params
it = self.output_unified_diff_table(unified_diff, has_internal_linenos)
wrote_all = next(it)
if wrote_all is None:
assert self.spl_current_page == 1
# now pause the iteration and wait for consumer to give us a
# size-limit to write the remaining pages with
# exhaust the iterator and save the last item in wrote_all
new_limit = yield PartialString(PartialString.escape(udiff.getvalue()) + u"{0}</table>\n", None)
wrote_all = send_and_exhaust(it, new_limit, wrote_all)
else:
yield udiff.getvalue()
return
except GeneratorExit:
logger.debug("skip extra output for unified diff %s", mainname)
it.close()
self.spl_print_exit(None, None, None)
return
except:
import traceback
traceback.print_exc()
if self.spl_print_exit(*sys.exc_info()) is False:
raise
else:
self.spl_print_exit(None, None, None)
finally:
self.spl_print_ctrl = None
self.spl_print_func = None
truncated = not wrote_all
child_rows_written = self.spl_rows - self.max_lines_parent
if truncated and not child_rows_written:
# if we didn't write any child rows, just output the error message
# on the parent page
parent_last_row = self.error_row
else:
noun = "pieces" if self.spl_current_page > 1 else "piece"
text = "load diff (%s %s%s)" % (self.spl_current_page, noun, (", truncated" if truncated else ""))
parent_last_row = templates.UD_TABLE_FOOTER % {"filename": html.escape("%s-1.html" % mainname), "text": text}
yield self.bytes_written, parent_last_row
class HTMLPresenter(Presenter):
supports_visual_diffs = True
def __init__(self):
self.reset()
def reset(self):
self.report_printed = 0
self.report_limit = Config().max_report_size
@property
def report_remaining(self):
return self.report_limit - self.report_printed
def maybe_print(self, node, printers, outputs, continuations):
output = outputs[node]
node_cont = continuations[node]
if output.holes and set(output.holes) - set(node_cont):
return
# could be slightly more accurate, whatever
est_placeholder_len = max(
len(templates.UD_TABLE_FOOTER),
len(templates.UD_TABLE_LIMIT_FOOTER),
) + 40
est_size = output.size(est_placeholder_len)
results = {}
for cont in node_cont:
remaining = self.report_remaining - est_size
printed, result = cont(remaining)
self.report_printed += printed
results[cont] = result
out = output.format(results)
printer_args = printers[node]
with printer_args[0](*printer_args[1:]) as printer:
printer(out)
self.report_printed += len(out)
del outputs[node]
del printers[node]
del continuations[node]
def output_node_placeholder(self, pagename, lazy_load, size=0):
if lazy_load:
return templates.DIFFNODE_LAZY_LOAD % {
"pagename": pagename,
"pagesize": sizeof_fmt(Config().max_page_size_child),
"size": sizeof_fmt(size),
}
else:
return templates.DIFFNODE_LIMIT
def output_difference(self, ctx, root_difference):
outputs = {} # nodes to their partial output
ancestors = {} # child nodes to ancestor nodes
placeholder_len = len(self.output_node_placeholder(
"XXXXXXXXXXXXXXXX",
not ctx.single_page,
))
continuations = {} # functions to print unified diff continuations (html-dir only)
printers = {} # nodes to their printers
def smallest_first(node, parent_score):
depth = parent_score[0] + 1 if parent_score else 0
parents = parent_score[3] if parent_score else []
# Difference is not comparable so use memory address in event of a tie
return depth, node.size_self(), id(node), parents + [node]
def process_node(node, score):
path = score[3]
diff_path = output_diff_path(path)
pagename = md5(diff_path)
logger.debug('html output for %s', diff_path)
ancestor = ancestors.pop(node, None)
assert ancestor in path or (ancestor is None and node is root_difference)
node_output, node_continuation = output_node(ctx, node, path, " ", len(path)-1)
add_to_existing = False
if ancestor:
page_limit = Config().max_page_size if ancestor is \
root_difference else Config().max_page_size_child
page_current = outputs[ancestor].size(placeholder_len)
report_current = self.report_printed + \
sum(p.size(placeholder_len) for p in outputs.values())
want_to_add = node_output.size(placeholder_len)
logger.debug(
"report size: %s/%s, page size: %s/%s, want to add %s)",
report_current,
self.report_limit,
page_current,
page_limit,
want_to_add,
)
if report_current + want_to_add > self.report_limit:
make_new_subpage = False
elif page_current + want_to_add < page_limit:
add_to_existing = True
else:
make_new_subpage = not ctx.single_page
if add_to_existing:
# under limit, add it to an existing page
outputs[ancestor] = outputs[ancestor].pformat({
node: node_output,
})
stored = ancestor
else:
# over limit (or root), new subpage or continue/break
if ancestor:
placeholder = self.output_node_placeholder(
pagename,
make_new_subpage,
node.size(),
)
outputs[ancestor] = outputs[ancestor].pformat({node: placeholder})
self.maybe_print(ancestor, printers, outputs, continuations)
footer = output_footer()
# we hit a limit, either max-report-size or single-page
if not make_new_subpage:
if outputs:
# True = don't traverse this node's children,
# because they won't be output however there are
# holes in other pages, so don't break the loop
# just yet
return True
# No more holes, don't traverse any more nodes
raise StopIteration
else:
# Unconditionally write the root node regardless of limits
assert node is root_difference
footer = output_footer(ctx.jquery_url)
pagename = "index"
outputs[node] = node_output.frame(
output_header(ctx.css_url, ctx.our_css_url, ctx.icon_url) +
u'<div class="difference">\n', u'</div>\n' + footer)
assert not ctx.single_page or node is root_difference
printers[node] = (make_printer, ctx.target) if ctx.single_page \
else (file_printer, ctx.target, "%s.html" % pagename)
stored = node
for child in node.details:
logger.debug(
"scheduling future html output for: %s",
output_diff_path(path + [child]),
)
ancestors[child] = stored
conts = continuations.setdefault(stored, [])
if node_continuation:
conts.append(node_continuation)
self.maybe_print(stored, printers, outputs, continuations)
nodes = root_difference.traverse_heapq(smallest_first, yield_score=True)
prune_prev_node_descendants = None
while True:
try:
node, score = nodes.send(prune_prev_node_descendants)
prune_prev_node_descendants = process_node(node, score)
except StopIteration:
break
if outputs:
import pprint
pprint.pprint(outputs, indent=4)
assert not outputs
def ensure_jquery(self, jquery_url, basedir, default_override):
if jquery_url is None:
jquery_url = default_override
default_override = None # later, we can detect jquery_url was None
if jquery_url == 'disable' or not jquery_url:
return None
url = urlparse(jquery_url)
if url.scheme or url.netloc:
# remote path
return jquery_url
# local path
if os.path.isabs(url.path):
check_path = url.path
else:
check_path = os.path.join(basedir, url.path)
if os.path.lexists(check_path):
return url.path
for path in JQUERY_SYSTEM_LOCATIONS:
if os.path.exists(path):
os.symlink(path, check_path)
logger.debug('jquery found at %s and symlinked to %s', path, check_path)
return url.path
if default_override is None:
# if no jquery_url was given, and we can't find it, don't use it
return None
logger.warning('--jquery given, but jQuery was not found. Using it regardless.')
logger.debug('Locations searched: %s', ', '.join(JQUERY_SYSTEM_LOCATIONS))
return url.path
def output_html_directory(self, directory, difference, css_url=None, jquery_url=None):
"""
Multi-file presenter. Writes to a directory, and puts large diff tables
into files of their own.
This uses jQuery. By default it uses
/usr/share/javascript/jquery/jquery.js (symlinked, so that you can
still share the result over HTTP). You can also pass --jquery URL to
diffoscope to use a central jQuery copy.
"""
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise ValueError("%s is not a directory" % directory)
jquery_url = self.ensure_jquery(jquery_url, directory, "jquery.js")
with open(os.path.join(directory, "common.css"), "w") as fp:
fp.write(templates.STYLES)
with open(os.path.join(directory, "icon.png"), "wb") as fp:
fp.write(base64.b64decode(FAVICON_BASE64))
ctx = HTMLPrintContext(directory, False, jquery_url, css_url, "common.css", "icon.png")
self.output_difference(ctx, difference)
def output_html(self, target, difference, css_url=None, jquery_url=None):
"""
Default presenter, all in one HTML file
"""
jquery_url = self.ensure_jquery(jquery_url, os.getcwd(), None)
ctx = HTMLPrintContext(target, True, jquery_url, css_url, None, None)
self.output_difference(ctx, difference)
@classmethod
def run(cls, data, difference, parsed_args):
cls().output_html(
parsed_args.html_output,
difference,
css_url=parsed_args.css_url,
jquery_url=parsed_args.jquery_url,
)
class HTMLDirectoryPresenter(HTMLPresenter):
@classmethod
def run(cls, data, difference, parsed_args):
cls().output_html_directory(
parsed_args.html_output_directory,
difference,
css_url=parsed_args.css_url,
jquery_url=parsed_args.jquery_url,
)
|
ReproducibleBuilds/diffoscope
|
diffoscope/presenters/html/html.py
|
Python
|
gpl-3.0
| 30,030
| 0.001632
|
# Uncomment to run this module directly. TODO comment out.
#import sys, os
#sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
# End of uncomment.
import unittest
import subprocess
import runserver
from flask import Flask, current_app, jsonify
from views import neo4j_driver
from views import my_patients
from views import session
import helper
import json
class MyPatientsPageTestCase(unittest.TestCase):
def setUp(self):
runserver.app.config['TESTING'] = True
runserver.app.config['DB_NAME_USERS'] = 'test_users'
self.app = runserver.app.test_client()
#helper.create_neo4j_demo_user()
helper.login(self.app)
helper.my_patients_neo4j_data()
def tearDown(self):
self.app.get('/logout', follow_redirects=True)
def test_my_patients_page(self):
page = self.app.get('/my_patients', follow_redirects=True)
assert page.status_code == 200 # NB this test doesn't wait for the data to load.
def test_my_patients_functionality(self):
app = Flask(__name__)
with app.test_request_context():
records = my_patients.get_individuals('demo')
# Here we create the Flask Response object, containing json,
# that the /my_patients page receives. We then test
# that the expected data is available.
data=jsonify(result=records)
assert data.status == '200 OK'
parsed_json = json.loads(data.data)
# First person.
i=0
assert parsed_json['result'][i]['individual'] == 'person2'
assert parsed_json['result'][i]['gender'] == 'F'
for pheno in parsed_json['result'][i]['phenotypes'] :
assert (pheno['name'] == 'Abnormality of the retina' or
pheno['name'] == 'Visual impairment' or
pheno['name'] == 'Macular dystrophy')
assert parsed_json['result'][i]['phenotypeScore'] == 0.69
assert parsed_json['result'][i]['hom_count'] == 1
assert parsed_json['result'][i]['het_count'] == 2
for gene in parsed_json['result'][i]['genes'] :
assert gene == 'RPGR' or gene == 'TTLL5' or gene == 'DRAM2' or gene == 'TRIM32'
# Next person.
i=1
assert parsed_json['result'][i]['individual'] == 'person1'
assert parsed_json['result'][i]['gender'] == 'M'
assert parsed_json['result'][i]['phenotypes'][0]['name'] == 'Visual impairment'
assert parsed_json['result'][i]['phenotypeScore'] == 0.69
assert parsed_json['result'][i]['hom_count'] == 1
assert parsed_json['result'][i]['het_count'] == 1
assert parsed_json['result'][i]['genes'][0] == 'TTLL5'
if __name__ == '__main__':
unittest.main()
|
phenopolis/phenopolis
|
tests/test_my_patients.py
|
Python
|
mit
| 2,878
| 0.009382
|
"""
Course API Serializers. Representing course catalog data
"""
import urllib
from django.core.urlresolvers import reverse
from django.template import defaultfilters
from rest_framework import serializers
from lms.djangoapps.courseware.courses import course_image_url, get_course_about_section
from xmodule.course_module import DEFAULT_START_DATE
class _MediaSerializer(serializers.Serializer): # pylint: disable=abstract-method
"""
Nested serializer to represent a media object.
"""
def __init__(self, uri_parser, *args, **kwargs):
super(_MediaSerializer, self).__init__(*args, **kwargs)
self.uri_parser = uri_parser
uri = serializers.SerializerMethodField(source='*')
def get_uri(self, course):
"""
Get the representation for the media resource's URI
"""
return self.uri_parser(course)
class _CourseApiMediaCollectionSerializer(serializers.Serializer): # pylint: disable=abstract-method
"""
Nested serializer to represent a collection of media objects
"""
course_image = _MediaSerializer(source='*', uri_parser=course_image_url)
class CourseSerializer(serializers.Serializer): # pylint: disable=abstract-method
"""
Serializer for Course objects
"""
course_id = serializers.CharField(source='id', read_only=True)
name = serializers.CharField(source='display_name_with_default')
number = serializers.CharField(source='display_number_with_default')
org = serializers.CharField(source='display_org_with_default')
description = serializers.SerializerMethodField()
media = _CourseApiMediaCollectionSerializer(source='*')
start = serializers.DateTimeField()
start_type = serializers.SerializerMethodField()
start_display = serializers.SerializerMethodField()
end = serializers.DateTimeField()
enrollment_start = serializers.DateTimeField()
enrollment_end = serializers.DateTimeField()
blocks_url = serializers.SerializerMethodField()
def get_start_type(self, course):
"""
Get the representation for SerializerMethodField `start_type`
"""
if course.advertised_start is not None:
return u'string'
elif course.start != DEFAULT_START_DATE:
return u'timestamp'
else:
return u'empty'
def get_start_display(self, course):
"""
Get the representation for SerializerMethodField `start_display`
"""
if course.advertised_start is not None:
return course.advertised_start
elif course.start != DEFAULT_START_DATE:
return defaultfilters.date(course.start, "DATE_FORMAT")
else:
return None
def get_description(self, course):
"""
Get the representation for SerializerMethodField `description`
"""
return get_course_about_section(self.context['request'], course, 'short_description').strip()
def get_blocks_url(self, course):
"""
Get the representation for SerializerMethodField `blocks_url`
"""
base_url = '?'.join([
reverse('blocks_in_course'),
urllib.urlencode({'course_id': course.id}),
])
return self.context['request'].build_absolute_uri(base_url)
|
pomegranited/edx-platform
|
lms/djangoapps/course_api/serializers.py
|
Python
|
agpl-3.0
| 3,293
| 0.001518
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
from odoo import models, fields, api, _
from odoo.exceptions import ValidationError, UserError
from odoo.tools.float_utils import float_split_str
from odoo.tools.misc import mod10r
l10n_ch_ISR_NUMBER_LENGTH = 27
l10n_ch_ISR_NUMBER_ISSUER_LENGTH = 12
class AccountMove(models.Model):
_inherit = 'account.move'
l10n_ch_isr_subscription = fields.Char(compute='_compute_l10n_ch_isr_subscription', help='ISR subscription number identifying your company or your bank to generate ISR.')
l10n_ch_isr_subscription_formatted = fields.Char(compute='_compute_l10n_ch_isr_subscription', help="ISR subscription number your company or your bank, formated with '-' and without the padding zeros, to generate ISR report.")
l10n_ch_isr_number = fields.Char(compute='_compute_l10n_ch_isr_number', store=True, help='The reference number associated with this invoice')
l10n_ch_isr_number_spaced = fields.Char(compute='_compute_l10n_ch_isr_number_spaced', help="ISR number split in blocks of 5 characters (right-justified), to generate ISR report.")
l10n_ch_isr_optical_line = fields.Char(compute="_compute_l10n_ch_isr_optical_line", help='Optical reading line, as it will be printed on ISR')
l10n_ch_isr_valid = fields.Boolean(compute='_compute_l10n_ch_isr_valid', help='Boolean value. True iff all the data required to generate the ISR are present')
l10n_ch_isr_sent = fields.Boolean(default=False, help="Boolean value telling whether or not the ISR corresponding to this invoice has already been printed or sent by mail.")
l10n_ch_currency_name = fields.Char(related='currency_id.name', readonly=True, string="Currency Name", help="The name of this invoice's currency") #This field is used in the "invisible" condition field of the 'Print ISR' button.
@api.depends('partner_bank_id.l10n_ch_isr_subscription_eur', 'partner_bank_id.l10n_ch_isr_subscription_chf')
def _compute_l10n_ch_isr_subscription(self):
""" Computes the ISR subscription identifying your company or the bank that allows to generate ISR. And formats it accordingly"""
def _format_isr_subscription(isr_subscription):
#format the isr as per specifications
currency_code = isr_subscription[:2]
middle_part = isr_subscription[2:-1]
trailing_cipher = isr_subscription[-1]
middle_part = re.sub('^0*', '', middle_part)
return currency_code + '-' + middle_part + '-' + trailing_cipher
def _format_isr_subscription_scanline(isr_subscription):
# format the isr for scanline
return isr_subscription[:2] + isr_subscription[2:-1].rjust(6, '0') + isr_subscription[-1:]
for record in self:
record.l10n_ch_isr_subscription = False
record.l10n_ch_isr_subscription_formatted = False
if record.partner_bank_id:
if record.currency_id.name == 'EUR':
isr_subscription = record.partner_bank_id.l10n_ch_isr_subscription_eur
elif record.currency_id.name == 'CHF':
isr_subscription = record.partner_bank_id.l10n_ch_isr_subscription_chf
else:
#we don't format if in another currency as EUR or CHF
continue
if isr_subscription:
isr_subscription = isr_subscription.replace("-", "") # In case the user put the -
record.l10n_ch_isr_subscription = _format_isr_subscription_scanline(isr_subscription)
record.l10n_ch_isr_subscription_formatted = _format_isr_subscription(isr_subscription)
@api.depends('name', 'partner_bank_id.l10n_ch_postal')
def _compute_l10n_ch_isr_number(self):
""" The ISR reference number is 27 characters long. The first 12 of them
contain the postal account number of this ISR's issuer, removing the zeros
at the beginning and filling the empty places with zeros on the right if it is
too short. The next 14 characters contain an internal reference identifying
the invoice. For this, we use the invoice sequence number, removing each
of its non-digit characters, and pad the unused spaces on the left of
this number with zeros. The last character of the ISR number is the result
of a recursive modulo 10 on its first 26 characters.
"""
for record in self:
if record.name and record.partner_bank_id and record.partner_bank_id.l10n_ch_postal:
invoice_issuer_ref = record.partner_bank_id.l10n_ch_postal.ljust(l10n_ch_ISR_NUMBER_ISSUER_LENGTH, '0')
invoice_ref = re.sub('[^\d]', '', record.name)
#We only keep the last digits of the sequence number if it is too long
invoice_ref = invoice_ref[-l10n_ch_ISR_NUMBER_ISSUER_LENGTH:]
internal_ref = invoice_ref.zfill(l10n_ch_ISR_NUMBER_LENGTH - l10n_ch_ISR_NUMBER_ISSUER_LENGTH - 1) # -1 for mod10r check character
record.l10n_ch_isr_number = mod10r(invoice_issuer_ref + internal_ref)
else:
record.l10n_ch_isr_number = False
@api.depends('l10n_ch_isr_number')
def _compute_l10n_ch_isr_number_spaced(self):
def _space_isr_number(isr_number):
to_treat = isr_number
res = ''
while to_treat:
res = to_treat[-5:] + res
to_treat = to_treat[:-5]
if to_treat:
res = ' ' + res
return res
for record in self:
if record.name and record.partner_bank_id and record.partner_bank_id.l10n_ch_postal:
record.l10n_ch_isr_number_spaced = _space_isr_number(record.l10n_ch_isr_number)
else:
record.l10n_ch_isr_number_spaced = False
@api.depends(
'currency_id.name', 'amount_residual', 'name',
'partner_bank_id.l10n_ch_postal',
'partner_bank_id.l10n_ch_isr_subscription_eur',
'partner_bank_id.l10n_ch_isr_subscription_chf')
def _compute_l10n_ch_isr_optical_line(self):
""" The optical reading line of the ISR looks like this :
left>isr_ref+ bank_ref>
Where:
- left is composed of two ciphers indicating the currency (01 for CHF,
03 for EUR), followed by ten characters containing the total of the
invoice (with the dot between units and cents removed, everything being
right-aligned and empty places filled with zeros). After the total,
left contains a last cipher, which is the result of a recursive modulo
10 function ran over the rest of it.
- isr_ref is the ISR reference number
- bank_ref is the full postal bank code (aka clearing number) of the
bank supporting the ISR (including the zeros).
"""
for record in self:
record.l10n_ch_isr_optical_line = ''
if record.l10n_ch_isr_number and record.l10n_ch_isr_subscription and record.currency_id.name:
#Left part
currency_code = None
if record.currency_id.name == 'CHF':
currency_code = '01'
elif record.currency_id.name == 'EUR':
currency_code = '03'
units, cents = float_split_str(record.amount_residual, 2)
amount_to_display = units + cents
amount_ref = amount_to_display.zfill(10)
left = currency_code + amount_ref
left = mod10r(left)
#Final assembly (the space after the '+' is no typo, it stands in the specs.)
record.l10n_ch_isr_optical_line = left + '>' + record.l10n_ch_isr_number + '+ ' + record.l10n_ch_isr_subscription + '>'
@api.depends(
'move_type', 'name', 'currency_id.name',
'partner_bank_id.l10n_ch_postal',
'partner_bank_id.l10n_ch_isr_subscription_eur',
'partner_bank_id.l10n_ch_isr_subscription_chf')
def _compute_l10n_ch_isr_valid(self):
"""Returns True if all the data required to generate the ISR are present"""
for record in self:
record.l10n_ch_isr_valid = record.move_type == 'out_invoice' and\
record.name and \
record.l10n_ch_isr_subscription and \
record.partner_bank_id.l10n_ch_postal and \
record.l10n_ch_currency_name in ['EUR', 'CHF']
def split_total_amount(self):
""" Splits the total amount of this invoice in two parts, using the dot as
a separator, and taking two precision digits (always displayed).
These two parts are returned as the two elements of a tuple, as strings
to print in the report.
This function is needed on the model, as it must be called in the report
template, which cannot reference static functions
"""
return float_split_str(self.amount_residual, 2)
def isr_print(self):
""" Triggered by the 'Print ISR' button.
"""
self.ensure_one()
if self.l10n_ch_isr_valid:
self.l10n_ch_isr_sent = True
return self.env.ref('l10n_ch.l10n_ch_isr_report').report_action(self)
else:
raise ValidationError(_("""You cannot generate an ISR yet.\n
For this, you need to :\n
- set a valid postal account number (or an IBAN referencing one) for your company\n
- define its bank\n
- associate this bank with a postal reference for the currency used in this invoice\n
- fill the 'bank account' field of the invoice with the postal to be used to receive the related payment. A default account will be automatically set for all invoices created after you defined a postal account for your company."""))
def print_ch_qr_bill(self):
""" Triggered by the 'Print QR-bill' button.
"""
self.ensure_one()
if not self.partner_bank_id._eligible_for_qr_code('ch_qr', self.partner_id, self.currency_id):
raise UserError(_("Cannot generate the QR-bill. Please check you have configured the address of your company and debtor. If you are using a QR-IBAN, also check the invoice's payment reference is a QR reference."))
self.l10n_ch_isr_sent = True
return self.env.ref('l10n_ch.l10n_ch_qr_report').report_action(self)
def action_invoice_sent(self):
# OVERRIDE
rslt = super(AccountMove, self).action_invoice_sent()
if self.l10n_ch_isr_valid:
rslt['context']['l10n_ch_mark_isr_as_sent'] = True
return rslt
@api.returns('mail.message', lambda value: value.id)
def message_post(self, **kwargs):
if self.env.context.get('l10n_ch_mark_isr_as_sent'):
self.filtered(lambda inv: not inv.l10n_ch_isr_sent).write({'l10n_ch_isr_sent': True})
return super(AccountMove, self.with_context(mail_post_autofollow=True)).message_post(**kwargs)
def _get_invoice_reference_ch_invoice(self):
""" This sets ISR reference number which is generated based on customer's `Bank Account` and set it as
`Payment Reference` of the invoice when invoice's journal is using Switzerland's communication standard
"""
self.ensure_one()
return self.l10n_ch_isr_number
def _get_invoice_reference_ch_partner(self):
""" This sets ISR reference number which is generated based on customer's `Bank Account` and set it as
`Payment Reference` of the invoice when invoice's journal is using Switzerland's communication standard
"""
self.ensure_one()
return self.l10n_ch_isr_number
|
ddico/odoo
|
addons/l10n_ch/models/account_invoice.py
|
Python
|
agpl-3.0
| 11,966
| 0.005265
|
from django.contrib import admin
from polls.models import Choice, Poll
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class PollAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question']}),
('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
]
inlines = [ChoiceInline]
list_display = ('question','pub_date')
list_filter = ['pub_date']
search_fields = ['question']
date_hierarchy = 'pub_date'
admin.site.register(Poll, PollAdmin)
|
damiencalloway/djtut
|
mysite/polls/admin.py
|
Python
|
mit
| 570
| 0.014035
|
import json
import sys
import requests
from collections import Counter
from wapy.api import Wapy
from http.server import BaseHTTPRequestHandler, HTTPServer
wapy = Wapy('frt6ajvkqm4aexwjksrukrey')
def removes(yes):
no = ["Walmart.com", ".", ","]
for x in no:
yes = yes.replace(x, '')
return yes
def post_some_dict(dict):
headers = {'Content-type': 'application/json'}
r = requests.post("http://127.0.0.1:5000/search", data=json.dumps(dict), headers=headers)
return r.text
def parse_image(image):
out = json.loads(post_some_dict({"image_url": image}))['titles']
print(out)
#out = [x for x in out if 'walmart' in x]
threshold = len(out)-1
#out = [x[27:-9] for x in out]
#print(out)
large = []
for line in out:
line = line.replace('-', '')
line = removes(line)
line = line.split(' ')
for word in line:
large.append(word)
#print(large)
c = Counter(large).most_common()
keywords = []
for x in c:
if x[1] > threshold:
keywords.append(x[0])
print(keywords)
return ' '.join(keywords)
def parse_wallmart(keywords):
products = wapy.search(' '.join(keywords))
out = {}
out['name'] = products[0].name
out['rating'] = products[0].customer_rating
out['price'] = products[0].sale_price
return json.dumps(out)
class StoreHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(fh.read().encode())
def do_POST(self):
self.send_response(200)
length = self.headers['content-length']
data = self.rfile.read(int(length))
with open('/var/www/html/image.jpg', 'wb') as fh:
fh.write(data)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(parse_wallmart(parse_image('http://45.33.95.66/image.jpg')).encode())
server = HTTPServer(('', 8081), StoreHandler)
server.serve_forever()
|
Pennapps-XV/backend
|
root/parse-server.py
|
Python
|
gpl-3.0
| 2,081
| 0.005766
|
import json
from typing import Union, List, Dict, Any
import torch
from torch.autograd import Variable
from torch.nn.modules import Dropout
import numpy
import h5py
from overrides import overrides
from allennlp.common.file_utils import cached_path
from allennlp.common.checks import ConfigurationError
from allennlp.common import Registrable, Params
from allennlp.modules.elmo_lstm import ElmoLstm
from allennlp.modules.highway import Highway
from allennlp.modules.scalar_mix import ScalarMix
from allennlp.nn.util import remove_sentence_boundaries, add_sentence_boundary_token_ids
from allennlp.data import Vocabulary
from allennlp.data.token_indexers.elmo_indexer import ELMoCharacterMapper
# pylint: disable=attribute-defined-outside-init
@Registrable.register('elmo')
class Elmo(torch.nn.Module, Registrable):
"""
Compute ELMo representations using a pre-trained bidirectional language model.
See "Deep contextualized word representations", Peters et al. for details.
This module takes character id input and computes ``num_output_representations`` different layers
of ELMo representations. Typically ``num_output_representations`` is 1 or 2. For example, in
the case of the SRL model in the above paper, ``num_output_representations=1`` where ELMo was included at
the input token representation layer. In the case of the SQuAD model, ``num_output_representations=2``
as ELMo was also included at the GRU output layer.
In the implementation below, we learn separate scalar weights for each output layer,
but only run the biLM once on each input sequence for efficiency.
Parameters
----------
options_file : ``str``, required.
ELMo JSON options file
weight_file : ``str``, required.
ELMo hdf5 weight file
num_output_representations: ``int``, required.
The number of ELMo representation layers to output.
do_layer_norm : ``bool``, optional, (default=False).
Should we apply layer normalization (passed to ``ScalarMix``)?
dropout : ``float``, optional, (default = 0.5).
The dropout to be applied to the ELMo representations.
"""
def __init__(self,
options_file: str,
weight_file: str,
num_output_representations: int,
do_layer_norm: bool = False,
dropout: float = 0.5) -> None:
super(Elmo, self).__init__()
self._elmo_lstm = _ElmoBiLm(options_file, weight_file)
self._dropout = Dropout(p=dropout)
self._scalar_mixes: Any = []
for k in range(num_output_representations):
scalar_mix = ScalarMix(self._elmo_lstm.num_layers, do_layer_norm=do_layer_norm)
self.add_module('scalar_mix_{}'.format(k), scalar_mix)
self._scalar_mixes.append(scalar_mix)
def forward(self, # pylint: disable=arguments-differ
inputs: torch.Tensor) -> Dict[str, Union[torch.Tensor, List[torch.Tensor]]]:
"""
Parameters
----------
inputs : ``torch.autograd.Variable``
Shape ``(batch_size, timesteps, 50)`` of character ids representing the current batch.
We also accept tensors with additional optional dimensions:
``(batch_size, dim0, dim1, ..., dimn, timesteps, 50)``
Returns
-------
Dict with keys:
``'elmo_representations'``: ``List[torch.autograd.Variable]``
A ``num_output_representations`` list of ELMo representations for the input sequence.
Each representation is shape ``(batch_size, timesteps, embedding_dim)``
``'mask'``: ``torch.autograd.Variable``
Shape ``(batch_size, timesteps)`` long tensor with sequence mask.
"""
# reshape the input if needed
original_shape = inputs.size()
timesteps, num_characters = original_shape[-2:]
if len(original_shape) > 3:
reshaped_inputs = inputs.view(-1, timesteps, num_characters)
else:
reshaped_inputs = inputs
# run the biLM
bilm_output = self._elmo_lstm(reshaped_inputs)
layer_activations = bilm_output['activations']
mask_with_bos_eos = bilm_output['mask']
# compute the elmo representations
representations = []
for scalar_mix in self._scalar_mixes:
representation_with_bos_eos = scalar_mix(layer_activations, mask_with_bos_eos)
representation_without_bos_eos, mask_without_bos_eos = remove_sentence_boundaries(
representation_with_bos_eos, mask_with_bos_eos
)
representations.append(self._dropout(representation_without_bos_eos))
# reshape if necessary
if len(original_shape) > 3:
mask = mask_without_bos_eos.view(original_shape[:-1])
elmo_representations = [representation.view(original_shape[:-1] + (-1, ))
for representation in representations]
else:
mask = mask_without_bos_eos
elmo_representations = representations
return {'elmo_representations': elmo_representations, 'mask': mask}
@classmethod
def from_params(cls, params: Params) -> 'Elmo':
# Add files to archive
params.add_file_to_archive('options_file')
params.add_file_to_archive('weight_file')
options_file = params.pop('options_file')
weight_file = params.pop('weight_file')
num_output_representations = params.pop('num_output_representations')
do_layer_norm = params.pop('do_layer_norm', False)
params.assert_empty(cls.__name__)
return cls(options_file, weight_file, num_output_representations, do_layer_norm)
class _ElmoCharacterEncoder(torch.nn.Module):
"""
Compute context sensitive token representation using pretrained biLM.
This embedder has input character ids of size (batch_size, sequence_length, 50)
and returns (batch_size, sequence_length + 2, embedding_dim), where embedding_dim
is specified in the options file (typically 512).
We add special entries at the beginning and end of each sequence corresponding
to <S> and </S>, the beginning and end of sentence tokens.
Note: this is a lower level class useful for advanced usage. Most users should
use ``ElmoTokenEmbedder`` or ``allennlp.modules.Elmo`` instead.
Parameters
----------
options_file : ``str``
ELMo JSON options file
weight_file : ``str``
ELMo hdf5 weight file
The relevant section of the options file is something like:
.. example-code::
.. code-block:: python
{'char_cnn': {
'activation': 'relu',
'embedding': {'dim': 4},
'filters': [[1, 4], [2, 8], [3, 16], [4, 32], [5, 64]],
'max_characters_per_token': 50,
'n_characters': 262,
'n_highway': 2
}
}
"""
def __init__(self,
options_file: str,
weight_file: str) -> None:
super(_ElmoCharacterEncoder, self).__init__()
with open(cached_path(options_file), 'r') as fin:
self._options = json.load(fin)
self._weight_file = weight_file
self.output_dim = self._options['lstm']['projection_dim']
self._load_weights()
# Cache the arrays for use in forward -- +1 due to masking.
self._beginning_of_sentence_characters = Variable(torch.from_numpy(
numpy.array(ELMoCharacterMapper.beginning_of_sentence_characters) + 1
))
self._end_of_sentence_characters = Variable(torch.from_numpy(
numpy.array(ELMoCharacterMapper.end_of_sentence_characters) + 1
))
def get_output_dim(self):
return self.output_dim
@overrides
def forward(self, inputs: torch.Tensor) -> Dict[str, torch.Tensor]: # pylint: disable=arguments-differ
"""
Compute context insensitive token embeddings for ELMo representations.
Parameters
----------
inputs: ``torch.autograd.Variable``
Shape ``(batch_size, sequence_length, 50)`` of character ids representing the
current batch.
Returns
-------
Dict with keys:
``'token_embedding'``: ``torch.autograd.Variable``
Shape ``(batch_size, sequence_length + 2, embedding_dim)`` tensor with context
insensitive token representations.
``'mask'``: ``torch.autograd.Variable``
Shape ``(batch_size, sequence_length + 2)`` long tensor with sequence mask.
"""
# Add BOS/EOS
mask = ((inputs > 0).long().sum(dim=-1) > 0).long()
character_ids_with_bos_eos, mask_with_bos_eos = add_sentence_boundary_token_ids(
inputs,
mask,
self._beginning_of_sentence_characters,
self._end_of_sentence_characters
)
# the character id embedding
max_chars_per_token = self._options['char_cnn']['max_characters_per_token']
# (batch_size * sequence_length, max_chars_per_token, embed_dim)
character_embedding = torch.nn.functional.embedding(
character_ids_with_bos_eos.view(-1, max_chars_per_token),
self._char_embedding_weights
)
# run convolutions
cnn_options = self._options['char_cnn']
if cnn_options['activation'] == 'tanh':
activation = torch.nn.functional.tanh
elif cnn_options['activation'] == 'relu':
activation = torch.nn.functional.relu
else:
raise ConfigurationError("Unknown activation")
# (batch_size * sequence_length, embed_dim, max_chars_per_token)
character_embedding = torch.transpose(character_embedding, 1, 2)
convs = []
for conv in self._convolutions:
convolved = conv(character_embedding)
# (batch_size * sequence_length, n_filters for this width)
convolved, _ = torch.max(convolved, dim=-1)
convolved = activation(convolved)
convs.append(convolved)
# (batch_size * sequence_length, n_filters)
token_embedding = torch.cat(convs, dim=-1)
# apply the highway layers (batch_size * sequence_length, n_filters)
token_embedding = self._highways(token_embedding)
# final projection (batch_size * sequence_length, embedding_dim)
token_embedding = self._projection(token_embedding)
# reshape to (batch_size, sequence_length, embedding_dim)
batch_size, sequence_length, _ = character_ids_with_bos_eos.size()
return {
'mask': mask_with_bos_eos,
'token_embedding': token_embedding.view(batch_size, sequence_length, -1)
}
def _load_weights(self):
self._load_char_embedding()
self._load_cnn_weights()
self._load_highway()
self._load_projection()
def _load_char_embedding(self):
with h5py.File(cached_path(self._weight_file), 'r') as fin:
char_embed_weights = fin['char_embed'][...]
weights = numpy.zeros(
(char_embed_weights.shape[0] + 1, char_embed_weights.shape[1]),
dtype='float32'
)
weights[1:, :] = char_embed_weights
self._char_embedding_weights = torch.nn.Parameter(
torch.FloatTensor(weights), requires_grad=False
)
def _load_cnn_weights(self):
cnn_options = self._options['char_cnn']
filters = cnn_options['filters']
char_embed_dim = cnn_options['embedding']['dim']
convolutions = []
for i, (width, num) in enumerate(filters):
conv = torch.nn.Conv1d(
in_channels=char_embed_dim,
out_channels=num,
kernel_size=width,
bias=True
)
# load the weights
with h5py.File(cached_path(self._weight_file), 'r') as fin:
weight = fin['CNN']['W_cnn_{}'.format(i)][...]
bias = fin['CNN']['b_cnn_{}'.format(i)][...]
w_reshaped = numpy.transpose(weight.squeeze(axis=0), axes=(2, 1, 0))
if w_reshaped.shape != tuple(conv.weight.data.shape):
raise ValueError("Invalid weight file")
conv.weight.data.copy_(torch.FloatTensor(w_reshaped))
conv.bias.data.copy_(torch.FloatTensor(bias))
conv.weight.requires_grad = False
conv.bias.requires_grad = False
convolutions.append(conv)
self.add_module('char_conv_{}'.format(i), conv)
self._convolutions = convolutions
def _load_highway(self):
# pylint: disable=protected-access
# the highway layers have same dimensionality as the number of cnn filters
cnn_options = self._options['char_cnn']
filters = cnn_options['filters']
n_filters = sum(f[1] for f in filters)
n_highway = cnn_options['n_highway']
# create the layers, and load the weights
self._highways = Highway(n_filters, n_highway, activation=torch.nn.functional.relu)
for k in range(n_highway):
# The AllenNLP highway is one matrix multplication with concatenation of
# transform and carry weights.
with h5py.File(cached_path(self._weight_file), 'r') as fin:
# The weights are transposed due to multiplication order assumptions in tf
# vs pytorch (tf.matmul(X, W) vs pytorch.matmul(W, X))
w_transform = numpy.transpose(fin['CNN_high_{}'.format(k)]['W_transform'][...])
# -1.0 since AllenNLP is g * x + (1 - g) * f(x) but tf is (1 - g) * x + g * f(x)
w_carry = -1.0 * numpy.transpose(fin['CNN_high_{}'.format(k)]['W_carry'][...])
weight = numpy.concatenate([w_transform, w_carry], axis=0)
self._highways._layers[k].weight.data.copy_(torch.FloatTensor(weight))
self._highways._layers[k].weight.requires_grad = False
b_transform = fin['CNN_high_{}'.format(k)]['b_transform'][...]
b_carry = -1.0 * fin['CNN_high_{}'.format(k)]['b_carry'][...]
bias = numpy.concatenate([b_transform, b_carry], axis=0)
self._highways._layers[k].bias.data.copy_(torch.FloatTensor(bias))
self._highways._layers[k].bias.requires_grad = False
def _load_projection(self):
cnn_options = self._options['char_cnn']
filters = cnn_options['filters']
n_filters = sum(f[1] for f in filters)
self._projection = torch.nn.Linear(n_filters, self.output_dim, bias=True)
with h5py.File(cached_path(self._weight_file), 'r') as fin:
weight = fin['CNN_proj']['W_proj'][...]
bias = fin['CNN_proj']['b_proj'][...]
self._projection.weight.data.copy_(torch.FloatTensor(numpy.transpose(weight)))
self._projection.bias.data.copy_(torch.FloatTensor(bias))
self._projection.weight.requires_grad = False
self._projection.bias.requires_grad = False
@classmethod
def from_params(cls, vocab: Vocabulary, params: Params) -> '_ElmoCharacterEncoder':
# pylint: disable=unused-argument
options_file = params.pop('options_file')
weight_file = params.pop('weight_file')
params.assert_empty(cls.__name__)
return cls(options_file, weight_file)
class _ElmoBiLm(torch.nn.Module):
"""
Run a pre-trained bidirectional language model, outputing the activations at each
layer for weighting together into an ELMo representation (with
``allennlp.modules.seq2seq_encoders.Elmo``). This is a lower level class, useful
for advanced uses, but most users should use ``allennlp.modules.seq2seq_encoders.Elmo``
directly.
Parameters
----------
options_file : ``str``
ELMo JSON options file
weight_file : ``str``
ELMo hdf5 weight file
"""
def __init__(self,
options_file: str,
weight_file: str) -> None:
super(_ElmoBiLm, self).__init__()
self._token_embedder = _ElmoCharacterEncoder(options_file, weight_file)
with open(cached_path(options_file), 'r') as fin:
options = json.load(fin)
if not options['lstm'].get('use_skip_connections'):
raise ConfigurationError('We only support pretrained biLMs with residual connections')
self._elmo_lstm = ElmoLstm(input_size=options['lstm']['projection_dim'],
hidden_size=options['lstm']['projection_dim'],
cell_size=options['lstm']['dim'],
num_layers=options['lstm']['n_layers'],
memory_cell_clip_value=options['lstm']['cell_clip'],
state_projection_clip_value=options['lstm']['proj_clip'])
self._elmo_lstm.load_weights(weight_file)
# Number of representation layers including context independent layer
self.num_layers = options['lstm']['n_layers'] + 1
def forward(self, # pylint: disable=arguments-differ
inputs: torch.Tensor) -> Dict[str, Union[torch.Tensor, List[torch.Tensor]]]:
"""
Parameters
----------
inputs: ``torch.autograd.Variable``
Shape ``(batch_size, timesteps, 50)`` of character ids representing the current batch.
Returns
-------
Dict with keys:
``'activations'``: ``List[torch.autograd.Variable]``
A list of activations at each layer of the network, each of shape
``(batch_size, timesteps + 2, embedding_dim)``
``'mask'``: ``torch.autograd.Variable``
Shape ``(batch_size, timesteps + 2)`` long tensor with sequence mask.
Note that the output tensors all include additional special begin and end of sequence
markers.
"""
token_embedding = self._token_embedder(inputs)
type_representation = token_embedding['token_embedding']
mask = token_embedding['mask']
lstm_outputs = self._elmo_lstm(type_representation, mask)
# Prepare the output. The first layer is duplicated.
output_tensors = [
torch.cat([type_representation, type_representation], dim=-1)
]
for layer_activations in torch.chunk(lstm_outputs, lstm_outputs.size(0), dim=0):
output_tensors.append(layer_activations.squeeze(0))
return {
'activations': output_tensors,
'mask': mask,
}
|
nafitzgerald/allennlp
|
allennlp/modules/elmo.py
|
Python
|
apache-2.0
| 18,830
| 0.002921
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# Author: Surabhi Gupta
import sys
import numpy as np
import matplotlib.pylab as pyl
def analyzeOverlaps(activeCoincsFile, encodingsFile, dataset):
'''Mirror Image Visualization: Shows the encoding space juxtaposed against the
coincidence space. The encoding space is the bottom-up sensory encoding and
the coincidence space depicts the corresponding activation of coincidences in
the SP. Hence, the mirror image visualization is a visual depiction of the
mapping of SP cells to the input representations.
Note:
* The files spBUOut and sensorBUOut are assumed to be in the output format
used for LPF experiment outputs.
* BU outputs for some sample datasets are provided. Specify the name of the
dataset as an option while running this script.
'''
lines = activeCoincsFile.readlines()
inputs = encodingsFile.readlines()
w = len(inputs[0].split(' '))-1
patterns = set([])
encodings = set([])
coincs = [] #The set of all coincidences that have won at least once
reUsedCoincs = []
firstLine = inputs[0].split(' ')
size = int(firstLine.pop(0))
spOutput = np.zeros((len(lines),40))
inputBits = np.zeros((len(lines),w))
print 'Total n:', size
print 'Total number of records in the file:', len(lines), '\n'
print 'w:', w
count = 0
for x in xrange(len(lines)):
inputSpace = [] #Encoded representation for each input
spBUout = [int(z) for z in lines[x].split(' ')]
spBUout.pop(0) #The first element of each row of spBUOut is the size of the SP
temp = set(spBUout)
spOutput[x]=spBUout
input = [int(z) for z in inputs[x].split(' ')]
input.pop(0) #The first element of each row of sensorBUout is the size of the encoding space
tempInput = set(input)
inputBits[x]=input
#Creating the encoding space
for m in xrange(size):
if m in tempInput:
inputSpace.append(m)
else:
inputSpace.append('|') #A non-active bit
repeatedBits = tempInput.intersection(encodings) #Storing the bits that have been previously active
reUsed = temp.intersection(patterns) #Checking if any of the active cells have been previously active
#Dividing the coincidences into two difference categories.
if len(reUsed)==0:
coincs.append((count,temp,repeatedBits,inputSpace, tempInput)) #Pattern no, active cells, repeated bits, encoding (full), encoding (summary)
else:
reUsedCoincs.append((count,temp,repeatedBits,inputSpace, tempInput))
patterns=patterns.union(temp) #Adding the active cells to the set of coincs that have been active at least once
encodings = encodings.union(tempInput)
count +=1
overlap = {}
overlapVal = 0
seen = []
seen = (printOverlaps(coincs, coincs, seen))
print len(seen), 'sets of 40 cells'
seen = printOverlaps(reUsedCoincs, coincs, seen)
Summ=[]
for z in coincs:
c=0
for y in reUsedCoincs:
c += len(z[1].intersection(y[1]))
Summ.append(c)
print 'Sum: ', Summ
for m in xrange(3):
displayLimit = min(51, len(spOutput[m*200:]))
if displayLimit>0:
drawFile(dataset, np.zeros([len(inputBits[:(m+1)*displayLimit]),len(inputBits[:(m+1)*displayLimit])]), inputBits[:(m+1)*displayLimit], spOutput[:(m+1)*displayLimit], w, m+1)
else:
print 'No more records to display'
pyl.show()
def drawFile(dataset, matrix, patterns, cells, w, fnum):
'''The similarity of two patterns in the bit-encoding space is displayed alongside
their similarity in the sp-coinc space.'''
score=0
count = 0
assert len(patterns)==len(cells)
for p in xrange(len(patterns)-1):
matrix[p+1:,p] = [len(set(patterns[p]).intersection(set(q)))*100/w for q in patterns[p+1:]]
matrix[p,p+1:] = [len(set(cells[p]).intersection(set(r)))*5/2 for r in cells[p+1:]]
score += sum(abs(np.array(matrix[p+1:,p])-np.array(matrix[p,p+1:])))
count += len(matrix[p+1:,p])
print 'Score', score/count
fig = pyl.figure(figsize = (10,10), num = fnum)
pyl.matshow(matrix, fignum = fnum)
pyl.colorbar()
pyl.title('Coincidence Space', verticalalignment='top', fontsize=12)
pyl.xlabel('The Mirror Image Visualization for '+dataset, fontsize=17)
pyl.ylabel('Encoding space', fontsize=12)
def printOverlaps(comparedTo, coincs, seen):
""" Compare the results and return True if success, False if failure
Parameters:
--------------------------------------------------------------------
coincs: Which cells are we comparing?
comparedTo: The set of 40 cells we being compared to (they have no overlap with seen)
seen: Which of the cells we are comparing to have already been encountered.
This helps glue together the unique and reused coincs
"""
inputOverlap = 0
cellOverlap = 0
for y in comparedTo:
closestInputs = []
closestCells = []
if len(seen)>0:
inputOverlap = max([len(seen[m][1].intersection(y[4])) for m in xrange(len(seen))])
cellOverlap = max([len(seen[m][0].intersection(y[1])) for m in xrange(len(seen))])
for m in xrange( len(seen) ):
if len(seen[m][1].intersection(y[4]))==inputOverlap:
closestInputs.append(seen[m][2])
if len(seen[m][0].intersection(y[1]))==cellOverlap:
closestCells.append(seen[m][2])
seen.append((y[1], y[4], y[0]))
print 'Pattern',y[0]+1,':',' '.join(str(len(z[1].intersection(y[1]))).rjust(2) for z in coincs),'input overlap:', inputOverlap, ';', len(closestInputs), 'closest encodings:',','.join(str(m+1) for m in closestInputs).ljust(15), \
'cell overlap:', cellOverlap, ';', len(closestCells), 'closest set(s):',','.join(str(m+1) for m in closestCells)
return seen
################################################################################################################
if __name__=='__main__':
if len(sys.argv)<2: #Use basil if no dataset specified
print ('Input files required. Read documentation for details.')
else:
dataset = sys.argv[1]
activeCoincsPath = dataset+'/'+dataset+'_spBUOut.txt'
encodingsPath = dataset+'/'+dataset+'_sensorBUOut.txt'
activeCoincsFile=open(activeCoincsPath, 'r')
encodingsFile=open(encodingsPath, 'r')
analyzeOverlaps(activeCoincsFile, encodingsFile, dataset)
|
tomsilver/nupic
|
examples/opf/tools/MirrorImageViz/mirrorImageViz.py
|
Python
|
gpl-3.0
| 7,336
| 0.023719
|
#
# HotC Server
# CTN2 Jackson
#
import socket
def _recv_data(conn):
data = conn.recv(1024)
command, _, arguments = data.partition(' ')
return command, arguments
def game(conn):
print 'success'
def login_loop(conn):
while True:
command, arguments = _recv_data(conn)
if command == 'login':
username, password = arguments.split()
# check if username and password is correct
with open('login.d', 'r') as f:
logins = eval(f.read())
for k, v in logins.items():
if (k == username) and (v == password):
conn.send('login_success')
return
conn.send('login_failure')
elif command == 'register':
username, password = arguments.split()
# check if username already registered
with open('login.d', 'r') as f:
logins = eval(f.read())
for k, _ in logins.items():
if k == username:
conn.send('register_failure')
continue
# register new user
logins[username] = password
with open('login.d', 'w') as f:
f.write(str(logins))
conn.send('register_success')
def main():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('0.0.0.0', 1337))
sock.listen(5)
while True:
conn, addr = sock.accept()
login_loop(conn)
game(conn)
break
if __name__ == '__main__':
main()
|
vesche/HotC
|
old/server_proto.py
|
Python
|
unlicense
| 1,750
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright © 2009 The Caffeine Developers
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from gi.repository import Gtk, GObject, Gio, Notify
import os
import os.path
import commands
import time
import sys
import dbus
import threading
import applicationinstance
import caffeine
import utils
import procmanager
import caffeinelogging as logging
import Xlib.display
#import kaa.metadata
os.chdir(os.path.abspath(os.path.dirname(__file__)))
class Caffeine(GObject.GObject):
def __init__(self):
GObject.GObject.__init__(self)
## object to manage processes to activate for.
self.ProcMan = caffeine.get_ProcManager()
## Status string.
self.status_string = ""
## Makes sure that only one instance of Caffeine is run for
## each user on the system.
self.pid_name = '/tmp/caffeine' + str(os.getuid()) + '.pid'
self.appInstance = applicationinstance.ApplicationInstance( self.pid_name )
## This variable is set to a string describing the type of screensaver and
## powersaving systems used on this computer. It is detected when the user
## first attempts to inhibit the screensaver and powersaving, and can be set
## to one of the following values: "Gnome", "KDE", "XSS+DPMS" or "DPMS".
self.screensaverAndPowersavingType = None
# Set to True when the detection routine is in progress
self.attemptingToDetect = False
self.dbusDetectionTimer = None
self.dbusDetectionFailures = 0
# Set to True when sleep seems to be prevented from the perspective of the user.
# This does not necessarily mean that sleep really is prevented, because the
# detection routine could be in progress.
self.sleepAppearsPrevented = False
# Set to True when sleep mode has been successfully inhibited somehow. This should
# match up with "self.sleepAppearsPrevented" most of the time.
self.sleepIsPrevented = False
self.preventedForProcess = False
self.preventedForQL = False
self.preventedForFlash = False
self.screenSaverCookie = None
self.powerManagementCookie = None
self.timer = None
self.inhibit_id = None
self.note = None
## check for processes to activate for.
id = GObject.timeout_add(10000, self._check_for_process)
settings = Gio.Settings.new(caffeine.BASE_KEY)
## check for Quake Live.
self.ql_id = None
if settings.get_boolean("act-for-quake"):
self.setActivateForQL(True)
## check for Flash video.
self.flash_durations = {}
self.flash_id = None
if settings.get_boolean("act-for-flash"):
self.setActivateForFlash(True)
print self.status_string
def setActivateForFlash(self, do_activate):
## In case caffeine is currently activated for Flash
self._check_for_Flash()
if self.flash_id != None:
GObject.source_remove(self.flash_id)
self.flash_id = None
if do_activate:
self.flash_id = GObject.timeout_add(15000,
self._check_for_Flash)
def _check_for_Flash(self):
class escape(Exception):pass
try:
## look for files opened by flashplayer that begin with 'Flash'
output = commands.getoutput("python flash_detect.py")
if output.startswith("1"):
if self.preventedForFlash:
self.setActivated(False)
self.preventedForFlash = False
self.status_string = ""
raise escape
elif output.startswith("2\n"):
data = output.split("\n")[-1]
logging.error("Exception: " + str(data))
raise escape
if not self.getActivated():
logging.info("Caffeine has detected "+
"that Flash video is playing")
self.status_string = _("Activated for Flash video")
self.setActivated(True)
self.preventedForFlash = True
else:
logging.info("Caffeine has detected "+
"that Flash video is playing but will "+
"NOT activate because Caffeine is already "+
"activated for a different reason.")
return True
except escape:
pass
except Exception, data:
logging.error("Exception: " + str(data))
return True
def setActivateForQL(self, do_activate):
## In case caffeine is currently activated for QL
self._check_for_QL()
if self.ql_id != None:
GObject.source_remove(self.ql_id)
self.ql_id = None
if do_activate:
self.ql_id = GObject.timeout_add(15000, self._check_for_QL)
def _check_for_QL(self):
dsp = None
try:
dsp = Xlib.display.Display()
screen = dsp.screen()
root_win = screen.root
activate = False
## iterate through all of the X windows
for window in root_win.query_tree()._data['children']:
window_name = window.get_wm_name()
width = window.get_geometry()._data["width"]
height = window.get_geometry()._data["height"]
if window_name == "QuakeLive":
activate = True
if self.preventedForQL or not self.getActivated():
self.status_string = _("Activated for Quake Live")
logging.info("Caffeine has detected that 'QuakeLive' is running, and will auto-activate")
self.setActivated(True)
self.preventedForQL = True
if not activate and self.preventedForQL:
logging.info("Caffeine had previously auto-activated for QuakeLive, but it is no longer running; deactivating...")
self.setActivated(False)
except Exception, data:
logging.error("Exception: " + str(data))
finally:
if dsp != None:
dsp.close()
return True
def _check_for_process(self):
activate = False
for proc in self.ProcMan.get_process_list():
if utils.isProcessRunning(proc):
activate = True
if self.preventedForProcess or not self.getActivated():
logging.info("Caffeine has detected that the process '" + proc + "' is running, and will auto-activate")
self.setActivated(True)
self.preventedForProcess = True
else:
logging.info("Caffeine has detected that the process '"+
proc + "' is running, but will NOT auto-activate"+
" as Caffeine has already been activated for a different"+
" reason.")
### No process in the list is running, deactivate.
if not activate and self.preventedForProcess:
logging.info("Caffeine had previously auto-activated for a process, but that process is no longer running; deactivating...")
self.setActivated(False)
return True
def quit(self):
"""Cancels any timer thread running
so the program can quit right away.
"""
if self.timer:
self.timer.cancel()
if self.dbusDetectionTimer:
self.dbusDetectionTimer.cancel()
## The following four methods deal with adding the correct syntax
## for plural forms of time units. For example, 1 minute and 2
## minutes. Will be obsolete once the application is
## internationalized, as not all languages use "s" for plural form.
def _mconcat(self, base, sep, app):
return (base + sep + app if base else app) if app else base
def _spokenConcat(self, ls):
and_str = _(" and ")
txt, n = '', len(ls)
for w in ls[0:n-1]:
txt = self._mconcat(txt, ', ', w)
return self._mconcat(txt, and_str, ls[n-1])
def _pluralize(self, name, time):
names = [_('hour'), _('minute')]
if time < 1:
return ""
if name == "hour":
if time < 2:
return "%d %s" % (time, _("hour"))
if time >= 2:
return "%d %s" % (time, _("hours"))
elif name == "minute":
if time < 2:
return "%d %s" % (time, _("minute"))
if time >= 2:
return "%d %s" % (time, _("minutes"))
def _timeDisplay(self, sec):
hours = sec/3600
minutes = sec/60 % 60
ls = []
ls.append(self._pluralize("hour", hours))
ls.append(self._pluralize("minute", minutes))
string = self._spokenConcat(ls)
if not string:
string = "0 minutes"
return string
def _notify(self, message, icon, title="Caffeine"):
"""Easy way to use pynotify"""
try:
Notify.init("Caffeine")
if self.note:
self.note.update(title, message, icon)
else:
self.note = Notify.Notification(title, message, icon)
## Notify OSD doesn't seem to work when sleep is prevented
if self.screenSaverCookie != None and self.sleepIsPrevented:
self.ssProxy.UnInhibit(self.screenSaverCookie)
self.note.show()
if self.screenSaverCookie != None and self.sleepIsPrevented:
self.screenSaverCookie = self.ssProxy.Inhibit("Caffeine",
"User has requested that Caffeine disable the screen saver")
except Exception, e:
logging.error("Exception occurred:\n" + " " + str(e))
logging.error("Exception occurred attempting to display message:\n" + message)
finally:
return False
def getActivated(self):
return self.sleepAppearsPrevented
def timedActivation(self, time, note=True):
"""Calls toggleActivated after the number of seconds
specified by time has passed.
"""
message = (_("Timed activation set; ")+
_("Caffeine will prevent powersaving for the next ") +
self._timeDisplay(time))
logging.info("Timed activation set for " + self._timeDisplay(time))
if self.status_string == "":
self.status_string = _("Activated for ")+self._timeDisplay(time)
self.emit("activation-toggled", self.getActivated(),
self.status_string)
self.setActivated(True, note)
if note:
self._notify(message, caffeine.FULL_ICON_PATH)
## and deactivate after time has passed.
## Stop already running timer
if self.timer:
logging.info("Previous timed activation cancelled due to a second timed activation request (was set for " +
self._timeDisplay(self.timer.interval) + " or "+
str(time)+" seconds )")
self.timer.cancel()
self.timer = threading.Timer(time, self._deactivate, args=[note])
self.timer.name = "Active"
self.timer.start()
def _deactivate(self, note):
self.timer.name = "Expired"
self.toggleActivated(note=note)
def setActivated(self, activate, note=True):
if self.getActivated() != activate:
self.toggleActivated(note)
def toggleActivated(self, note=True):
"""This function toggles the inhibition of the screensaver and powersaving
features of the current computer, detecting the the type of screensaver and powersaving
in use, if it has not been detected already."""
self.preventedForProcess = False
self.preventedForQL = False
self.preventedForFlash = False
if self.sleepAppearsPrevented:
### sleep prevention was on now turn it off
self.sleepAppearsPrevented = False
logging.info("Caffeine is now dormant; powersaving is re-enabled")
self.status_string = _("Caffeine is dormant; powersaving is enabled")
# If the user clicks on the full coffee-cup to disable
# sleep prevention, it should also
# cancel the timer for timed activation.
if self.timer != None and self.timer.name != "Expired":
message = (_("Timed activation cancelled (was set for ") +
self._timeDisplay(self.timer.interval) + ")")
logging.info("Timed activation cancelled (was set for " +
self._timeDisplay(self.timer.interval) + ")")
if note:
self._notify(message, caffeine.EMPTY_ICON_PATH)
self.timer.cancel()
self.timer = None
elif self.timer != None and self.timer.name == "Expired":
message = (self._timeDisplay(self.timer.interval) +
_(" have elapsed; powersaving is re-enabled"))
logging.info("Timed activation period (" + self._timeDisplay(self.timer.interval) + ") has elapsed")
if note:
self._notify(message, caffeine.EMPTY_ICON_PATH)
self.timer = None
else:
self.sleepAppearsPrevented = True
self._performTogglingActions()
if self.status_string == "":
### Fixes bug #458847.
if self.screensaverAndPowersavingType != None:
self.status_string = (_("Caffeine is preventing powersaving modes and screensaver activation ")+"("+
self.screensaverAndPowersavingType + ")")
self.emit("activation-toggled", self.getActivated(),
self.status_string)
self.status_string = ""
def _detectScreensaverAndPowersavingType(self):
"""This method always runs when the first attempt to inhibit the screensaver and
powersaving is made. It detects what screensaver/powersaving software is running.
After detection is complete, it will finish the inhibiting process."""
logging.info("Attempting to detect screensaver/powersaving type... (" + str(self.dbusDetectionFailures) + " dbus failures so far)")
bus = dbus.SessionBus()
if 'org.gnome.SessionManager' in bus.list_names() and not utils.isProcessRunning("xscreensaver"):
self.screensaverAndPowersavingType = "Gnome3"
elif 'org.freedesktop.ScreenSaver' in bus.list_names() and \
'org.freedesktop.PowerManagement.Inhibit' in bus.list_names():
self.screensaverAndPowersavingType = "KDE"
else:
self.dbusDetectionFailures += 1
if self.dbusDetectionFailures <= 3:
self.dbusDetectionTimer = threading.Timer(10, self._detectScreensaverAndPowersavingType)
self.dbusDetectionTimer.start()
return
else:
# At this point, all attempts to connect to the relevant dbus interfaces have failed.
# This user must be using something other than the Gnome or KDE screensaver programs.
if utils.isProcessRunning("xscreensaver"):
self.screensaverAndPowersavingType = "XSS+DPMS"
else:
self.screensaverAndPowersavingType = "DPMS"
self.attemptingToDetect = False
self.dbusDetectionFailures = 0
self.dbusDetectionTimer = None
logging.info("Successfully detected screensaver and powersaving type: " + str(self.screensaverAndPowersavingType))
if self.sleepAppearsPrevented != self.sleepIsPrevented:
self._performTogglingActions()
def _performTogglingActions(self):
"""This method performs the actions that affect the screensaver and
powersaving."""
if self.screensaverAndPowersavingType == None:
if self.attemptingToDetect == False:
self.attemptingToDetect = True
self._detectScreensaverAndPowersavingType()
return
if self.screensaverAndPowersavingType == "Gnome":
self._toggleGnome()
if self.screensaverAndPowersavingType == "Gnome3":
self._toggleGnome3()
elif self.screensaverAndPowersavingType == "KDE":
self._toggleKDE()
elif self.screensaverAndPowersavingType == "XSS+DPMS":
self._toggleXSSAndDPMS()
elif self.screensaverAndPowersavingType == "DPMS":
self._toggleDPMS()
if self.sleepIsPrevented == False:
logging.info("Caffeine is now preventing powersaving modes"+
" and screensaver activation (" +
self.screensaverAndPowersavingType + ")")
self.sleepIsPrevented = not self.sleepIsPrevented
def _toggleGnome3(self):
"""Toggle the screensaver and powersaving with the interfaces used by Gnome 3."""
self._toggleDPMS()
bus = dbus.SessionBus()
self.susuProxy = bus.get_object('org.gnome.SessionManager', '/org/gnome/SessionManager')
if self.sleepIsPrevented:
if self.screenSaverCookie != None:
self.susuProxy.Uninhibit(self.screenSaverCookie)
else:
self.screenSaverCookie = self.susuProxy.Inhibit("Caffeine",dbus.UInt32(0),
"User has requested that Caffeine disable the screen saver",dbus.UInt32(8))
def _toggleKDE(self):
"""Toggle the screensaver and powersaving with the interfaces used by KDE."""
self._toggleDPMS()
bus = dbus.SessionBus()
self.ssProxy = bus.get_object(
'org.freedesktop.ScreenSaver', '/ScreenSaver')
pmProxy = bus.get_object(
'org.freedesktop.PowerManagement.Inhibit',
'/org/freedesktop/PowerManagement/Inhibit')
if self.sleepIsPrevented:
if self.screenSaverCookie != None:
self.ssProxy.UnInhibit(self.screenSaverCookie)
if self.powerManagementCookie != None:
pmProxy.UnInhibit(self.powerManagementCookie)
else:
self.powerManagementCookie = pmProxy.Inhibit("Caffeine",
"User has requested that Caffeine disable"+
" the powersaving modes")
self.screenSaverCookie = self.ssProxy.Inhibit("Caffeine",
"User has requested that Caffeine disable the screen saver")
def _toggleXSSAndDPMS(self):
self._toggleXSS()
self._toggleDPMS()
def _toggleDPMS(self):
"""Toggle the DPMS powersaving subsystem."""
if self.sleepIsPrevented:
commands.getoutput("xset +dpms")
commands.getoutput("xset s on")
else:
commands.getoutput("xset -dpms")
commands.getoutput("xset s off")
def _toggleXSS(self):
"""Toggle whether XScreensaver is activated (powersaving is unaffected)"""
if self.sleepIsPrevented:
### sleep prevention was on now turn it off
# If the user clicks on the full coffee-cup to disable
# sleep prevention, it should also
# cancel the timer for timed activation.
if self.inhibit_id != None:
GObject.source_remove(self.inhibit_id)
else:
def deactivate():
try:
output = commands.getoutput(
"xscreensaver-command -deactivate")
except Exception, data:
logging.error("Exception occurred:\n" + data)
return True
# reset the idle timer every 50 seconds.
self.inhibit_id = GObject.timeout_add(50000, deactivate)
## register a signal
GObject.signal_new("activation-toggled", Caffeine,
GObject.SignalFlags.RUN_FIRST, None, [bool, str])
|
ashh87/caffeine
|
caffeine/core.py
|
Python
|
gpl-3.0
| 20,950
| 0.008497
|
# Copyright (C) 2003-2005 Peter J. Verveer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import division, print_function, absolute_import
import math
import numpy
from . import _ni_support
from . import _nd_image
__all__ = ['spline_filter1d', 'spline_filter', 'geometric_transform',
'map_coordinates', 'affine_transform', 'shift', 'zoom', 'rotate']
def _extend_mode_to_code(mode):
mode = _ni_support._extend_mode_to_code(mode)
return mode
def spline_filter1d(input, order=3, axis=-1, output=numpy.float64):
"""
Calculates a one-dimensional spline filter along the given axis.
The lines of the array along the given axis are filtered by a
spline filter. The order of the spline must be >= 2 and <= 5.
Parameters
----------
input : array_like
The input array.
order : int, optional
The order of the spline, default is 3.
axis : int, optional
The axis along which the spline filter is applied. Default is the last
axis.
output : ndarray or dtype, optional
The array in which to place the output, or the dtype of the returned
array. Default is `numpy.float64`.
Returns
-------
spline_filter1d : ndarray or None
The filtered input. If `output` is given as a parameter, None is
returned.
"""
if order < 0 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
output, return_value = _ni_support._get_output(output, input)
if order in [0, 1]:
output[...] = numpy.array(input)
else:
axis = _ni_support._check_axis(axis, input.ndim)
_nd_image.spline_filter1d(input, order, axis, output)
return return_value
def spline_filter(input, order=3, output = numpy.float64):
"""
Multi-dimensional spline filter.
For more details, see `spline_filter1d`.
See Also
--------
spline_filter1d
Notes
-----
The multi-dimensional filter is implemented as a sequence of
one-dimensional spline filters. The intermediate arrays are stored
in the same data type as the output. Therefore, for output types
with a limited precision, the results may be imprecise because
intermediate results may be stored with insufficient precision.
"""
if order < 2 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
output, return_value = _ni_support._get_output(output, input)
if order not in [0, 1] and input.ndim > 0:
for axis in range(input.ndim):
spline_filter1d(input, order, axis, output = output)
input = output
else:
output[...] = input[...]
return return_value
def geometric_transform(input, mapping, output_shape=None,
output=None, order=3,
mode='constant', cval=0.0, prefilter=True,
extra_arguments=(), extra_keywords={}):
"""
Apply an arbritrary geometric transform.
The given mapping function is used to find, for each point in the
output, the corresponding coordinates in the input. The value of the
input at those coordinates is determined by spline interpolation of
the requested order.
Parameters
----------
input : array_like
The input array.
mapping : callable
A callable object that accepts a tuple of length equal to the output
array rank, and returns the corresponding input coordinates as a tuple
of length equal to the input array rank.
output_shape : tuple of ints
Shape tuple.
output : ndarray or dtype, optional
The array in which to place the output, or the dtype of the returned
array.
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
mode : str, optional
Points outside the boundaries of the input are filled according
to the given mode ('constant', 'nearest', 'reflect' or 'wrap').
Default is 'constant'.
cval : scalar, optional
Value used for points outside the boundaries of the input if
``mode='constant'``. Default is 0.0
prefilter : bool, optional
The parameter prefilter determines if the input is pre-filtered with
`spline_filter` before interpolation (necessary for spline
interpolation of order > 1). If False, it is assumed that the input is
already filtered. Default is True.
extra_arguments : tuple, optional
Extra arguments passed to `mapping`.
extra_keywords : dict, optional
Extra keywords passed to `mapping`.
Returns
-------
return_value : ndarray or None
The filtered input. If `output` is given as a parameter, None is
returned.
See Also
--------
map_coordinates, affine_transform, spline_filter1d
Examples
--------
>>> a = np.arange(12.).reshape((4, 3))
>>> def shift_func(output_coords):
... return (output_coords[0] - 0.5, output_coords[1] - 0.5)
...
>>> sp.ndimage.geometric_transform(a, shift_func)
array([[ 0. , 0. , 0. ],
[ 0. , 1.362, 2.738],
[ 0. , 4.812, 6.187],
[ 0. , 8.263, 9.637]])
"""
if order < 0 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
if output_shape is None:
output_shape = input.shape
if input.ndim < 1 or len(output_shape) < 1:
raise RuntimeError('input and output rank must be > 0')
mode = _extend_mode_to_code(mode)
if prefilter and order > 1:
filtered = spline_filter(input, order, output = numpy.float64)
else:
filtered = input
output, return_value = _ni_support._get_output(output, input,
shape=output_shape)
_nd_image.geometric_transform(filtered, mapping, None, None, None,
output, order, mode, cval, extra_arguments, extra_keywords)
return return_value
def map_coordinates(input, coordinates, output=None, order=3,
mode='constant', cval=0.0, prefilter=True):
"""
Map the input array to new coordinates by interpolation.
The array of coordinates is used to find, for each point in the output,
the corresponding coordinates in the input. The value of the input at
those coordinates is determined by spline interpolation of the
requested order.
The shape of the output is derived from that of the coordinate
array by dropping the first axis. The values of the array along
the first axis are the coordinates in the input array at which the
output value is found.
Parameters
----------
input : ndarray
The input array.
coordinates : array_like
The coordinates at which `input` is evaluated.
output : ndarray or dtype, optional
The array in which to place the output, or the dtype of the returned
array.
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
mode : str, optional
Points outside the boundaries of the input are filled according
to the given mode ('constant', 'nearest', 'reflect' or 'wrap').
Default is 'constant'.
cval : scalar, optional
Value used for points outside the boundaries of the input if
``mode='constant'``. Default is 0.0
prefilter : bool, optional
The parameter prefilter determines if the input is pre-filtered with
`spline_filter` before interpolation (necessary for spline
interpolation of order > 1). If False, it is assumed that the input is
already filtered. Default is True.
Returns
-------
map_coordinates : ndarray
The result of transforming the input. The shape of the output is
derived from that of `coordinates` by dropping the first axis.
See Also
--------
spline_filter, geometric_transform, scipy.interpolate
Examples
--------
>>> from scipy import ndimage
>>> a = np.arange(12.).reshape((4, 3))
>>> a
array([[ 0., 1., 2.],
[ 3., 4., 5.],
[ 6., 7., 8.],
[ 9., 10., 11.]])
>>> ndimage.map_coordinates(a, [[0.5, 2], [0.5, 1]], order=1)
[ 2. 7.]
Above, the interpolated value of a[0.5, 0.5] gives output[0], while
a[2, 1] is output[1].
>>> inds = np.array([[0.5, 2], [0.5, 4]])
>>> ndimage.map_coordinates(a, inds, order=1, cval=-33.3)
array([ 2. , -33.3])
>>> ndimage.map_coordinates(a, inds, order=1, mode='nearest')
array([ 2., 8.])
>>> ndimage.map_coordinates(a, inds, order=1, cval=0, output=bool)
array([ True, False], dtype=bool
"""
if order < 0 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
coordinates = numpy.asarray(coordinates)
if numpy.iscomplexobj(coordinates):
raise TypeError('Complex type not supported')
output_shape = coordinates.shape[1:]
if input.ndim < 1 or len(output_shape) < 1:
raise RuntimeError('input and output rank must be > 0')
if coordinates.shape[0] != input.ndim:
raise RuntimeError('invalid shape for coordinate array')
mode = _extend_mode_to_code(mode)
if prefilter and order > 1:
filtered = spline_filter(input, order, output = numpy.float64)
else:
filtered = input
output, return_value = _ni_support._get_output(output, input,
shape=output_shape)
_nd_image.geometric_transform(filtered, None, coordinates, None, None,
output, order, mode, cval, None, None)
return return_value
def affine_transform(input, matrix, offset=0.0, output_shape=None,
output=None, order=3,
mode='constant', cval=0.0, prefilter=True):
"""
Apply an affine transformation.
The given matrix and offset are used to find for each point in the
output the corresponding coordinates in the input by an affine
transformation. The value of the input at those coordinates is
determined by spline interpolation of the requested order. Points
outside the boundaries of the input are filled according to the given
mode.
Parameters
----------
input : ndarray
The input array.
matrix : ndarray
The matrix must be two-dimensional or can also be given as a
one-dimensional sequence or array. In the latter case, it is assumed
that the matrix is diagonal. A more efficient algorithms is then
applied that exploits the separability of the problem.
offset : float or sequence, optional
The offset into the array where the transform is applied. If a float,
`offset` is the same for each axis. If a sequence, `offset` should
contain one value for each axis.
output_shape : tuple of ints, optional
Shape tuple.
output : ndarray or dtype, optional
The array in which to place the output, or the dtype of the returned
array.
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
mode : str, optional
Points outside the boundaries of the input are filled according
to the given mode ('constant', 'nearest', 'reflect' or 'wrap').
Default is 'constant'.
cval : scalar, optional
Value used for points outside the boundaries of the input if
``mode='constant'``. Default is 0.0
prefilter : bool, optional
The parameter prefilter determines if the input is pre-filtered with
`spline_filter` before interpolation (necessary for spline
interpolation of order > 1). If False, it is assumed that the input is
already filtered. Default is True.
Returns
-------
affine_transform : ndarray or None
The transformed input. If `output` is given as a parameter, None is
returned.
"""
if order < 0 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
if output_shape is None:
output_shape = input.shape
if input.ndim < 1 or len(output_shape) < 1:
raise RuntimeError('input and output rank must be > 0')
mode = _extend_mode_to_code(mode)
if prefilter and order > 1:
filtered = spline_filter(input, order, output = numpy.float64)
else:
filtered = input
output, return_value = _ni_support._get_output(output, input,
shape=output_shape)
matrix = numpy.asarray(matrix, dtype = numpy.float64)
if matrix.ndim not in [1, 2] or matrix.shape[0] < 1:
raise RuntimeError('no proper affine matrix provided')
if matrix.shape[0] != input.ndim:
raise RuntimeError('affine matrix has wrong number of rows')
if matrix.ndim == 2 and matrix.shape[1] != output.ndim:
raise RuntimeError('affine matrix has wrong number of columns')
if not matrix.flags.contiguous:
matrix = matrix.copy()
offset = _ni_support._normalize_sequence(offset, input.ndim)
offset = numpy.asarray(offset, dtype = numpy.float64)
if offset.ndim != 1 or offset.shape[0] < 1:
raise RuntimeError('no proper offset provided')
if not offset.flags.contiguous:
offset = offset.copy()
if matrix.ndim == 1:
_nd_image.zoom_shift(filtered, matrix, offset, output, order,
mode, cval)
else:
_nd_image.geometric_transform(filtered, None, None, matrix, offset,
output, order, mode, cval, None, None)
return return_value
def shift(input, shift, output=None, order=3, mode='constant', cval=0.0,
prefilter=True):
"""
Shift an array.
The array is shifted using spline interpolation of the requested order.
Points outside the boundaries of the input are filled according to the
given mode.
Parameters
----------
input : ndarray
The input array.
shift : float or sequence, optional
The shift along the axes. If a float, `shift` is the same for each
axis. If a sequence, `shift` should contain one value for each axis.
output : ndarray or dtype, optional
The array in which to place the output, or the dtype of the returned
array.
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
mode : str, optional
Points outside the boundaries of the input are filled according
to the given mode ('constant', 'nearest', 'reflect' or 'wrap').
Default is 'constant'.
cval : scalar, optional
Value used for points outside the boundaries of the input if
``mode='constant'``. Default is 0.0
prefilter : bool, optional
The parameter prefilter determines if the input is pre-filtered with
`spline_filter` before interpolation (necessary for spline
interpolation of order > 1). If False, it is assumed that the input is
already filtered. Default is True.
Returns
-------
shift : ndarray or None
The shifted input. If `output` is given as a parameter, None is
returned.
"""
if order < 0 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
if input.ndim < 1:
raise RuntimeError('input and output rank must be > 0')
mode = _extend_mode_to_code(mode)
if prefilter and order > 1:
filtered = spline_filter(input, order, output = numpy.float64)
else:
filtered = input
output, return_value = _ni_support._get_output(output, input)
shift = _ni_support._normalize_sequence(shift, input.ndim)
shift = [-ii for ii in shift]
shift = numpy.asarray(shift, dtype = numpy.float64)
if not shift.flags.contiguous:
shift = shift.copy()
_nd_image.zoom_shift(filtered, None, shift, output, order, mode, cval)
return return_value
def zoom(input, zoom, output=None, order=3, mode='constant', cval=0.0,
prefilter=True):
"""
Zoom an array.
The array is zoomed using spline interpolation of the requested order.
Parameters
----------
input : ndarray
The input array.
zoom : float or sequence, optional
The zoom factor along the axes. If a float, `zoom` is the same for each
axis. If a sequence, `zoom` should contain one value for each axis.
output : ndarray or dtype, optional
The array in which to place the output, or the dtype of the returned
array.
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
mode : str, optional
Points outside the boundaries of the input are filled according
to the given mode ('constant', 'nearest', 'reflect' or 'wrap').
Default is 'constant'.
cval : scalar, optional
Value used for points outside the boundaries of the input if
``mode='constant'``. Default is 0.0
prefilter : bool, optional
The parameter prefilter determines if the input is pre-filtered with
`spline_filter` before interpolation (necessary for spline
interpolation of order > 1). If False, it is assumed that the input is
already filtered. Default is True.
Returns
-------
zoom : ndarray or None
The zoomed input. If `output` is given as a parameter, None is
returned.
"""
if order < 0 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
if input.ndim < 1:
raise RuntimeError('input and output rank must be > 0')
mode = _extend_mode_to_code(mode)
if prefilter and order > 1:
filtered = spline_filter(input, order, output = numpy.float64)
else:
filtered = input
zoom = _ni_support._normalize_sequence(zoom, input.ndim)
output_shape = tuple([int(ii * jj) for ii, jj in zip(input.shape, zoom)])
zoom_div = numpy.array(output_shape, float) - 1
zoom = (numpy.array(input.shape) - 1) / zoom_div
# Zooming to non-finite values in unpredictable, so just choose
# zoom factor 1 instead
zoom[~numpy.isfinite(zoom)] = 1
output, return_value = _ni_support._get_output(output, input,
shape=output_shape)
zoom = numpy.asarray(zoom, dtype = numpy.float64)
zoom = numpy.ascontiguousarray(zoom)
_nd_image.zoom_shift(filtered, zoom, None, output, order, mode, cval)
return return_value
def _minmax(coor, minc, maxc):
if coor[0] < minc[0]:
minc[0] = coor[0]
if coor[0] > maxc[0]:
maxc[0] = coor[0]
if coor[1] < minc[1]:
minc[1] = coor[1]
if coor[1] > maxc[1]:
maxc[1] = coor[1]
return minc, maxc
def rotate(input, angle, axes=(1, 0), reshape=True,
output=None, order=3,
mode='constant', cval=0.0, prefilter=True):
"""
Rotate an array.
The array is rotated in the plane defined by the two axes given by the
`axes` parameter using spline interpolation of the requested order.
Parameters
----------
input : ndarray
The input array.
angle : float
The rotation angle in degrees.
axes : tuple of 2 ints, optional
The two axes that define the plane of rotation. Default is the first
two axes.
reshape : bool, optional
If `reshape` is true, the output shape is adapted so that the input
array is contained completely in the output. Default is True.
output : ndarray or dtype, optional
The array in which to place the output, or the dtype of the returned
array.
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
mode : str, optional
Points outside the boundaries of the input are filled according
to the given mode ('constant', 'nearest', 'reflect' or 'wrap').
Default is 'constant'.
cval : scalar, optional
Value used for points outside the boundaries of the input if
``mode='constant'``. Default is 0.0
prefilter : bool, optional
The parameter prefilter determines if the input is pre-filtered with
`spline_filter` before interpolation (necessary for spline
interpolation of order > 1). If False, it is assumed that the input is
already filtered. Default is True.
Returns
-------
rotate : ndarray or None
The rotated input. If `output` is given as a parameter, None is
returned.
"""
input = numpy.asarray(input)
axes = list(axes)
rank = input.ndim
if axes[0] < 0:
axes[0] += rank
if axes[1] < 0:
axes[1] += rank
if axes[0] < 0 or axes[1] < 0 or axes[0] > rank or axes[1] > rank:
raise RuntimeError('invalid rotation plane specified')
if axes[0] > axes[1]:
axes = axes[1], axes[0]
angle = numpy.pi / 180 * angle
m11 = math.cos(angle)
m12 = math.sin(angle)
m21 = -math.sin(angle)
m22 = math.cos(angle)
matrix = numpy.array([[m11, m12],
[m21, m22]], dtype = numpy.float64)
iy = input.shape[axes[0]]
ix = input.shape[axes[1]]
if reshape:
mtrx = numpy.array([[ m11, -m21],
[-m12, m22]], dtype = numpy.float64)
minc = [0, 0]
maxc = [0, 0]
coor = numpy.dot(mtrx, [0, ix])
minc, maxc = _minmax(coor, minc, maxc)
coor = numpy.dot(mtrx, [iy, 0])
minc, maxc = _minmax(coor, minc, maxc)
coor = numpy.dot(mtrx, [iy, ix])
minc, maxc = _minmax(coor, minc, maxc)
oy = int(maxc[0] - minc[0] + 0.5)
ox = int(maxc[1] - minc[1] + 0.5)
else:
oy = input.shape[axes[0]]
ox = input.shape[axes[1]]
offset = numpy.zeros((2,), dtype = numpy.float64)
offset[0] = float(oy) / 2.0 - 0.5
offset[1] = float(ox) / 2.0 - 0.5
offset = numpy.dot(matrix, offset)
tmp = numpy.zeros((2,), dtype = numpy.float64)
tmp[0] = float(iy) / 2.0 - 0.5
tmp[1] = float(ix) / 2.0 - 0.5
offset = tmp - offset
output_shape = list(input.shape)
output_shape[axes[0]] = oy
output_shape[axes[1]] = ox
output_shape = tuple(output_shape)
output, return_value = _ni_support._get_output(output, input,
shape=output_shape)
if input.ndim <= 2:
affine_transform(input, matrix, offset, output_shape, output,
order, mode, cval, prefilter)
else:
coordinates = []
size = numpy.product(input.shape,axis=0)
size //= input.shape[axes[0]]
size //= input.shape[axes[1]]
for ii in range(input.ndim):
if ii not in axes:
coordinates.append(0)
else:
coordinates.append(slice(None, None, None))
iter_axes = list(range(input.ndim))
iter_axes.reverse()
iter_axes.remove(axes[0])
iter_axes.remove(axes[1])
os = (output_shape[axes[0]], output_shape[axes[1]])
for ii in range(size):
ia = input[tuple(coordinates)]
oa = output[tuple(coordinates)]
affine_transform(ia, matrix, offset, os, oa, order, mode,
cval, prefilter)
for jj in iter_axes:
if coordinates[jj] < input.shape[jj] - 1:
coordinates[jj] += 1
break
else:
coordinates[jj] = 0
return return_value
|
sargas/scipy
|
scipy/ndimage/interpolation.py
|
Python
|
bsd-3-clause
| 25,990
| 0.001578
|
import pygame
from PacManMap import *
class MakeGraph:
def __init__(self):
self.shortest_path_from_one_to_other = {}
self.nodes = self.find_nodes()
def get_shortest_path(self):
return self.shortest_path_from_one_to_other
def get_nodes(self):
return self.nodes
def find_nodes(self):
nodes = []
for row_n in range(1, len(Map) - 1):
for col_n in range(2, len(Map[0]) - 1):
if (Map[row_n][col_n] != 0 and Map[row_n][col_n + 1] != 0 and
Map[row_n][col_n - 1] != 0):
if ((row_n > 0 and Map[row_n - 1][col_n] != 0) or
(row_n < len(Map[0]) - 2 and Map[row_n + 1][col_n] != 0)):
nodes.append((row_n, col_n))
Map[row_n][col_n] = 3
Map1 = list(zip(*Map))
for row_n in range(1, len(Map1) - 1):
for col_n in range(2, len(Map1[0]) - 1):
if (Map1[row_n][col_n] != 0 and Map1[row_n][col_n + 1] != 0 and
Map1[row_n][col_n - 1] != 0):
if ((row_n > 0 and Map1[row_n - 1][col_n] != 0) or
(row_n < len(Map1[0]) - 2 and Map1[row_n + 1][col_n] != 0)):
nodes.append((col_n, row_n))
Map[col_n][row_n] = 3
return nodes
def is_p_vertex(self, vertex):
if ((vertex[0] < 0 or vertex[0] >= len(Map)) or
(vertex[1] < 0 or vertex[1] >= len(Map[0]))):
return False
if Map[vertex[0]][vertex[1]] == 0:
return False
return True
def bfs(self, vertex):
Path_all_in_Matrix = {}
Path_all_in_Matrix[vertex] = vertex
Path_to_Nodes = {}
Path_to_Nodes[vertex] = vertex
queue = [vertex]
Visited = [vertex]
all_Nodes = self.find_nodes()
all_Nodes.remove(vertex)
while queue != []:
new_v = queue.pop(0)
new_v_adj = [(new_v[0] - 1, new_v[1]),
(new_v[0] + 1, new_v[1]),
(new_v[0], new_v[1] - 1),
(new_v[0], new_v[1] + 1)]
if new_v in all_Nodes:
full_path = [new_v]
temp_v = new_v
while Path_all_in_Matrix[temp_v] != vertex:
full_path.append(Path_all_in_Matrix[temp_v])
temp_v = Path_all_in_Matrix[temp_v]
full_path.reverse()
temp_full = []
for i in full_path:
if i in all_Nodes:
temp_full.append(i)
break
temp_full.append(i)
Path_to_Nodes[new_v] = temp_full
all_Nodes.remove(new_v)
for v_adj in new_v_adj:
if self.is_p_vertex(v_adj) and v_adj not in Visited:
queue.append(v_adj)
Path_all_in_Matrix[v_adj] = new_v
Visited.append(v_adj)
return Path_to_Nodes
def make_all_paths(self):
all_Nodes = self.find_nodes()
for node in all_Nodes:
self.shortest_path_from_one_to_other[node] = self.bfs(node)
return self.shortest_path_from_one_to_other
def draw_shortest_path(self, screen, v1, v2):
if not self.shortest_path_from_one_to_other:
self.make_all_paths()
l = self.shortest_path_from_one_to_other[v1][v2]
full = l
while l[-1] != v2:
print(l)
l = self.shortest_path_from_one_to_other[full[-1]][v2]
full += l
# print (full)
for node in full:
# print(node)
pygame.draw.rect(screen, (0, 255, 0),
(node[1] * MOVE, node[0] * MOVE, 23, 23))
|
Yordan92/Pac-man-multiplayer
|
MakeGraph.py
|
Python
|
gpl-3.0
| 3,015
| 0.038143
|
def cyclegesture2():
##for x in range(5):
welcome()
sleep(1)
relax()
sleep(2)
fingerright()
sleep(1)
isitaball()
sleep(2)
removeleftarm()
sleep(2)
handdown()
sleep(1)
fullspeed()
i01.giving()
sleep(5)
removeleftarm()
sleep(4)
takeball()
sleep(1)
surrender()
sleep(6)
isitaball()
sleep(6)
dropit()
sleep(2)
removeleftarm()
sleep(5)
relax()
sleep()
fullspeed()
sleep(5)
madeby()
relax()
sleep(5)
i01.disable()
|
MyRobotLab/pyrobotlab
|
home/kwatters/harry/gestures/cyclegesture2.py
|
Python
|
apache-2.0
| 481
| 0.079002
|
import os
from setuptools import setup, find_packages
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
from watermarker import __version__
setup(
name='django-watermark',
version=__version__,
packages=find_packages(exclude=['example']),
include_package_data=True,
license='BSD License',
description="Quick and efficient way to apply watermarks to images in Django.",
long_description=README,
keywords='django, watermark, image, photo, logo',
url='http://github.com/bashu/django-watermark/',
author='Josh VanderLinden',
author_email='codekoala@gmail.com',
maintainer='Basil Shubin',
maintainer_email='basil.shubin@gmail.com',
install_requires=[
'django>=1.4',
'django-appconf',
'pillow',
'six',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Artistic Software',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics'
],
zip_safe=False
)
|
lzanuz/django-watermark
|
setup.py
|
Python
|
bsd-3-clause
| 1,660
| 0.001205
|
# Copyright (c) 2015-2020 Contributors as noted in the AUTHORS file
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# System imports
import json
import logging
import re
import uuid
from threading import Event
# Third-party imports
from pyre import Pyre
# Local imports
from ..tools import zmq, green # , spy_call, w_spy_call, spy_object
logger = logging.getLogger(__name__)
class PyreNode(Pyre):
def __init__(self, *args, **kwargs):
# spy_object(self, class_=Pyre, except_=['name', 'uuid'], with_caller=False)
# spy_call(self.__init__, args, kwargs, with_caller=False); print
self._name = None
self._uuid = None
super(self.__class__, self).__init__(*args, **kwargs)
self.request_results = {} # TODO: Fuse the two dicts
self.request_events = {}
self.poller = zmq.Poller()
self.poller.register(self.inbox, zmq.POLLIN)
self.join('SURVEY')
def run(self):
self.task = green.spawn(self._run, 100)
def _run(self, timeout=None):
self._running = True
self.start()
while self._running:
try:
# logger.debug('Polling')
items = dict(self.poller.poll(timeout))
# logger.debug('polled out: %s, %s', len(items), items)
while len(items) > 0:
for fd, ev in items.items():
if (self.inbox == fd) and (ev == zmq.POLLIN):
self._process_message()
# logger.debug('quick polling')
items = dict(self.poller.poll(0))
# logger.debug('qpoll: %s, %s', len(items), items)
except (KeyboardInterrupt, SystemExit):
logger.debug('(%s) KeyboardInterrupt or SystemExit', self.name())
break
logger.debug('(%s) Exiting loop and stopping', self.name())
self.stop()
def _process_message(self):
logger.debug('(%s) processing message', self.name())
msg = self.recv()
logger.debug('(%s) received stuff: %s', self.name(), msg)
msg_type = msg.pop(0)
logger.debug('(%s) msg_type: %s', self.name(), msg_type)
peer_id = uuid.UUID(bytes=msg.pop(0))
logger.debug('(%s) peer_id: %s', self.name(), peer_id)
peer_name = msg.pop(0)
logger.debug('(%s) peer_name: %s', self.name(), peer_name)
if msg_type == b'ENTER':
self.on_peer_enter(peer_id, peer_name, msg)
elif msg_type == b'EXIT':
self.on_peer_exit(peer_id, peer_name, msg)
elif msg_type == b'SHOUT':
self.on_peer_shout(peer_id, peer_name, msg)
elif msg_type == b'WHISPER':
self.on_peer_whisper(peer_id, peer_name, msg)
def on_peer_enter(self, peer_id, peer_name, msg):
logger.debug('(%s) ZRE ENTER: %s, %s', self.name(), peer_name, peer_id)
pub_endpoint = self.get_peer_endpoint(peer_id, 'pub')
rpc_endpoint = self.get_peer_endpoint(peer_id, 'rpc')
self.on_new_peer(peer_id, peer_name, pub_endpoint, rpc_endpoint)
def on_new_peer(self, peer_id, peer_name, pub_endpoint, rpc_endpoint):
pass
def on_peer_exit(self, peer_id, peer_name, msg):
logger.debug('(%s) ZRE EXIT: %s, %s', self.name(), peer_name, peer_id)
self.on_peer_gone(peer_id, peer_name)
def on_peer_gone(self, peer_id, peer_name):
pass
def on_peer_shout(self, peer_id, peer_name, msg):
group = msg.pop(0)
data = msg.pop(0)
logger.debug('(%s) ZRE SHOUT: %s, %s > (%s) %s',
self.name(), peer_name, peer_id, group, data)
if group == b'SURVEY':
self.on_survey(peer_id, peer_name, json.loads(data))
elif group == b'EVENT':
self.on_event(peer_id, peer_name, json.loads(data))
def on_survey(self, peer_id, peer_name, request):
pass
def on_event(self, peer_id, peer_name, request):
pass
def on_peer_whisper(self, peer_id, peer_name, msg):
logger.debug('(%s) ZRE WHISPER: %s, %s > %s', self.name(), peer_name, peer_id, msg)
reply = json.loads(msg[0])
if reply['req_id'] in self.request_results:
logger.debug('(%s) Received reply from %s: %s', self.name(), peer_name, reply['data'])
self.request_results[reply['req_id']].append((peer_name, reply['data']))
ev, limit_peers = self.request_events[reply['req_id']]
if limit_peers and (len(self.request_results[reply['req_id']]) >= limit_peers):
ev.set()
green.sleep(0) # Yield
else:
logger.warning(
'(%s) Discarding reply from %s because the request ID is unknown',
self.name(), peer_name
)
def get_peer_endpoint(self, peer, prefix):
pyre_endpoint = self.peer_address(peer)
ip = re.search('.*://(.*):.*', pyre_endpoint).group(1)
return '%s://%s:%s' % (
self.peer_header_value(peer, prefix + '_proto'),
ip,
self.peer_header_value(peer, prefix + '_port')
)
def join_event(self):
self.join('EVENT')
def leave_event(self):
self.leave('EVENT')
def send_survey(self, request, timeout, limit_peers):
# request['req_id'] = ('%x' % randint(0, 0xFFFFFFFF)).encode()
self.request_results[request['req_id']] = []
ev = Event()
self.request_events[request['req_id']] = (ev, limit_peers)
self.shout('SURVEY', json.dumps(request).encode())
ev.wait(timeout)
result = self.request_results[request['req_id']]
del self.request_results[request['req_id']]
del self.request_events[request['req_id']]
return result
def send_event(self, request):
self.shout('EVENT', json.dumps(request).encode())
def reply_survey(self, peer_id, reply):
self.whisper(peer_id, json.dumps(reply).encode())
def shutdown(self):
self._running = False
def name(self):
if self._name is None:
# f = w_spy_call(super(self.__class__, self).name, with_caller=False)
f = super(self.__class__, self).name
self._name = f()
return self._name
def uuid(self):
if self._uuid is None:
# f = w_spy_call(super(self.__class__, self).uuid, with_caller=False)
f = super(self.__class__, self).uuid
self._uuid = f()
return self._uuid
|
Alidron/alidron-isac
|
isac/transport/pyre_node.py
|
Python
|
mpl-2.0
| 6,714
| 0.00134
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import json
import os
import six
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.client import session as session_lib
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import template
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import adam
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import momentum
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import training_util
from tensorflow.python.training.checkpointable import base
from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util as checkpointable_utils
class NonLayerCheckpointable(tracking.Checkpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
self.a_variable = checkpointable_utils.add_variable(
self, name="a_variable", shape=[])
# pylint: disable=not-callable
class MyModel(training.Model):
"""A concrete Model for testing."""
def __init__(self):
super(MyModel, self).__init__()
self._named_dense = core.Dense(1, use_bias=True)
self._second = core.Dense(1, use_bias=False)
# We can still track Checkpointables which aren't Layers.
self._non_layer = NonLayerCheckpointable()
def call(self, values):
ret = self._second(self._named_dense(values))
return ret
class InterfaceTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testAddVariable(self):
obj = NonLayerCheckpointable()
with self.assertRaisesRegexp(ValueError, "do not specify shape"):
checkpointable_utils.add_variable(
obj, name="shape_specified_twice", shape=[], initializer=1)
constant_initializer = checkpointable_utils.add_variable(
obj, name="constant_initializer", initializer=1)
with variable_scope.variable_scope("some_variable_scope"):
ones_initializer = checkpointable_utils.add_variable(
obj,
name="ones_initializer",
shape=[2],
initializer=init_ops.ones_initializer(dtype=dtypes.float32))
bare_initializer = checkpointable_utils.add_variable(
obj,
name="bare_initializer",
shape=[2, 2],
dtype=dtypes.float64,
initializer=init_ops.zeros_initializer)
# Even in graph mode, there are no naming conflicts between objects, only
# naming conflicts within an object.
other_duplicate = resource_variable_ops.ResourceVariable(
name="duplicate", initial_value=1.)
duplicate = checkpointable_utils.add_variable(
obj, name="duplicate", shape=[])
with self.assertRaisesRegexp(ValueError, "'duplicate'.*already declared"):
checkpointable_utils.add_variable(obj, name="duplicate", shape=[])
self.evaluate(checkpointable_utils.gather_initializers(obj))
self.assertEqual("constant_initializer:0", constant_initializer.name)
self.assertEqual(1, self.evaluate(constant_initializer))
self.assertEqual("some_variable_scope/ones_initializer:0",
ones_initializer.name)
self.assertAllEqual([1, 1], self.evaluate(ones_initializer))
self.assertAllEqual([[0., 0.],
[0., 0.]], self.evaluate(bare_initializer))
self.assertEqual("a_variable:0", obj.a_variable.name)
self.assertEqual("duplicate:0", other_duplicate.name)
if context.executing_eagerly():
# When executing eagerly, there's no uniquification of variable names. The
# checkpoint name will be the same.
self.assertEqual("duplicate:0", duplicate.name)
else:
# The .name attribute may be globally influenced, but the checkpoint name
# won't be (tested below).
self.assertEqual("duplicate_1:0", duplicate.name)
named_variables, _, _ = checkpointable_utils._serialize_object_graph(
obj, saveables_cache=None)
expected_checkpoint_names = (
"a_variable/.ATTRIBUTES/VARIABLE_VALUE",
"bare_initializer/.ATTRIBUTES/VARIABLE_VALUE",
"constant_initializer/.ATTRIBUTES/VARIABLE_VALUE",
"duplicate/.ATTRIBUTES/VARIABLE_VALUE",
"ones_initializer/.ATTRIBUTES/VARIABLE_VALUE",
)
six.assertCountEqual(
self, expected_checkpoint_names, [v.name for v in named_variables])
def testInitNotCalled(self):
class NoInit(tracking.Checkpointable):
def __init__(self):
pass
# __init__ for Checkpointable will be called implicitly.
checkpointable_utils.add_variable(NoInit(), "var", shape=[])
def testShapeDtype(self):
root = tracking.Checkpointable()
v1 = checkpointable_utils.add_variable(
root, name="v1", initializer=3., dtype=dtypes.float64)
self.assertEqual(dtypes.float64, v1.dtype)
v2 = checkpointable_utils.add_variable(
root,
name="v2",
shape=[3],
initializer=init_ops.ones_initializer,
dtype=dtypes.float64)
self.assertEqual(dtypes.float64, v2.dtype)
self.assertAllEqual([1., 1., 1.], self.evaluate(v2))
def testObjectMetadata(self):
with context.eager_mode():
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
dense = core.Dense(1)
checkpoint = checkpointable_utils.Checkpoint(dense=dense)
dense(constant_op.constant([[1.]]))
save_path = checkpoint.save(checkpoint_prefix)
objects = checkpointable_utils.object_metadata(save_path)
all_variable_names = []
for obj in objects.nodes:
for attribute in obj.attributes:
all_variable_names.append(attribute.full_name)
self.assertIn("dense/kernel", all_variable_names)
def testNotCheckpointable(self):
class CallsFunctionalStuff(
tracking.NotCheckpointable, tracking.Checkpointable):
pass
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
checkpoint = checkpointable_utils.Checkpoint(x=CallsFunctionalStuff())
with self.assertRaises(NotImplementedError):
checkpoint.save(prefix)
class CallsFunctionalStuffOtherMRO(
tracking.Checkpointable, tracking.NotCheckpointable):
pass
checkpoint_reversed = checkpointable_utils.Checkpoint(
x=CallsFunctionalStuffOtherMRO())
with self.assertRaises(NotImplementedError):
checkpoint_reversed.save(prefix)
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def test_object_graph_no_attributes(self):
root = tracking.Checkpointable()
root.v = resource_variable_ops.ResourceVariable(1.)
root.opt = momentum.MomentumOptimizer(0.01, 0.5)
root.opt.minimize(root.v.read_value)
object_graph = checkpointable_utils.make_object_graph_without_attributes(
root)
# Four objects: Root, v, opt, and a slot variable for v
self.assertEqual(4, len(object_graph.nodes))
class _MirroringSaveable(saver_lib.BaseSaverBuilder.SaveableObject):
def __init__(self, primary_variable, mirrored_variable, name):
self._primary_variable = primary_variable
self._mirrored_variable = mirrored_variable
tensor = self._primary_variable.read_value()
spec = saver_lib.BaseSaverBuilder.SaveSpec(
tensor=tensor,
slice_spec="",
name=name)
super(_MirroringSaveable, self).__init__(
tensor, [spec], name)
def restore(self, restored_tensors, restored_shapes):
"""Restore the same value into both variables."""
tensor, = restored_tensors
return control_flow_ops.group(
self._primary_variable.assign(tensor),
self._mirrored_variable.assign(tensor))
class _OwnsMirroredVariables(base.CheckpointableBase):
"""A Checkpointable object which returns a more complex SaveableObject."""
def __init__(self):
self.non_dep_variable = variable_scope.get_variable(
name="non_dep_variable", initializer=6., use_resource=True)
self.mirrored = variable_scope.get_variable(
name="mirrored", initializer=15., use_resource=True)
def _gather_saveables_for_checkpoint(self):
def _saveable_factory(name=self.non_dep_variable.name):
return _MirroringSaveable(
primary_variable=self.non_dep_variable,
mirrored_variable=self.mirrored,
name=name)
return {base.VARIABLE_VALUE_KEY: _saveable_factory}
# The Saver sorts by name before parsing, so we need a name property.
@property
def name(self):
return self.non_dep_variable.name
class CheckpointingTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testNamingWithOptimizer(self):
input_value = constant_op.constant([[3.]])
model = MyModel()
# A nuisance Model using the same optimizer. Its slot variables should not
# go in the checkpoint, since it is never depended on.
other_model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
optimizer_step = training_util.get_or_create_global_step()
root_checkpointable = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model, optimizer_step=optimizer_step)
if context.executing_eagerly():
optimizer.minimize(
lambda: model(input_value),
global_step=optimizer_step)
optimizer.minimize(
lambda: other_model(input_value),
global_step=optimizer_step)
else:
train_op = optimizer.minimize(
model(input_value), global_step=optimizer_step)
optimizer.minimize(
other_model(input_value),
global_step=optimizer_step)
self.evaluate(checkpointable_utils.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
named_variables, serialized_graph, _ = (
checkpointable_utils._serialize_object_graph(
root_checkpointable, saveables_cache=None))
expected_checkpoint_names = (
# Created in the root node, so no prefix.
"optimizer_step",
"model/_second/kernel",
"model/_named_dense/kernel",
"model/_named_dense/bias",
# non-Layer dependency of the model
"model/_non_layer/a_variable",
# The optimizer creates two non-slot variables
"optimizer/beta1_power",
"optimizer/beta2_power",
# Slot variables
"model/_second/kernel/.OPTIMIZER_SLOT/optimizer/m",
"model/_second/kernel/.OPTIMIZER_SLOT/optimizer/v",
"model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m",
"model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/v",
"model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/m",
"model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/v",
)
suffix = "/.ATTRIBUTES/VARIABLE_VALUE"
expected_checkpoint_names = [
name + suffix for name in expected_checkpoint_names]
# The Dense layers also save get_config() JSON
expected_checkpoint_names.extend(
["model/_second/.ATTRIBUTES/OBJECT_CONFIG_JSON",
"model/_named_dense/.ATTRIBUTES/OBJECT_CONFIG_JSON"])
named_variables = {v.name: v for v in named_variables}
six.assertCountEqual(self, expected_checkpoint_names,
named_variables.keys())
# Check that we've mapped to the right variable objects (not exhaustive)
self.assertEqual(
"global_step",
named_variables["optimizer_step" + suffix].full_name)
self.assertEqual(
"my_model/dense_1/kernel",
named_variables["model/_second/kernel" + suffix].full_name)
self.assertEqual(
"my_model/dense/kernel",
named_variables["model/_named_dense/kernel" + suffix].full_name)
self.assertEqual(
"beta1_power",
named_variables["optimizer/beta1_power" + suffix].full_name)
self.assertEqual(
"beta2_power",
named_variables["optimizer/beta2_power" + suffix].full_name)
# Spot check the generated protocol buffers.
self.assertEqual("optimizer",
serialized_graph.nodes[0].children[1].local_name)
optimizer_node = serialized_graph.nodes[serialized_graph.nodes[0].children[
1].node_id]
self.assertEqual("beta1_power",
optimizer_node.children[0].local_name)
self.assertEqual("beta1_power",
serialized_graph.nodes[optimizer_node.children[0].node_id]
.attributes[0].full_name)
self.assertEqual(
"my_model/dense/kernel",
serialized_graph.nodes[optimizer_node.slot_variables[0]
.original_variable_node_id]
.attributes[0].full_name)
# We strip off the :0 suffix, as variable.name-based saving does.
self.assertEqual(
"my_model/dense/kernel/Adam",
serialized_graph.nodes[optimizer_node.slot_variables[0]
.slot_variable_node_id]
.attributes[0].full_name)
self.assertEqual(
"my_model/dense/kernel/Adam:0",
optimizer.get_slot(
var=model._named_dense.kernel,
name="m").name)
self.assertEqual(
"model/_named_dense/kernel" + suffix,
serialized_graph.nodes[
optimizer_node.slot_variables[0]
.original_variable_node_id].attributes[0].checkpoint_key)
self.assertEqual("m", optimizer_node.slot_variables[0].slot_name)
self.assertEqual(
"model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m" + suffix,
serialized_graph.nodes[
optimizer_node.slot_variables[0]
.slot_variable_node_id].attributes[0].checkpoint_key)
@test_util.run_in_graph_and_eager_modes
def testMoreComplexSaveableReturned(self):
v = _OwnsMirroredVariables()
checkpoint = checkpointable_utils.Checkpoint(v=v)
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
self.evaluate(v.non_dep_variable.assign(42.))
save_path = checkpoint.save(prefix)
self.evaluate(v.non_dep_variable.assign(43.))
self.evaluate(v.mirrored.assign(44.))
checkpoint.restore(save_path).assert_consumed().initialize_or_restore()
self.assertEqual(42., self.evaluate(v.non_dep_variable))
self.assertEqual(42., self.evaluate(v.mirrored))
self.evaluate(v.non_dep_variable.assign(44.))
save_path = checkpoint.save(prefix)
self.evaluate(v.non_dep_variable.assign(45.))
checkpoint.restore(save_path).assert_consumed().initialize_or_restore()
self.assertEqual(44., self.evaluate(v.non_dep_variable))
self.assertEqual(44., self.evaluate(v.mirrored))
@test_util.run_in_graph_and_eager_modes
def testMoreComplexSaveableReturnedWithGlobalName(self):
# The same object can also be saved using the name-based saver.
v = _OwnsMirroredVariables()
saver = saver_lib.Saver(var_list=[v])
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
with self.cached_session() as sess:
self.evaluate(v.non_dep_variable.assign(42.))
save_path = saver.save(sess, prefix)
self.evaluate(v.non_dep_variable.assign(43.))
self.evaluate(v.mirrored.assign(44.))
saver.restore(sess, save_path)
self.assertEqual(42., self.evaluate(v.non_dep_variable))
self.assertEqual(42., self.evaluate(v.mirrored))
@test_util.run_in_graph_and_eager_modes
def testSaveRestore(self):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root_checkpointable = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model)
input_value = constant_op.constant([[3.]])
if context.executing_eagerly():
optimizer.minimize(
lambda: model(input_value))
else:
train_op = optimizer.minimize(model(input_value))
# TODO(allenl): Make initialization more pleasant when graph building.
root_checkpointable.save_counter # pylint: disable=pointless-statement
self.evaluate(checkpointable_utils.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
prefix = os.path.join(self.get_temp_dir(), "ckpt")
self.evaluate(state_ops.assign(model._named_dense.variables[1], [42.]))
m_bias_slot = optimizer.get_slot(model._named_dense.variables[1], "m")
self.evaluate(state_ops.assign(m_bias_slot, [1.5]))
save_path = root_checkpointable.save(file_prefix=prefix)
self.evaluate(state_ops.assign(model._named_dense.variables[1], [43.]))
self.evaluate(state_ops.assign(root_checkpointable.save_counter, 3))
optimizer_variables = self.evaluate(optimizer.variables())
self.evaluate(state_ops.assign(m_bias_slot, [-2.]))
# Immediate restoration
status = root_checkpointable.restore(save_path=save_path).assert_consumed()
status.run_restore_ops()
self.assertAllEqual([42.], self.evaluate(model._named_dense.variables[1]))
self.assertAllEqual(1, self.evaluate(root_checkpointable.save_counter))
self.assertAllEqual([1.5], self.evaluate(m_bias_slot))
if not context.executing_eagerly():
return # Restore-on-create is only supported when executing eagerly
on_create_model = MyModel()
on_create_optimizer = adam.AdamOptimizer(
0.001,
# Preserve beta1_power and beta2_power when appying gradients so we can
# test that they've been restored correctly.
beta1=1.0, beta2=1.0)
on_create_root = checkpointable_utils.Checkpoint(
optimizer=on_create_optimizer, model=on_create_model)
# Deferred restoration
status = on_create_root.restore(save_path=save_path)
status.assert_nontrivial_match()
status.assert_existing_objects_matched()
with self.assertRaises(AssertionError):
status.assert_consumed()
on_create_model(constant_op.constant([[3.]])) # create variables
self.assertAllEqual(1, self.evaluate(on_create_root.save_counter))
self.assertAllEqual([42.],
self.evaluate(
on_create_model._named_dense.variables[1]))
on_create_m_bias_slot = on_create_optimizer.get_slot(
on_create_model._named_dense.variables[1], "m")
status.assert_existing_objects_matched()
with self.assertRaises(AssertionError):
status.assert_consumed()
# Optimizer slot variables are created when the original variable is
# restored.
self.assertAllEqual([1.5], self.evaluate(on_create_m_bias_slot))
self.assertAllEqual(optimizer_variables[2:],
self.evaluate(on_create_optimizer.variables()))
dummy_var = resource_variable_ops.ResourceVariable([1.])
on_create_optimizer.minimize(loss=dummy_var.read_value)
status.assert_existing_objects_matched()
status.assert_consumed()
beta1_power, beta2_power = on_create_optimizer._get_beta_accumulators()
self.assertAllEqual(optimizer_variables[0], self.evaluate(beta1_power))
self.assertAllEqual(optimizer_variables[1], self.evaluate(beta2_power))
# TODO(allenl): Debug garbage created by this test in python3.
def testDeferredRestorationUsageEager(self):
"""An idiomatic eager execution example."""
num_training_steps = 10
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
for training_continuation in range(3):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model,
optimizer_step=training_util.get_or_create_global_step())
root.restore(checkpoint_management.latest_checkpoint(
checkpoint_directory))
for _ in range(num_training_steps):
# TODO(allenl): Use a Dataset and serialize/checkpoint it.
input_value = constant_op.constant([[3.]])
optimizer.minimize(
lambda: model(input_value), # pylint: disable=cell-var-from-loop
global_step=root.optimizer_step)
root.save(file_prefix=checkpoint_prefix)
self.assertEqual((training_continuation + 1) * num_training_steps,
root.optimizer_step.numpy())
def testUsageGraph(self):
"""Expected usage when graph building."""
with context.graph_mode():
num_training_steps = 10
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
for training_continuation in range(3):
with ops.Graph().as_default():
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
input_value = constant_op.constant([[3.]])
train_op = optimizer.minimize(
model(input_value),
global_step=root.global_step)
checkpoint_path = checkpoint_management.latest_checkpoint(
checkpoint_directory)
with self.session(graph=ops.get_default_graph()) as session:
status = root.restore(save_path=checkpoint_path)
status.initialize_or_restore(session=session)
if checkpoint_path is None:
self.assertEqual(0, training_continuation)
with self.assertRaises(AssertionError):
status.assert_consumed()
with self.assertRaises(AssertionError):
status.assert_existing_objects_matched()
else:
status.assert_consumed()
status.assert_existing_objects_matched()
for _ in range(num_training_steps):
session.run(train_op)
root.save(file_prefix=checkpoint_prefix, session=session)
self.assertEqual((training_continuation + 1) * num_training_steps,
session.run(root.global_step))
self.assertEqual(training_continuation + 1,
session.run(root.save_counter))
@test_util.run_in_graph_and_eager_modes
def testAgnosticUsage(self):
"""Graph/eager agnostic usage."""
# Does create garbage when executing eagerly due to ops.Graph() creation.
num_training_steps = 10
checkpoint_directory = self.get_temp_dir()
for training_continuation in range(3):
with test_util.device(use_gpu=True):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
manager = checkpoint_management.CheckpointManager(
root, checkpoint_directory, max_to_keep=1)
status = root.restore(save_path=manager.latest_checkpoint)
input_value = constant_op.constant([[3.]])
train_fn = functools.partial(
optimizer.minimize,
functools.partial(model, input_value),
global_step=root.global_step)
if not context.executing_eagerly():
train_fn = functools.partial(self.evaluate, train_fn())
status.initialize_or_restore()
for _ in range(num_training_steps):
train_fn()
manager.save()
self.assertEqual((training_continuation + 1) * num_training_steps,
self.evaluate(root.global_step))
self.assertEqual(training_continuation + 1,
self.evaluate(root.save_counter))
@test_util.run_in_graph_and_eager_modes
def testFreezing(self):
with self.cached_session(use_gpu=True) as session:
# Save an object-based checkpoint using a frozen saver
directory = self.get_temp_dir()
prefix = os.path.join(directory, "ckpt")
v = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
checkpoint = checkpointable_utils.Checkpoint(v=v)
self.evaluate(v.assign(3))
# Create the save counter so assert_consumed doesn't complain about it not
# existing in the checkpoint on restore.
self.evaluate(checkpoint.save_counter.assign(12))
saver = checkpointable_utils.frozen_saver(checkpoint)
save_path = saver.save(session, prefix)
self.evaluate(v.assign(10))
# Use the frozen saver to restore the same object graph
saver.restore(session, save_path)
self.assertEqual(3, self.evaluate(v))
# Restore using another frozen saver on an identical object graph
del v, checkpoint, saver
v = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
checkpoint = checkpointable_utils.Checkpoint(v=v)
saver = checkpointable_utils.frozen_saver(checkpoint)
saver.restore(session, save_path)
self.assertEqual(3, self.evaluate(v))
# Restore as an object-based checkpoint
del v, checkpoint, saver
checkpoint = checkpointable_utils.Checkpoint()
status = checkpoint.restore(save_path)
v = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
if context.executing_eagerly():
self.assertEqual(12, self.evaluate(checkpoint.save_counter))
self.assertEqual(0, self.evaluate(v))
checkpoint.v = v
status.assert_consumed().run_restore_ops()
self.assertEqual(3, self.evaluate(v))
self.assertEqual(12, self.evaluate(checkpoint.save_counter))
@test_util.run_in_graph_and_eager_modes
def testCustomNumbering(self):
directory = self.get_temp_dir()
prefix = os.path.join(directory, "ckpt")
step = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
checkpoint = checkpointable_utils.Checkpoint(step=step)
self.evaluate(step.initializer)
for i in range(5):
path = checkpoint.write("%s-%d" % (prefix, self.evaluate(step)))
expected_suffix = "-%d" % (2 * i,)
if not path.endswith(expected_suffix):
self.fail("%s should have suffix %s" % (path, expected_suffix))
self.evaluate(step.assign_add(2))
# pylint: disable=cell-var-from-loop
@test_util.run_in_graph_and_eager_modes
def testWithDefun(self):
num_training_steps = 2
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
for training_continuation in range(3):
with test_util.device(use_gpu=True):
model = MyModel()
# Don't actually train so we can test variable values
optimizer = adam.AdamOptimizer(0.)
root = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
checkpoint_path = checkpoint_management.latest_checkpoint(
checkpoint_directory)
status = root.restore(save_path=checkpoint_path)
def train_fn():
@def_function.function
def _call_model(x):
return model(x)
with backprop.GradientTape() as tape:
loss = _call_model(constant_op.constant([[3.]]))
gradients = tape.gradient(loss, model.variables)
return optimizer.apply_gradients(zip(gradients, model.variables),
global_step=root.global_step)
if not context.executing_eagerly():
train_fn = functools.partial(
self.evaluate, train_fn())
status.initialize_or_restore()
for _ in range(num_training_steps):
train_fn()
if training_continuation > 0:
status.assert_consumed()
self.assertAllClose([[42.]], self.evaluate(model.variables[0]))
else:
self.evaluate(model.variables[0].assign([[42.]]))
root.save(file_prefix=checkpoint_prefix)
self.assertEqual((training_continuation + 1) * num_training_steps,
self.evaluate(root.global_step))
self.assertEqual(training_continuation + 1,
self.evaluate(root.save_counter))
# pylint: enable=cell-var-from-loop
def _get_checkpoint_name(self, name):
root = tracking.Checkpointable()
checkpointable_utils.add_variable(
root, name=name, shape=[1, 2], dtype=dtypes.float64)
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
root, saveables_cache=None)
with ops.name_scope("root/" + named_variable.name):
pass # Make sure we can use this as an op name if we prefix it.
return named_variable.name
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testVariableNameEscaping(self):
suffix = "/.ATTRIBUTES/VARIABLE_VALUE"
self.assertEqual(r"a.Sb.Sc" + suffix, self._get_checkpoint_name(r"a/b/c"))
self.assertEqual(r"b" + suffix, self._get_checkpoint_name(r"b"))
self.assertEqual(r"c.S" + suffix, self._get_checkpoint_name(r"c/"))
self.assertEqual(r"d.S..S" + suffix, self._get_checkpoint_name(r"d/.S"))
self.assertEqual(r"d.S..ATTRIBUTES.Sf" + suffix,
self._get_checkpoint_name(r"d/.ATTRIBUTES/f"))
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testNumberedPath(self):
root = tracking.Checkpointable()
leaf = tracking.Checkpointable()
root.leaf = leaf
checkpointable_utils.add_variable(leaf, name="v", shape=[])
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
root, saveables_cache=None)
self.assertEqual(r"leaf/v/.ATTRIBUTES/VARIABLE_VALUE", named_variable.name)
@test_util.run_in_graph_and_eager_modes
def testLocalNameValidation(self):
root = tracking.Checkpointable()
leaf = tracking.Checkpointable()
# Dots are escaped, which avoids conflicts with reserved names.
root._track_checkpointable(leaf, name=".ATTRIBUTES")
checkpointable_utils.add_variable(checkpointable=leaf, name="a", shape=[])
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
root, saveables_cache=None)
self.assertEqual("..ATTRIBUTES/a/.ATTRIBUTES/VARIABLE_VALUE",
named_variable.name)
def testAnonymousVarsInInit(self):
class Model(training.Model):
def __init__(self):
super(Model, self).__init__()
self.w = resource_variable_ops.ResourceVariable(0.0)
self.b = resource_variable_ops.ResourceVariable(0.0)
self.vars = [self.w, self.b]
def call(self, x):
return x * self.w + self.b
with context.eager_mode():
model = Model()
optimizer = adam.AdamOptimizer(learning_rate=0.05)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
checkpoint = checkpointable_utils.Checkpoint(
model=model, optimizer=optimizer)
for _ in range(2):
checkpoint.save(checkpoint_prefix)
with backprop.GradientTape() as tape:
loss = (constant_op.constant(1.)
- model(constant_op.constant(1.))) ** 2
grad = tape.gradient(loss, model.vars)
optimizer.apply_gradients(
[(g, v) for g, v in zip(grad, model.vars)])
@test_util.run_in_graph_and_eager_modes
def testLateDependencyTracking(self):
class Dependency(tracking.Checkpointable):
def build(self):
self.var = checkpointable_utils.add_variable(
self, "var", initializer=0.)
class LateDependencies(tracking.Checkpointable):
def add_dep(self):
self.dep = Dependency()
self.dep.build()
original = LateDependencies()
original.add_dep()
self.evaluate(state_ops.assign(original.dep.var, 123.))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpointable_utils.CheckpointableSaver(
original).save(checkpoint_prefix)
load_into = LateDependencies()
status = checkpointable_utils.CheckpointableSaver(
load_into).restore(save_path)
status.assert_existing_objects_matched()
with self.assertRaises(AssertionError):
status.assert_consumed()
load_into.add_dep()
status.assert_consumed()
status.assert_existing_objects_matched().run_restore_ops()
self.assertEqual(123., self.evaluate(load_into.dep.var))
@test_util.run_in_graph_and_eager_modes
def testDepAfterVar(self):
class Dependency(tracking.Checkpointable):
def build(self):
self.var = checkpointable_utils.add_variable(
self, "var", initializer=0.)
class DepAfterVar(tracking.Checkpointable):
def add_dep(self):
dep = Dependency()
dep.build()
self.dep = dep
dep_after_var = DepAfterVar()
dep_after_var.add_dep()
self.evaluate(state_ops.assign(dep_after_var.dep.var, -14.))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpointable_utils.CheckpointableSaver(dep_after_var).save(
checkpoint_prefix)
loaded_dep_after_var = DepAfterVar()
status = checkpointable_utils.CheckpointableSaver(
loaded_dep_after_var).restore(save_path)
loaded_dep_after_var.add_dep()
status.assert_consumed()
status.run_restore_ops()
self.assertEqual(-14., self.evaluate(loaded_dep_after_var.dep.var))
@test_util.run_in_graph_and_eager_modes
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
root = tracking.Checkpointable()
root.var = checkpointable_utils.add_variable(
root, name="var", initializer=0.)
optimizer = adam.AdamOptimizer(0.1)
if context.executing_eagerly():
optimizer.minimize(root.var.read_value)
else:
train_op = optimizer.minimize(root.var)
# Note that `optimizer` has not been added as a dependency of
# `root`. Create a one-off grouping so that slot variables for `root.var`
# get initialized too.
self.evaluate(checkpointable_utils.gather_initializers(
checkpointable_utils.Checkpoint(root=root, optimizer=optimizer)))
self.evaluate(train_op)
self.evaluate(state_ops.assign(root.var, 12.))
no_slots_path = checkpointable_utils.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "no_slots"))
root.optimizer = optimizer
self.evaluate(state_ops.assign(root.var, 13.))
self.evaluate(state_ops.assign(optimizer.get_slot(name="m", var=root.var),
14.))
slots_path = checkpointable_utils.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "with_slots"))
new_root = tracking.Checkpointable()
# Load the slot-containing checkpoint (deferred), then immediately overwrite
# the non-slot variable (also deferred).
slot_status = checkpointable_utils.CheckpointableSaver(
new_root).restore(slots_path)
no_slot_status = checkpointable_utils.CheckpointableSaver(
new_root).restore(no_slots_path)
with self.assertRaises(AssertionError):
no_slot_status.assert_consumed()
new_root.var = checkpointable_utils.add_variable(
new_root, name="var", shape=[])
no_slot_status.assert_consumed()
no_slot_status.run_restore_ops()
self.assertEqual(12., self.evaluate(new_root.var))
new_root.optimizer = adam.AdamOptimizer(0.1)
slot_status.assert_existing_objects_matched()
with self.assertRaisesRegexp(AssertionError, "beta1_power"):
slot_status.assert_consumed()
self.assertEqual(12., self.evaluate(new_root.var))
if context.executing_eagerly():
# Slot variables are only created with restoring initializers when
# executing eagerly.
self.assertEqual(14., self.evaluate(
new_root.optimizer.get_slot(name="m", var=new_root.var)))
else:
self.assertIs(new_root.optimizer.get_slot(name="m", var=new_root.var),
None)
if context.executing_eagerly():
new_root.optimizer.minimize(new_root.var.read_value)
else:
train_op = new_root.optimizer.minimize(new_root.var)
# The slot variable now exists; restore() didn't create it, but we should
# now have a restore op for it.
slot_status.run_restore_ops()
self.assertEqual(14., self.evaluate(
new_root.optimizer.get_slot(name="m", var=new_root.var)))
self.evaluate(train_op)
slot_status.assert_consumed()
@test_util.run_in_graph_and_eager_modes
def testOverlappingRestores(self):
checkpoint_directory = self.get_temp_dir()
save_root = tracking.Checkpointable()
save_root.dep = tracking.Checkpointable()
save_root.dep.var = checkpointable_utils.add_variable(
save_root.dep, name="var", initializer=0.)
self.evaluate(state_ops.assign(save_root.dep.var, 12.))
saver = checkpointable_utils.CheckpointableSaver(save_root)
first_path = saver.save(os.path.join(checkpoint_directory, "first"))
self.evaluate(state_ops.assign(save_root.dep.var, 13.))
second_path = saver.save(os.path.join(checkpoint_directory, "second"))
first_root = tracking.Checkpointable()
second_root = tracking.Checkpointable()
first_status = checkpointable_utils.CheckpointableSaver(
first_root).restore(first_path)
second_status = checkpointable_utils.CheckpointableSaver(
second_root).restore(second_path)
load_dep = tracking.Checkpointable()
load_dep.var = checkpointable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
first_status.assert_consumed()
first_status.run_restore_ops()
self.assertEqual(12., self.evaluate(load_dep.var))
second_root.dep = load_dep
second_status.assert_consumed()
second_status.run_restore_ops()
self.assertEqual(13., self.evaluate(load_dep.var))
# Try again with the order of the restore() reversed. The last restore
# determines the final value.
first_root = tracking.Checkpointable()
second_root = tracking.Checkpointable()
second_status = checkpointable_utils.CheckpointableSaver(
second_root).restore(second_path)
first_status = checkpointable_utils.CheckpointableSaver(
first_root).restore(first_path)
load_dep = tracking.Checkpointable()
load_dep.var = checkpointable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
first_status.assert_consumed()
first_status.run_restore_ops()
self.assertEqual(12., self.evaluate(load_dep.var))
second_root.dep = load_dep
second_status.assert_consumed()
second_status.run_restore_ops()
self.assertEqual(12., self.evaluate(load_dep.var))
@test_util.run_in_graph_and_eager_modes
def testAmbiguousLoad(self):
# Not OK to split one checkpoint object into two
checkpoint_directory = self.get_temp_dir()
save_root = tracking.Checkpointable()
save_root.dep_one = tracking.Checkpointable()
save_root.dep_two = tracking.Checkpointable()
dep_three = tracking.Checkpointable()
save_root.dep_one.dep_three = dep_three
save_root.dep_two.dep_three = dep_three
checkpointable_utils.add_variable(dep_three, name="var", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(save_root))
save_path = checkpointable_utils.CheckpointableSaver(save_root).save(
os.path.join(checkpoint_directory, "ckpt"))
load_root = tracking.Checkpointable()
status = checkpointable_utils.CheckpointableSaver(load_root).restore(
save_path)
load_root.dep_one = tracking.Checkpointable()
load_root.dep_two = tracking.Checkpointable()
load_root.dep_one.dep_three = tracking.Checkpointable()
load_root.dep_two.dep_three = tracking.Checkpointable()
checkpointable_utils.add_variable(
load_root.dep_one.dep_three, name="var", initializer=0.)
with self.assertRaises(AssertionError):
status.assert_consumed()
with self.assertRaises(AssertionError):
status.assert_existing_objects_matched()
@test_util.run_in_graph_and_eager_modes
def testObjectsCombined(self):
# Currently fine to load two checkpoint objects into one Python object
checkpoint_directory = self.get_temp_dir()
save_root = tracking.Checkpointable()
save_root.dep_one = tracking.Checkpointable()
save_root.dep_two = tracking.Checkpointable()
checkpointable_utils.add_variable(
save_root.dep_one, name="var1", initializer=32., dtype=dtypes.float64)
checkpointable_utils.add_variable(
save_root.dep_two, name="var2", initializer=64., dtype=dtypes.float64)
self.evaluate(checkpointable_utils.gather_initializers(save_root))
save_path = checkpointable_utils.CheckpointableSaver(save_root).save(
os.path.join(checkpoint_directory, "ckpt"))
load_root = tracking.Checkpointable()
load_root.dep_one = tracking.Checkpointable()
load_root.dep_two = load_root.dep_one
v1 = checkpointable_utils.add_variable(
load_root.dep_one, name="var1", shape=[], dtype=dtypes.float64)
v2 = checkpointable_utils.add_variable(
load_root.dep_one, name="var2", shape=[], dtype=dtypes.float64)
status = checkpointable_utils.CheckpointableSaver(load_root).restore(
save_path).assert_consumed().assert_existing_objects_matched()
status.run_restore_ops()
self.assertEqual(32., self.evaluate(v1))
self.assertEqual(64., self.evaluate(v2))
@test_util.run_in_graph_and_eager_modes
def testDependencyLoop(self):
# Note: this test creates garbage during eager execution because it
# purposefully creates a reference cycle.
first = tracking.Checkpointable()
second = tracking.Checkpointable()
first.second = second
second.first = first
first.v = checkpointable_utils.add_variable(
first, "v1", initializer=[3., 1., 4.])
second.v = checkpointable_utils.add_variable(
second, "v2", initializer=[1., 1., 2., 3.])
self.evaluate(checkpointable_utils.gather_initializers(first))
checkpoint_directory = self.get_temp_dir()
save_path = checkpointable_utils.CheckpointableSaver(first).save(
os.path.join(checkpoint_directory, "ckpt"))
# Test deferred loading
first_load = tracking.Checkpointable()
status = checkpointable_utils.CheckpointableSaver(
first_load).restore(save_path)
second_load = tracking.Checkpointable()
first_load.second = second_load
second_load.first = first_load
with self.assertRaises(AssertionError):
status.assert_consumed()
first_load.v = checkpointable_utils.add_variable(
first_load, "v1", shape=[3])
second_load.v = checkpointable_utils.add_variable(
second_load, "v2", shape=[4])
status.assert_consumed()
status.run_restore_ops()
self.assertAllEqual([3., 1., 4.], self.evaluate(first_load.v))
self.assertAllEqual([1., 1., 2., 3.], self.evaluate(second_load.v))
# Test loading when variables have already been created
self.evaluate(first_load.v.assign([2., 7., 1.]))
self.assertAllEqual([2., 7., 1.], self.evaluate(first_load.v))
self.evaluate(second_load.v.assign([2., 7., 1., 8.]))
self.assertAllEqual([2., 7., 1., 8.], self.evaluate(second_load.v))
status = checkpointable_utils.CheckpointableSaver(first_load).restore(
save_path).assert_consumed()
status.run_restore_ops()
self.assertAllEqual([3., 1., 4.], self.evaluate(first_load.v))
self.assertAllEqual([1., 1., 2., 3.], self.evaluate(second_load.v))
@test_util.run_in_graph_and_eager_modes
def testRestoreOnAssign(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
first = tracking.Checkpointable()
first.var1 = variables.Variable(0., name="outside_var")
first.var2 = variables.Variable(0., name="blah")
self.evaluate(first.var1.assign(4.))
self.evaluate(first.var2.assign(8.))
save_path = checkpointable_utils.CheckpointableSaver(first).save(
checkpoint_prefix)
second = tracking.Checkpointable()
second.var2 = variables.Variable(0., name="blah")
status = checkpointable_utils.CheckpointableSaver(
second).restore(save_path)
recreated_var1 = variables.Variable(0., name="outside_var")
status.run_restore_ops()
self.assertEqual(8., self.evaluate(second.var2))
self.evaluate(recreated_var1.assign(-2.))
self.assertEqual(-2., self.evaluate(recreated_var1))
second.var1 = recreated_var1
status.run_restore_ops()
self.assertEqual(4., self.evaluate(recreated_var1))
def testManySavesGraph(self):
"""Saves after the first should not modify the graph."""
with context.graph_mode():
graph = ops.Graph()
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
self.evaluate(checkpointable_utils.gather_initializers(obj))
saver = checkpointable_utils.CheckpointableSaver(obj)
saver.save(checkpoint_prefix)
before_ops = graph.get_operations()
saver.save(checkpoint_prefix)
self.assertEqual(before_ops, graph.get_operations())
@test_util.run_in_graph_and_eager_modes
def testCheckpointState(self):
# No checkpoints are deleted by default
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(obj))
saver = checkpointable_utils.Checkpoint(obj=obj)
for _ in range(10):
saver.save(checkpoint_prefix)
expected_filenames = ["checkpoint"]
for checkpoint_number in range(1, 11):
expected_filenames.append("ckpt-%d.index" % (checkpoint_number,))
expected_filenames.append(
"ckpt-%d.data-00000-of-00001" % (checkpoint_number,))
six.assertCountEqual(
self,
expected_filenames,
os.listdir(checkpoint_directory))
@test_util.run_in_graph_and_eager_modes
def testCheckpointStateChangingVarList(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(obj))
checkpoint = checkpointable_utils.Checkpoint(obj=obj)
looped_variables = []
for iteration in range(10):
new_variable = resource_variable_ops.ResourceVariable(iteration)
self.evaluate(new_variable.initializer)
setattr(checkpoint, "var_%d" % iteration, new_variable)
checkpoint.save(checkpoint_prefix)
looped_variables.append(new_variable)
expected_filenames = ["checkpoint"]
# We've copied the saver each time, but checkpoint management should still
# be consistent. Nothing gets deleted.
for checkpoint_number in range(1, 11):
expected_filenames.append("ckpt-%d.index" % (checkpoint_number,))
expected_filenames.append(
"ckpt-%d.data-00000-of-00001" % (checkpoint_number,))
six.assertCountEqual(
self,
expected_filenames,
os.listdir(checkpoint_directory))
self.assertEqual(
checkpoint_prefix + "-10",
checkpoint_management.latest_checkpoint(checkpoint_directory))
# The checkpoint list only contains the most recent checkpoint, but they're
# all on disk. This means we won't eventually run into proto size limits.
self.assertEqual(
[checkpoint_prefix + "-10"],
(checkpoint_management.get_checkpoint_state(checkpoint_directory)
.all_model_checkpoint_paths))
for v in looped_variables:
self.evaluate(v.assign(314))
checkpoint.restore(checkpoint_prefix + "-6").run_restore_ops()
self.assertEqual(314, self.evaluate(checkpoint.var_9))
self.assertEqual(314, self.evaluate(checkpoint.var_8))
self.assertEqual(314, self.evaluate(checkpoint.var_6))
self.assertEqual(5, self.evaluate(checkpoint.var_5))
self.assertEqual(1, self.evaluate(checkpoint.var_1))
self.assertEqual(0, self.evaluate(checkpoint.var_0))
checkpoint.restore(checkpoint_prefix + "-10").run_restore_ops()
self.assertEqual(9, self.evaluate(checkpoint.var_9))
self.assertEqual(8, self.evaluate(checkpoint.var_8))
self.assertEqual(1, self.evaluate(checkpoint.var_1))
self.assertEqual(0, self.evaluate(checkpoint.var_0))
def testManyRestoresGraph(self):
"""Restores after the first should not modify the graph."""
with context.graph_mode():
graph = ops.Graph()
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
self.evaluate(checkpointable_utils.gather_initializers(obj))
saver = checkpointable_utils.CheckpointableSaver(obj)
save_path = saver.save(checkpoint_prefix)
saver.restore(save_path)
before_ops = graph.get_operations()
saver.restore(save_path)
self.assertEqual(before_ops, graph.get_operations())
def testMultipleGraphsNonSlotVariables(self):
with context.graph_mode():
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
optimizer = adam.AdamOptimizer(0.001)
# Construct a model in one graph
first_graph = ops.Graph()
first_session = session_lib.Session(graph=first_graph)
with first_graph.as_default(), first_session.as_default():
first_variable = resource_variable_ops.ResourceVariable([1.])
first_root_checkpointable = checkpointable_utils.Checkpoint(
optimizer=optimizer, variable=first_variable)
train_op = optimizer.minimize(first_variable.read_value)
self.evaluate(checkpointable_utils.gather_initializers(
first_root_checkpointable))
self.evaluate(train_op)
self.evaluate(first_variable.assign([1.]))
self.evaluate(optimizer.get_slot(
var=first_variable, name="m").assign([2.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(3.))
# Save and load in a second graph
second_graph = ops.Graph()
with second_graph.as_default(), session_lib.Session(graph=second_graph):
second_variable = resource_variable_ops.ResourceVariable([1.])
second_root_checkpointable = checkpointable_utils.Checkpoint(
optimizer=optimizer, variable=second_variable)
train_op = optimizer.minimize(second_variable.read_value)
second_root_checkpointable.restore(None).initialize_or_restore()
self.evaluate(train_op)
self.evaluate(second_variable.assign([4.]))
self.evaluate(optimizer.get_slot(
var=second_variable, name="m").assign([5.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(6.))
save_path = second_root_checkpointable.save(checkpoint_prefix)
self.evaluate(second_variable.assign([7.]))
self.evaluate(optimizer.get_slot(
var=second_variable, name="m").assign([8.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.assertAllEqual(6., self.evaluate(beta1_power))
status = second_root_checkpointable.restore(save_path)
status.assert_consumed().run_restore_ops()
self.assertAllEqual([4.], self.evaluate(second_variable))
self.assertAllEqual([5.], self.evaluate(optimizer.get_slot(
var=second_variable, name="m")))
beta1_power, _ = optimizer._get_beta_accumulators()
self.assertAllEqual(6., self.evaluate(beta1_power))
# Check that the first graph is unmolested
with first_graph.as_default(), first_session.as_default():
self.assertAllEqual([1.], self.evaluate(first_variable))
self.assertAllEqual([2.], self.evaluate(optimizer.get_slot(
var=first_variable, name="m")))
beta1_power, _ = optimizer._get_beta_accumulators()
self.assertAllEqual(3., self.evaluate(beta1_power))
@test_util.run_in_graph_and_eager_modes
def test_sequential(self):
model = sequential.Sequential()
checkpoint = checkpointable_utils.Checkpoint(model=model)
model.add(core.Dense(4))
second_dense = core.Dense(5)
model.add(second_dense)
model(constant_op.constant([[1.]]))
checkpoint.restore(None).initialize_or_restore()
self.evaluate(second_dense.bias.assign(
constant_op.constant([1., 2., 3., 4., 5.])))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpoint.save(checkpoint_prefix)
self.evaluate(second_dense.bias.assign(
constant_op.constant([5., 6., 7., 8., 9.])))
checkpoint.restore(save_path).assert_consumed().run_restore_ops()
self.assertAllEqual([1., 2., 3., 4., 5.], self.evaluate(second_dense.bias))
deferred_sequential = sequential.Sequential()
deferred_sequential_checkpoint = checkpointable_utils.Checkpoint(
model=deferred_sequential)
status = deferred_sequential_checkpoint.restore(save_path)
deferred_sequential.add(core.Dense(4))
deferred_sequential(constant_op.constant([[1.]]))
deferred_second_dense = core.Dense(5)
deferred_sequential.add(deferred_second_dense)
deferred_sequential(constant_op.constant([[1.]]))
status.run_restore_ops()
self.assertAllEqual([1., 2., 3., 4., 5.],
self.evaluate(deferred_second_dense.bias))
@test_util.run_in_graph_and_eager_modes
def test_initialize_if_not_restoring(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
optimizer_only_prefix = os.path.join(checkpoint_directory, "opt")
with test_util.device(use_gpu=True):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = checkpointable_utils.Checkpoint(
model=model, # Do not save the optimizer with the checkpoint.
global_step=training_util.get_or_create_global_step())
optimizer_checkpoint = checkpointable_utils.Checkpoint(
optimizer=optimizer)
checkpoint_path = checkpoint_management.latest_checkpoint(
checkpoint_directory)
status = root.restore(save_path=checkpoint_path)
input_value = constant_op.constant([[3.]])
train_fn = functools.partial(
optimizer.minimize,
functools.partial(model, input_value),
global_step=root.global_step)
if not context.executing_eagerly():
train_fn = functools.partial(self.evaluate, train_fn())
status.initialize_or_restore()
self.evaluate([v.initializer for v in optimizer.variables()])
train_fn()
model_save_path = root.save(file_prefix=checkpoint_prefix)
self.evaluate(optimizer.variables()[0].assign(42.))
optimizer_save_path = optimizer_checkpoint.save(optimizer_only_prefix)
# Restore into a graph with the optimizer
with test_util.device(use_gpu=True):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
root = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
status = root.restore(save_path=model_save_path)
input_value = constant_op.constant([[3.]])
train_fn = functools.partial(
optimizer.minimize,
functools.partial(model, input_value),
global_step=root.global_step)
if not context.executing_eagerly():
train_fn = functools.partial(self.evaluate, train_fn())
status.initialize_or_restore()
train_fn()
with self.assertRaises(AssertionError):
status.assert_existing_objects_matched()
with self.assertRaises(AssertionError):
status.assert_consumed()
# Make sure initialization doesn't clobber later restores
with test_util.device(use_gpu=True):
model = MyModel()
optimizer = adam.AdamOptimizer(0.001, beta1=1.0)
root = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model,
global_step=training_util.get_or_create_global_step())
opt_root = checkpointable_utils.Checkpoint(
optimizer=optimizer)
status = root.restore(save_path=model_save_path)
init_only_optimizer_status = opt_root.restore(save_path=None)
optimizer_status = opt_root.restore(save_path=optimizer_save_path)
input_value = constant_op.constant([[3.]])
train_fn = functools.partial(
optimizer.minimize,
functools.partial(model, input_value),
global_step=root.global_step)
if not context.executing_eagerly():
train_fn = functools.partial(self.evaluate, train_fn())
optimizer_status.run_restore_ops()
status.initialize_or_restore()
init_only_optimizer_status.initialize_or_restore()
train_fn()
self.assertEqual(42., self.evaluate(optimizer.variables()[0]))
@test_util.run_in_graph_and_eager_modes
def test_restore_after_adding_empty_checkpointable_data_structure(self):
model = NonLayerCheckpointable()
checkpoint = checkpointable_utils.Checkpoint(model=model)
checkpoint.restore(None).initialize_or_restore()
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpoint.save(checkpoint_prefix)
del model, checkpoint
model = NonLayerCheckpointable()
model.dict = {"a": 1}
model.list = {"b": 1}
checkpoint = checkpointable_utils.Checkpoint(model=model)
load_status = checkpoint.restore(save_path)
load_status.assert_existing_objects_matched().run_restore_ops()
class _ManualScope(tracking.Checkpointable):
def __call__(self):
with variable_scope.variable_scope("ManualScope") as vs:
self.variable_scope = vs
with checkpointable_utils.capture_dependencies(template=self):
return self._build()
def _build(self):
return variable_scope.get_variable(name="in_manual_scope", shape=[])
class TemplateTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_checkpointable_save_restore(self):
def _templated():
v = variable_scope.get_variable(
"v", shape=[1], initializer=init_ops.zeros_initializer(),
use_resource=True)
v2 = variable_scope.get_variable(
"v2", shape=[1], initializer=init_ops.zeros_initializer(),
use_resource=True)
manual = _ManualScope()
return v, v + 1., v2, manual, manual()
save_template = template.make_template("s1", _templated)
v1_save, _, v2_save, manual_scope, manual_scope_v = save_template()
six.assertCountEqual(
self,
[v1_save, v2_save, manual_scope, manual_scope_v, save_template],
checkpointable_utils.list_objects(save_template))
manual_dep, = manual_scope._checkpoint_dependencies
self.assertEqual("in_manual_scope", manual_dep.name)
self.assertIs(manual_scope_v, manual_dep.ref)
optimizer = adam.AdamOptimizer(0.0)
save_root = checkpointable_utils.Checkpoint(
my_template=save_template, optimizer=optimizer)
optimizer.minimize(v1_save.read_value)
self.evaluate([v.initializer for v in save_template.variables])
self.evaluate([v.initializer for v in optimizer.variables()])
self.evaluate(v1_save.assign([12.]))
self.evaluate(v2_save.assign([14.]))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = save_root.save(checkpoint_prefix)
load_template = template.make_template("s2", _templated)
load_optimizer = adam.AdamOptimizer(0.0)
load_root = checkpointable_utils.Checkpoint(
my_template=load_template, optimizer=load_optimizer)
status = load_root.restore(save_path)
var, var_plus_one, var2, _, _ = load_template()
load_optimizer.minimize(var.read_value)
self.assertEqual(3, len(load_template._checkpoint_dependencies))
self.assertEqual("v", load_template._checkpoint_dependencies[0].name)
self.assertEqual("v2", load_template._checkpoint_dependencies[1].name)
self.assertEqual("ManualScope",
load_template._checkpoint_dependencies[2].name)
status.assert_consumed().run_restore_ops()
self.assertAllEqual([12.], self.evaluate(var))
self.assertAllEqual([13.], self.evaluate(var_plus_one))
self.assertAllEqual([14.], self.evaluate(var2))
@test_util.run_in_graph_and_eager_modes
def test_checkpointable_save_restore_nested(self):
def _inner_template():
v = variable_scope.get_variable(
"v", shape=[1], initializer=init_ops.zeros_initializer())
return v
def _outer_template():
first_inner = template.make_template("i1", _inner_template)
second_inner = template.make_template("i2", _inner_template)
v1 = first_inner()
v2 = second_inner()
v3 = second_inner()
return (first_inner, second_inner), (v1, v2, v3)
with variable_scope.variable_scope("ignored"):
save_template = template.make_template("s1", _outer_template)
save_root = checkpointable_utils.Checkpoint(my_template=save_template)
(inner_template_one, inner_template_two), _ = save_template()
self.evaluate(inner_template_one.variables[0].assign([20.]))
self.evaluate(inner_template_two.variables[0].assign([25.]))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = save_root.save(checkpoint_prefix)
load_template = template.make_template("s2", _outer_template)
load_root = checkpointable_utils.Checkpoint(my_template=load_template)
status = load_root.restore(save_path)
(inner_template_one, inner_template_two), (v1, v2, v3) = load_template()
outer_template_dependencies = load_root.my_template._checkpoint_dependencies
self.assertEqual(2, len(outer_template_dependencies))
self.assertEqual("i1", outer_template_dependencies[0].name)
self.assertIs(inner_template_one, outer_template_dependencies[0].ref)
self.assertEqual("i2", outer_template_dependencies[1].name)
self.assertIs(inner_template_two, outer_template_dependencies[1].ref)
self.assertEqual(1, len(inner_template_one._checkpoint_dependencies))
self.assertEqual("v", inner_template_one._checkpoint_dependencies[0].name)
self.assertEqual(1, len(inner_template_two._checkpoint_dependencies))
self.assertEqual("v", inner_template_two._checkpoint_dependencies[0].name)
status.assert_consumed().run_restore_ops()
self.assertAllEqual([20.], self.evaluate(v1))
self.assertAllEqual([25.], self.evaluate(v2))
self.assertAllEqual([25.], self.evaluate(v3))
class CheckpointCompatibilityTests(test.TestCase):
def _initialized_model(self):
input_value = constant_op.constant([[3.]])
model = MyModel()
optimizer = adam.AdamOptimizer(0.001)
optimizer_step = training_util.get_or_create_global_step()
root_checkpointable = checkpointable_utils.Checkpoint(
optimizer=optimizer, model=model, optimizer_step=optimizer_step)
train_op = optimizer.minimize(
functools.partial(model, input_value),
global_step=optimizer_step)
self.evaluate(checkpointable_utils.gather_initializers(
root_checkpointable))
self.evaluate(train_op)
# A regular variable, a slot variable, and a non-slot Optimizer variable
# with known values to check when loading.
self.evaluate(model._named_dense.bias.assign([1.]))
self.evaluate(optimizer.get_slot(
var=model._named_dense.bias, name="m").assign([2.]))
beta1_power, _ = optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(3.))
return root_checkpointable
def _set_sentinels(self, root_checkpointable):
self.evaluate(root_checkpointable.model._named_dense.bias.assign([101.]))
self.evaluate(
root_checkpointable.optimizer.get_slot(
var=root_checkpointable.model._named_dense.bias, name="m")
.assign([102.]))
beta1_power, _ = root_checkpointable.optimizer._get_beta_accumulators()
self.evaluate(beta1_power.assign(103.))
def _check_sentinels(self, root_checkpointable):
self.assertAllEqual(
[1.], self.evaluate(root_checkpointable.model._named_dense.bias))
self.assertAllEqual([2.], self.evaluate(
root_checkpointable.optimizer.get_slot(
var=root_checkpointable.model._named_dense.bias, name="m")))
beta1_power, _ = root_checkpointable.optimizer._get_beta_accumulators()
self.assertAllEqual(3., self.evaluate(beta1_power))
def _write_name_based_checkpoint(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
with context.graph_mode():
save_graph = ops.Graph()
with save_graph.as_default(), self.session(
graph=save_graph) as session:
root = self._initialized_model()
name_saver = saver_lib.Saver()
return name_saver.save(
sess=session, save_path=checkpoint_prefix,
global_step=root.optimizer_step)
@test_util.run_in_graph_and_eager_modes
def testLoadFromNameBasedSaver(self):
"""Save a name-based checkpoint, load it using the object-based API."""
with test_util.device(use_gpu=True):
save_path = self._write_name_based_checkpoint()
root = self._initialized_model()
self._set_sentinels(root)
with self.assertRaises(AssertionError):
self._check_sentinels(root)
object_saver = checkpointable_utils.CheckpointableSaver(root)
self._set_sentinels(root)
status = object_saver.restore(save_path)
if context.executing_eagerly():
self._check_sentinels(root)
if context.executing_eagerly():
with self.assertRaisesRegexp(AssertionError, "OBJECT_CONFIG_JSON"):
status.assert_consumed()
with self.assertRaisesRegexp(AssertionError, "OBJECT_CONFIG_JSON"):
status.assert_existing_objects_matched()
with self.assertRaisesRegexp(AssertionError, "OBJECT_CONFIG_JSON"):
status.assert_nontrivial_match()
else:
# When graph building, we haven't read any keys, so we don't know
# whether the restore will be complete.
with self.assertRaisesRegexp(AssertionError, "not restored"):
status.assert_consumed()
with self.assertRaisesRegexp(AssertionError, "not restored"):
status.assert_existing_objects_matched()
with self.assertRaisesRegexp(AssertionError, "not restored"):
status.assert_nontrivial_match()
status.run_restore_ops()
self._check_sentinels(root)
self._set_sentinels(root)
status = object_saver.restore(save_path)
status.initialize_or_restore()
self._check_sentinels(root)
# Check that there is no error when keys are missing from the name-based
# checkpoint.
root.not_in_name_checkpoint = resource_variable_ops.ResourceVariable([1.])
status = object_saver.restore(save_path)
with self.assertRaises(AssertionError):
status.assert_existing_objects_matched()
def testSaveGraphLoadEager(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
with context.graph_mode():
save_graph = ops.Graph()
with save_graph.as_default(), self.session(
graph=save_graph) as session:
root = self._initialized_model()
save_path = root.save(session=session, file_prefix=checkpoint_prefix)
with context.eager_mode():
root = self._initialized_model()
self._set_sentinels(root)
root.restore(save_path).assert_consumed()
self._check_sentinels(root)
def testSaveEagerLoadGraph(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
with context.eager_mode():
root = self._initialized_model()
save_path = root.save(file_prefix=checkpoint_prefix)
with context.graph_mode():
save_graph = ops.Graph()
with save_graph.as_default(), self.session(
graph=save_graph):
root = self._initialized_model()
self._set_sentinels(root)
root.restore(save_path).assert_consumed().run_restore_ops()
self._check_sentinels(root)
class PythonMetadataTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testSaveLoad(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
dense = core.Dense(1)
checkpoint = checkpointable_utils.Checkpoint(dense=dense)
dense(constant_op.constant([[1.]]))
checkpoint.restore(None).initialize_or_restore()
save_path = checkpoint.save(checkpoint_prefix)
def _get_dense_node_from_object_graph(object_graph_proto):
root_node = object_graph_proto.nodes[0]
for child in root_node.children:
if child.local_name == "dense":
break
else:
raise AssertionError(
"Expected a 'dense' dependency of root, didn't find one.")
dense_node = object_graph_proto.nodes[child.node_id] # pylint: disable=undefined-loop-variable
self.assertEqual(1, len(dense_node.attributes))
reader = pywrap_tensorflow.NewCheckpointReader(save_path)
layer_json = reader.get_tensor(dense_node.attributes[0].checkpoint_key)
return json.loads(layer_json.decode("utf-8"))
layer_data = _get_dense_node_from_object_graph(
checkpointable_utils.object_metadata(save_path))
self.assertEqual("Dense", layer_data["class_name"])
self.assertEqual(1, layer_data["config"]["units"])
# Check that no new ops are added to the graph the second time we save.
ops.get_default_graph().finalize()
dense.units = 42
save_path = checkpoint.save(checkpoint_prefix)
layer_data = _get_dense_node_from_object_graph(
checkpointable_utils.object_metadata(save_path))
self.assertEqual("Dense", layer_data["class_name"])
self.assertEqual(42, layer_data["config"]["units"])
if __name__ == "__main__":
test.main()
|
hehongliang/tensorflow
|
tensorflow/python/training/checkpointable/util_test.py
|
Python
|
apache-2.0
| 72,153
| 0.005502
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import account_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
sysadminmatmoz/ingadhoc
|
account_invoice_commercial/__init__.py
|
Python
|
agpl-3.0
| 366
| 0
|
#!python3
from setuptools import setup
from irsdk import VERSION
setup(
name='pyirsdk',
version=VERSION,
description='Python 3 implementation of iRacing SDK',
author='Mihail Latyshov',
author_email='kutu182@gmail.com',
url='https://github.com/kutu/pyirsdk',
py_modules=['irsdk'],
license='MIT',
platforms=['win64'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.7',
'Topic :: Utilities',
],
entry_points={
'console_scripts': ['irsdk = irsdk:main'],
},
install_requires=[
'PyYAML >= 5.3',
],
)
|
kutu/pyirsdk
|
setup.py
|
Python
|
mit
| 783
| 0
|
#!/usr/bin/env python
# This file is part of tcollector.
# Copyright (C) 2013 The tcollector Authors.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details. You should have received a copy
# of the GNU Lesser General Public License along with this program. If not,
# see <http://www.gnu.org/licenses/>.
#
# Written by Mark Smith <mark@qq.is>.
#
"""A collector to gather statistics from a Riak node.
The following all have tags of 'type' which can be 'get' or 'put'. Latency
is measured in fractional seconds. All latency values are calculated over the
last 60 seconds and are moving values.
- riak.vnode.requests
- riak.node.requests
- riak.node.latency.mean
- riak.node.latency.median
- riak.node.latency.95th
- riak.node.latency.99th
- riak.node.latency.100th
These metrics have no tags and are global:
- riak.memory.total
- riak.memory.allocated
- riak.executing_mappers
- riak.sys_process_count
- riak.read_repairs
- riak.connections
- riak.connected_nodes
"""
import json
import os
import sys
import time
from collectors.etc import riak_conf
from collectors.lib import utils
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
CONFIG = riak_conf.get_default_config()
MAP = {
'vnode_gets_total': ('vnode.requests', 'type=get'),
'vnode_puts_total': ('vnode.requests', 'type=put'),
'vnode_gets': ('vnode.requests.last.minute', 'type=get'),
'vnode_puts': ('vnode.requests.last.minute', 'type=put'),
'vnode_index_reads': ('vnode.indexing', 'type=read'),
'vnode_index_writes': ('vnode.indexing', 'type=write'),
'vnode_index_deletes': ('vnode.indexing', 'type=delete'),
'vnode_index_writes_postings': ('vnode.index.posting', 'type=write'),
'vnode_index_deletes_postings': ('vnode.index.posting', 'type=delete'),
'node_gets_total': ('node.requests', 'type=get'),
'node_puts_total': ('node.requests', 'type=put'),
'node_gets': ('node.requests.last.minute', 'type=get'),
'node_puts': ('node.requests.last.minute', 'type=put'),
'node_get_fsm_active': ('node.active.fsm', 'type=get'),
'node_put_fsm_active': ('node.active.fsm', 'type=put'),
'node_get_fsm_time_mean': ('node.latency.mean', 'type=get'),
'node_get_fsm_time_median': ('node.latency.median', 'type=get'),
'node_get_fsm_time_95': ('node.latency.95th', 'type=get'),
'node_get_fsm_time_99': ('node.latency.99th', 'type=get'),
'node_get_fsm_time_100': ('node.latency.100th', 'type=get'),
'node_put_fsm_time_mean': ('node.latency.mean', 'type=put'),
'node_put_fsm_time_median': ('node.latency.median', 'type=put'),
'node_put_fsm_time_95': ('node.latency.95th', 'type=put'),
'node_put_fsm_time_99': ('node.latency.99th', 'type=put'),
'node_put_fsm_time_100': ('node.latency.100th', 'type=put'),
'node_get_fsm_rejected': ('node.rejected.fsm', 'type=get'),
'node_put_fsm_rejected': ('node.rejected.fsm', 'type=put'),
'node_get_fsm_siblings_mean': ('node.siblings.mean', ''),
'node_get_fsm_siblings_median': ('node.siblings.median', ''),
'node_get_fsm_siblings_95': ('node.siblings.95th', ''),
'node_get_fsm_siblings_99': ('node.siblings.99th', ''),
'node_get_fsm_siblings_100': ('node.siblings.100th', ''),
'node_get_fsm_objsize_mean': ('node.object.size.mean', ''),
'node_get_fsm_objsize_median': ('node.object.size.median', ''),
'node_get_fsm_objsize_95': ('node.object.size.95th', ''),
'node_get_fsm_objsize_99': ('node.object.size.99th', ''),
'node_get_fsm_objsize_100': ('node.object.size.100th', ''),
'pbc_connects_total': ('connections', ''),
'pbc_active': ('pbc.active', ''),
'read_repairs_total': ('read_repairs', ''),
'sys_process_count': ('sys_process_count', ''),
'executing_mappers': ('executing_mappers', ''),
'mem_allocated': ('memory.allocated', ''),
'mem_total': ('memory.total', ''),
'memory_processes_used': ('memory.erlang', ''),
'index_fsm_active': ('index.active.fsm', ''),
'list_fsm_active': ('key.listing.active', ''),
'cpu_nprocs': ('os.processes', '')
#connected_nodes is calculated
}
def main():
"""Main loop"""
# don't run if we're not a riak node
if not os.path.exists("/usr/lib/riak"):
sys.exit(13)
utils.drop_privileges()
sys.stdin.close()
interval = 15
def print_stat(metric, value, tags=""):
if value is not None:
print("riak.%s %d %s %s" % (metric, ts, value, tags))
while True:
ts = int(time.time())
req = urlopen(CONFIG['stats_endpoint'])
if req is not None:
obj = json.loads(req.read())
for key in obj:
if key not in MAP:
continue
# this is a hack, but Riak reports latencies in microseconds. they're fairly useless
# to our human operators, so we're going to convert them to seconds.
if 'latency' in MAP[key][0]:
obj[key] = obj[key] / 1000000.0
print_stat(MAP[key][0], obj[key], MAP[key][1])
if 'connected_nodes' in obj:
print_stat('connected_nodes', len(obj['connected_nodes']), '')
req.close()
sys.stdout.flush()
time.sleep(interval)
if __name__ == "__main__":
sys.exit(main())
|
OpenTSDB/tcollector
|
collectors/0/riak.py
|
Python
|
lgpl-3.0
| 5,780
| 0.000519
|
# -*- coding: utf-8 -*-
#
# http://www.privacyidea.org
# (c) cornelius kölbel, privacyidea.org
#
# 2018-11-21 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Remove the audit log based statistics
# 2016-12-20 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Restrict download to certain time
# 2015-07-16 Cornelius Kölbel, <cornelius.koelbel@netknights.it>
# Add statistics endpoint
# 2015-01-20 Cornelius Kölbel, <cornelius@privacyidea.org>
# Complete rewrite during flask migration
# Try to provide REST API
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__doc__="""This is the audit REST API that can be used to search the audit log.
It only provides the method
GET /audit
"""
from flask import (Blueprint, request, current_app, stream_with_context)
from .lib.utils import (send_result, send_file)
from ..api.lib.prepolicy import (prepolicy, check_base_action, auditlog_age,
allowed_audit_realm, hide_audit_columns)
from ..api.auth import admin_required
from ..lib.policy import ACTION
from flask import g
import logging
from ..lib.audit import search, getAudit
from privacyidea.lib.utils import parse_timedelta
log = logging.getLogger(__name__)
audit_blueprint = Blueprint('audit_blueprint', __name__)
@audit_blueprint.route('/', methods=['GET'])
@prepolicy(check_base_action, request, ACTION.AUDIT)
@prepolicy(allowed_audit_realm, request, ACTION.AUDIT)
@prepolicy(auditlog_age, request)
@prepolicy(hide_audit_columns, request)
def search_audit():
"""
return a paginated list of audit entries.
Params can be passed as key-value-pairs.
:httpparam timelimit: A timelimit, that limits the recent audit entries.
This param gets overwritten by a policy auditlog_age. Can be 1d, 1m, 1h.
**Example request**:
.. sourcecode:: http
GET /audit?realm=realm1 HTTP/1.1
Host: example.com
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"id": 1,
"jsonrpc": "2.0",
"result": {
"status": true,
"value": [
{
"serial": "....",
"missing_line": "..."
}
]
},
"version": "privacyIDEA unknown"
}
"""
audit_dict = search(current_app.config, request.all_data)
g.audit_object.log({'success': True})
return send_result(audit_dict)
@audit_blueprint.route('/<csvfile>', methods=['GET'])
@prepolicy(check_base_action, request, ACTION.AUDIT_DOWNLOAD)
@prepolicy(auditlog_age, request)
@admin_required
def download_csv(csvfile=None):
"""
Download the audit entry as CSV file.
Params can be passed as key-value-pairs.
**Example request**:
.. sourcecode:: http
GET /audit/audit.csv?realm=realm1 HTTP/1.1
Host: example.com
Accept: text/csv
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: text/csv
{
"id": 1,
"jsonrpc": "2.0",
"result": {
"status": true,
"value": [
{
"serial": "....",
"missing_line": "..."
}
]
},
"version": "privacyIDEA unknown"
}
"""
audit = getAudit(current_app.config)
g.audit_object.log({'success': True})
param = request.all_data
if "timelimit" in param:
timelimit = parse_timedelta(param["timelimit"])
del param["timelimit"]
else:
timelimit = None
return send_file(stream_with_context(audit.csv_generator(param=param,
timelimit=timelimit)),
csvfile)
|
privacyidea/privacyidea
|
privacyidea/api/audit.py
|
Python
|
agpl-3.0
| 4,483
| 0.002903
|
"""
/******************************************************************************
This source file is part of the Avogadro project.
Copyright 2013 Kitware, Inc.
This source code is released under the New BSD License, (the "License").
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/
"""
import argparse
import json
import sys
def getMetaData():
metaData = {}
metaData['inputFormat'] = 'xyz'
metaData['outputFormat'] = 'xyz'
metaData['operations'] = ['read', 'write']
metaData['identifier'] = 'ZYX Example Format'
metaData['name'] = 'ZYX'
metaData['description'] = "Mostly useless file format that reads xyz-style " +\
"files with reversed coordinates. Demonstrates " +\
"the implementation of a user-scripted file format."
metaData['fileExtensions'] = ['zyx']
metaData['mimeTypes'] = ['chemical/x-zyx']
return metaData
def write():
result = ""
# Just copy the first two lines: numAtoms and comment/title
result += sys.stdin.readline()
result += sys.stdin.readline()
for line in sys.stdin:
words = line.split()
result += '%-3s %9.5f %9.5f %9.5f' %\
(words[0], float(words[3]), float(words[2]), float(words[1]))
if len(words) > 4:
result += words[4:].join(' ')
result += '\n'
return result
def read():
result = ""
# Just copy the first two lines: numAtoms and comment/title
result += sys.stdin.readline()
result += sys.stdin.readline()
for line in sys.stdin:
words = line.split()
result += '%-3s %9.5f %9.5f %9.5f' %\
(words[0], float(words[3]), float(words[2]), float(words[1]))
if len(words) > 4:
result += words[4:].join(' ')
result += '\n'
return result
if __name__ == "__main__":
parser = argparse.ArgumentParser('Example file format script.')
parser.add_argument('--metadata', action='store_true')
parser.add_argument('--read', action='store_true')
parser.add_argument('--write', action='store_true')
parser.add_argument('--display-name', action='store_true')
parser.add_argument('--lang', nargs='?', default='en')
args = vars(parser.parse_args())
if args['metadata']:
print(json.dumps(getMetaData()))
elif args['display_name']:
print(getMetaData()['name'])
elif args['read']:
print(read())
elif args['write']:
print(write())
|
OpenChemistry/avogadrolibs
|
avogadro/qtplugins/scriptfileformats/formatScripts/zyx.py
|
Python
|
bsd-3-clause
| 2,841
| 0.001408
|
import random
from subprocess import call
import yaml
with open('./venues.yml') as f:
venues = yaml.load(f)
venue = random.choice(venues)
print(venue['name'])
print(venue['url'])
call(['open', venue['url']])
|
miiila/hungry-in-karlin
|
decide.py
|
Python
|
mit
| 218
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-07-19 14:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('query_designer', '0012_query_dataset_query'),
]
operations = [
migrations.RemoveField(
model_name='query',
name='dataset_query',
),
]
|
dipapaspyros/bdo_platform
|
query_designer/migrations/0013_remove_query_dataset_query.py
|
Python
|
mit
| 405
| 0
|
from widgets import messagebox as msg
class QtBaseException(Exception):
"""
Custom Exception base class used to handle exception with our on subset of options
"""
def __init__(self, message, displayPopup=False, *args):
"""initializes the exception, use cause to display the cause of the exception
:param message: The exception to display
:param cause: the cause of the error eg. a variable/class etc
:param args: std Exception args
"""
self.message = message
if displayPopup:
self.showDialog()
super(self.__class__, self).__init__(message, *args)
def showDialog(self):
messageBox = msg.MessageBox()
messageBox.setText(self.message)
messageBox.exec_()
|
dsparrow27/zoocore
|
zoo/libs/pyqt/errors.py
|
Python
|
gpl-3.0
| 777
| 0.002574
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Requirements for Ruby codegen."""
from artman.tasks.requirements import task_requirement_base
class RubyFormatRequirements(task_requirement_base.TaskRequirementBase):
@classmethod
def require(cls):
return ['rubocop']
@classmethod
def install(cls):
# Intentionally do nothing
pass
class RakeRequirements(task_requirement_base.TaskRequirementBase):
@classmethod
def require(cls):
return ['rake']
@classmethod
def install(cls):
# Intentionally do nothing
pass
|
ethanbao/artman
|
artman/tasks/requirements/ruby_requirements.py
|
Python
|
apache-2.0
| 1,142
| 0
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test running pivxd with the -rpcbind and -rpcallowip options."""
import sys
from test_framework.netutil import addr_to_hex, all_interfaces, get_bind_addrs, test_ipv6_local
from test_framework.test_framework import PivxTestFramework, SkipTest
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
get_datadir_path,
get_rpc_proxy,
rpc_port,
rpc_url
)
class RPCBindTest(PivxTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.bind_to_localhost_only = False
self.num_nodes = 1
def setup_network(self):
self.add_nodes(self.num_nodes, None)
def add_options(self, parser):
parser.add_option("--ipv4", action='store_true', dest="run_ipv4", help="Run ipv4 tests only", default=False)
parser.add_option("--ipv6", action='store_true', dest="run_ipv6", help="Run ipv6 tests only", default=False)
parser.add_option("--nonloopback", action='store_true', dest="run_nonloopback", help="Run non-loopback tests only", default=False)
def run_bind_test(self, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
self.log.info("Bind test for %s" % str(addresses))
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
self.nodes[0].rpchost = connect_to
self.start_node(0, base_args + binds)
pid = self.nodes[0].process.pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
self.stop_nodes()
def run_allowip_test(self, allow_ips, rpchost, rpcport):
'''
Start a node with rpcallow IP, and request getnetworkinfo
at a non-localhost IP.
'''
self.log.info("Allow IP test for %s:%d" % (rpchost, rpcport))
node_args = \
['-disablewallet', '-nolisten'] + \
['-rpcallowip='+x for x in allow_ips] + \
['-rpcbind='+addr for addr in ['127.0.0.1', "%s:%d" % (rpchost, rpcport)]] # Bind to localhost as well so start_nodes doesn't hang
self.nodes[0].rpchost = None
self.start_nodes([node_args])
# connect to node through non-loopback interface
node = get_rpc_proxy(rpc_url(get_datadir_path(self.options.tmpdir, 0), 0, "%s:%d" % (rpchost, rpcport)), 0, coveragedir=self.options.coveragedir)
node.getnetworkinfo()
self.stop_nodes()
def run_test(self):
# due to OS-specific network stats queries, this test works only on Linux
if sum([self.options.run_ipv4, self.options.run_ipv6, self.options.run_nonloopback]) > 1:
raise AssertionError("Only one of --ipv4, --ipv6 and --nonloopback can be set")
self.log.info("Check for linux")
if not sys.platform.startswith('linux'):
raise SkipTest("This test can only be run on linux.")
self.log.info("Check for ipv6")
have_ipv6 = test_ipv6_local()
if not have_ipv6 and not (self.options.run_ipv4 or self.options.run_nonloopback):
raise SkipTest("This test requires ipv6 support.")
self.log.info("Check for non-loopback interface")
self.non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
self.non_loopback_ip = ip
break
if self.non_loopback_ip is None and self.options.run_nonloopback:
raise SkipTest("This test requires a non-loopback ip address.")
self.defaultport = rpc_port(0)
if not self.options.run_nonloopback:
self._run_loopback_tests()
if not self.options.run_ipv4 and not self.options.run_ipv6:
self._run_nonloopback_tests()
def _run_loopback_tests(self):
if self.options.run_ipv4:
# check only IPv4 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', self.defaultport)])
# check only IPv4 localhost (explicit) with alternative port
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
else:
# check default without rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(None, '127.0.0.1', [],
[('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
# check default with rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
[('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
# check only IPv6 localhost (explicit)
self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
[('::1', self.defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
def _run_nonloopback_tests(self):
self.log.info("Using interface %s for testing" % self.non_loopback_ip)
# check only non-loopback interface
self.run_bind_test([self.non_loopback_ip], self.non_loopback_ip, [self.non_loopback_ip],
[(self.non_loopback_ip, self.defaultport)])
# Check that with invalid rpcallowip, we are denied
self.run_allowip_test([self.non_loopback_ip], self.non_loopback_ip, self.defaultport)
assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], self.non_loopback_ip, self.defaultport)
if __name__ == '__main__':
RPCBindTest().main()
|
PIVX-Project/PIVX
|
test/functional/rpc_bind.py
|
Python
|
mit
| 6,476
| 0.004324
|
# -*- coding: utf-8 -*-
import os.path
files = os.listdir(os.path.dirname(__file__))
__all__ = [filename[:-3] for filename in files if not filename.startswith('__') and filename.endswith('.py')]
|
repotvsupertuga/tvsupertuga.repository
|
script.module.streamtvsupertuga/lib/resources/lib/sources/en_torrents/__init__.py
|
Python
|
gpl-2.0
| 199
| 0.01005
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" tests for supporting multiple NIC's in advanced zone with security groups in cloudstack 4.14.0.0
"""
# Import Local Modules
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase, unittest
from marvin.sshClient import SshClient
from marvin.lib.utils import (validateList,
cleanup_resources,
get_host_credentials,
get_process_status,
execute_command_in_host,
random_gen)
from marvin.lib.base import (PhysicalNetwork,
Account,
Host,
TrafficType,
Domain,
Network,
NetworkOffering,
VirtualMachine,
ServiceOffering,
Zone,
NIC,
SecurityGroup)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_virtual_machines,
list_routers,
list_hosts,
get_free_vlan)
from marvin.codes import (PASS, FAILED)
import logging
import random
import time
class TestMulipleNicSupport(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(
TestMulipleNicSupport,
cls).getClsTestClient()
cls.apiclient = cls.testClient.getApiClient()
cls.testdata = cls.testClient.getParsedTestDataConfig()
cls.services = cls.testClient.getParsedTestDataConfig()
zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.zone = Zone(zone.__dict__)
cls._cleanup = []
cls.skip = False
if str(cls.zone.securitygroupsenabled) != "True":
cls.skip = True
return
cls.logger = logging.getLogger("TestMulipleNicSupport")
cls.stream_handler = logging.StreamHandler()
cls.logger.setLevel(logging.DEBUG)
cls.logger.addHandler(cls.stream_handler)
# Get Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(cls.apiclient, cls.zone.id, hypervisor="KVM")
if cls.template == FAILED:
cls.skip = True
return
# Create new domain, account, network and VM
cls.user_domain = Domain.create(
cls.apiclient,
services=cls.testdata["acl"]["domain2"],
parentdomainid=cls.domain.id)
# Create account
cls.account1 = Account.create(
cls.apiclient,
cls.testdata["acl"]["accountD2"],
admin=True,
domainid=cls.user_domain.id
)
# Create small service offering
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.testdata["service_offerings"]["small"]
)
cls._cleanup.append(cls.service_offering)
cls.services["network"]["zoneid"] = cls.zone.id
cls.network_offering = NetworkOffering.create(
cls.apiclient,
cls.services["network_offering"],
)
# Enable Network offering
cls.network_offering.update(cls.apiclient, state='Enabled')
cls._cleanup.append(cls.network_offering)
cls.testdata["virtual_machine"]["zoneid"] = cls.zone.id
cls.testdata["virtual_machine"]["template"] = cls.template.id
if cls.zone.securitygroupsenabled:
# Enable networking for reaching to VM thorugh SSH
security_group = SecurityGroup.create(
cls.apiclient,
cls.testdata["security_group"],
account=cls.account1.name,
domainid=cls.account1.domainid
)
# Authorize Security group to SSH to VM
ingress_rule = security_group.authorize(
cls.apiclient,
cls.testdata["ingress_rule"],
account=cls.account1.name,
domainid=cls.account1.domainid
)
# Authorize Security group to SSH to VM
ingress_rule2 = security_group.authorize(
cls.apiclient,
cls.testdata["ingress_rule_ICMP"],
account=cls.account1.name,
domainid=cls.account1.domainid
)
cls.testdata["shared_network_offering_sg"]["specifyVlan"] = 'True'
cls.testdata["shared_network_offering_sg"]["specifyIpRanges"] = 'True'
cls.shared_network_offering = NetworkOffering.create(
cls.apiclient,
cls.testdata["shared_network_offering_sg"],
conservemode=False
)
NetworkOffering.update(
cls.shared_network_offering,
cls.apiclient,
id=cls.shared_network_offering.id,
state="enabled"
)
physical_network, vlan = get_free_vlan(cls.apiclient, cls.zone.id)
cls.testdata["shared_network_sg"]["physicalnetworkid"] = physical_network.id
random_subnet_number = random.randrange(90, 99)
cls.testdata["shared_network_sg"]["name"] = "Shared-Network-SG-Test-vlan" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["displaytext"] = "Shared-Network-SG-Test-vlan" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["vlan"] = "vlan://" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["startip"] = "192.168." + str(random_subnet_number) + ".240"
cls.testdata["shared_network_sg"]["endip"] = "192.168." + str(random_subnet_number) + ".250"
cls.testdata["shared_network_sg"]["gateway"] = "192.168." + str(random_subnet_number) + ".254"
cls.network1 = Network.create(
cls.apiclient,
cls.testdata["shared_network_sg"],
networkofferingid=cls.shared_network_offering.id,
zoneid=cls.zone.id,
accountid=cls.account1.name,
domainid=cls.account1.domainid
)
random_subnet_number = random.randrange(100, 110)
cls.testdata["shared_network_sg"]["name"] = "Shared-Network-SG-Test-vlan" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["displaytext"] = "Shared-Network-SG-Test-vlan" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["vlan"] = "vlan://" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["startip"] = "192.168." + str(random_subnet_number) + ".240"
cls.testdata["shared_network_sg"]["endip"] = "192.168." + str(random_subnet_number) + ".250"
cls.testdata["shared_network_sg"]["gateway"] = "192.168." + str(random_subnet_number) + ".254"
cls.network2 = Network.create(
cls.apiclient,
cls.testdata["shared_network_sg"],
networkofferingid=cls.shared_network_offering.id,
zoneid=cls.zone.id,
accountid=cls.account1.name,
domainid=cls.account1.domainid
)
random_subnet_number = random.randrange(111, 120)
cls.testdata["shared_network_sg"]["name"] = "Shared-Network-SG-Test-vlan" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["displaytext"] = "Shared-Network-SG-Test-vlan" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["vlan"] = "vlan://" + str(random_subnet_number)
cls.testdata["shared_network_sg"]["startip"] = "192.168." + str(random_subnet_number) + ".240"
cls.testdata["shared_network_sg"]["endip"] = "192.168." + str(random_subnet_number) + ".250"
cls.testdata["shared_network_sg"]["gateway"] = "192.168." + str(random_subnet_number) + ".254"
cls.network3 = Network.create(
cls.apiclient,
cls.testdata["shared_network_sg"],
networkofferingid=cls.shared_network_offering.id,
zoneid=cls.zone.id,
accountid=cls.account1.name,
domainid=cls.account1.domainid
)
try:
cls.virtual_machine1 = VirtualMachine.create(
cls.apiclient,
cls.testdata["virtual_machine"],
accountid=cls.account1.name,
domainid=cls.account1.domainid,
serviceofferingid=cls.service_offering.id,
templateid=cls.template.id,
securitygroupids=[security_group.id],
networkids=cls.network1.id
)
for nic in cls.virtual_machine1.nic:
if nic.isdefault:
cls.virtual_machine1.ssh_ip = nic.ipaddress
cls.virtual_machine1.default_network_id = nic.networkid
break
except Exception as e:
cls.fail("Exception while deploying virtual machine: %s" % e)
try:
cls.virtual_machine2 = VirtualMachine.create(
cls.apiclient,
cls.testdata["virtual_machine"],
accountid=cls.account1.name,
domainid=cls.account1.domainid,
serviceofferingid=cls.service_offering.id,
templateid=cls.template.id,
securitygroupids=[security_group.id],
networkids=[str(cls.network1.id), str(cls.network2.id)]
)
for nic in cls.virtual_machine2.nic:
if nic.isdefault:
cls.virtual_machine2.ssh_ip = nic.ipaddress
cls.virtual_machine2.default_network_id = nic.networkid
break
except Exception as e:
cls.fail("Exception while deploying virtual machine: %s" % e)
cls._cleanup.append(cls.virtual_machine1)
cls._cleanup.append(cls.virtual_machine2)
cls._cleanup.append(cls.network1)
cls._cleanup.append(cls.network2)
cls._cleanup.append(cls.network3)
cls._cleanup.append(cls.shared_network_offering)
if cls.zone.securitygroupsenabled:
cls._cleanup.append(security_group)
cls._cleanup.append(cls.account1)
cls._cleanup.append(cls.user_domain)
@classmethod
def tearDownClass(self):
try:
cleanup_resources(self.apiclient, self._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
if self.skip:
self.skipTest("Test can be run only on advanced zone and KVM hypervisor")
self.apiclient = self.testClient.getApiClient()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def verify_network_rules(self, vm_id):
virtual_machine = VirtualMachine.list(
self.apiclient,
id=vm_id
)
vm = virtual_machine[0]
hosts = list_hosts(
self.apiclient,
id=vm.hostid
)
host = hosts[0]
if host.hypervisor.lower() not in "kvm":
return
host.user, host.password = get_host_credentials(self.config, host.ipaddress)
for nic in vm.nic:
secips = ""
if len(nic.secondaryip) > 0:
for secip in nic.secondaryip:
secips += secip.ipaddress + ";"
command="/usr/share/cloudstack-common/scripts/vm/network/security_group.py verify_network_rules --vmname %s --vmip %s --vmmac %s --nicsecips '%s'" % (vm.instancename, nic.ipaddress, nic.macaddress, secips)
self.logger.debug("Executing command '%s' in host %s" % (command, host.ipaddress))
result=execute_command_in_host(host.ipaddress, 22,
host.user,
host.password,
command)
if len(result) > 0:
self.fail("The iptables/ebtables rules for nic %s on vm %s on host %s are not correct" %(nic.ipaddress, vm.instancename, host.name))
@attr(tags=["adeancedsg"], required_hardware="false")
def test_01_create_vm_with_multiple_nics(self):
"""Create Vm with multiple NIC's
Steps:
# 1. Create more than 1 isolated or shared network
# 2. Create a vm and select more than 1 network while deploying
# 3. Vm is deployed successfully with 1 nic from each network
# 4. All the vm's should be pingable
:return:
"""
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine2.id
)
self.assertEqual(
len(virtual_machine), 1,
"Virtual Machine create with 2 NIC's failed")
nicIdInVm = virtual_machine[0].nic[0]
self.assertIsNotNone(nicIdInVm, "NIC 1 not found in Virtual Machine")
nicIdInVm = virtual_machine[0].nic[1]
self.assertIsNotNone(nicIdInVm, "NIC 2 not found in Virtual Machine")
self.verify_network_rules(self.virtual_machine2.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_02_add_nic_to_vm(self):
"""Create VM with single NIC and then add additional NIC
Steps:
# 1. Create a VM by selecting one default NIC
# 2. Create few more isolated or shared networks
# 3. Add extra NIC's to the vm from the newly created networks
# 4. The deployed VM should have extra nic's added in the above
# step without any fail
# 5. The IP's of the extra NIC's should be pingable
:return:
"""
self.virtual_machine1.add_nic(self.apiclient, self.network2.id)
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine1.id
)
nicIdInVm = virtual_machine[0].nic[1]
self.assertIsNotNone(nicIdInVm, "Second NIC not found")
self.verify_network_rules(self.virtual_machine1.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_03_add_ip_to_default_nic(self):
""" Add secondary IP's to the VM
Steps:
# 1. Create a VM with more than 1 NIC
# 2) Navigate to Instances->NIC->Edit Secondary IP's
# ->Aquire new Secondary IP"
# 3) Add as many secondary Ip as possible to the VM
# 4) Configure the secondary IP's by referring to "Configure
# the secondary IP's" in the "Action Item" section
:return:
"""
ipaddress = NIC.addIp(
self.apiclient,
id=self.virtual_machine2.nic[0].id
)
self.assertIsNotNone(
ipaddress,
"Unable to add secondary IP to the default NIC")
self.verify_network_rules(self.virtual_machine2.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_04_add_ip_to_remaining_nics(self):
""" Add secondary IP's to remaining NIC's
Steps:
# 1) Create a VM with more than 1 NIC
# 2)Navigate to Instances-NIC's->Edit Secondary IP's
# ->Acquire new Secondary IP
# 3) Add secondary IP to all the NIC's of the VM
# 4) Confiugre the secondary IP's by referring to "Configure the
# secondary IP's" in the "Action Item" section
:return:
"""
self.virtual_machine1.add_nic(self.apiclient, self.network3.id)
vms = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine1.id
)
self.assertIsNotNone(
vms[0].nic[2],
"Third NIC is not added successfully to the VM")
vms1_nic1_id = vms[0].nic[1]['id']
vms1_nic2_id = vms[0].nic[2]['id']
ipaddress21 = NIC.addIp(
self.apiclient,
id=vms1_nic1_id
)
ipaddress22 = NIC.addIp(
self.apiclient,
id=vms1_nic1_id
)
self.assertIsNotNone(
ipaddress21,
"Unable to add first secondary IP to the second nic")
self.assertIsNotNone(
ipaddress22,
"Unable to add second secondary IP to second NIC")
ipaddress31 = NIC.addIp(
self.apiclient,
id=vms1_nic2_id
)
ipaddress32 = NIC.addIp(
self.apiclient,
id=vms1_nic2_id
)
self.assertIsNotNone(
ipaddress31,
"Unable to add first secondary IP to third NIC")
self.assertIsNotNone(
ipaddress32,
"Unable to add second secondary IP to third NIC")
self.verify_network_rules(self.virtual_machine1.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_05_stop_start_vm_with_multiple_nic(self):
""" Stop and Start a VM with Multple NIC
Steps:
# 1) Create a Vm with multiple NIC's
# 2) Configure secondary IP's on the VM
# 3) Try to stop/start the VM
# 4) Ping the IP's of the vm
# 5) Remove Secondary IP from one of the NIC
:return:
"""
ipaddress1 = NIC.addIp(
self.apiclient,
id=self.virtual_machine2.nic[0].id
)
ipaddress2 = NIC.addIp(
self.apiclient,
id=self.virtual_machine2.nic[1].id
)
# Stop the VM with multiple NIC's
self.virtual_machine2.stop(self.apiclient)
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine2.id
)
self.assertEqual(
virtual_machine[0]['state'], 'Stopped',
"Could not stop the VM with multiple NIC's")
if virtual_machine[0]['state'] == 'Stopped':
# If stopped then try to start the VM
self.virtual_machine2.start(self.apiclient)
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine2.id
)
self.assertEqual(
virtual_machine[0]['state'], 'Running',
"Could not start the VM with multiple NIC's")
self.verify_network_rules(self.virtual_machine2.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_06_migrate_vm_with_multiple_nic(self):
""" Migrate a VM with Multple NIC
Steps:
# 1) Create a Vm with multiple NIC's
# 2) Configure secondary IP's on the VM
# 3) Try to stop/start the VM
# 4) Ping the IP's of the vm
:return:
"""
# Skipping adding Secondary IP to NIC since its already
# done in the previous test cases
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine1.id
)
old_host_id = virtual_machine[0]['hostid']
try:
hosts = Host.list(
self.apiclient,
virtualmachineid=self.virtual_machine1.id,
listall=True)
self.assertEqual(
validateList(hosts)[0],
PASS,
"hosts list validation failed")
# Get a host which is not already assigned to VM
for host in hosts:
if host.id == old_host_id:
continue
else:
host_id = host.id
break
self.virtual_machine1.migrate(self.apiclient, host_id)
except Exception as e:
self.fail("Exception occured: %s" % e)
# List the vm again
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine1.id)
new_host_id = virtual_machine[0]['hostid']
self.assertNotEqual(
old_host_id, new_host_id,
"Migration of VM to new host failed"
)
self.verify_network_rules(self.virtual_machine1.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_07_remove_secondary_ip_from_nic(self):
""" Remove secondary IP from any NIC
Steps:
# 1) Navigate to Instances
# 2) Select any vm
# 3) NIC's ->Edit secondary IP's->Release IP
# 4) The secondary IP should be successfully removed
"""
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine2.id)
# Check which NIC is having secondary IP
secondary_ips = virtual_machine[0].nic[1].secondaryip
for secondary_ip in secondary_ips:
NIC.removeIp(self.apiclient, ipaddressid=secondary_ip['id'])
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine2.id
)
self.assertFalse(
virtual_machine[0].nic[1].secondaryip,
'Failed to remove secondary IP')
self.verify_network_rules(self.virtual_machine2.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_08_remove_nic_from_vm(self):
""" Remove NIC from VM
Steps:
# 1) Navigate to Instances->select any vm->NIC's->NIC 2
# ->Click on "X" button to remove the second NIC
# 2) Remove other NIC's as well from the VM
# 3) All the NIC's should be successfully removed from the VM
:return:
"""
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine2.id)
for nic in virtual_machine[0].nic:
if nic.isdefault:
continue
self.virtual_machine2.remove_nic(self.apiclient, nic.id)
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine2.id)
self.assertEqual(
len(virtual_machine[0].nic), 1,
"Failed to remove all the nics from the virtual machine")
self.verify_network_rules(self.virtual_machine2.id)
@attr(tags=["advancedsg"], required_hardware="false")
def test_09_reboot_vm_with_multiple_nic(self):
""" Reboot a VM with Multple NIC
Steps:
# 1) Create a Vm with multiple NIC's
# 2) Configure secondary IP's on the VM
# 3) Try to reboot the VM
# 4) Ping the IP's of the vm
:return:
"""
# Skipping adding Secondary IP to NIC since its already
# done in the previous test cases
virtual_machine = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine1.id
)
try:
self.virtual_machine1.reboot(self.apiclient)
except Exception as e:
self.fail("Exception occured: %s" % e)
self.verify_network_rules(self.virtual_machine1.id)
|
GabrielBrascher/cloudstack
|
test/integration/component/test_multiple_nic_support.py
|
Python
|
apache-2.0
| 24,109
| 0.00141
|
def test_gen_generate_returns_generated_value():
from papylon.gen import Gen
def gen():
while True:
yield 1
sut = Gen(gen)
actual = sut.generate()
assert actual == 1
def test_such_that_returns_new_ranged_gen_instance():
from papylon.gen import choose
gen = choose(-20, 20)
new_gen = gen.such_that(lambda x: 0 <= x <= 20)
actual = new_gen.generate()
assert 0 <= actual <= 20
def test_such_that_returns_no_hit_gen_and_raise_stop_generation_when_generate_called():
from papylon.gen import choose, StopGeneration
gen = choose(-30, 30)
new_gen = gen.such_that(lambda x: 31 <= x)
try:
new_gen.generate()
except StopGeneration:
assert True
return
assert False
def test_when_one_of_takes_a_gen_list_then_returns_one_of_the_gen_instance_in_the_list():
from papylon.gen import one_of, constant
sut = one_of(list(map(constant, [1, 4, 9])))
actual = sut.generate()
assert actual in [1, 4, 9]
def test_when_choose_takes_a_string_argument_as_min_value_then_raises_type_error():
from papylon.gen import choose
try:
choose("1", 2)
except TypeError:
assert True
return
assert False
def test_when_choose_takes_a_list_argument_as_max_value_then_raises_type_error():
from papylon.gen import choose
try:
choose(1, [2])
except TypeError:
assert True
return
assert False
def test_when_choose_takes_arguments_where_min_value_is_greater_than_max_value_then_raises_value_error():
from papylon.gen import choose
try:
choose(3, 2.0)
except ValueError:
assert True
return
assert False
def test_when_choose_takes_arguments_where_min_value_is_equal_to_max_value_then_raises_value_error():
from papylon.gen import choose
try:
choose(-1, -1)
except ValueError:
assert True
return
assert False
def test_when_choose_takes_arguments_where_min_value_is_float_then_returns_gen_instance_which_generates_float_value():
from papylon.gen import choose
sut = choose(-2.0, 2)
actual = sut.generate()
assert type(actual) == float
assert -2.0 <= actual <= 2.0
def test_when_choose_takes_arguments_where_max_value_is_float_then_returns_gen_instance_which_generates_float_value():
from papylon.gen import choose
sut = choose(-5, 10.0)
actual = sut.generate()
assert type(actual) == float
assert -5.0 <= actual <= 10.0
def test_when_choose_takes_arguments_both_of_which_are_int_then_returns_gen_instance_which_generates_int_value():
from papylon.gen import choose
sut = choose(-50, 50)
actual = sut.generate()
assert type(actual) == int
assert -50 <= actual <= 50
def test_when_frequency_runs_10000_times_then_its_choices_should_be_satisfied_with_accuracy_ge94_percents():
from papylon.gen import frequency, constant
weighted_gens = [(5, constant(1)), (3, constant(10)), (2, constant(100))]
count_1, count_10, count_100 = 0, 0, 0
parameter = 10000
for i in range(parameter):
sut = frequency(weighted_gens)
value = sut.generate()
if value == 1:
count_1 += 1
elif value == 10:
count_10 += 1
elif value == 100:
count_100 += 1
else:
assert False
def assert_frequency(actual, param, weight, accuracy):
return actual >= param * weight * accuracy
assuring_accuracy = 0.94
assert assert_frequency(count_1, parameter, 0.5, assuring_accuracy)
assert assert_frequency(count_10, parameter, 0.3, assuring_accuracy)
assert assert_frequency(count_100, parameter, 0.2, assuring_accuracy)
def test_map_should_create_new_gen_instance_with_mapper_function():
from papylon.gen import choose
gen = choose(1, 10)
new_gen = gen.map(lambda x: x * 2)
generated_by_new_gen = new_gen.generate()
assert type(generated_by_new_gen) == int
assert generated_by_new_gen in range(2, 21, 2)
generated_by_gen = gen.generate()
assert type(generated_by_gen) == int
assert generated_by_gen in range(1, 11)
def test_given_a_value_v_when_constant_v_then_returns_gen_instance_which_generates_only_v():
from papylon.gen import constant
value = 6
sut = constant(value)
count = 0
trial = 10
for i in range(trial):
result = sut.generate()
if result == value:
count += 1
assert count == trial
|
Gab-km/papylon
|
tests/test_gen.py
|
Python
|
mit
| 4,520
| 0.002434
|
from __future__ import print_function
import os
import time
import subprocess
from colorama import Fore, Style
from watchdog.events import (
FileSystemEventHandler, FileModifiedEvent, FileCreatedEvent,
FileMovedEvent, FileDeletedEvent)
from watchdog.observers import Observer
from watchdog.observers.polling import PollingObserver
from .spooler import EventSpooler
EVENT_NAMES = {
FileModifiedEvent: 'modified',
FileCreatedEvent: 'created',
FileMovedEvent: 'moved',
FileDeletedEvent: 'deleted',
}
WATCHED_EVENTS = list(EVENT_NAMES)
DEFAULT_EXTENSIONS = ['.py']
CLEAR_COMMAND = 'cls' if os.name == 'nt' else 'clear'
BEEP_CHARACTER = '\a'
STYLE_NORMAL = Fore.RESET
STYLE_HIGHLIGHT = Fore.CYAN + Style.NORMAL + Style.BRIGHT
class ChangeHandler(FileSystemEventHandler):
"""Listens for changes to files and re-runs tests after each change."""
def __init__(self, auto_clear=False, beep_on_failure=True,
onpass=None, onfail=None, beforerun=None, extensions=[],
args=None, spool=True, verbose=False, quiet=False):
super(ChangeHandler, self).__init__()
self.auto_clear = auto_clear
self.beep_on_failure = beep_on_failure
self.onpass = onpass
self.onfail = onfail
self.beforerun = beforerun
self.extensions = extensions or DEFAULT_EXTENSIONS
self.args = args or []
self.spooler = None
if spool:
self.spooler = EventSpooler(0.2, self.on_queued_events)
self.verbose = verbose
self.quiet = quiet
def on_queued_events(self, events):
summary = []
for event in events:
paths = [event.src_path]
if isinstance(event, FileMovedEvent):
paths.append(event.dest_path)
event_name = EVENT_NAMES[type(event)]
paths = tuple(map(os.path.relpath, paths))
if any(os.path.splitext(path)[1].lower() in self.extensions
for path in paths):
summary.append((event_name, paths))
if summary:
self.run(sorted(set(summary)))
def on_any_event(self, event):
if isinstance(event, tuple(WATCHED_EVENTS)):
if self.spooler is not None:
self.spooler.enqueue(event)
else:
self.on_queued_events([event])
def run(self, summary=None):
"""Called when a file is changed to re-run the tests with py.test."""
if self.auto_clear:
subprocess.call(CLEAR_COMMAND, shell=True)
command = ' '.join(['py.test'] + self.args)
if summary and not self.auto_clear:
print()
if not self.quiet:
highlight = lambda arg: STYLE_HIGHLIGHT + arg + STYLE_NORMAL
msg = 'Running: {}'.format(highlight(command))
if summary:
if self.verbose:
file_lines = [' {:9s}'.format(event_name + ':') + ' ' +
' -> '.join(map(highlight, paths))
for event_name, paths in summary]
msg = ('Changes detected in files:\n{}\n\nRerunning: {}'
.format('\n'.join(file_lines), highlight(command)))
else:
msg = ('Changes detected, rerunning: {}'
.format(highlight(command)))
print(STYLE_NORMAL + msg + Fore.RESET + Style.NORMAL)
if self.beforerun:
os.system(self.beforerun)
exit_code = subprocess.call(['py.test'] + self.args,
shell=subprocess.mswindows)
passed = exit_code == 0
# Beep if failed
if not passed and self.beep_on_failure:
print(BEEP_CHARACTER, end='')
# Run custom commands
if passed and self.onpass:
os.system(self.onpass)
elif not passed and self.onfail:
os.system(self.onfail)
def watch(directories=[], ignore=[], auto_clear=False, beep_on_failure=True,
onpass=None, onfail=None, beforerun=None, poll=False, extensions=[],
args=[], spool=True, verbose=False, quiet=False):
if not directories:
directories = ['.']
directories = [os.path.abspath(directory) for directory in directories]
for directory in directories:
if not os.path.isdir(directory):
raise ValueError('Directory not found: ' + directory)
if ignore:
recursive_dirs, non_recursive_dirs = split_recursive(
directories, ignore)
else:
recursive_dirs = directories
non_recursive_dirs = []
# Initial run
event_handler = ChangeHandler(auto_clear, beep_on_failure,
onpass, onfail, beforerun, extensions, args,
spool, verbose, quiet)
event_handler.run()
# Setup watchdog
observer = PollingObserver() if poll else Observer()
for directory in recursive_dirs:
observer.schedule(event_handler, path=directory, recursive=True)
for directory in non_recursive_dirs:
observer.schedule(event_handler, path=directory, recursive=False)
# Watch and run tests until interrupted by user
try:
observer.start()
while True:
time.sleep(1)
observer.join()
except KeyboardInterrupt:
observer.stop()
def samepath(left, right):
return (os.path.abspath(os.path.normcase(left)) ==
os.path.abspath(os.path.normcase(right)))
def split_recursive(directories, ignore):
non_recursive_dirs = []
recursive_dirs = []
for directory in directories:
subdirs = [os.path.join(directory, d)
for d in os.listdir(directory)
if os.path.isdir(d)]
filtered = [subdir for subdir in subdirs
if not any(samepath(os.path.join(directory, d), subdir)
for d in ignore)]
if len(subdirs) == len(filtered):
recursive_dirs.append(directory)
else:
non_recursive_dirs.append(directory)
recursive_dirs.extend(filtered)
return sorted(set(recursive_dirs)), sorted(set(non_recursive_dirs))
|
ColtonProvias/pytest-watch
|
pytest_watch/watcher.py
|
Python
|
mit
| 6,256
| 0.00016
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.