repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
whiterabbitengine/fifeplusplus
|
tests/extension_tests/modelview_tests.py
|
1
|
2883
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2013 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
from extension_test_utils import *
from loaders import *
import time
class TestModelView(unittest.TestCase):
def setUp(self):
self.engine = getEngine()
self.model = self.engine.getModel()
self.metamodel = self.model.getMetaModel()
loadMapFile("content/maps/new_official_map.xml", self.engine)
self.map = self.model.getMaps("id", "OfficialMap")[0]
self.elevation = self.map.getElevations("id", "OfficialMapElevation")[0]
self.layer = self.elevation.getLayers("id", "OfficialMapTileLayer")[0]
imgid = self.layer.getInstances()[0].getObject().get2dGfxVisual().getStaticImageIndexByAngle(0)
img = self.engine.getImagePool().getImage(imgid)
self.screen_cell_w = img.getWidth()
self.screen_cell_h = img.getHeight()
self.camloc = fife.Location(self.layer)
self.camloc.setLayerCoordinates(fife.ModelCoordinate(5,0))
def tearDown(self):
del self.engine
def testModelView(self):
cam = self.engine.getView().addCamera()
cam.setCellImageDimensions(self.screen_cell_w, self.screen_cell_h)
cam.setRotation(45)
cam.setTilt(40)
cam.setLocation(self.camloc)
rb = self.engine.getRenderBackend()
viewport = fife.Rect(0, 0, rb.getScreenWidth(), rb.getScreenHeight())
cam.setViewPort(viewport)
self.engine.getView().resetRenderers()
self.engine.initializePumping()
for count in range(10):
self.engine.pump()
#time.sleep(0.03)
c = self.camloc.getExactLayerCoordinates()
c.x += 0.50
c = self.camloc.setExactLayerCoordinates(c)
cam.setLocation(self.camloc)
self.engine.finalizePumping()
# removed from test set now due to switch to new directory structure -> content moved to clients
# to be considered if this should be taken into use again
TEST_CLASSES = []
if __name__ == '__main__':
unittest.main()
|
lgpl-2.1
| -7,791,245,036,448,926,000
| 33.592593
| 97
| 0.673257
| false
| 3.302405
| true
| false
| false
|
sergei-maertens/bfeu.net
|
src/bfeu/conf/settings.py
|
1
|
8042
|
import os
import django.conf.global_settings as DEFAULT_SETTINGS
# Automatically figure out the ROOT_DIR and PROJECT_DIR.
DJANGO_PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
ROOT_DIR = os.path.abspath(os.path.join(DJANGO_PROJECT_DIR, os.path.pardir, os.path.pardir))
#
# Standard Django settings.
#
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/Amsterdam'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = os.path.join(ROOT_DIR, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = os.path.join(ROOT_DIR, 'static')
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(DJANGO_PROJECT_DIR, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
]
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'u_$=j1yn4iil1e6y358std&4h1t!57m9ddwr#4069_!4%2p$su'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# External middleware.
'maintenancemode.middleware.MaintenanceModeMiddleware',
'axes.middleware.FailedLoginMiddleware'
]
ROOT_URLCONF = 'bfeu.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'bfeu.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(DJANGO_PROJECT_DIR, 'templates'),
)
FIXTURE_DIRS = (
os.path.join(DJANGO_PROJECT_DIR, 'fixtures'),
)
INSTALLED_APPS = [
# Note: contenttypes should be first, see Django ticket #10827
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Optional applications.
'django.contrib.admin',
#'django.contrib.humanize',
#'django.contrib.sitemaps',
# External applications.
'axes',
'south',
'compressor',
# Project applications.
'bfeu.tournaments'
]
LOGGING_DIR = os.path.join(ROOT_DIR, 'log')
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(asctime)s %(levelname)s %(name)s %(module)s %(process)d %(thread)d %(message)s'
},
'timestamped': {
'format': '%(asctime)s %(levelname)s %(name)s %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
'performance': {
'format': '%(asctime)s %(process)d | %(thread)d | %(message)s',
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'timestamped'
},
'django': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(LOGGING_DIR, 'django.log'),
'formatter': 'verbose'
},
'project': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(LOGGING_DIR, 'bfeu.log'),
'formatter': 'verbose'
},
'performance': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': os.path.join(LOGGING_DIR, 'performance.log'),
'formatter': 'performance'
},
},
'loggers': {
'bfeu': {
'handlers': ['project'],
'level': 'INFO',
'propagate': True,
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
#
# Additional Django settings
# Enable these when using HTTPS
#
SESSION_COOKIE_NAME = 'bfeusessionid'
# SESSION_COOKIE_SECURE = True
# SESSION_COOKIE_HTTPONLY = True
# CSRF_COOKIE_SECURE = True
# X_FRAME_OPTIONS = 'DENY'
#
# Django-axes
#
AXES_LOGIN_FAILURE_LIMIT = 3 # Default: 3
AXES_LOCK_OUT_AT_FAILURE = True # Default: True
AXES_USE_USER_AGENT = False # Default: False
|
mit
| -1,098,656,257,339,263,600
| 30.912698
| 127
| 0.642502
| false
| 3.653794
| false
| false
| false
|
wmttom/rehichao
|
rehichao.py
|
1
|
1326
|
# -*- coding: utf-8 -*-
from intstr import IntStr
from redis import StrictRedis
redis_keyer = IntStr(
'!"#$&()+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ^_`abcdefghijklmnopqrstuvwxyz{|}~'
)
REDIS_KEY_ID = 'RedisKeyId'
REDIS_KEY = 'RedisKey'
REDIS_ID_KEY = 'RedisIdKey'
_EXIST = set()
class RedisKey(object):
def __init__(self, REDIS_CONFIG):
self.redis = StrictRedis(*REDIS_CONFIG)
def __getattr__(self, attr):
def _(name=''):
return self(attr, name)
return _
def __call__(self, attr, name=''):
key = attr+name
redis = self.redis
if key in _EXIST:
print 'REDIS KEY IS ALREADY DEFINED %s !!!'%key
_EXIST.add(key)
if redis:
_key = redis.hget(REDIS_KEY, key)
if _key is None:
id = redis.incr(REDIS_KEY_ID)
_key = redis_keyer.encode(id)
if name and "%" in name:
_key = _key+"'"+name
p = redis.pipeline()
p.hset(REDIS_KEY, key, _key)
p.hset(REDIS_ID_KEY, _key, key)
p.execute()
return self.redis, _key
if __name__ == "__main__":
import sys
if sys.getdefaultencoding() == 'ascii':
reload(sys)
sys.setdefaultencoding('utf-8')
|
apache-2.0
| 6,936,459,794,541,497,000
| 25.52
| 90
| 0.520362
| false
| 3.444156
| false
| false
| false
|
terrai/rastercube
|
rastercube/jgrid/utils.py
|
1
|
10119
|
"""
Utility functions related to jGrid2
"""
import gdal
import numpy as np
import numpy.ma as ma
import rastercube.imutils as imutils
import rastercube.gdal_utils as gdal_utils
def fracs_for_poly_bbox_xy(header, polygon_xy):
"""
Returns fractions covered by the given polygon. This is based on the
polygon's bounding box.
"""
assert np.all([header.in_bounds_xy(p) for p in polygon_xy]), \
"Polygon not contained in jgrid"
xy_from, xy_to = polygon_xy.min(axis=0), polygon_xy.max(axis=0)
return header.fracs_for_rect_xy(xy_from, xy_to)
def fracs_for_poly_bbox_latlng(header, polygon_latlng):
poly_xy = np.array([header.latlng2xy(p) for p in polygon_latlng])
return fracs_for_poly_bbox_xy(header, poly_xy)
def load_poly_xy_from_jgrid(header, polygon_xy, **kwargs):
"""
Given a header and a polygon (*assumed* to be fully contained in the
jgrid), returns a masked array containing the jgrid data in the polygon.
The returned masked array has the shape of the polygon bounding box but
only pixels inside the polygon are unmasked
"""
assert np.all([header.in_bounds_xy(p) for p in polygon_xy]), \
"Polygon not contained in jgrid"
xy_from, xy_to = polygon_xy.min(axis=0), polygon_xy.max(axis=0)
ndvi_data = header.load_slice_xy(xy_from, xy_to, **kwargs)
poly_mask = imutils.rasterize_poly(polygon_xy - xy_from, ndvi_data.shape)
return ndvi_data, poly_mask, xy_from
def load_poly_latlng_from_jgrid(header, polygon_latlng, **kwargs):
"""
Like `load_poly_xy_from_jgrid`, but the polygon is given in latlng
"""
poly_xy = np.array([header.latlng2xy(p) for p in polygon_latlng])
return load_poly_xy_from_jgrid(header, poly_xy, **kwargs)
def load_poly_latlng_from_multi_jgrids(headers, polygon, **kwargs):
"""
Given a set of jgrid header, loads the given polygon from all all grids
and reproject all of them on the first one.
Returns:
xy_from: A single xy_from
Followed by a list of data/mask pairs :
data0, mask0, data1, mask1, data2, mask2, ...
"""
header0 = headers[0]
data0, mask0, xy_from0 = load_poly_latlng_from_jgrid(header0, polygon,
**kwargs)
retval = [xy_from0, data0, mask0]
for _h in headers[1:]:
_data, _mask, _xy_from = load_poly_latlng_from_jgrid(_h, polygon,
**kwargs)
# only reproject if needed
if (not _h.spatialref.IsSame(header0.spatialref)) or \
(_h.geot != header0.geot):
_data, _mask = reproject_jgrid_on_jgrid(
header0, xy_from0, data0.shape,
_h, _xy_from, _data, _mask
)
retval.append(_data)
retval.append(_mask)
return retval
def poly_latlng_for_frac(header, frac_num):
"""
Returns the latlng polygon corresponding to a given fraction
"""
poly = [
header.xy2latlng((header.x_start(frac_num),
header.y_start(frac_num))),
header.xy2latlng((header.x_end(frac_num),
header.y_start(frac_num))),
header.xy2latlng((header.x_end(frac_num),
header.y_end(frac_num))),
header.xy2latlng((header.x_start(frac_num),
header.y_end(frac_num)))
]
return np.array(poly)
def headers_are_same_geogrid(header1, header2):
"""
Given two headers, verify that they are in the same projection with the
same geotransform and the same fraction sizes
"""
return header1.spatialref.IsSame(header2.spatialref) and \
(header1.geot == header2.geot) and \
header1.width == header2.width and \
header1.height == header2.height and \
header1.frac_width == header2.frac_width and \
header1.frac_height == header2.frac_height
def load_frac_from_multi_jgrids(headers, frac_num, **kwargs):
"""
Given a set of jgrid headers and a frac_num in headers[0], loads the
corresponding area from all headers
Returns:
xy_from: A single xy_from
Followed by a list of data/mask pairs :
data0, mask0, data1, mask1, data2, mask2, ...
"""
header0 = headers[0]
xy_from0 = (header0.x_start(frac_num), header0.y_start(frac_num))
data0 = header0.load_frac_by_num(frac_num, **kwargs)
mask0 = np.ones((data0.shape[0], data0.shape[1]), dtype=np.bool)
frac_poly = poly_latlng_for_frac(header0, frac_num)
retval = [xy_from0, data0, mask0]
for _h in headers[1:]:
if headers_are_same_geogrid(header0, _h):
print 'Headers in same geogrid'
_data = _h.load_frac_by_num(frac_num, **kwargs)
_mask = np.ones((_data.shape[0], _data.shape[1]), dtype=np.bool)
else:
_data, _mask, _xy_from = load_poly_latlng_from_jgrid(
_h, frac_poly, **kwargs)
_data, _mask = reproject_jgrid_on_jgrid(
header0, xy_from0, data0.shape,
_h, _xy_from, _data, _mask
)
retval.append(_data)
retval.append(_mask)
return retval
def latlng_for_grid(header, xy_from, shape):
"""
For each point in the grid, computes its latlng coordinates, returning
a (shape[0], shape[1], 2) array
"""
yx = np.indices(shape)
yx[0] += xy_from[1]
yx[1] += xy_from[0]
latlng = [header.xy2latlng((x, y))
for y, x in zip(yx[0].reshape(-1), yx[1].reshape(-1))]
return np.array(latlng).reshape(shape[0], shape[1], 2)
def slice_and_reproject_to_grid(header, xy_from, grid_shape, src_ds,
interpolation='near'):
"""
Helper function which takes a jgrid slice (so Header, xy_from, grid_shape)
and a GDAL dataset and slice/reprojects the GDAL dataset to the jgrid
slice.
This is typically useful to reproject some arbitrary TIFF file on some
part of the NDVI worldgrid.
Args:
header: A jgrid3.Header
xy_from: the (x, y) at which the subgrid starts in the given header
grid_shape: the (height, width) of the subgrid
src_ds: The source GDAL dataset to reproject
interpolation: The resampling mode : one of 'near', 'mode', 'average'
Returns:
A masked array containing the reprojected values
"""
# https://jgomezdans.github.io/gdal_notes/reprojection.html
# http://www.gdal.org/gdalwarper_8h.html#ad36462e8d5d34642df7f9ea1cfc2fec4
src_wkt = src_ds.GetProjectionRef()
nbands = src_ds.RasterCount
src_dtype = src_ds.GetRasterBand(1).DataType
# print 'src dtype : %s' % gdal.GetDataTypeName(src_dtype)
mem_drv = gdal.GetDriverByName('MEM')
dst_ds = mem_drv.Create('', grid_shape[1], grid_shape[0], nbands,
src_dtype)
dst_geo = header.geot_for_xyfrom(xy_from)
dst_ds.SetGeoTransform(dst_geo)
dst_ds.SetProjection(header.spatialref.ExportToWkt())
# NoData handling when using ReprojectImage with a MEM target ds is
# a bit tricky. See those discussions :
# https://trac.osgeo.org/gdal/ticket/6404
# http://gis.stackexchange.com/q/158503
# We have to fill each band with the nodata value before doing the
# reprojectimage because the bands are initialized with 0
ndv = None
for i in range(1, nbands + 1):
src_b = src_ds.GetRasterBand(i)
if ndv is not None and not np.isnan(ndv):
assert src_b.GetNoDataValue() == ndv, \
"All bands of the source dataset should have the same NODATA"
else:
ndv = src_b.GetNoDataValue()
dst_b = dst_ds.GetRasterBand(i)
if ndv is not None:
dst_b.SetNoDataValue(ndv)
dst_b.Fill(ndv)
if interpolation == 'near':
gdal_mode = gdal.GRA_NearestNeighbour
elif interpolation == 'mode':
gdal_mode = gdal.GRA_Mode
elif interpolation == 'average':
gdal_mode = gdal.GRA_Average
else:
raise ValueError("Invalid interpolation mode %s" % interpolation)
res = gdal.ReprojectImage(
src_ds,
dst_ds,
src_ds.GetProjectionRef(),
dst_ds.GetProjectionRef(),
gdal_mode
)
assert res == 0, 'Error reprojecting, res=%d' % res
dst_arr = dst_ds.ReadAsArray()
# GDAL ReadAsArray returns (bands, height, width) but we expect
# (height, width, bands)
if len(dst_arr.shape) == 3:
dst_arr = dst_arr.transpose(1, 2, 0)
# TODO: This assumes that the no data value is the same for all bands
if ndv is not None:
dst_arr = ma.masked_where(dst_arr == ndv, dst_arr)
else:
dst_arr = ma.asarray(dst_arr)
return dst_arr
def gdal_ds_from_jgrid_slice(header, xy_from, data):
"""
Returns a GDAL in-memory dataset that maps the provided jgrid slice.
Note that the dataset only keeps a reference to the data array.
"""
ds = gdal_utils.gdal_ds_from_array(data)
ds.SetGeoTransform(header.geot_for_xyfrom(xy_from))
ds.SetProjection(header.spatialref.ExportToWkt())
return ds
def reproject_jgrid_on_jgrid(target_header, target_xy_from, target_shape,
src_header, src_xy_from, src_data, src_mask):
"""
Reproject a source jgrid on a target jgrid
"""
data_ds = gdal_ds_from_jgrid_slice(src_header, src_xy_from, src_data)
# This requires a mask copy because GDAL doesn't support bool
# Also, GDAL ignores 0 during reproject, so add 1 to the mask here
src_mask = src_mask.astype(np.uint8) + 1
mask_ds = gdal_ds_from_jgrid_slice(src_header, src_xy_from, src_mask)
new_data = slice_and_reproject_to_grid(target_header, target_xy_from,
target_shape, data_ds)
new_mask = slice_and_reproject_to_grid(target_header, target_xy_from,
target_shape, mask_ds)
# recover the boolean mask
new_mask = new_mask > 1
return new_data, new_mask
|
mit
| 5,451,843,363,966,093,000
| 35.139286
| 78
| 0.615278
| false
| 3.357332
| false
| false
| false
|
gfyoung/pandas
|
pandas/tests/strings/test_find_replace.py
|
1
|
20811
|
from datetime import datetime
import re
import numpy as np
import pytest
import pandas as pd
from pandas import Index, Series, _testing as tm
def test_contains():
values = np.array(
["foo", np.nan, "fooommm__foo", "mmm_", "foommm[_]+bar"], dtype=np.object_
)
values = Series(values)
pat = "mmm[_]+"
result = values.str.contains(pat)
expected = Series(np.array([False, np.nan, True, True, False], dtype=np.object_))
tm.assert_series_equal(result, expected)
result = values.str.contains(pat, regex=False)
expected = Series(np.array([False, np.nan, False, False, True], dtype=np.object_))
tm.assert_series_equal(result, expected)
values = Series(np.array(["foo", "xyz", "fooommm__foo", "mmm_"], dtype=object))
result = values.str.contains(pat)
expected = Series(np.array([False, False, True, True]))
assert result.dtype == np.bool_
tm.assert_series_equal(result, expected)
# case insensitive using regex
values = Series(np.array(["Foo", "xYz", "fOOomMm__fOo", "MMM_"], dtype=object))
result = values.str.contains("FOO|mmm", case=False)
expected = Series(np.array([True, False, True, True]))
tm.assert_series_equal(result, expected)
# case insensitive without regex
result = Series(values).str.contains("foo", regex=False, case=False)
expected = Series(np.array([True, False, True, False]))
tm.assert_series_equal(result, expected)
# mixed
mixed = Series(
np.array(
["a", np.nan, "b", True, datetime.today(), "foo", None, 1, 2.0],
dtype=object,
)
)
rs = mixed.str.contains("o")
xp = Series(
np.array(
[False, np.nan, False, np.nan, np.nan, True, np.nan, np.nan, np.nan],
dtype=np.object_,
)
)
tm.assert_series_equal(rs, xp)
rs = mixed.str.contains("o")
xp = Series([False, np.nan, False, np.nan, np.nan, True, np.nan, np.nan, np.nan])
assert isinstance(rs, Series)
tm.assert_series_equal(rs, xp)
# unicode
values = Series(np.array(["foo", np.nan, "fooommm__foo", "mmm_"], dtype=np.object_))
pat = "mmm[_]+"
result = values.str.contains(pat)
expected = Series(np.array([False, np.nan, True, True], dtype=np.object_))
tm.assert_series_equal(result, expected)
result = values.str.contains(pat, na=False)
expected = Series(np.array([False, False, True, True]))
tm.assert_series_equal(result, expected)
values = Series(np.array(["foo", "xyz", "fooommm__foo", "mmm_"], dtype=np.object_))
result = values.str.contains(pat)
expected = Series(np.array([False, False, True, True]))
assert result.dtype == np.bool_
tm.assert_series_equal(result, expected)
def test_contains_for_object_category():
# gh 22158
# na for category
values = Series(["a", "b", "c", "a", np.nan], dtype="category")
result = values.str.contains("a", na=True)
expected = Series([True, False, False, True, True])
tm.assert_series_equal(result, expected)
result = values.str.contains("a", na=False)
expected = Series([True, False, False, True, False])
tm.assert_series_equal(result, expected)
# na for objects
values = Series(["a", "b", "c", "a", np.nan])
result = values.str.contains("a", na=True)
expected = Series([True, False, False, True, True])
tm.assert_series_equal(result, expected)
result = values.str.contains("a", na=False)
expected = Series([True, False, False, True, False])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("dtype", [None, "category"])
@pytest.mark.parametrize("null_value", [None, np.nan, pd.NA])
@pytest.mark.parametrize("na", [True, False])
def test_startswith(dtype, null_value, na):
# add category dtype parametrizations for GH-36241
values = Series(
["om", null_value, "foo_nom", "nom", "bar_foo", null_value, "foo"],
dtype=dtype,
)
result = values.str.startswith("foo")
exp = Series([False, np.nan, True, False, False, np.nan, True])
tm.assert_series_equal(result, exp)
result = values.str.startswith("foo", na=na)
exp = Series([False, na, True, False, False, na, True])
tm.assert_series_equal(result, exp)
# mixed
mixed = np.array(
["a", np.nan, "b", True, datetime.today(), "foo", None, 1, 2.0],
dtype=np.object_,
)
rs = Series(mixed).str.startswith("f")
xp = Series([False, np.nan, False, np.nan, np.nan, True, np.nan, np.nan, np.nan])
tm.assert_series_equal(rs, xp)
@pytest.mark.parametrize("dtype", [None, "category"])
@pytest.mark.parametrize("null_value", [None, np.nan, pd.NA])
@pytest.mark.parametrize("na", [True, False])
def test_endswith(dtype, null_value, na):
# add category dtype parametrizations for GH-36241
values = Series(
["om", null_value, "foo_nom", "nom", "bar_foo", null_value, "foo"],
dtype=dtype,
)
result = values.str.endswith("foo")
exp = Series([False, np.nan, False, False, True, np.nan, True])
tm.assert_series_equal(result, exp)
result = values.str.endswith("foo", na=na)
exp = Series([False, na, False, False, True, na, True])
tm.assert_series_equal(result, exp)
# mixed
mixed = np.array(
["a", np.nan, "b", True, datetime.today(), "foo", None, 1, 2.0],
dtype=object,
)
rs = Series(mixed).str.endswith("f")
xp = Series([False, np.nan, False, np.nan, np.nan, False, np.nan, np.nan, np.nan])
tm.assert_series_equal(rs, xp)
def test_replace():
values = Series(["fooBAD__barBAD", np.nan])
result = values.str.replace("BAD[_]*", "", regex=True)
exp = Series(["foobar", np.nan])
tm.assert_series_equal(result, exp)
result = values.str.replace("BAD[_]*", "", n=1, regex=True)
exp = Series(["foobarBAD", np.nan])
tm.assert_series_equal(result, exp)
# mixed
mixed = Series(
["aBAD", np.nan, "bBAD", True, datetime.today(), "fooBAD", None, 1, 2.0]
)
rs = Series(mixed).str.replace("BAD[_]*", "", regex=True)
xp = Series(["a", np.nan, "b", np.nan, np.nan, "foo", np.nan, np.nan, np.nan])
assert isinstance(rs, Series)
tm.assert_almost_equal(rs, xp)
# flags + unicode
values = Series([b"abcd,\xc3\xa0".decode("utf-8")])
exp = Series([b"abcd, \xc3\xa0".decode("utf-8")])
result = values.str.replace(r"(?<=\w),(?=\w)", ", ", flags=re.UNICODE, regex=True)
tm.assert_series_equal(result, exp)
# GH 13438
msg = "repl must be a string or callable"
for klass in (Series, Index):
for repl in (None, 3, {"a": "b"}):
for data in (["a", "b", None], ["a", "b", "c", "ad"]):
values = klass(data)
with pytest.raises(TypeError, match=msg):
values.str.replace("a", repl)
def test_replace_callable():
# GH 15055
values = Series(["fooBAD__barBAD", np.nan])
# test with callable
repl = lambda m: m.group(0).swapcase()
result = values.str.replace("[a-z][A-Z]{2}", repl, n=2, regex=True)
exp = Series(["foObaD__baRbaD", np.nan])
tm.assert_series_equal(result, exp)
# test with wrong number of arguments, raising an error
p_err = (
r"((takes)|(missing)) (?(2)from \d+ to )?\d+ "
r"(?(3)required )positional arguments?"
)
repl = lambda: None
with pytest.raises(TypeError, match=p_err):
values.str.replace("a", repl)
repl = lambda m, x: None
with pytest.raises(TypeError, match=p_err):
values.str.replace("a", repl)
repl = lambda m, x, y=None: None
with pytest.raises(TypeError, match=p_err):
values.str.replace("a", repl)
# test regex named groups
values = Series(["Foo Bar Baz", np.nan])
pat = r"(?P<first>\w+) (?P<middle>\w+) (?P<last>\w+)"
repl = lambda m: m.group("middle").swapcase()
result = values.str.replace(pat, repl, regex=True)
exp = Series(["bAR", np.nan])
tm.assert_series_equal(result, exp)
def test_replace_compiled_regex():
# GH 15446
values = Series(["fooBAD__barBAD", np.nan])
# test with compiled regex
pat = re.compile(r"BAD_*")
result = values.str.replace(pat, "", regex=True)
exp = Series(["foobar", np.nan])
tm.assert_series_equal(result, exp)
result = values.str.replace(pat, "", n=1, regex=True)
exp = Series(["foobarBAD", np.nan])
tm.assert_series_equal(result, exp)
# mixed
mixed = Series(
["aBAD", np.nan, "bBAD", True, datetime.today(), "fooBAD", None, 1, 2.0]
)
rs = Series(mixed).str.replace(pat, "", regex=True)
xp = Series(["a", np.nan, "b", np.nan, np.nan, "foo", np.nan, np.nan, np.nan])
assert isinstance(rs, Series)
tm.assert_almost_equal(rs, xp)
# flags + unicode
values = Series([b"abcd,\xc3\xa0".decode("utf-8")])
exp = Series([b"abcd, \xc3\xa0".decode("utf-8")])
pat = re.compile(r"(?<=\w),(?=\w)", flags=re.UNICODE)
result = values.str.replace(pat, ", ")
tm.assert_series_equal(result, exp)
# case and flags provided to str.replace will have no effect
# and will produce warnings
values = Series(["fooBAD__barBAD__bad", np.nan])
pat = re.compile(r"BAD_*")
with pytest.raises(ValueError, match="case and flags cannot be"):
result = values.str.replace(pat, "", flags=re.IGNORECASE)
with pytest.raises(ValueError, match="case and flags cannot be"):
result = values.str.replace(pat, "", case=False)
with pytest.raises(ValueError, match="case and flags cannot be"):
result = values.str.replace(pat, "", case=True)
# test with callable
values = Series(["fooBAD__barBAD", np.nan])
repl = lambda m: m.group(0).swapcase()
pat = re.compile("[a-z][A-Z]{2}")
result = values.str.replace(pat, repl, n=2)
exp = Series(["foObaD__baRbaD", np.nan])
tm.assert_series_equal(result, exp)
def test_replace_literal():
# GH16808 literal replace (regex=False vs regex=True)
values = Series(["f.o", "foo", np.nan])
exp = Series(["bao", "bao", np.nan])
result = values.str.replace("f.", "ba", regex=True)
tm.assert_series_equal(result, exp)
exp = Series(["bao", "foo", np.nan])
result = values.str.replace("f.", "ba", regex=False)
tm.assert_series_equal(result, exp)
# Cannot do a literal replace if given a callable repl or compiled
# pattern
callable_repl = lambda m: m.group(0).swapcase()
compiled_pat = re.compile("[a-z][A-Z]{2}")
msg = "Cannot use a callable replacement when regex=False"
with pytest.raises(ValueError, match=msg):
values.str.replace("abc", callable_repl, regex=False)
msg = "Cannot use a compiled regex as replacement pattern with regex=False"
with pytest.raises(ValueError, match=msg):
values.str.replace(compiled_pat, "", regex=False)
def test_match():
# New match behavior introduced in 0.13
values = Series(["fooBAD__barBAD", np.nan, "foo"])
result = values.str.match(".*(BAD[_]+).*(BAD)")
exp = Series([True, np.nan, False])
tm.assert_series_equal(result, exp)
values = Series(["fooBAD__barBAD", "BAD_BADleroybrown", np.nan, "foo"])
result = values.str.match(".*BAD[_]+.*BAD")
exp = Series([True, True, np.nan, False])
tm.assert_series_equal(result, exp)
# mixed
mixed = Series(
[
"aBAD_BAD",
np.nan,
"BAD_b_BAD",
True,
datetime.today(),
"foo",
None,
1,
2.0,
]
)
rs = Series(mixed).str.match(".*(BAD[_]+).*(BAD)")
xp = Series([True, np.nan, True, np.nan, np.nan, False, np.nan, np.nan, np.nan])
assert isinstance(rs, Series)
tm.assert_series_equal(rs, xp)
# na GH #6609
res = Series(["a", 0, np.nan]).str.match("a", na=False)
exp = Series([True, False, False])
tm.assert_series_equal(exp, res)
res = Series(["a", 0, np.nan]).str.match("a")
exp = Series([True, np.nan, np.nan])
tm.assert_series_equal(exp, res)
values = Series(["ab", "AB", "abc", "ABC"])
result = values.str.match("ab", case=False)
expected = Series([True, True, True, True])
tm.assert_series_equal(result, expected)
def test_fullmatch():
# GH 32806
values = Series(["fooBAD__barBAD", "BAD_BADleroybrown", np.nan, "foo"])
result = values.str.fullmatch(".*BAD[_]+.*BAD")
exp = Series([True, False, np.nan, False])
tm.assert_series_equal(result, exp)
# Make sure that the new string arrays work
string_values = Series(
["fooBAD__barBAD", "BAD_BADleroybrown", np.nan, "foo"], dtype="string"
)
result = string_values.str.fullmatch(".*BAD[_]+.*BAD")
# Result is nullable boolean with StringDtype
string_exp = Series([True, False, np.nan, False], dtype="boolean")
tm.assert_series_equal(result, string_exp)
values = Series(["ab", "AB", "abc", "ABC"])
result = values.str.fullmatch("ab", case=False)
expected = Series([True, True, False, False])
tm.assert_series_equal(result, expected)
def test_findall():
values = Series(["fooBAD__barBAD", np.nan, "foo", "BAD"])
result = values.str.findall("BAD[_]*")
exp = Series([["BAD__", "BAD"], np.nan, [], ["BAD"]])
tm.assert_almost_equal(result, exp)
# mixed
mixed = Series(
[
"fooBAD__barBAD",
np.nan,
"foo",
True,
datetime.today(),
"BAD",
None,
1,
2.0,
]
)
rs = Series(mixed).str.findall("BAD[_]*")
xp = Series(
[
["BAD__", "BAD"],
np.nan,
[],
np.nan,
np.nan,
["BAD"],
np.nan,
np.nan,
np.nan,
]
)
assert isinstance(rs, Series)
tm.assert_almost_equal(rs, xp)
def test_find():
values = Series(["ABCDEFG", "BCDEFEF", "DEFGHIJEF", "EFGHEF", "XXXX"])
result = values.str.find("EF")
tm.assert_series_equal(result, Series([4, 3, 1, 0, -1]))
expected = np.array([v.find("EF") for v in values.values], dtype=np.int64)
tm.assert_numpy_array_equal(result.values, expected)
result = values.str.rfind("EF")
tm.assert_series_equal(result, Series([4, 5, 7, 4, -1]))
expected = np.array([v.rfind("EF") for v in values.values], dtype=np.int64)
tm.assert_numpy_array_equal(result.values, expected)
result = values.str.find("EF", 3)
tm.assert_series_equal(result, Series([4, 3, 7, 4, -1]))
expected = np.array([v.find("EF", 3) for v in values.values], dtype=np.int64)
tm.assert_numpy_array_equal(result.values, expected)
result = values.str.rfind("EF", 3)
tm.assert_series_equal(result, Series([4, 5, 7, 4, -1]))
expected = np.array([v.rfind("EF", 3) for v in values.values], dtype=np.int64)
tm.assert_numpy_array_equal(result.values, expected)
result = values.str.find("EF", 3, 6)
tm.assert_series_equal(result, Series([4, 3, -1, 4, -1]))
expected = np.array([v.find("EF", 3, 6) for v in values.values], dtype=np.int64)
tm.assert_numpy_array_equal(result.values, expected)
result = values.str.rfind("EF", 3, 6)
tm.assert_series_equal(result, Series([4, 3, -1, 4, -1]))
expected = np.array([v.rfind("EF", 3, 6) for v in values.values], dtype=np.int64)
tm.assert_numpy_array_equal(result.values, expected)
with pytest.raises(TypeError, match="expected a string object, not int"):
result = values.str.find(0)
with pytest.raises(TypeError, match="expected a string object, not int"):
result = values.str.rfind(0)
def test_find_nan():
values = Series(["ABCDEFG", np.nan, "DEFGHIJEF", np.nan, "XXXX"])
result = values.str.find("EF")
tm.assert_series_equal(result, Series([4, np.nan, 1, np.nan, -1]))
result = values.str.rfind("EF")
tm.assert_series_equal(result, Series([4, np.nan, 7, np.nan, -1]))
result = values.str.find("EF", 3)
tm.assert_series_equal(result, Series([4, np.nan, 7, np.nan, -1]))
result = values.str.rfind("EF", 3)
tm.assert_series_equal(result, Series([4, np.nan, 7, np.nan, -1]))
result = values.str.find("EF", 3, 6)
tm.assert_series_equal(result, Series([4, np.nan, -1, np.nan, -1]))
result = values.str.rfind("EF", 3, 6)
tm.assert_series_equal(result, Series([4, np.nan, -1, np.nan, -1]))
def test_translate():
def _check(result, expected):
if isinstance(result, Series):
tm.assert_series_equal(result, expected)
else:
tm.assert_index_equal(result, expected)
for klass in [Series, Index]:
s = klass(["abcdefg", "abcc", "cdddfg", "cdefggg"])
table = str.maketrans("abc", "cde")
result = s.str.translate(table)
expected = klass(["cdedefg", "cdee", "edddfg", "edefggg"])
_check(result, expected)
# Series with non-string values
s = Series(["a", "b", "c", 1.2])
expected = Series(["c", "d", "e", np.nan])
result = s.str.translate(table)
tm.assert_series_equal(result, expected)
def test_contains_moar():
# PR #1179
s = Series(["A", "B", "C", "Aaba", "Baca", "", np.nan, "CABA", "dog", "cat"])
result = s.str.contains("a")
expected = Series(
[False, False, False, True, True, False, np.nan, False, False, True]
)
tm.assert_series_equal(result, expected)
result = s.str.contains("a", case=False)
expected = Series(
[True, False, False, True, True, False, np.nan, True, False, True]
)
tm.assert_series_equal(result, expected)
result = s.str.contains("Aa")
expected = Series(
[False, False, False, True, False, False, np.nan, False, False, False]
)
tm.assert_series_equal(result, expected)
result = s.str.contains("ba")
expected = Series(
[False, False, False, True, False, False, np.nan, False, False, False]
)
tm.assert_series_equal(result, expected)
result = s.str.contains("ba", case=False)
expected = Series(
[False, False, False, True, True, False, np.nan, True, False, False]
)
tm.assert_series_equal(result, expected)
def test_contains_nan():
# PR #14171
s = Series([np.nan, np.nan, np.nan], dtype=np.object_)
result = s.str.contains("foo", na=False)
expected = Series([False, False, False], dtype=np.bool_)
tm.assert_series_equal(result, expected)
result = s.str.contains("foo", na=True)
expected = Series([True, True, True], dtype=np.bool_)
tm.assert_series_equal(result, expected)
result = s.str.contains("foo", na="foo")
expected = Series(["foo", "foo", "foo"], dtype=np.object_)
tm.assert_series_equal(result, expected)
result = s.str.contains("foo")
expected = Series([np.nan, np.nan, np.nan], dtype=np.object_)
tm.assert_series_equal(result, expected)
def test_replace_moar():
# PR #1179
s = Series(["A", "B", "C", "Aaba", "Baca", "", np.nan, "CABA", "dog", "cat"])
result = s.str.replace("A", "YYY")
expected = Series(
["YYY", "B", "C", "YYYaba", "Baca", "", np.nan, "CYYYBYYY", "dog", "cat"]
)
tm.assert_series_equal(result, expected)
result = s.str.replace("A", "YYY", case=False)
expected = Series(
[
"YYY",
"B",
"C",
"YYYYYYbYYY",
"BYYYcYYY",
"",
np.nan,
"CYYYBYYY",
"dog",
"cYYYt",
]
)
tm.assert_series_equal(result, expected)
result = s.str.replace("^.a|dog", "XX-XX ", case=False, regex=True)
expected = Series(
[
"A",
"B",
"C",
"XX-XX ba",
"XX-XX ca",
"",
np.nan,
"XX-XX BA",
"XX-XX ",
"XX-XX t",
]
)
tm.assert_series_equal(result, expected)
def test_match_findall_flags():
data = {
"Dave": "dave@google.com",
"Steve": "steve@gmail.com",
"Rob": "rob@gmail.com",
"Wes": np.nan,
}
data = Series(data)
pat = r"([A-Z0-9._%+-]+)@([A-Z0-9.-]+)\.([A-Z]{2,4})"
result = data.str.extract(pat, flags=re.IGNORECASE, expand=True)
assert result.iloc[0].tolist() == ["dave", "google", "com"]
result = data.str.match(pat, flags=re.IGNORECASE)
assert result[0]
result = data.str.fullmatch(pat, flags=re.IGNORECASE)
assert result[0]
result = data.str.findall(pat, flags=re.IGNORECASE)
assert result[0][0] == ("dave", "google", "com")
result = data.str.count(pat, flags=re.IGNORECASE)
assert result[0] == 1
with tm.assert_produces_warning(UserWarning):
result = data.str.contains(pat, flags=re.IGNORECASE)
assert result[0]
|
bsd-3-clause
| 6,857,426,806,594,123,000
| 31.824921
| 88
| 0.587478
| false
| 3.169027
| true
| false
| false
|
Rosuav/shed
|
BL1_find_items.py
|
1
|
17462
|
import argparse
import os.path
import struct
import inspect
from dataclasses import dataclass # ImportError? Upgrade to Python 3.7 or pip install dataclasses
class FunctionArg:
def __init__(self, desc="keyword", other_args=0):
self.desc = desc
self.functions = {}
self.other_args = other_args # Number of args given to the function that aren't from the cmdline
def __repr__(self): return self.desc
def __call__(self, func_or_arg):
if isinstance(func_or_arg, str):
# We've been given a command-line argument (argparse mode).
fn, *args = func_or_arg.split(":")
if fn not in self.functions:
raise argparse.ArgumentTypeError("Unrecognized %r - valid: %s"
% (fn, ', '.join(sorted(self.functions))))
func = self.functions[fn]
max = func.__code__.co_argcount - self.other_args
min = max - len(func.__defaults__ or ())
if func.__code__.co_flags & inspect.CO_VARARGS:
max = float("inf")
if min == max != len(args):
# Special case some messages for readability
if min == 0:
raise argparse.ArgumentTypeError("%s does not take arguments" % fn)
raise argparse.ArgumentTypeError("%s%s requires exactly %d arg%s" %
(fn, ":X" * min, min, "s" * (min!=1)))
if len(args) < min:
raise argparse.ArgumentTypeError("%s requires at least %d arg%s" % (fn, min, "s" * (min!=1)))
if len(args) > max:
raise argparse.ArgumentTypeError("%s requires at most %d arg%s" % (fn, max, "s" * (max!=1)))
return func, args
# Else assume we've been given a function to retain (decorator mode)
self.functions[func_or_arg.__name__] = func_or_arg
return func_or_arg
loot_filter = FunctionArg("filter", 1)
@loot_filter
def type(item, type): return type in item.type
del type # I want the filter to be called type, but not to override type()
@loot_filter
def eq(item, slot="any"):
if slot == "any": return item.slot > 0 # By default, show everything that's equipped.
return int(slot) == item.slot # Or say "eq:3" to select item in equip slot 3.
synthesizer = FunctionArg("synth", 1)
@synthesizer
def money(savefile): savefile.money += 5000000
@synthesizer
def burnammo(savefile):
for ammo in savefile.ammo:
if ammo.amount > 10: ammo.amount -= 1.0
@synthesizer
def fix_prison_jump(savefile):
"""Fix a weird glitch where you can't get into the Knoxx DLC prison
Marks the first goal (finding the spot to jump) as done. The glitch
had the first goal not done but the second done, and the mission
wouldn't progress.
"""
for block in savefile.missions:
for mission in block.missions:
if mission.progress == 1:
if mission.mission == "dlc3_MainMissions.MainMissions.M_dlc3_PrisonInfiltrate":
mission.goals[0] = ('None', 1)
print(mission)
@synthesizer
def create_shields(savefile):
"""Synthesize a bunch of similar shields to compare Quality values"""
for quality in range(6):
savefile.items.append(Item(
grade="gd_itemgrades.Gear.ItemGrade_Gear_Shield",
type='gd_shields.A_Item.Item_Shield',
pieces=[
"gd_shields.Body.body3b_power",
"gd_shields.LeftSide.leftside4",
"gd_shields.RightSide.rightside4",
"gd_shields.ManufacturerMaterials.Material_Torgue_3",
],
mfg='gd_manufacturers.Manufacturers.Torgue',
prefix="gd_shields.Prefix.Prefix_Max4_Impenetrable",
title="gd_shields.Title.Title_Torgue3_MachoShield",
unknown=1, quality=quality, level=0, slot=0, junk=0, locked=0,
))
@synthesizer
def create_class_mods(savefile, who):
# TODO: Deduplicate
if who.casefold() == "brick":
for quality in range(3, 6):
savefile.items.append(Item(
grade="gd_itemgrades.Gear.ItemGrade_Gear_ComDeck_Brick",
type='gd_CommandDecks.A_Item.Item_CommandDeck_Brick',
pieces=[
"gd_CommandDecks.Body_Brick.Brick_Warmonger",
"gd_CommandDecks.LeftSide.leftside6b",
"gd_CommandDecks.RightSide.rightside6",
"gd_CommandDecks.ManufacturerMaterials.Material_Torgue_2",
],
mfg='gd_manufacturers.Manufacturers.Torgue',
prefix="gd_CommandDecks.Prefix.Prefix_Brick_Warmonger",
title="gd_CommandDecks.Title.Title_ComDeckBrick",
unknown=1, quality=quality, level=0, slot=0, junk=0, locked=0,
))
elif who.casefold() == "lilith":
for quality in range(3, 6):
savefile.items.append(Item(
grade="gd_itemgrades.Gear.ItemGrade_Gear_ComDeck_Lilith",
type='gd_CommandDecks.A_Item.Item_CommandDeck_Lilith',
pieces=[
"gd_CommandDecks.Body_Lilith.Lilith_Mercenary",
"gd_CommandDecks.LeftSide.leftside6",
"gd_CommandDecks.RightSide.rightside6",
"gd_CommandDecks.ManufacturerMaterials.Material_Dahl_3",
],
mfg='gd_manufacturers.Manufacturers.Dahl',
prefix="gd_CommandDecks.Prefix.Prefix_Lilith_Mercenary",
title="gd_CommandDecks.Title.Title_ComDeckLilith",
unknown=1, quality=quality, level=0, slot=0, junk=0, locked=0,
))
else: raise ValueError("Dunno who you want class mods for - %r" % who)
@synthesizer
def create_cmod_variants(savefile):
import itertools
for quality, left, mfg, mat in itertools.product(range(3, 6), ["leftside6", "leftside6c"], ["Pangolin", "Maliwan"], "23"):
savefile.items.append(Item(
grade="gd_itemgrades.Gear.ItemGrade_Gear_ComDeck_Mordecai",
type='gd_CommandDecks.A_Item.Item_CommandDeck_Mordecai',
pieces=[
"gd_CommandDecks.Body_Mordecai.Mordecai_Survivor",
"gd_CommandDecks.LeftSide." + left,
"gd_CommandDecks.RightSide.rightside6",
"gd_CommandDecks.ManufacturerMaterials.Material_%s_%s" % (mfg, mat),
],
mfg='gd_manufacturers.Manufacturers.' + mfg,
prefix="gd_CommandDecks.Prefix.Prefix_Mordecai_Survivor",
title="gd_CommandDecks.Title.Title_ComDeckMordecai",
unknown=1, quality=quality, level=0, slot=0, junk=0, locked=0,
))
@synthesizer
def boost_weapons(savefile):
newweaps = []
for weapon in savefile.weapons:
if weapon.slot:
for quality in range(weapon.quality, 6):
newweap = Weapon(**vars(weapon))
newweap.quality = quality
newweap.slot = 0
# print(newweap)
newweaps.append(newweap)
savefile.weapons.extend(newweaps) # Don't change the list while we're iterating over it
class Consumable:
"""Like a bytes/str object but can be consumed a few bytes/chars at a time"""
def __init__(self, data):
self.data = data
self.eaten = 0
self.left = len(data)
def get(self, num):
"""Destructively read the next num bytes/chars of data"""
if num > self.left: raise ValueError("Out of data!")
ret = self.data[self.eaten : self.eaten + num]
self.eaten += num
self.left -= num
return ret
# Read integers, and some length-preceded string formats, assuming we have
# a collection of bytes here. Don't call these if the original data was text.
def int(self, size=4, order="little"): return int.from_bytes(self.get(size), order)
def hollerith(self, size=4, order="little"): return self.get(self.int(size, order))
def str(self): return self.hollerith().rstrip(b"\x00").decode("ascii")
def __len__(self): return self.left
def peek(self): return self.data[self.eaten:] # Doubles as "convert to bytes/str"
@classmethod
def from_bits(cls, data):
"""Create a bitfield consumable from packed eight-bit data"""
return cls(''.join(format(x, "08b") for x in data))
class SaveFileFormatError(Exception): pass
def decode_dataclass(data, typ):
if hasattr(typ, "__dataclass_fields__"):
values = {}
for field in typ.__dataclass_fields__.values():
values[field.name] = decode_dataclass(data, field.type)
return typ(**values)
if isinstance(typ, list):
return [decode_dataclass(data, typ[0]) for _ in range(data.int())]
if isinstance(typ, tuple):
return tuple(decode_dataclass(data, t) for t in typ)
if isinstance(typ, int):
return data.get(typ)
if isinstance(typ, bytes):
ret = data.get(len(typ))
assert ret == typ
return ret
if typ is int:
return data.int()
if isinstance(typ, range):
# Bounded integer
l = len(typ)
ret = data.int(1 if l <= 256 else 2 if l <= 65536 else 4)
# TODO: Support signed numbers eg range(-128, 127)
assert ret in typ
return ret
if typ is bytes:
return data.hollerith()
if typ is str:
return data.str()
if typ is float:
return struct.unpack("f", data.get(4))[0]
if typ is print:
print(data.peek()[:16], len(data))
return None
raise TypeError("need to implement: %r %r" % (type(typ), typ))
def encode_dataclass(data, typ):
if hasattr(typ, "__dataclass_fields__"):
ret = []
for field in typ.__dataclass_fields__.values():
ret.append(encode_dataclass(getattr(data, field.name), field.type))
return b"".join(ret)
if isinstance(typ, list):
return encode_dataclass(len(data), int) + b"".join(encode_dataclass(val, typ[0]) for val in data)
if isinstance(typ, tuple):
return b"".join(encode_dataclass(val, t) for val, t in zip(data, typ))
if isinstance(typ, int):
assert len(data) == typ
return data
if isinstance(typ, bytes):
assert data == typ
return data
if typ is int:
return data.to_bytes(4, "little")
if isinstance(typ, range):
# Bounded integer
l = len(typ)
assert data in typ
# TODO as above, signed integers
return data.to_bytes(1 if l <= 256 else 2 if l <= 65536 else 4, "little")
if typ is bytes:
return encode_dataclass(len(data), int) + data
if typ is str:
return encode_dataclass(data.encode("ascii") + b"\x00", bytes)
if typ is float:
return struct.pack("f", data)
if typ is print:
return b""
raise TypeError("need to implement: %r %r" % (type(type), typ))
# For anyone reading this file to try to understand the save file format:
# Firstly, be sure to also read the WillowTree# source code, which is more
# comprehensive but less comprehensible than this - you can find it at
# http://willowtree.sourceforge.net. Everything in here came either from my
# own explorations with a hex editor or from reading the WillowTree# source.
# Secondly, these classes represent different structures within the file;
# fields are laid out sequentially with no padding.
# Annotation Meaning
# int 32-bit unsigned integer
# float 32-bit IEEE binary floating-point
# bytes Hollerith byte string consisting of a 32-bit length followed
# by that many bytes of raw data
# str Hollerith text string: 32-bit length, that many bytes of ASCII
# data, then b"\0" (included in the length)
# b"..." Exactly those bytes. Used for signatures etc.
# range(N) Integer within the given range, taking up the minimum space
# (so a range(65536) is a 16-bit integer)
# AnyClassName One instance of the named class (potentially recursive)
# (x,y,z) The given values in that exact order. Identical in the file to
# having the same three annotations separately identified.
# [x] Hollerith array: 32-bit length, then that many instances of
# whatever is in the list (so [int] would make an array of ints).
@dataclass
class BankString:
mask: 1 # Always seems to use a mask of 32 or 0 (b" " or b"\0")
# Segments are usually just normal strings (complete with their NUL
# termination included in the length), but can have a length of zero.
segments: (bytes,)*6
def __repr__(self):
return repr(".".join(s.rstrip(b"\x00").decode("ascii") for s in self.segments if s))
@dataclass
class Skill:
name: str
level: int
progress: int # Possibly progress to next level?? Applies only to proficiencies.
state: int # Always either -1 or 1
@dataclass
class Ammo:
cat: str
pool: str
amount: float # WHY??? Ammo regen maybe???
capacity: int # 0 = base capacity, 1 = first upgrade, etc
@dataclass
class Item: # Can't find item level
grade: str
type: str
pieces: (str,) * 4
mfg: str
prefix: str
title: str
unknown: int
quality: range(65536)
level: range(65536)
slot: int # 1 if equipped or 0 for backpack
junk: int
locked: int
@dataclass
class Weapon:
grade: str
mfg: str
type: str
pieces: (str,) * 8
material: str
prefix: str
title: str
ammo: int
quality: range(65536)
level: range(65536)
slot: int # 1-4 or 0 for backpack
junk: int
locked: int
@dataclass
class BankItem: # Bank items have things in a different order. Weird.
type: str
grade: str
mfg: str
pieces: (str,) * 4
prefix: str
title: str
@dataclass
class Mission:
mission: str
progress: int # 1 = active, 2 = complete, 4 = turned in. Not sure if bitwise or enumeration.
unknown: (int, int)
goals: [(str, int)] # Always 0 of these for done missions
@dataclass
class MissionBlock:
id: int # Sequentially numbered blocks
current_mission: str # I think? Maybe?
missions: [Mission]
@dataclass
class Challenges:
outer_length: b"\x43\x05\0\0" # Length of this entire structure (not counting itself)
id: b"\3\0\0\0"
inner_length: b"\x3b\x05\0\0" # Length of the rest of the structure. Yes, exactly 8 less than outer_length.
@dataclass
class Challenge:
id: range(65536)
type: range(256) # Either 1 or 5, usually 1
value: int
count: b"\xbf\0" # Number of entries - it's 16-bit but otherwise same as saying [Challenge]
challenges: (Challenge,) * 191
@dataclass
class Savefile:
sig: b"WSG" # If it's not, this might be an Xbox save file
ver: b"\2\0\0\0" # If it's not, this might be a big-endian PS3 save file
type: b"PLYR"
revision: int
cls: str
level: int
xp: int
zeroes1: bytes(8) # Unspent skill points?
money: int
finished_once: int # 1 if you've finished the first playthrough
skills: [Skill]
vehicle_info: (int,) * 4 # Vehicle info
ammo: [Ammo]
items: [Item]
backpacksize: int
weaponslots: int
weapons: [Weapon]
challenges: Challenges
fasttravels: [str] # Doesn't include DLCs that have yet to be tagged up
last_location: str # You'll spawn at this location
zeroes4: bytes(12)
unknown7: int
zeroes5: bytes(4)
savefile_index: int # Possibly needs to correspond to the file name??
unknown8: b"\x27\0\0\0"
unknown8a: int # Higher on more-experienced players, up to 45 on completion of main plot
missions: [MissionBlock]
playtime: int
timestamp: str # Last saved? I think?
name: str
colours: (int, int, int)
enhancedblock: 0x55 # ???
unknown10: int
promocodes: [int]
promocodes_new: [int]
echo_recordings: [(int, [(str, int, int)])] # No idea what the ints mean, probably flags about having heard them or something
dlc_block_len: int # Total length of all the DLC blocks (up to just before zeroes6)
bank_sig: b"\x34\x12\x21\x43"
bank_block_len: int # == 5 + len(encoded(bank_weapons))
unknown12: b"\x02"
bank_capacity: int
bank_weapons: [(1, BankString, BankString, BankString, int, (BankString,)*11, bytes(7), 5, int)]
unknown13: 42
dlc_items: [Item] # DLC-only items??
dlc_weapons: [Weapon] # Ditto
unknown99: (int,) * 6
zeroes6: bytes(80)
def parse_savefile(fn):
with open(fn, "rb") as f: data = Consumable(f.read())
savefile = decode_dataclass(data, Savefile)
assert savefile.last_location in savefile.fasttravels
print("%s (level %d %s, $%d)" % (savefile.name, savefile.level, savefile.cls.split("_")[-1], savefile.money))
if args.loot_filter is not None:
for weapon in sorted(savefile.weapons + savefile.dlc_weapons, key=lambda w: w.slot or 5):
for filter, filterargs in args.loot_filter:
if not filter(weapon, *filterargs): break
else:
print("%d: [%d-%d] %s %s" % (weapon.slot, weapon.level, weapon.quality, weapon.prefix.split(".")[-1], weapon.title.split(".")[-1]))
for item in sorted(savefile.items + savefile.dlc_items, key=lambda w: w.slot or 5):
for filter, filterargs in args.loot_filter:
if not filter(item, *filterargs): break
else:
print("%d: [%d-%d] %s %s" % (item.slot, item.level, item.quality, item.prefix.split(".")[-1], item.title.split(".")[-1]))
# print(", ".join(hex(x) for x in savefile.unknown13))
# print(*savefile.bank_weapons, sep="\n")
print(savefile.bank_block_len, savefile.unknown12, savefile.bank_capacity)
print(savefile.bank_weapons)
assert len(data) == 0
assert encode_dataclass(savefile, Savefile) == data.data
if args.synth is not None:
savefile.name = "PATCHED"
for synth, synthargs in args.synth: synth(savefile, *synthargs)
synthesized = encode_dataclass(savefile, Savefile)
with open(os.path.basename(fn), "wb") as f: f.write(synthesized)
return ""
def main(args):
# TODO: Support the non-GOTY version too?
# TODO: Locate paths case insensitively in case there's differences
# GOTY non-enhanced: /steam/steamapps/compatdata/8980/pfx/drive_c/users/steamuser/My Documents/my games/borderlands/savedata
dir = os.path.expanduser(args.path + "/steam/steamapps/compatdata/729040/pfx/drive_c/users/steamuser/My Documents/My Games/Borderlands Game of the Year/Binaries/SaveData")
for fn in sorted(os.listdir(dir)):
if not fn.endswith(".sav"): continue
print(fn, end="... ")
try: print(parse_savefile(os.path.join(dir, fn)))
except SaveFileFormatError as e: print(e.args[0])
print()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Borderlands 1 save file reader")
parser.add_argument("--path", help="Set path to Steam", default="~/.steam")
# parser.add_argument("--pieces", help="Show the individual pieces inside weapons/items", action="store_true")
# parser.add_argument("--raw", help="Show the raw details of weapons/items (spammy - use loot filters)", action="store_true")
parser.add_argument("--synth", help="Synthesize a modified save file", type=synthesizer, nargs="*")
parser.add_argument("-l", "--loot-filter", help="Show loot, optionally filtered to only what's interesting", type=loot_filter, nargs="*")
# parser.add_argument("-f", "--file", help="Process only one save file")
args = parser.parse_args()
print(args)
main(args)
|
mit
| 1,392,985,254,175,123,700
| 35.684874
| 172
| 0.70181
| false
| 2.863562
| false
| false
| false
|
j-be/laundry-dudes
|
server/laundrydude-server.py
|
1
|
5181
|
#!flask/bin/python
import datetime
import time
import smtplib
from email.mime.text import MIMEText
import domain
from sqlobject import SQLObjectNotFound
from flask import Flask, request, jsonify, abort
LED_THRESHOLD = 600
data_types = None
app = Flask(__name__)
washer_state = None
def _getTimeOfDay(dt):
return "%02d.%02d, %02d:%02d" % (dt.day, dt.month, dt.hour, dt.minute)
def changeState(new_state):
global washer_state
if new_state == washer_state.value:
return;
washer_state = domain.State(value=new_state)
if new_state == 3:
user = getCurrentUser()
if user is not None:
sendMail(user.email, "[LaundryDude] Washer almost done...",
"Hi %s,\n\nthe machine is spin-drying - your laundry should be "
"done soon.\n\nKind regards,\nyour LaundryDudes" % user.name)
if new_state == 4:
user = getCurrentUser()
if user is not None:
sendMail(user.email, "[LaundryDude] Washer done!",
"Hi %s,\n\nyour laundry is done!\n\nKind regards,\n"
"your LaundryDudes" % user.name)
def sendMail(to,subject,text):
fromAddress = 'laundry.dude.notificator@gmail.com'
msg = MIMEText(text)
msg['Subject'] = subject
msg['From'] = fromAddress
msg['To'] = to
# Send the mail
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(fromAddress, password)
server.sendmail(fromAddress, to, msg.as_string())
server.quit()
def getNextReservation():
try:
now = time.time()
select_clause = 'start > %s' % now
reservation = domain.Reservation.select(select_clause).orderBy('start').limit(1)
reservation = reservation.getOne()
return reservation
except SQLObjectNotFound:
return None
@app.route('/laundrydude/')
def index():
return app.send_static_file('index.html')
@app.route('/laundrydude/<path:file_name>')
def static_html_proxy(file_name):
return app.send_static_file(file_name)
@app.route('/laundrydude/css/<path:path>')
def static_css_proxy(path):
return app.send_static_file('css/' + path)
@app.route('/laundrydude/js/<path:path>')
def static_js_proxy(path):
return app.send_static_file('js/' + path)
@app.route('/laundrydude/img/<path:path>')
def static_img_proxy(path):
print path
return app.send_static_file('img/' + path)
@app.route('/laundrydude/api/data')
def get_data():
values = {}
for data_type in data_types.keys():
domain_cls = data_types[data_type]
values[data_type] = [(_getTimeOfDay(row.timestamp), row.value)
for row in domain_cls.select()]
return jsonify(values), 200
def getCurrentUser():
try:
rfid_tag = domain.RfidCard.select().orderBy('-id').limit(1).getOne().value
return domain.User.select('rfid == "' + rfid_tag + '"').getOne()
except SQLObjectNotFound:
return None
@app.route('/laundrydude/api/last-data')
def get_last_data():
values = {}
for data_type in ['h', 's', 't']:
domain_cls = data_types[data_type]
last_row = domain_cls.select().orderBy('-id').limit(1).getOne()
values[data_type] = (_getTimeOfDay(last_row.timestamp), last_row.value)
user = getCurrentUser()
if user:
values['u'] = user.name
reservation = getNextReservation()
if reservation:
reservation_info = {}
reservation_info['user'] = reservation.user
reservation_info['start'] = _getTimeOfDay(
datetime.datetime.fromtimestamp(reservation.start))
reservation_info['startTs'] = reservation.start
values['r'] = reservation_info
return jsonify(values), 200
@app.route('/laundrydude/api/blocker')
def get_blocker_state():
return "b=0", 200
@app.route('/laundrydude/api/clear')
def clear_db():
print "Clearing DB..."
for cls in data_types.values():
cls.deleteMany('id=id')
return jsonify({'e': 0}), 200
def handleStateChange(key, value):
if key == "l":
if value > LED_THRESHOLD and washer_state.value == 0:
changeState(1)
if value <= LED_THRESHOLD and washer_state.value == 1:
changeState(2)
if value > LED_THRESHOLD and washer_state.value > 1 and washer_state != 4:
changeState(4)
if value <= LED_THRESHOLD and washer_state.value == 4:
changeState(0)
elif key == "a":
if abs(value) - 1190 > 300 and washer_state.value == 2:
changeState(3)
@app.route('/laundrydude/api/data', methods=['POST'])
def save_data():
global washer_state
data_dict = request.form
if not data_dict:
abort(400)
for data_type in data_dict.keys():
if data_type == "r":
value = data_dict[data_type].strip()
else:
value = float(data_dict[data_type].strip())
data_types[data_type](value=value)
handleStateChange(data_type, value)
return jsonify({"e": 0}), 201
@app.route('/laundrydude/api/reservation', methods=['POST'])
def save_reservation():
start_time = time.mktime(time.strptime(
request.json['start'], "%Y-%m-%dT%H:%M:%S.000Z"))
print domain.Reservation(
user=request.json['title'],
start=start_time + 3600)
return jsonify({"e": 0}), 201
@app.route('/laundrydude/api/reservation', methods=['GET'])
def get_reservations():
return jsonify(domain.sqlresultToDictList(domain.Reservation.select())), 200
if __name__ == '__main__':
data_types = domain.createDb()
washer_state = domain.State(value=0)
sendMail('juriberlanda@hotmail.com', '[LaundryDudes] Started', '')
app.run(host='0.0.0.0', debug=True)
|
mit
| 107,147,871,397,105,260
| 25.569231
| 82
| 0.691565
| false
| 2.805089
| false
| false
| false
|
jut-io/jut-python-tools
|
jut/api/integrations.py
|
1
|
1068
|
"""
jut integrations api
"""
from jut import defaults
from jut.api import deployments, data_engine
def get_webhook_url(deployment_name,
space='default',
data_source='webhook',
token_manager=None,
app_url=defaults.APP_URL,
**fields):
"""
return the webhook URL for posting webhook data to
"""
import_url = data_engine.get_import_data_url(deployment_name,
app_url=app_url,
token_manager=token_manager)
api_key = deployments.get_apikey(deployment_name,
token_manager=token_manager,
app_url=app_url)
fields_string = '&'.join(['%s=%s' % (key, value)
for (key, value) in fields.items()])
return '%s/api/v1/import/webhook/?space=%s&data_source=%sk&apikey=%s&%s' % \
(import_url, space, data_source, api_key, fields_string)
|
mit
| 4,401,881,427,402,692,000
| 29.514286
| 80
| 0.4897
| false
| 4.25498
| false
| false
| false
|
simright/flask-security
|
flask_security/decorators.py
|
1
|
8042
|
# -*- coding: utf-8 -*-
"""
flask_security.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~
Flask-Security decorators module
:copyright: (c) 2012 by Matt Wright.
:license: MIT, see LICENSE for more details.
"""
import re
from collections import namedtuple
from functools import wraps
from flask import (abort, current_app, Response, request,
url_for, redirect, _request_ctx_stack)
from flask_login import current_user, login_required # pragma: no flakes
from flask_principal import RoleNeed, Permission, Identity, identity_changed
from werkzeug.local import LocalProxy
from werkzeug.routing import BuildError
from . import utils
# Convenient references
_security = LocalProxy(lambda: current_app.extensions['security'])
_default_unauthorized_html = """
<h1>Unauthorized</h1>
<p>The server could not verify that you are authorized to access the URL
requested. You either supplied the wrong credentials (e.g. a bad password),
or your browser doesn't understand how to supply the credentials required.</p>
"""
BasicAuth = namedtuple('BasicAuth', 'username, password')
def _get_unauthorized_response(text=None, headers=None):
text = text or _default_unauthorized_html
headers = headers or {}
return Response(text, 401, headers)
def _get_unauthorized_view():
view = utils.get_url(utils.config_value('UNAUTHORIZED_VIEW'))
if view:
if callable(view):
view = view()
else:
try:
view = url_for(view)
except BuildError:
view = None
utils.do_flash(*utils.get_message('UNAUTHORIZED'))
return redirect(view or request.referrer or '/')
abort(403)
def _check_token():
header_key = _security.token_authentication_header
args_key = _security.token_authentication_key
header_token = request.headers.get(header_key, None)
token = request.args.get(args_key, header_token)
if request.get_json(silent=True):
if not isinstance(request.json, list):
token = request.json.get(args_key, token)
user = _security.login_manager.token_callback(token)
if user and user.is_authenticated:
app = current_app._get_current_object()
_request_ctx_stack.top.user = user
identity_changed.send(app, identity=Identity(user.id))
return True
return False
def _check_http_auth():
auth = request.authorization or BasicAuth(username=None, password=None)
user = _security.datastore.find_user(email=auth.username)
if user and utils.verify_and_update_password(auth.password, user):
_security.datastore.commit()
app = current_app._get_current_object()
_request_ctx_stack.top.user = user
identity_changed.send(app, identity=Identity(user.id))
return True
return False
def http_auth_required(realm):
"""Decorator that protects endpoints using Basic HTTP authentication.
The username should be set to the user's email address.
:param realm: optional realm name"""
def decorator(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
if _check_http_auth():
return fn(*args, **kwargs)
if _security._unauthorized_callback:
return _security._unauthorized_callback()
else:
r = _security.default_http_auth_realm if callable(realm) else realm
h = {'WWW-Authenticate': 'Basic realm="%s"' % r}
return _get_unauthorized_response(headers=h)
return wrapper
if callable(realm):
return decorator(realm)
return decorator
def auth_token_required(fn):
"""Decorator that protects endpoints using token authentication. The token
should be added to the request by the client by using a query string
variable with a name equal to the configuration value of
`SECURITY_TOKEN_AUTHENTICATION_KEY` or in a request header named that of
the configuration value of `SECURITY_TOKEN_AUTHENTICATION_HEADER`
"""
@wraps(fn)
def decorated(*args, **kwargs):
if _check_token():
return fn(*args, **kwargs)
if _security._unauthorized_callback:
return _security._unauthorized_callback()
else:
return _get_unauthorized_response()
return decorated
def auth_required(*auth_methods):
"""
Decorator that protects enpoints through multiple mechanisms
Example::
@app.route('/dashboard')
@auth_required('token', 'session')
def dashboard():
return 'Dashboard'
:param auth_methods: Specified mechanisms.
"""
login_mechanisms = {
'token': lambda: _check_token(),
'basic': lambda: _check_http_auth(),
'session': lambda: current_user.is_authenticated
}
def wrapper(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
h = {}
mechanisms = [(method, login_mechanisms.get(method)) for method in auth_methods]
for method, mechanism in mechanisms:
if mechanism and mechanism():
return fn(*args, **kwargs)
elif method == 'basic':
r = _security.default_http_auth_realm
h['WWW-Authenticate'] = 'Basic realm="%s"' % r
if _security._unauthorized_callback:
return _security._unauthorized_callback()
else:
return _get_unauthorized_response(headers=h)
return decorated_view
return wrapper
def roles_required(*roles):
"""Decorator which specifies that a user must have all the specified roles.
Example::
@app.route('/dashboard')
@roles_required('admin', 'editor')
def dashboard():
return 'Dashboard'
The current user must have both the `admin` role and `editor` role in order
to view the page.
:param args: The required roles.
"""
def wrapper(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
perms = [Permission(RoleNeed(role)) for role in roles]
for perm in perms:
if not perm.can():
if _security._unauthorized_callback:
return _security._unauthorized_callback()
else:
return _get_unauthorized_view()
return fn(*args, **kwargs)
return decorated_view
return wrapper
def roles_accepted(*roles):
"""Decorator which specifies that a user must have at least one of the
specified roles. Example::
@app.route('/create_post')
@roles_accepted('editor', 'author')
def create_post():
return 'Create Post'
The current user must have either the `editor` role or `author` role in
order to view the page.
:param args: The possible roles.
"""
def wrapper(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
perm = Permission(*[RoleNeed(role) for role in roles])
if perm.can():
return fn(*args, **kwargs)
if _security._unauthorized_callback:
return _security._unauthorized_callback()
else:
return _get_unauthorized_view()
return decorated_view
return wrapper
def anonymous_user_required(f):
@wraps(f)
def wrapper(*args, **kwargs):
if current_user.is_authenticated:
return redirect(utils.get_url(_security.post_login_view))
return f(*args, **kwargs)
return wrapper
def verify_pag_source(func):
"""验证页面来源"""
@wraps(func)
def wrapper(*args, **kwargs):
regx = re.compile(r"https?://www.simright.(com|io)/phone_register")
refer = str(request.referrer)
rest = re.match(regx, refer)
if rest is not None:
return func(*args, **kwargs)
else:
raise Exception("Please request in a legal way")
return wrapper
|
mit
| 5,692,127,315,478,371,000
| 30.490196
| 92
| 0.615318
| false
| 4.219653
| false
| false
| false
|
arantes555/oblivious-movie-gharial
|
config.py
|
1
|
1105
|
import os
from sys import maxsize
# Minimum relevance (in per cent of the total amount of documents) to accept a classifier
MIN_RELEVANCE = 0.001
# Max amount of reviews to retrieve
MAX_REVIEWS = maxsize
# Max amount of movies to analyze
MOVIES_TO_ANALYZE = 1500
# Movies to classify after the model is trained
MOVIES_TO_CLASSIFY = 100
READ_ALL_THEN_SHUFFLE = True
# Maximum amount of words in the dictionary
MAX_FEATURES = 1000
# Amount of topics to extract, keep it relatively low
N_TOPICS = 20
# Amount of words to display for each topic, doesn't affect anything except printing
N_TOP_WORDS = 15
# Parameter that controls spareness
BETA = 1e+2
AMAZON_REVIEWS_FILE = './resources/Movies_and_TV_5.json'
METADATA_FILE = './resources/meta_Movies_and_TV.json'
LANGUAGE_STOP_WORDS_PATH = './resources/stopwords/english'
PROJECT_STOP_WORDS_PATH = './resources/movies_stopwords'
NLTK_DATA_DIR = os.path.abspath('./resources/nltk_data/')
os.environ['NLTK_DATA'] = NLTK_DATA_DIR
# Amount of parallel jobs the computer can take (core amount x2 with hyper-threading)
N_JOBS = 8
FULL_TOPICS = True
|
mit
| 660,543,326,210,007,400
| 23.555556
| 89
| 0.751131
| false
| 3.130312
| false
| false
| false
|
alexoneill/py3status
|
py3status/modules/github.py
|
1
|
9985
|
# -*- coding: utf-8 -*-
"""
Display Github notifications and issue/pull requests for a repo.
To check notifications a Github `username` and `personal access token` are
required. You can create a personal access token at
https://github.com/settings/tokens The only `scope` needed is `notifications`,
which provides readonly access to notifications.
The Github API is rate limited so setting `cache_timeout` too small may cause
issues see https://developer.github.com/v3/#rate-limiting for details
Configuration parameters:
auth_token: Github personal access token, needed to check notifications
see above.
(default None)
button_action: Button that when clicked opens the Github notification page
if notifications, else the project page for the repository if there is
one (otherwise the github home page). Setting to `None` disables.
(default 3)
button_refresh: Button that when clicked refreshes module.
Setting to `None` disables.
(default 2)
cache_timeout: How often we refresh this module in seconds
(default 60)
format: Format of output
*(default '{repo} {issues}/{pull_requests}{notifications}'
if username and auth_token provided else
'{repo} {issues}/{pull_requests}')*
format_notifications: Format of `{notification}` status placeholder.
(default ' N{notifications_count}')
notifications: Type of notifications can be `all` for all notifications or
`repo` to only get notifications for the repo specified. If repo is
not provided then all notifications will be checked.
(default 'all')
repo: Github repo to check
(default 'ultrabug/py3status')
url_api: Change only if using Enterprise Github, example https://github.domain.com/api/v3.
(default 'https://api.github.com')
url_base: Change only if using Enterprise Github, example https://github.domain.com.
(default 'https://github.com')
username: Github username, needed to check notifications.
(default None)
Format placeholders:
{issues} Number of open issues.
{notifications} Notifications. If no notifications this will be empty.
{notifications_count} Number of notifications. This is also the __Only__
placeholder available to `format_notifications`.
{pull_requests} Number of open pull requests
{repo} short name of the repository being checked. eg py3status
{repo_full} full name of the repository being checked. eg ultrabug/py3status
Examples:
```
# set github access credentials
github {
auth_token = '40_char_hex_access_token'
username = 'my_username'
}
# just check for any notifications
github {
auth_token = '40_char_hex_access_token'
username = 'my_username'
format = 'Github {notifications_count}'
}
```
@author tobes
SAMPLE OUTPUT
{'full_text': 'py3status 34/24'}
notification
{'full_text': 'py3status 34/24 N3', 'urgent': True}
"""
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class Py3status:
"""
"""
# available configuration parameters
auth_token = None
button_action = 3
button_refresh = 2
cache_timeout = 60
format = None
format_notifications = ' N{notifications_count}'
notifications = 'all'
repo = 'ultrabug/py3status'
url_api = 'https://api.github.com'
url_base = 'https://github.com'
username = None
def post_config_hook(self):
self.first = True
self.notification_warning = False
self.repo_warning = False
self._issues = '?'
self._pulls = '?'
self._notify = '?'
# remove a trailing slash in the urls
self.url_api = self.url_api.strip('/')
self.url_base = self.url_base.strip('/')
def _init(self):
# Set format if user has not configured it.
if not self.format:
if self.username and self.auth_token:
# include notifications
self.format = '{repo} {issues}/{pull_requests}{notifications}'
else:
self.format = '{repo} {issues}/{pull_requests}'
def _github_count(self, url):
"""
Get counts for requests that return 'total_count' in the json response.
"""
if self.first:
return '?'
url = self.url_api + url + '&per_page=1'
# if we have authentication details use them as we get better
# rate-limiting.
if self.username and self.auth_token:
auth = (self.username, self.auth_token)
else:
auth = None
try:
info = self.py3.request(url, timeout=10, auth=auth)
except (self.py3.RequestException):
return
if info and info.status_code == 200:
return(int(info.json()['total_count']))
if info.status_code == 422:
if not self.repo_warning:
self.py3.notify_user('Github repo cannot be found.')
self.repo_warning = True
return '?'
def _notifications(self):
"""
Get the number of unread notifications.
"""
if not self.username or not self.auth_token:
if not self.notification_warning:
self.py3.notify_user('Github module needs username and '
'auth_token to check notifications.')
self.notification_warning = True
return '?'
if self.first:
return '?'
if self.notifications == 'all' or not self.repo:
url = self.url_api + '/notifications'
else:
url = self.url_api + '/repos/' + self.repo + '/notifications'
url += '?per_page=100'
try:
info = self.py3.request(url, timeout=10,
auth=(self.username, self.auth_token))
except (self.py3.RequestException):
return
if info.status_code == 200:
links = info.headers.get('Link')
if not links:
return len(info.json())
last_page = 1
for link in links.split(','):
if 'rel="last"' in link:
last_url = link[link.find('<') + 1:link.find('>')]
parsed = urlparse.urlparse(last_url)
last_page = int(urlparse.parse_qs(parsed.query)['page'][0])
if last_page == 1:
return len(info.json())
try:
last_page_info = self.py3.request(last_url, timeout=10,
auth=(self.username, self.auth_token))
except self.py3.RequestException:
return
return len(info.json()) * (last_page - 1) + len(last_page_info.json())
if info.status_code == 404:
if not self.repo_warning:
self.py3.notify_user('Github repo cannot be found.')
self.repo_warning = True
def github(self):
if self.first:
self._init()
status = {}
urgent = False
# issues
if self.repo and self.py3.format_contains(self.format, 'issues'):
url = '/search/issues?q=state:open+type:issue+repo:' + self.repo
self._issues = self._github_count(url) or self._issues
status['issues'] = self._issues
# pull requests
if self.repo and self.py3.format_contains(self.format, 'pull_requests'):
url = '/search/issues?q=state:open+type:pr+repo:' + self.repo
self._pulls = self._github_count(url) or self._pulls
status['pull_requests'] = self._pulls
# notifications
if self.py3.format_contains(self.format, 'notifications*'):
count = self._notifications()
# if we don't have a notification count, then use the last value
# that we did have.
if count is None:
count = self._notify
self._notify = count
if count and count != '?':
notify = self.py3.safe_format(
self.format_notifications,
{'notifications_count': count})
urgent = True
else:
notify = ''
status['notifications'] = notify
status['notifications_count'] = count
# repo
try:
status['repo'] = self.repo.split('/')[1]
except IndexError:
status['repo'] = 'Error'
status['repo_full'] = self.repo
if self.first:
cached_until = 0
self.first = False
else:
cached_until = self.py3.time_in(self.cache_timeout)
return {
'full_text': self.py3.safe_format(self.format, status),
'cached_until': cached_until,
'urgent': urgent
}
def on_click(self, event):
button = event['button']
if button == self.button_action:
# open github in browser
if self._notify and self._notify != '?':
# open github notifications page
url = self.url_base + '/notifications'
else:
if self.notifications == 'all' and not self.repo:
# open github.com if there are no unread notifications and no repo
url = self.url_base
else:
# open repo page if there are no unread notifications
url = self.url_base + '/' + self.repo
# open url in default browser
self.py3.command_run('xdg-open {}'.format(url))
self.py3.prevent_refresh()
elif button != self.button_refresh:
# only refresh the module if needed
self.py3.prevent_refresh()
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
bsd-3-clause
| 7,620,734,784,100,960,000
| 35.441606
| 94
| 0.576565
| false
| 4.254367
| false
| false
| false
|
slachiewicz/teryt2osm
|
teryt2osm/reporting.py
|
1
|
8621
|
# vi: encoding=utf-8
# teryt2osm - tool to merge TERYT data with OSM maps
# Copyright (C) 2009 Jacek Konieczny <jajcus@jajcus.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Status and error reporting facilities.
"""
__version__ = "$Revision$"
import sys
import os
import codecs
import xml.etree.cElementTree as ElementTree
class Error(Exception):
pass
class ProgressError(Error):
pass
class ChannelError(Error):
pass
class Channel(object):
def __init__(self, name, location = None):
"""Create channel. `name` is base channel name, `location`
is a location data (list of one to three of [wojewodztwo, powiat,
gmina])"""
self.name = name
if location is None:
location = []
self.location = location
self.level = len(location)
if self.level > 3:
raise ChannelError, "Channel too deep. Maximum depth level is 3"
if self.level:
if "." in location or ".." in location:
raise ValueError, "Forbidden entries in channel location!"
location = [ l.replace("/", "_").replace("\\", "_") for l in location ]
self.directory = os.path.join("reports", os.path.join(*location))
else:
self.directory = "reports"
if not os.path.exists(self.directory):
os.makedirs(self.directory)
if name in (".", ".."):
raise ValueError, "Forbidden channel name!"
name = name.replace("/", "_").replace("\\", "_")
self.log_file = codecs.open( os.path.join(self.directory, name + ".txt"), "w", "utf-8" )
self.counter = 0
self.quiet = False
self.split_level = 0
self.subchannels = {}
self.map_file = None
def __del__(self):
self.close()
def close(self):
if self.log_file:
self.log_file.close()
self.log_file = None
if self.map_file:
self.close_map_file()
def close_map_file(self):
self.map_file.write("</osm>\n")
self.map_file.close()
self.map_file = None
def set_mapping(self, value):
if not self.map_file:
if value:
self.map_file = file( os.path.join(self.directory,
self.name + ".osm"), "w" )
self.map_file.write('<osm generator="teryt2osm" version="0.6">\n')
elif not value:
self.close_map_file()
for subch in self.subchannels.values():
subch.mapping = value
def get_mapping(self):
if self.map_file:
return True
else:
return False
mapping = property(get_mapping, set_mapping)
def emit(self, msg, location):
self.log_file.write(u"%s\n" % (msg,))
if self.map_file and isinstance(location, OSM_Place):
self.map_file.write(
ElementTree.tostring(location.element, "utf-8"))
if not self.split_level:
return
try:
if self.level == 0:
loc_obj = location.wojewodztwo
elif self.level == 1:
loc_obj = location.powiat
elif self.level == 2:
loc_obj = location.gmina
except AttributeError, KeyError:
loc_obj = None
if loc_obj:
loc_name = loc_obj.name
split_level = self.split_level - 1
else:
loc_name = u"_brak"
split_level = 0
if loc_name in self.subchannels:
subchannel = self.subchannels[loc_name]
else:
subchannel = Channel(self.name, self.location + [loc_name])
if self.mapping:
subchannel.mapping = True
self.subchannels[loc_name] = subchannel
subchannel.split_level = split_level
subchannel.emit(msg, location)
def __repr__(self):
return "<Channel %i %r quiet=%r>" % (id(self), self.name, self.quiet)
class Reporting(object):
instance = None
def _init(self, logging = True):
global OSM_Place
from teryt2osm.osm_places import OSM_Place
self.logging = logging
self.progress_total = None
self.progress_step = None
self.progress_value = None
self.need_eol = False
self.channels = {}
if not os.path.exists("reports"):
os.mkdir("reports")
self.log_file = codecs.open( os.path.join("reports", "log.txt"), "w", "utf-8" )
def __del__(self):
self.close()
def close(self):
if self.need_eol:
print >>sys.stderr
if self.log_file:
self.log_file.close()
if Reporting.instance is self:
Reporting.instance = None
for channel in self.channels.values():
channel.close()
self.channels = {}
def __new__(cls, logging = True):
if cls.instance is None:
cls.instance = object.__new__(cls)
cls.instance._init(logging = logging)
return cls.instance
def get_channel(self, name):
if name in self.channels:
return self.channels[name]
channel = Channel(name)
self.channels[name] = channel
return channel
def config_channel(self, name, quiet = None, mapping = None, split_level = None):
channel = self.get_channel(name)
if quiet is not None:
channel.quiet = quiet
if mapping is not None:
channel.mapping = mapping
if split_level is not None:
channel.split_level = split_level
def log(self, msg):
if self.logging:
print >> self.log_file, msg
def print_msg(self, msg):
if self.need_eol:
print >>sys.stderr, u"\n%s" % (msg,)
self.need_eol = False
else:
print >>sys.stderr, msg
def output_msg(self, channel_name, msg, location = None):
"""Output a single message via channel 'channel'."""
channel = self.get_channel(channel_name)
if not channel.quiet:
self.print_msg(msg)
self.log(msg)
if self.logging:
channel.emit(msg, location)
def progress_start(self, msg, total, step = 1):
"""Start progrss reporting.
:Parameters:
- `total`: total number of progrss point
- `step`: percentage step when progress counter should be updated
"""
if self.progress_total:
raise ProgressError, u"Progress reporting already started."
self.progress_total = total
self.progress_step = max(int(total * step / 100), 1)
self.progress_value = 0
self.progress_msg = msg
self.log(u"%s… rozpoczęte" % (msg,))
sys.stderr.write(u"\r%s… " % (msg,))
sys.stderr.flush()
self.need_eol = True
def progress(self, increment = None, value = None):
if self.progress_total is None:
raise ProgressError, u"Progress reporting not started."
if not self.progress_total:
return
if increment is not None:
self.progress_value += increment
elif value is not None:
self.progress_value = value
else:
self.progress_value += 1
if self.progress_value % self.progress_step:
return
sys.stderr.write(u"\r%s… %2i%% " % (self.progress_msg,
self.progress_value * 100 / self.progress_total))
sys.stderr.flush()
self.need_eol = True
def progress_stop(self):
if self.progress_total is None:
raise ProgressError, u"Progress reporting not started."
print >>sys.stderr, "\r%s 100%% " % (self.progress_msg,)
self.progress_total = None
self.progress_step = None
self.progress_value = None
self.need_eol = False
self.log(u"%s… zakończone" % self.progress_msg)
|
gpl-2.0
| -6,645,002,260,093,666,000
| 33.035573
| 96
| 0.569272
| false
| 3.944572
| false
| false
| false
|
zjuchenyuan/BioWeb
|
Lib/Bio/SVDSuperimposer/__init__.py
|
1
|
5375
|
# Copyright (C) 2002, Thomas Hamelryck (thamelry@vub.ac.be)
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Align on protein structure onto another using SVD alignment.
SVDSuperimposer finds the best rotation and translation to put
two point sets on top of each other (minimizing the RMSD). This is
eg. useful to superimpose crystal structures. SVD stands for singular
value decomposition, which is used in the algorithm.
"""
from __future__ import print_function
from numpy import dot, transpose, sqrt, array
from numpy.linalg import svd, det
class SVDSuperimposer(object):
"""Class to run SVD alignment,
SVDSuperimposer finds the best rotation and translation to put
two point sets on top of each other (minimizing the RMSD). This is
eg. useful to superimpose crystal structures.
SVD stands for Singular Value Decomposition, which is used to calculate
the superposition.
Reference:
Matrix computations, 2nd ed. Golub, G. & Van Loan, CF., The Johns
Hopkins University Press, Baltimore, 1989
"""
def __init__(self):
self._clear()
# Private methods
def _clear(self):
self.reference_coords = None
self.coords = None
self.transformed_coords = None
self.rot = None
self.tran = None
self.rms = None
self.init_rms = None
def _rms(self, coords1, coords2):
"""Return rms deviations between coords1 and coords2."""
diff = coords1 - coords2
l = coords1.shape[0]
return sqrt(sum(sum(diff * diff)) / l)
# Public methods
def set(self, reference_coords, coords):
"""Set the coordinates to be superimposed.
coords will be put on top of reference_coords.
- reference_coords: an NxDIM array
- coords: an NxDIM array
DIM is the dimension of the points, N is the number
of points to be superimposed.
"""
# clear everything from previous runs
self._clear()
# store cordinates
self.reference_coords = reference_coords
self.coords = coords
n = reference_coords.shape
m = coords.shape
if n != m or not(n[1] == m[1] == 3):
raise Exception("Coordinate number/dimension mismatch.")
self.n = n[0]
def run(self):
"""Superimpose the coordinate sets."""
if self.coords is None or self.reference_coords is None:
raise Exception("No coordinates set.")
coords = self.coords
reference_coords = self.reference_coords
# center on centroid
av1 = sum(coords) / self.n
av2 = sum(reference_coords) / self.n
coords = coords - av1
reference_coords = reference_coords - av2
# correlation matrix
a = dot(transpose(coords), reference_coords)
u, d, vt = svd(a)
self.rot = transpose(dot(transpose(vt), transpose(u)))
# check if we have found a reflection
if det(self.rot) < 0:
vt[2] = -vt[2]
self.rot = transpose(dot(transpose(vt), transpose(u)))
self.tran = av2 - dot(av1, self.rot)
def get_transformed(self):
"""Get the transformed coordinate set."""
if self.coords is None or self.reference_coords is None:
raise Exception("No coordinates set.")
if self.rot is None:
raise Exception("Nothing superimposed yet.")
if self.transformed_coords is None:
self.transformed_coords = dot(self.coords, self.rot) + self.tran
return self.transformed_coords
def get_rotran(self):
"""Right multiplying rotation matrix and translation."""
if self.rot is None:
raise Exception("Nothing superimposed yet.")
return self.rot, self.tran
def get_init_rms(self):
"""Root mean square deviation of untransformed coordinates."""
if self.coords is None:
raise Exception("No coordinates set yet.")
if self.init_rms is None:
self.init_rms = self._rms(self.coords, self.reference_coords)
return self.init_rms
def get_rms(self):
"""Root mean square deviation of superimposed coordinates."""
if self.rms is None:
transformed_coords = self.get_transformed()
self.rms = self._rms(transformed_coords, self.reference_coords)
return self.rms
if __name__ == "__main__":
# start with two coordinate sets (Nx3 arrays - float)
x = array([[51.65, -1.90, 50.07],
[50.40, -1.23, 50.65],
[50.68, -0.04, 51.54],
[50.22, -0.02, 52.85]], 'f')
y = array([[51.30, -2.99, 46.54],
[51.09, -1.88, 47.58],
[52.36, -1.20, 48.03],
[52.71, -1.18, 49.38]], 'f')
# start!
sup = SVDSuperimposer()
# set the coords
# y will be rotated and translated on x
sup.set(x, y)
# do the lsq fit
sup.run()
# get the rmsd
rms = sup.get_rms()
# get rotation (right multiplying!) and the translation
rot, tran = sup.get_rotran()
# rotate y on x
y_on_x1 = dot(y, rot) + tran
# same thing
y_on_x2 = sup.get_transformed()
print(y_on_x1)
print("")
print(y_on_x2)
print("")
print("%.2f" % rms)
|
mit
| -6,332,052,390,746,206,000
| 30.432749
| 76
| 0.610977
| false
| 3.748257
| false
| false
| false
|
jiadaizhao/LeetCode
|
0201-0300/0269-Alien Dictionary/0269-Alien Dictionary.py
|
1
|
1091
|
import collections
class Solution:
def alienOrder(self, words: List[str]) -> str:
letters = set()
prev = ''
graph = collections.defaultdict(set)
for word in words:
for c in word:
letters.add(c)
for a, b in zip(prev, word):
if a != b:
graph[a].add(b)
break
else:
if len(prev) > len(word):
return ''
prev = word
degrees = collections.Counter()
for v in graph.values():
for c in v:
degrees[c] += 1
Q = collections.deque()
result = []
for c in letters:
if degrees[c] == 0:
Q.append(c)
result.append(c)
while Q:
c = Q.popleft()
for n in graph[c]:
degrees[n] -= 1
if degrees[n] == 0:
Q.append(n)
result.append(n)
return ''.join(result) if len(result) == len(letters) else ''
|
mit
| -724,017,005,088,899,200
| 25.609756
| 69
| 0.409716
| false
| 4.417004
| false
| false
| false
|
fevxie/odoo-infrastructure
|
infrastructure/models/environment.py
|
1
|
9464
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
import string
from openerp import models, fields, api, _
from openerp.exceptions import Warning
from .server import custom_sudo as sudo
from fabric.contrib.files import exists
import os
class environment(models.Model):
""""""
_name = 'infrastructure.environment'
_description = 'environment'
_order = 'number'
_inherit = ['ir.needaction_mixin', 'mail.thread']
_states_ = [
# State machine: untitle
('draft', 'Draft'),
('active', 'Active'),
('inactive', 'Inactive'),
('cancel', 'Cancel'),
]
@api.model
def get_odoo_version(self):
return self.env['infrastructure.odoo_version'].search([], limit=1)
number = fields.Integer(
string='Number',
required=True,
readonly=True,
states={'draft': [('readonly', False)]},
)
name = fields.Char(
string='Name',
readonly=True,
required=True,
size=16,
states={'draft': [('readonly', False)]},
)
type = fields.Selection([
(u'docker', u'Docker'),
],
string='Type',
readonly=True,
required=True,
states={'draft': [('readonly', False)]},
default='docker'
)
description = fields.Char(
string='Description'
)
partner_id = fields.Many2one(
'res.partner',
string='Partner',
required=True,
readonly=True,
states={'draft': [('readonly', False)]},
)
odoo_version_id = fields.Many2one(
'infrastructure.odoo_version',
string='Odoo Version',
required=True,
readonly=True,
default=get_odoo_version,
states={'draft': [('readonly', False)]},
)
note = fields.Html(
string='Note'
)
color = fields.Integer(
string='Color Index',
compute='get_color',
)
state = fields.Selection(
_states_,
string="State",
default='draft',
)
server_id = fields.Many2one(
'infrastructure.server',
string='Server',
ondelete='cascade',
required=True,
readonly=True,
states={'draft': [('readonly', False)]},
)
instance_ids = fields.One2many(
'infrastructure.instance',
'environment_id',
string='Instances',
context={'from_environment': True},
domain=[('state', '!=', 'cancel')],
)
path = fields.Char(
string='Path',
readonly=True,
required=True,
states={'draft': [('readonly', False)]},
)
instance_count = fields.Integer(
string='# Instances',
compute='_get_instances'
)
database_ids = fields.One2many(
'infrastructure.database',
'environment_id',
string='Databases',
domain=[('state', '!=', 'cancel')],
)
database_count = fields.Integer(
string='# Databases',
compute='_get_databases'
)
@api.one
@api.depends('state')
def get_color(self):
color = 4
if self.state == 'draft':
color = 7
elif self.state == 'cancel':
color = 1
elif self.state == 'inactive':
color = 3
self.color = color
@api.one
@api.depends('database_ids')
def _get_databases(self):
self.database_count = len(self.database_ids)
@api.one
@api.depends('instance_ids')
def _get_instances(self):
self.instance_count = len(self.instance_ids)
@api.one
@api.constrains('number')
def _check_number(self):
if not self.number or self.number < 10 or self.number > 99:
raise Warning(_('Number should be between 10 and 99'))
@api.one
def unlink(self):
if self.state not in ('draft', 'cancel'):
raise Warning(
_('You cannot delete a environment which is not \
draft or cancelled.'))
return super(environment, self).unlink()
@api.onchange('server_id')
def _get_number(self):
environments = self.search(
[('server_id', '=', self.server_id.id)],
order='number desc')
if self.server_id.server_use_type:
self.partner_id = self.server_id.used_by_id
self.number = environments and environments[0].number + 1 or 10
# TODO si no vamos a usar el sufijo entonces borrar lo comentado aca
@api.onchange('partner_id')
# @api.onchange('partner_id', 'odoo_version_id')
def _get_name(self):
name = False
if self.partner_id:
# if self.partner_id and self.odoo_version_id:
name = self.partner_id.commercial_partner_id.name
# partner_name = self.partner_id.commercial_partner_id.name
# sufix = self.odoo_version_id.sufix
# name = '%s-%s' % (partner_name, sufix)
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
name = ''.join(c for c in name if c in valid_chars)
name = name.replace(' ', '').replace('.', '').lower()
self.name = name
@api.onchange('name', 'server_id')
def _get_path(self):
path = False
if self.server_id.base_path and self.name:
path = os.path.join(self.server_id.base_path, self.name)
self.path = path
@api.one
def make_env_paths(self):
self.server_id.get_env()
if exists(self.path, use_sudo=True):
raise Warning(_("Folder '%s' already exists") %
(self.path))
sudo('mkdir -p ' + self.path)
@api.multi
def create_environment(self):
self.make_env_paths()
self.signal_workflow('sgn_to_active')
@api.one
def check_to_inactive(self):
for instance in self.instance_ids:
if instance.service_type != 'no_service':
raise Warning(_(
'To set and environment as inactive you should set all '
'env instances with Service Type "No Service" and better'
' if you stop all of them'))
return True
@api.multi
def delete(self):
if self.instance_ids:
raise Warning(_(
'You can not delete an environment that has instances'))
self.server_id.get_env()
paths = [self.path]
for path in paths:
sudo('rm -f -r ' + path)
self.signal_workflow('sgn_cancel')
@api.multi
def action_wfk_set_draft(self):
self.write({'state': 'draft'})
self.delete_workflow()
self.create_workflow()
return True
_sql_constraints = [
('name_uniq', 'unique(name, server_id)',
'Name must be unique per server!'),
('path_uniq', 'unique(path, server_id)',
'Path must be unique per server!'),
('sources_number', 'unique(number, server_id)',
'Number must be unique per server!'),
]
@api.multi
def action_view_instances(self):
'''
This function returns an action that display a form or tree view
'''
self.ensure_one()
instances = self.instance_ids.search(
[('environment_id', 'in', self.ids)])
action = self.env['ir.model.data'].xmlid_to_object(
'infrastructure.action_infrastructure_instance_instances')
if not action:
return False
res = action.read()[0]
if len(self) == 1:
res['context'] = {
'default_environment_id': self.id,
'search_default_environment_id': self.id,
'search_default_not_cancel': 1,
}
if not len(instances.ids) > 1:
form_view_id = self.env['ir.model.data'].xmlid_to_res_id(
'infrastructure.view_infrastructure_instance_form')
res['views'] = [(form_view_id, 'form')]
# if 1 then we send res_id, if 0 open a new form view
res['res_id'] = instances and instances.ids[0] or False
return res
@api.multi
def action_view_databases(self):
'''
This function returns an action that display a form or tree view
'''
self.ensure_one()
databases = self.database_ids.search(
[('environment_id', 'in', self.ids)])
action = self.env['ir.model.data'].xmlid_to_object(
'infrastructure.action_infrastructure_database_databases')
if not action:
return False
res = action.read()[0]
if len(self) == 1:
res['context'] = {
'default_server_id': self.id,
'search_default_environment_id': self.id,
'search_default_not_cancel': 1,
}
if not len(databases.ids) > 1:
form_view_id = self.env['ir.model.data'].xmlid_to_res_id(
'infrastructure.view_infrastructure_database_form')
res['views'] = [(form_view_id, 'form')]
# if 1 then we send res_id, if 0 open a new form view
res['res_id'] = databases and databases.ids[0] or False
return res
|
agpl-3.0
| -3,885,337,799,388,930,600
| 31.190476
| 78
| 0.533707
| false
| 4.061803
| false
| false
| false
|
basmot/futsal_management
|
base/models/person_address.py
|
1
|
1675
|
##############################################################################
#
# Copyright 2015-2016 Bastien Mottiaux
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
from django.db import models
from django.contrib import admin
from base.models import person
class PersonAddressAdmin(admin.ModelAdmin):
list_display = ('person', 'label', 'location', 'postal_code', 'city', 'country')
fieldsets = ((None, {'fields': ('player', 'label', 'location', 'postal_code', 'city', 'country')}),)
class PersonAddress(models.Model):
person = models.ForeignKey('Person')
label = models.CharField(max_length=20)
location = models.CharField(max_length=255)
postal_code = models.CharField(max_length=20)
city = models.CharField(max_length=255)
country = models.CharField(max_length=255)
def find_by_person(a_person):
""" Return a list containing one or more addresses of a p. Returns None if there is no address.
:param a_person: An instance of the class base.models.Player
"""
return PersonAddress.objects.filter(person=a_person)
|
apache-2.0
| -7,179,946,004,434,599,000
| 38.880952
| 104
| 0.647761
| false
| 4.156328
| false
| false
| false
|
Zuckonit/devent
|
devent/event.py
|
1
|
1423
|
#!/usr/bin/env python
# encoding: utf-8
"""
use a global dict to store the events,
and here put event related operations wrapper
~~~~~~~~
event.py
"""
from gevent.event import AsyncResult
from .errors import (
EventKeyAlreadyExisted,
EventKeyTypeError,
)
__all__ = [
'set_event',
'get_event',
'init_event',
'register_event'
]
EVENT_ITEMS = {}
def set_event(name, value):
"""
@name string, event name
@value any, event value
@return boolean the set status
"""
if not isinstance(name, basestring):
raise EventKeyTypeError
global EVENT_ITEMS
if EVENT_ITEMS.has_key(name):
EVENT_ITEMS[name].set(value)
return True
return False
def register_event(name, value=None):
if not isinstance(name, basestring):
raise EventKeyTypeError
global EVENT_ITEMS
if EVENT_ITEMS.has_key(name):
raise EventKeyAlreadyExisted
EVENT_ITEMS[name] = AsyncResult()
if value is not None:
set_event(name, value)
def get_event(name):
global EVENT_ITEMS
return EVENT_ITEMS.get(name, None)
def init_event(name):
"""
@name string event name
"""
if not isinstance(name, basestring):
raise EventKeyTypeError
global EVENT_ITEMS
EVENT_ITEMS[name] = AsyncResult()
if __name__ == '__main__':
register_event('What', 1)
print get_event('What')
|
mit
| 4,389,554,712,849,286,000
| 19.623188
| 49
| 0.624034
| false
| 3.620865
| false
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py
|
1
|
22010
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=invalid-overridden-method
import asyncio
import sys
from io import BytesIO
from itertools import islice
import warnings
from typing import AsyncIterator
from aiohttp import ClientPayloadError
from azure.core.exceptions import HttpResponseError, ServiceResponseError
from .._shared.encryption import decrypt_blob
from .._shared.request_handlers import validate_and_format_range_headers
from .._shared.response_handlers import process_storage_error, parse_length_from_content_range
from .._deserialize import get_page_ranges_result
from .._download import process_range_and_offset, _ChunkDownloader
async def process_content(data, start_offset, end_offset, encryption):
if data is None:
raise ValueError("Response cannot be None.")
content = data.response.body()
if encryption.get('key') is not None or encryption.get('resolver') is not None:
try:
return decrypt_blob(
encryption.get('required'),
encryption.get('key'),
encryption.get('resolver'),
content,
start_offset,
end_offset,
data.response.headers)
except Exception as error:
raise HttpResponseError(
message="Decryption failed.",
response=data.response,
error=error)
return content
class _AsyncChunkDownloader(_ChunkDownloader):
def __init__(self, **kwargs):
super(_AsyncChunkDownloader, self).__init__(**kwargs)
self.stream_lock = asyncio.Lock() if kwargs.get('parallel') else None
self.progress_lock = asyncio.Lock() if kwargs.get('parallel') else None
async def process_chunk(self, chunk_start):
chunk_start, chunk_end = self._calculate_range(chunk_start)
chunk_data = await self._download_chunk(chunk_start, chunk_end - 1)
length = chunk_end - chunk_start
if length > 0:
await self._write_to_stream(chunk_data, chunk_start)
await self._update_progress(length)
async def yield_chunk(self, chunk_start):
chunk_start, chunk_end = self._calculate_range(chunk_start)
return await self._download_chunk(chunk_start, chunk_end - 1)
async def _update_progress(self, length):
if self.progress_lock:
async with self.progress_lock: # pylint: disable=not-async-context-manager
self.progress_total += length
else:
self.progress_total += length
async def _write_to_stream(self, chunk_data, chunk_start):
if self.stream_lock:
async with self.stream_lock: # pylint: disable=not-async-context-manager
self.stream.seek(self.stream_start + (chunk_start - self.start_index))
self.stream.write(chunk_data)
else:
self.stream.write(chunk_data)
async def _download_chunk(self, chunk_start, chunk_end):
download_range, offset = process_range_and_offset(
chunk_start, chunk_end, chunk_end, self.encryption_options)
# No need to download the empty chunk from server if there's no data in the chunk to be downloaded.
# Do optimize and create empty chunk locally if condition is met.
if self._do_optimize(download_range[0], download_range[1]):
chunk_data = b"\x00" * self.chunk_size
else:
range_header, range_validation = validate_and_format_range_headers(
download_range[0],
download_range[1],
check_content_md5=self.validate_content
)
retry_active = True
retry_total = 3
while retry_active:
try:
_, response = await self.client.download(
range=range_header,
range_get_content_md5=range_validation,
validate_content=self.validate_content,
data_stream_total=self.total_size,
download_stream_current=self.progress_total,
**self.request_options
)
retry_active = False
except HttpResponseError as error:
process_storage_error(error)
except ClientPayloadError as error:
retry_total -= 1
if retry_total <= 0:
raise ServiceResponseError(error, error=error)
await asyncio.sleep(1)
chunk_data = await process_content(response, offset[0], offset[1], self.encryption_options)
# This makes sure that if_match is set so that we can validate
# that subsequent downloads are to an unmodified blob
if self.request_options.get('modified_access_conditions'):
self.request_options['modified_access_conditions'].if_match = response.properties.etag
return chunk_data
class _AsyncChunkIterator(object):
"""Async iterator for chunks in blob download stream."""
def __init__(self, size, content, downloader, chunk_size):
self.size = size
self._chunk_size = chunk_size
self._current_content = content
self._iter_downloader = downloader
self._iter_chunks = None
self._complete = (size == 0)
def __len__(self):
return self.size
def __iter__(self):
raise TypeError("Async stream must be iterated asynchronously.")
def __aiter__(self):
return self
async def __anext__(self):
"""Iterate through responses."""
if self._complete:
raise StopAsyncIteration("Download complete")
if not self._iter_downloader:
# cut the data obtained from initial GET into chunks
if len(self._current_content) > self._chunk_size:
return self._get_chunk_data()
self._complete = True
return self._current_content
if not self._iter_chunks:
self._iter_chunks = self._iter_downloader.get_chunk_offsets()
# initial GET result still has more than _chunk_size bytes of data
if len(self._current_content) >= self._chunk_size:
return self._get_chunk_data()
try:
chunk = next(self._iter_chunks)
self._current_content += await self._iter_downloader.yield_chunk(chunk)
except StopIteration:
self._complete = True
# it's likely that there some data left in self._current_content
if self._current_content:
return self._current_content
raise StopAsyncIteration("Download complete")
return self._get_chunk_data()
def _get_chunk_data(self):
chunk_data = self._current_content[: self._chunk_size]
self._current_content = self._current_content[self._chunk_size:]
return chunk_data
class StorageStreamDownloader(object): # pylint: disable=too-many-instance-attributes
"""A streaming object to download from Azure Storage.
:ivar str name:
The name of the blob being downloaded.
:ivar str container:
The name of the container where the blob is.
:ivar ~azure.storage.blob.BlobProperties properties:
The properties of the blob being downloaded. If only a range of the data is being
downloaded, this will be reflected in the properties.
:ivar int size:
The size of the total data in the stream. This will be the byte range if speficied,
otherwise the total size of the blob.
"""
def __init__(
self,
clients=None,
config=None,
start_range=None,
end_range=None,
validate_content=None,
encryption_options=None,
max_concurrency=1,
name=None,
container=None,
encoding=None,
**kwargs
):
self.name = name
self.container = container
self.properties = None
self.size = None
self._clients = clients
self._config = config
self._start_range = start_range
self._end_range = end_range
self._max_concurrency = max_concurrency
self._encoding = encoding
self._validate_content = validate_content
self._encryption_options = encryption_options or {}
self._request_options = kwargs
self._location_mode = None
self._download_complete = False
self._current_content = None
self._file_size = None
self._non_empty_ranges = None
self._response = None
# The service only provides transactional MD5s for chunks under 4MB.
# If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first
# chunk so a transactional MD5 can be retrieved.
self._first_get_size = self._config.max_single_get_size if not self._validate_content \
else self._config.max_chunk_get_size
initial_request_start = self._start_range if self._start_range is not None else 0
if self._end_range is not None and self._end_range - self._start_range < self._first_get_size:
initial_request_end = self._end_range
else:
initial_request_end = initial_request_start + self._first_get_size - 1
self._initial_range, self._initial_offset = process_range_and_offset(
initial_request_start, initial_request_end, self._end_range, self._encryption_options
)
def __len__(self):
return self.size
async def _setup(self):
self._response = await self._initial_request()
self.properties = self._response.properties
self.properties.name = self.name
self.properties.container = self.container
# Set the content length to the download size instead of the size of
# the last range
self.properties.size = self.size
# Overwrite the content range to the user requested range
self.properties.content_range = 'bytes {0}-{1}/{2}'.format(
self._start_range,
self._end_range,
self._file_size
)
# Overwrite the content MD5 as it is the MD5 for the last range instead
# of the stored MD5
# TODO: Set to the stored MD5 when the service returns this
self.properties.content_md5 = None
if self.size == 0:
self._current_content = b""
else:
self._current_content = await process_content(
self._response,
self._initial_offset[0],
self._initial_offset[1],
self._encryption_options
)
async def _initial_request(self):
range_header, range_validation = validate_and_format_range_headers(
self._initial_range[0],
self._initial_range[1],
start_range_required=False,
end_range_required=False,
check_content_md5=self._validate_content)
retry_active = True
retry_total = 3
while retry_active:
try:
location_mode, response = await self._clients.blob.download(
range=range_header,
range_get_content_md5=range_validation,
validate_content=self._validate_content,
data_stream_total=None,
download_stream_current=0,
**self._request_options)
# Check the location we read from to ensure we use the same one
# for subsequent requests.
self._location_mode = location_mode
# Parse the total file size and adjust the download size if ranges
# were specified
self._file_size = parse_length_from_content_range(response.properties.content_range)
if self._end_range is not None:
# Use the length unless it is over the end of the file
self.size = min(self._file_size, self._end_range - self._start_range + 1)
elif self._start_range is not None:
self.size = self._file_size - self._start_range
else:
self.size = self._file_size
retry_active = False
except HttpResponseError as error:
if self._start_range is None and error.response.status_code == 416:
# Get range will fail on an empty file. If the user did not
# request a range, do a regular get request in order to get
# any properties.
try:
_, response = await self._clients.blob.download(
validate_content=self._validate_content,
data_stream_total=0,
download_stream_current=0,
**self._request_options)
retry_active = False
except HttpResponseError as error:
process_storage_error(error)
# Set the download size to empty
self.size = 0
self._file_size = 0
else:
process_storage_error(error)
except ClientPayloadError as error:
retry_total -= 1
if retry_total <= 0:
raise ServiceResponseError(error, error=error)
await asyncio.sleep(1)
# get page ranges to optimize downloading sparse page blob
if response.properties.blob_type == 'PageBlob':
try:
page_ranges = await self._clients.page_blob.get_page_ranges()
self._non_empty_ranges = get_page_ranges_result(page_ranges)[0]
except HttpResponseError:
pass
# If the file is small, the download is complete at this point.
# If file size is large, download the rest of the file in chunks.
if response.properties.size != self.size:
if self._request_options.get('modified_access_conditions'):
self._request_options['modified_access_conditions'].if_match = response.properties.etag
else:
self._download_complete = True
return response
def chunks(self):
# type: () -> AsyncIterator[bytes]
"""Iterate over chunks in the download stream.
:rtype: AsyncIterator[bytes]
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_hello_world_async.py
:start-after: [START download_a_blob_in_chunk]
:end-before: [END download_a_blob_in_chunk]
:language: python
:dedent: 16
:caption: Download a blob using chunks().
"""
if self.size == 0 or self._download_complete:
iter_downloader = None
else:
data_end = self._file_size
if self._end_range is not None:
# Use the length unless it is over the end of the file
data_end = min(self._file_size, self._end_range + 1)
iter_downloader = _AsyncChunkDownloader(
client=self._clients.blob,
non_empty_ranges=self._non_empty_ranges,
total_size=self.size,
chunk_size=self._config.max_chunk_get_size,
current_progress=self._first_get_size,
start_range=self._initial_range[1] + 1, # Start where the first download ended
end_range=data_end,
stream=None,
parallel=False,
validate_content=self._validate_content,
encryption_options=self._encryption_options,
use_location=self._location_mode,
**self._request_options)
return _AsyncChunkIterator(
size=self.size,
content=self._current_content,
downloader=iter_downloader,
chunk_size=self._config.max_chunk_get_size)
async def readall(self):
"""Download the contents of this blob.
This operation is blocking until all data is downloaded.
:rtype: bytes or str
"""
stream = BytesIO()
await self.readinto(stream)
data = stream.getvalue()
if self._encoding:
return data.decode(self._encoding)
return data
async def content_as_bytes(self, max_concurrency=1):
"""Download the contents of this file.
This operation is blocking until all data is downloaded.
:keyword int max_concurrency:
The number of parallel connections with which to download.
:rtype: bytes
"""
warnings.warn(
"content_as_bytes is deprecated, use readall instead",
DeprecationWarning
)
self._max_concurrency = max_concurrency
return await self.readall()
async def content_as_text(self, max_concurrency=1, encoding="UTF-8"):
"""Download the contents of this blob, and decode as text.
This operation is blocking until all data is downloaded.
:param int max_concurrency:
The number of parallel connections with which to download.
:param str encoding:
Test encoding to decode the downloaded bytes. Default is UTF-8.
:rtype: str
"""
warnings.warn(
"content_as_text is deprecated, use readall instead",
DeprecationWarning
)
self._max_concurrency = max_concurrency
self._encoding = encoding
return await self.readall()
async def readinto(self, stream):
"""Download the contents of this blob to a stream.
:param stream:
The stream to download to. This can be an open file-handle,
or any writable stream. The stream must be seekable if the download
uses more than one parallel connection.
:returns: The number of bytes read.
:rtype: int
"""
# the stream must be seekable if parallel download is required
parallel = self._max_concurrency > 1
if parallel:
error_message = "Target stream handle must be seekable."
if sys.version_info >= (3,) and not stream.seekable():
raise ValueError(error_message)
try:
stream.seek(stream.tell())
except (NotImplementedError, AttributeError):
raise ValueError(error_message)
# Write the content to the user stream
stream.write(self._current_content)
if self._download_complete:
return self.size
data_end = self._file_size
if self._end_range is not None:
# Use the length unless it is over the end of the file
data_end = min(self._file_size, self._end_range + 1)
downloader = _AsyncChunkDownloader(
client=self._clients.blob,
non_empty_ranges=self._non_empty_ranges,
total_size=self.size,
chunk_size=self._config.max_chunk_get_size,
current_progress=self._first_get_size,
start_range=self._initial_range[1] + 1, # start where the first download ended
end_range=data_end,
stream=stream,
parallel=parallel,
validate_content=self._validate_content,
encryption_options=self._encryption_options,
use_location=self._location_mode,
**self._request_options)
dl_tasks = downloader.get_chunk_offsets()
running_futures = [
asyncio.ensure_future(downloader.process_chunk(d))
for d in islice(dl_tasks, 0, self._max_concurrency)
]
while running_futures:
# Wait for some download to finish before adding a new one
done, running_futures = await asyncio.wait(
running_futures, return_when=asyncio.FIRST_COMPLETED)
try:
for task in done:
task.result()
except HttpResponseError as error:
process_storage_error(error)
try:
next_chunk = next(dl_tasks)
except StopIteration:
break
else:
running_futures.add(asyncio.ensure_future(downloader.process_chunk(next_chunk)))
if running_futures:
# Wait for the remaining downloads to finish
done, _running_futures = await asyncio.wait(running_futures)
try:
for task in done:
task.result()
except HttpResponseError as error:
process_storage_error(error)
return self.size
async def download_to_stream(self, stream, max_concurrency=1):
"""Download the contents of this blob to a stream.
:param stream:
The stream to download to. This can be an open file-handle,
or any writable stream. The stream must be seekable if the download
uses more than one parallel connection.
:param int max_concurrency:
The number of parallel connections with which to download.
:returns: The properties of the downloaded blob.
:rtype: Any
"""
warnings.warn(
"download_to_stream is deprecated, use readinto instead",
DeprecationWarning
)
self._max_concurrency = max_concurrency
await self.readinto(stream)
return self.properties
|
mit
| -8,986,826,406,296,550,000
| 39.23766
| 107
| 0.581645
| false
| 4.631734
| true
| false
| false
|
Naereen/mazhe
|
phystricksExoXLVL.py
|
1
|
1188
|
from phystricks import *
def ExoXLVL():
pspict,fig = SinglePicture("ExoXLVL")
x=var('x')
dist=0.1
l=2.5
C1=Rectangle( Point(-l,l),Point(-dist,dist) )
C2=Rectangle( Point(0,0),Point(l,l) )
C3=Rectangle( Point(0,0),Point(-l,-l) )
C4=Rectangle( Point(dist,-dist),Point(l,-l) )
C1.parameters.color="blue"
C2.parameters.color="red"
C3.parameters.color="cyan"
C4.parameters.color="green"
C1.parameters.style="dashed"
C2.parameters.style=C1.parameters.style
C2.parameters.style=C1.parameters.style
C4.parameters.style=C1.parameters.style
a1=C1.center()
a1.parameters.symbol=""
a1.put_mark(0,0,"\( xy\)",automatic_place=pspict)
a2=C2.center()
a2.parameters.symbol=""
a2.put_mark(0,0,"\( x-y\)",automatic_place=pspict)
a3=C3.center()
a3.parameters.symbol=""
a3.put_mark(0,0,"\( x^2y\)",automatic_place=pspict)
a4=C4.center()
a4.parameters.symbol=""
a4.put_mark(0,0,"\( x+y\)",automatic_place=pspict)
pspict.axes.no_graduation()
pspict.DrawGraphs(C1,C2,C3,C4,a1,a2,a3,a4)
pspict.DrawDefaultAxes()
pspict.dilatation(1)
fig.conclude()
fig.write_the_file()
|
gpl-3.0
| 8,272,567,932,617,199,000
| 26
| 55
| 0.637205
| false
| 2.501053
| false
| false
| false
|
chaincoin/chaincoin
|
contrib/devtools/update-translations.py
|
1
|
8664
|
#!/usr/bin/env python3
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'chaincoin_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
# Regexp to check for Chaincoin addresses
ADDRESS_REGEXP = re.compile('([C2]|chc)[a-zA-Z0-9]{30,}')
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
sys.exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
sys.exit(1)
def find_format_specifiers(s, errors):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
pos = percent+2
except IndexError:
errors.append("Failed to parse specifier: %s'" % (sanitize_string(s)))
# just jump over and move on
pos = percent+1
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source, errors))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation, errors))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def contains_bitcoin_addr(text, errors):
if text is not None and ADDRESS_REGEXP.search(text) is not None:
errors.append('Translation "%s" contains a chaincoin address. This will be removed.' % (text))
return True
return False
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus) and not contains_bitcoin_addr(translation, errors)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
|
mit
| 1,825,707,706,652,677,600
| 38.381818
| 140
| 0.622807
| false
| 4.151414
| false
| false
| false
|
rosalindfdt/huzzahbadge
|
huzzah/register/adafruit_register/i2c_bcd_alarm.py
|
1
|
6391
|
# The MIT License (MIT)
#
# Copyright (c) 2016 Scott Shawcroft for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import time
def _bcd2bin(value):
"""Convert binary coded decimal to Binary
Arguments:
value - the BCD value to convert to binary (required, no default)
"""
return value - 6 * (value >> 4)
def _bin2bcd(value):
"""Convert a binary value to binary coded decimal.
Arguments:
value - the binary value to convert to BCD. (required, no default)
"""
return value + 6 * (value // 10)
ALARM_COMPONENT_DISABLED = 0x80
FREQUENCY = ["secondly", "minutely", "hourly", "daily", "weekly", "monthly"]
class BCDAlarmTimeRegister:
"""
Alarm date and time register using binary coded decimal structure.
The byte order of the registers must* be: [second], minute, hour, day,
weekday. Each byte must also have a high enable bit where 1 is disabled and
0 is enabled.
* If weekday_shared is True, then weekday and day share a register.
* If has_seconds is True, then there is a seconds register.
Values are a tuple of (`time.struct_time`, `str`) where the struct represents
a date and time that would alarm. The string is the frequency:
* "secondly", once a second (only if alarm has_seconds)
* "minutely", once a minute when seconds match (if alarm doesn't seconds then when seconds = 0)
* "hourly", once an hour when `tm_min` and `tm_sec` match
* "daily", once a day when `tm_hour`, `tm_min` and `tm_sec` match
* "weekly", once a week when `tm_wday`, `tm_hour`, `tm_min`, `tm_sec` match
* "monthly", once a month when `tm_mday`, `tm_hour`, `tm_min`, `tm_sec` match
:param int register_address: The register address to start the read
:param bool has_seconds: True if the alarm can happen minutely.
:param bool weekday_shared: True if weekday and day share the same register
:param int weekday_start: 0 or 1 depending on the RTC's representation of the first day of the week (Monday)
"""
# Defaults are based on alarm1 of the DS3231.
def __init__(self, register_address, has_seconds=True, weekday_shared=True, weekday_start=1):
buffer_size = 5
if weekday_shared:
buffer_size -= 1
if has_seconds:
buffer_size += 1
self.has_seconds = has_seconds
self.buffer = bytearray(buffer_size)
self.buffer[0] = register_address
self.weekday_shared = weekday_shared
self.weekday_start = weekday_start
def __get__(self, obj, objtype=None):
# Read the alarm register.
with obj.i2c_device:
obj.i2c_device.write(self.buffer, end=1, stop=False)
obj.i2c_device.readinto(self.buffer, start=1)
frequency = None
i = 1
seconds = 0
if self.has_seconds:
if (self.buffer[1] & 0x80) != 0:
frequency = "secondly"
else:
frequency = "minutely"
seconds = _bcd2bin(self.buffer[1] & 0x7f)
i = 2
minute = 0
if (self.buffer[i] & 0x80) == 0:
frequency = "hourly"
minute = _bcd2bin(self.buffer[i] & 0x7f)
hour = 0
if (self.buffer[i + 1] & 0x80) == 0:
frequency = "daily"
hour = _bcd2bin(self.buffer[i + 1] & 0x7f)
mday = None
wday = None
if (self.buffer[i + 2] & 0x80) == 0:
# day of the month
if not self.weekday_shared or (self.buffer[i + 2] & 0x40) == 0:
frequency = "monthly"
mday = _bcd2bin(self.buffer[i + 2] & 0x3f)
else: # weekday
frequency = "weekly"
wday = _bcd2bin(self.buffer[i + 2] & 0x3f) - self.weekday_start
# weekday
if not self.weekday_shared and (self.buffer[i + 3] & 0x80) == 0:
frequency = "monthly"
mday = _bcd2bin(self.buffer[i + 3] & 0x7f)
if mday is not None:
wday = (mday - 2) % 7
elif wday is not None:
mday = wday + 2
else:
# Jan 1, 2017 was a Sunday (6)
wday = 6
mday = 1
return (time.struct_time((2017, 1, mday, hour, minute, seconds, wday, mday, -1)), frequency)
def __set__(self, obj, value):
# Turn all components off by default.
for i in range(len(self.buffer) - 1):
self.buffer[i + 1] = ALARM_COMPONENT_DISABLED
frequency = FREQUENCY.index(value[1])
# i is the index of the minute byte
i = 2 if self.has_seconds else 1
if frequency > 0 and self.has_seconds: # minutely at least
self.buffer[1] = _bin2bcd(value[0].tm_sec)
if frequency > 1: # hourly at least
self.buffer[i] = _bin2bcd(value[0].tm_min)
if frequency > 2: # daily at least
self.buffer[i + 1] = _bin2bcd(value[0].tm_hour)
if value[1] == "weekly":
if self.weekday_shared:
self.buffer[i + 2] = _bin2bcd(value[0].tm_wday + self.weekday_start) | 0x40
else:
self.buffer[i + 3] = _bin2bcd(value[0].tm_wday + self.weekday_start)
elif value[1] == "monthly":
self.buffer[i + 2] = _bin2bcd(value[0].tm_mday)
with obj.i2c_device:
obj.i2c_device.write(self.buffer)
|
artistic-2.0
| 7,971,070,526,889,591,000
| 37.733333
| 112
| 0.610703
| false
| 3.596511
| false
| false
| false
|
RincewindWizzard/django_digisys
|
django_digisys/settings.py
|
1
|
2561
|
"""
Django settings for django_digisys project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-$1-^^%upj_+svh@k)tfns5z&dt*)2vnufrlee@347e8!k6j=5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'suit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'uebungen',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'django_digisys.urls'
WSGI_APPLICATION = 'django_digisys.wsgi.application'
# django suit
SUIT_CONFIG = {
'ADMIN_NAME': 'DigiSys Admin',
'MENU': (
# Keep original label and models
'sites',
{ 'app': 'auth' },
{ 'app': 'uebungen' },
{ 'label': 'Export', 'url': '/digisys/', 'icon': 'icon-hand-right' },
)
}
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as TCP
TEMPLATE_CONTEXT_PROCESSORS = TCP + (
'django.core.context_processors.request',
)
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'de-De'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DATE_FORMAT = 'j F Y'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
gpl-2.0
| 5,638,799,035,028,847,000
| 23.625
| 77
| 0.696212
| false
| 3.237674
| false
| false
| false
|
sbuss/TigerShark
|
tigershark/parsers/M278_4010_X094_27_A1.py
|
1
|
129238
|
#
# Generated by TigerShark.tools.convertPyX12 on 2012-07-10 16:29:58.981434
#
from tigershark.X12.parse import Message, Loop, Segment, Composite, Element, Properties
parsed_278_HEADER = Loop( u'HEADER', Properties(looptype=u'wrapper',repeat=u'1',pos=u'015',req_sit=u'R',desc=u'Table 1 - Header'),
Segment( u'BHT', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'020',desc=u'Beginning of Hierarchical Transaction'),
Element( u'BHT01', Properties(desc=u'Hierarchical Structure Code', req_sit=u'R', data_type=(u'ID',u'4',u'4'), position=1,
codes=[u'0078'] ) ),
Element( u'BHT02', Properties(desc=u'Transaction Set Purpose Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=2,
codes=[u'11'] ) ),
Element( u'BHT03', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=3,
codes=[] ) ),
Element( u'BHT04', Properties(desc=u'Date', req_sit=u'R', data_type=(u'DT',u'8',u'8'), position=4,
codes=[] ) ),
Element( u'BHT05', Properties(desc=u'Time', req_sit=u'R', data_type=(u'TM',u'4',u'8'), position=5,
codes=[] ) ),
Element( u'BHT06', Properties(desc=u'Transaction Type Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=6,
codes=[u'18', u'19', u'AT'] ) ),
),
)
parsed_278_2010A = Loop( u'2010A', Properties(looptype='',repeat=u'1',pos=u'170',req_sit=u'R',desc=u'Utilization Management Organization (UMO) Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'170',desc=u'Utilization Management Organization (UMO) Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'X3'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1', u'2'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'24', u'34', u'46', u'PI', u'XV', u'XX'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'R', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'PER', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'220',desc=u'Utilization Management Organization (UMO) Contact Information'),
Element( u'PER01', Properties(desc=u'Contact Function Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'IC'] ) ),
Element( u'PER02', Properties(desc=u'Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=2,
codes=[] ) ),
Element( u'PER03', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'EM', u'FX', u'TE'] ) ),
Element( u'PER04', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=4,
codes=[] ) ),
Element( u'PER05', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER06', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=6,
codes=[] ) ),
Element( u'PER07', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=7,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER08', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=8,
codes=[] ) ),
Element( u'PER09', Properties(desc=u'Contact Inquiry Reference', req_sit=u'N', data_type=(u'AN',u'1',u'20'), position=9,
codes=[] ) ),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'230',desc=u'Utilization Management Organization (UMO) Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'04', u'41', u'42', u'79', u'80', u'T4'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'N', u'P', u'Y'] ) ),
),
)
parsed_278_2010B = Loop( u'2010B', Properties(looptype='',repeat=u'1',pos=u'170',req_sit=u'R',desc=u'Requester Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'170',desc=u'Requester Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1P', u'FA'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1', u'2'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'24', u'34', u'46', u'XX'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'R', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'REF', Properties(syntax='',req_sit=u'S',repeat=u'8',pos=u'180',desc=u'Requester Supplemental Identification'),
Element( u'REF01', Properties(desc=u'Reference Identification Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1G', u'1J', u'CT', u'EI', u'N5', u'N7', u'SY', u'ZH'] ) ),
Element( u'REF02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'REF03', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=3,
codes=[] ) ),
Composite( u'C040', Properties(req_sit=u'N',refdes='',seq=u'04',desc=u'Reference Identifier'),
),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'230',desc=u'Requester Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'35', u'41', u'43', u'44', u'45', u'46', u'47', u'49', u'50', u'51', u'79', u'97'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N', u'R'] ) ),
),
Segment( u'PRV', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'240',desc=u'Requester Provider Information'),
Element( u'PRV01', Properties(desc=u'Provider Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=1,
codes=[u'AD', u'AS', u'AT', u'CO', u'CV', u'OP', u'OR', u'OT', u'PC', u'PE', u'RF'] ) ),
Element( u'PRV02', Properties(desc=u'Reference Identification Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'ZZ'] ) ),
Element( u'PRV03', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=3,
codes=[] ) ),
Element( u'PRV04', Properties(desc=u'State or Province Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=4,
codes=[] ) ),
Composite( u'C035', Properties(req_sit=u'N',refdes='',seq=u'05',desc=u'Provider Specialty Information'),
),
Element( u'PRV06', Properties(desc=u'Provider Organization Code', req_sit=u'N', data_type=(u'ID',u'3',u'3'), position=6,
codes=[] ) ),
),
)
parsed_278_2010CA = Loop( u'2010CA', Properties(looptype='',repeat=u'1',pos=u'170',req_sit=u'R',desc=u'Subscriber Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'170',desc=u'Subscriber Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'IL'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'MI', u'ZZ'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'R', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'REF', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'180',desc=u'Subscriber Supplemental Identification'),
Element( u'REF01', Properties(desc=u'Reference Identification Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1L', u'1W', u'6P', u'A6', u'EJ', u'F6', u'HJ', u'IG', u'N6', u'NQ', u'SY'] ) ),
Element( u'REF02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'REF03', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=3,
codes=[] ) ),
Composite( u'C040', Properties(req_sit=u'N',refdes='',seq=u'04',desc=u'Reference Identifier'),
),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'230',desc=u'Subscriber Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'15', u'58', u'64', u'65', u'66', u'67', u'68', u'71', u'72', u'73', u'74', u'75', u'76', u'77', u'78', u'79', u'95'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N'] ) ),
),
Segment( u'DMG', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'250',desc=u'Subscriber Demographic Information'),
Element( u'DMG01', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'D8'] ) ),
Element( u'DMG02', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=2,
codes=[] ) ),
Element( u'DMG03', Properties(desc=u'Gender Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=3,
codes=[u'F', u'M', u'U'] ) ),
Element( u'DMG04', Properties(desc=u'Marital Status Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=4,
codes=[] ) ),
Element( u'DMG05', Properties(desc=u'Race or Ethnicity Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=5,
codes=[] ) ),
Element( u'DMG06', Properties(desc=u'Citizenship Status Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=6,
codes=[] ) ),
Element( u'DMG07', Properties(desc=u'Country Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=7,
codes=[] ) ),
Element( u'DMG08', Properties(desc=u'Basis of Verification Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=8,
codes=[] ) ),
Element( u'DMG09', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=9,
codes=[] ) ),
),
)
parsed_278_2010CB = Loop( u'2010CB', Properties(looptype='',repeat=u'1',pos=u'170',req_sit=u'S',desc=u'Additional Patient Information Contact Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'170',desc=u'Additional Patient Information Contact Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1P', u'2B', u'ABG', u'FA', u'PR', u'X3'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1', u'2'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'24', u'34', u'46', u'PI', u'XV', u'XX'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'N3', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'200',desc=u'Additional Patient Information Contact Address'),
Element( u'N301', Properties(desc=u'Address Information', req_sit=u'R', data_type=(u'AN',u'1',u'55'), position=1,
codes=[] ) ),
Element( u'N302', Properties(desc=u'Address Information', req_sit=u'S', data_type=(u'AN',u'1',u'55'), position=2,
codes=[] ) ),
),
Segment( u'N4', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'210',desc=u'Additional Patient Information Contact City/State/Zip Code'),
Element( u'N401', Properties(desc=u'City Name', req_sit=u'S', data_type=(u'AN',u'2',u'30'), position=1,
codes=[] ) ),
Element( u'N402', Properties(desc=u'State or Province Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'N403', Properties(desc=u'Postal Code', req_sit=u'S', data_type=(u'ID',u'3',u'15'), position=3,
codes=[] ) ),
Element( u'N404', Properties(desc=u'Country Code', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'N405', Properties(desc=u'Location Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=5,
codes=[u'B1', u'DP'] ) ),
Element( u'N406', Properties(desc=u'Location Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Segment( u'PER', Properties(syntax='',req_sit=u'S',repeat=u'3',pos=u'220',desc=u'Additional Patient Information Contact Information'),
Element( u'PER01', Properties(desc=u'Contact Function Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'IC'] ) ),
Element( u'PER02', Properties(desc=u'Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=2,
codes=[] ) ),
Element( u'PER03', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'EM', u'FX', u'TE'] ) ),
Element( u'PER04', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=4,
codes=[] ) ),
Element( u'PER05', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER06', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=6,
codes=[] ) ),
Element( u'PER07', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=7,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER08', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=8,
codes=[] ) ),
Element( u'PER09', Properties(desc=u'Contact Inquiry Reference', req_sit=u'N', data_type=(u'AN',u'1',u'20'), position=9,
codes=[] ) ),
),
)
parsed_278_2010DA = Loop( u'2010DA', Properties(looptype='',repeat=u'1',pos=u'170',req_sit=u'R',desc=u'Dependent Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'170',desc=u'Dependent Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'QC'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'MI', u'ZZ'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'REF', Properties(syntax='',req_sit=u'S',repeat=u'3',pos=u'180',desc=u'Dependent Supplemental Identification'),
Element( u'REF01', Properties(desc=u'Reference Identification Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'A6', u'EJ', u'SY'] ) ),
Element( u'REF02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'REF03', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=3,
codes=[] ) ),
Composite( u'C040', Properties(req_sit=u'N',refdes='',seq=u'04',desc=u'Reference Identifier'),
),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'230',desc=u'Dependent Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'15', u'33', u'58', u'64', u'65', u'66', u'67', u'68', u'71', u'77', u'95'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N'] ) ),
),
Segment( u'DMG', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'250',desc=u'Dependent Demographic Information'),
Element( u'DMG01', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'D8'] ) ),
Element( u'DMG02', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=2,
codes=[] ) ),
Element( u'DMG03', Properties(desc=u'Gender Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=3,
codes=[u'F', u'M', u'U'] ) ),
Element( u'DMG04', Properties(desc=u'Marital Status Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=4,
codes=[] ) ),
Element( u'DMG05', Properties(desc=u'Race or Ethnicity Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=5,
codes=[] ) ),
Element( u'DMG06', Properties(desc=u'Citizenship Status Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=6,
codes=[] ) ),
Element( u'DMG07', Properties(desc=u'Country Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=7,
codes=[] ) ),
Element( u'DMG08', Properties(desc=u'Basis of Verification Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=8,
codes=[] ) ),
Element( u'DMG09', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=9,
codes=[] ) ),
),
Segment( u'INS', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'260',desc=u'Dependent Relationship'),
Element( u'INS01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N'] ) ),
Element( u'INS02', Properties(desc=u'Individual Relationship Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=2,
codes=[u'01', u'04', u'05', u'07', u'09', u'10', u'15', u'17', u'19', u'20', u'21', u'22', u'23', u'24', u'29', u'32', u'33', u'34', u'39', u'40', u'41', u'43', u'53', u'G8'] ) ),
Element( u'INS03', Properties(desc=u'Maintenance Type Code', req_sit=u'N', data_type=(u'ID',u'3',u'3'), position=3,
codes=[] ) ),
Element( u'INS04', Properties(desc=u'Maintenance Reason Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'INS05', Properties(desc=u'Benefit Status Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=5,
codes=[] ) ),
Element( u'INS06', Properties(desc=u'Medicare Plan Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=6,
codes=[] ) ),
Element( u'INS07', Properties(desc=u'Consolidated Omnibus Budget Reconciliation Act (COBRA) Qualifying', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=7,
codes=[] ) ),
Element( u'INS08', Properties(desc=u'Employment Status Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=8,
codes=[] ) ),
Element( u'INS09', Properties(desc=u'Student Status Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=9,
codes=[] ) ),
Element( u'INS10', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=10,
codes=[] ) ),
Element( u'INS11', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
Element( u'INS12', Properties(desc=u'Date Time Period', req_sit=u'N', data_type=(u'AN',u'1',u'35'), position=12,
codes=[] ) ),
Element( u'INS13', Properties(desc=u'Confidentiality Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=13,
codes=[] ) ),
Element( u'INS14', Properties(desc=u'City Name', req_sit=u'N', data_type=(u'AN',u'2',u'30'), position=14,
codes=[] ) ),
Element( u'INS15', Properties(desc=u'State or Province Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=15,
codes=[] ) ),
Element( u'INS16', Properties(desc=u'Country Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=16,
codes=[] ) ),
Element( u'INS17', Properties(desc=u'Number', req_sit=u'S', data_type=(u'N0',u'1',u'9'), position=17,
codes=[] ) ),
),
)
parsed_278_2010DB = Loop( u'2010DB', Properties(looptype='',repeat=u'1',pos=u'170',req_sit=u'S',desc=u'Additional Patient Information Contact Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'170',desc=u'Additional Patient Information Contact Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1P', u'2B', u'ABG', u'FA', u'PR', u'X3'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1', u'2'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'24', u'34', u'46', u'PI', u'XV', u'XX'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'N3', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'200',desc=u'Additional Patient Information Contact Address'),
Element( u'N301', Properties(desc=u'Address Information', req_sit=u'R', data_type=(u'AN',u'1',u'55'), position=1,
codes=[] ) ),
Element( u'N302', Properties(desc=u'Address Information', req_sit=u'S', data_type=(u'AN',u'1',u'55'), position=2,
codes=[] ) ),
),
Segment( u'N4', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'210',desc=u'Additional Patient Information Contact City/State/Zip Code'),
Element( u'N401', Properties(desc=u'City Name', req_sit=u'S', data_type=(u'AN',u'2',u'30'), position=1,
codes=[] ) ),
Element( u'N402', Properties(desc=u'State or Province Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'N403', Properties(desc=u'Postal Code', req_sit=u'S', data_type=(u'ID',u'3',u'15'), position=3,
codes=[] ) ),
Element( u'N404', Properties(desc=u'Country Code', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'N405', Properties(desc=u'Location Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=5,
codes=[u'B1', u'DP'] ) ),
Element( u'N406', Properties(desc=u'Location Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Segment( u'PER', Properties(syntax='',req_sit=u'S',repeat=u'3',pos=u'220',desc=u'Additional Patient Information Contact Information'),
Element( u'PER01', Properties(desc=u'Contact Function Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'IC'] ) ),
Element( u'PER02', Properties(desc=u'Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=2,
codes=[] ) ),
Element( u'PER03', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'EM', u'FX', u'TE'] ) ),
Element( u'PER04', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=4,
codes=[] ) ),
Element( u'PER05', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER06', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=6,
codes=[] ) ),
Element( u'PER07', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=7,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER08', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=8,
codes=[] ) ),
Element( u'PER09', Properties(desc=u'Contact Inquiry Reference', req_sit=u'N', data_type=(u'AN',u'1',u'20'), position=9,
codes=[] ) ),
),
)
parsed_278_2010E = Loop( u'2010E', Properties(looptype='',repeat=u'3',pos=u'170',req_sit=u'R',desc=u'Service Provider Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'170',desc=u'Service Provider Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1T', u'FA', u'SJ'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1', u'2'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'24', u'34', u'46', u'XX'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'REF', Properties(syntax='',req_sit=u'S',repeat=u'7',pos=u'180',desc=u'Service Provider Supplemental Identification'),
Element( u'REF01', Properties(desc=u'Reference Identification Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1G', u'1J', u'EI', u'N5', u'N7', u'SY', u'ZH'] ) ),
Element( u'REF02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'REF03', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=3,
codes=[] ) ),
Composite( u'C040', Properties(req_sit=u'N',refdes='',seq=u'04',desc=u'Reference Identifier'),
),
),
Segment( u'N3', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'200',desc=u'Service Provider Address'),
Element( u'N301', Properties(desc=u'Address Information', req_sit=u'R', data_type=(u'AN',u'1',u'55'), position=1,
codes=[] ) ),
Element( u'N302', Properties(desc=u'Address Information', req_sit=u'S', data_type=(u'AN',u'1',u'55'), position=2,
codes=[] ) ),
),
Segment( u'N4', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'210',desc=u'Service Provider City/State/ZIP Code'),
Element( u'N401', Properties(desc=u'City Name', req_sit=u'S', data_type=(u'AN',u'2',u'30'), position=1,
codes=[] ) ),
Element( u'N402', Properties(desc=u'State or Province Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'N403', Properties(desc=u'Postal Code', req_sit=u'S', data_type=(u'ID',u'3',u'15'), position=3,
codes=[] ) ),
Element( u'N404', Properties(desc=u'Country Code', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'N405', Properties(desc=u'Location Qualifier', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=5,
codes=[] ) ),
Element( u'N406', Properties(desc=u'Location Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Segment( u'PER', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'220',desc=u'Service Provider Contact Information'),
Element( u'PER01', Properties(desc=u'Contact Function Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'IC'] ) ),
Element( u'PER02', Properties(desc=u'Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=2,
codes=[] ) ),
Element( u'PER03', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'EM', u'FX', u'TE'] ) ),
Element( u'PER04', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=4,
codes=[] ) ),
Element( u'PER05', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER06', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=6,
codes=[] ) ),
Element( u'PER07', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=7,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER08', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=8,
codes=[] ) ),
Element( u'PER09', Properties(desc=u'Contact Inquiry Reference', req_sit=u'N', data_type=(u'AN',u'1',u'20'), position=9,
codes=[] ) ),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'230',desc=u'Service Provider Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'15', u'33', u'35', u'41', u'43', u'44', u'45', u'46', u'47', u'49', u'51', u'52', u'79', u'97'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N'] ) ),
),
Segment( u'PRV', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'240',desc=u'Service Provider Information'),
Element( u'PRV01', Properties(desc=u'Provider Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=1,
codes=[u'AD', u'AS', u'AT', u'CO', u'CV', u'OP', u'OR', u'OT', u'PC', u'PE'] ) ),
Element( u'PRV02', Properties(desc=u'Reference Identification Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'ZZ'] ) ),
Element( u'PRV03', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=3,
codes=[] ) ),
Element( u'PRV04', Properties(desc=u'State or Province Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=4,
codes=[] ) ),
Composite( u'C035', Properties(req_sit=u'N',refdes='',seq=u'05',desc=u'Provider Specialty Information'),
),
Element( u'PRV06', Properties(desc=u'Provider Organization Code', req_sit=u'N', data_type=(u'ID',u'3',u'3'), position=6,
codes=[] ) ),
),
)
parsed_278_2010F = Loop( u'2010F', Properties(looptype='',repeat=u'1',pos=u'170',req_sit=u'S',desc=u'Additional Service Information Contact Name'),
Segment( u'NM1', Properties(syntax='',req_sit=u'S',repeat=u'>1',pos=u'170',desc=u'Additional Service Information Contact Name'),
Element( u'NM101', Properties(desc=u'Entity Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'1P', u'2B', u'ABG', u'FA', u'PR', u'X3'] ) ),
Element( u'NM102', Properties(desc=u'Entity Type Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1', u'2'] ) ),
Element( u'NM103', Properties(desc=u'Name Last or Organization Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=3,
codes=[] ) ),
Element( u'NM104', Properties(desc=u'Name First', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'NM105', Properties(desc=u'Name Middle', req_sit=u'S', data_type=(u'AN',u'1',u'25'), position=5,
codes=[] ) ),
Element( u'NM106', Properties(desc=u'Name Prefix', req_sit=u'N', data_type=(u'AN',u'1',u'10'), position=6,
codes=[] ) ),
Element( u'NM107', Properties(desc=u'Name Suffix', req_sit=u'S', data_type=(u'AN',u'1',u'10'), position=7,
codes=[] ) ),
Element( u'NM108', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=8,
codes=[u'24', u'34', u'46', u'PI', u'XV', u'XX'] ) ),
Element( u'NM109', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=9,
codes=[] ) ),
Element( u'NM110', Properties(desc=u'Entity Relationship Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'NM111', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=11,
codes=[] ) ),
),
Segment( u'N3', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'200',desc=u'Additional Service Information Contact Address'),
Element( u'N301', Properties(desc=u'Address Information', req_sit=u'R', data_type=(u'AN',u'1',u'55'), position=1,
codes=[] ) ),
Element( u'N302', Properties(desc=u'Address Information', req_sit=u'S', data_type=(u'AN',u'1',u'55'), position=2,
codes=[] ) ),
),
Segment( u'N4', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'210',desc=u'Additional Service Information Contact City/State/Zip Code'),
Element( u'N401', Properties(desc=u'City Name', req_sit=u'S', data_type=(u'AN',u'2',u'30'), position=1,
codes=[] ) ),
Element( u'N402', Properties(desc=u'State or Province Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'N403', Properties(desc=u'Postal Code', req_sit=u'S', data_type=(u'ID',u'3',u'15'), position=3,
codes=[] ) ),
Element( u'N404', Properties(desc=u'Country Code', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'N405', Properties(desc=u'Location Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=5,
codes=[u'B1', u'DP'] ) ),
Element( u'N406', Properties(desc=u'Location Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Segment( u'PER', Properties(syntax='',req_sit=u'S',repeat=u'3',pos=u'220',desc=u'Additional Service Information Contact Information'),
Element( u'PER01', Properties(desc=u'Contact Function Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'IC'] ) ),
Element( u'PER02', Properties(desc=u'Name', req_sit=u'S', data_type=(u'AN',u'1',u'60'), position=2,
codes=[] ) ),
Element( u'PER03', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'EM', u'FX', u'TE'] ) ),
Element( u'PER04', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=4,
codes=[] ) ),
Element( u'PER05', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER06', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=6,
codes=[] ) ),
Element( u'PER07', Properties(desc=u'Communication Number Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=7,
codes=[u'EM', u'EX', u'FX', u'TE'] ) ),
Element( u'PER08', Properties(desc=u'Communication Number', req_sit=u'S', data_type=(u'AN',u'1',u'256'), position=8,
codes=[] ) ),
Element( u'PER09', Properties(desc=u'Contact Inquiry Reference', req_sit=u'N', data_type=(u'AN',u'1',u'20'), position=9,
codes=[] ) ),
),
)
parsed_278_2000F = Loop( u'2000F', Properties(looptype='',repeat=u'>1',pos=u'180',req_sit=u'R',desc=u'Service Level'),
Segment( u'HL', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Service Level'),
Element( u'HL01', Properties(desc=u'Hierarchical ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=1,
codes=[] ) ),
Element( u'HL02', Properties(desc=u'Hierarchical Parent ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=2,
codes=[] ) ),
Element( u'HL03', Properties(desc=u'Hierarchical Level Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=3,
codes=[u'SS'] ) ),
Element( u'HL04', Properties(desc=u'Hierarchical Child Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'0'] ) ),
),
Segment( u'TRN', Properties(syntax='',req_sit=u'S',repeat=u'3',pos=u'020',desc=u'Service Trace Number'),
Element( u'TRN01', Properties(desc=u'Trace Type Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=1,
codes=[u'1', u'2'] ) ),
Element( u'TRN02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'TRN03', Properties(desc=u'Originating Company Identifier', req_sit=u'R', data_type=(u'AN',u'10',u'10'), position=3,
codes=[] ) ),
Element( u'TRN04', Properties(desc=u'Reference Identification', req_sit=u'S', data_type=(u'AN',u'1',u'50'), position=4,
codes=[] ) ),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'030',desc=u'Service Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'15', u'33', u'52', u'57', u'60', u'61', u'62', u'T5'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N'] ) ),
),
Segment( u'UM', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'040',desc=u'Health Care Services Review Information'),
Element( u'UM01', Properties(desc=u'Request Category Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=1,
codes=[u'AR', u'HS', u'SC'] ) ),
Element( u'UM02', Properties(desc=u'Certification Type Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=2,
codes=[u'1', u'2', u'3', u'4', u'I', u'R', u'S'] ) ),
Element( u'UM03', Properties(desc=u'Service Type Code', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=3,
codes=[u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'12', u'14', u'15', u'16', u'17', u'18', u'20', u'21', u'23', u'24', u'25', u'26', u'27', u'28', u'33', u'34', u'35', u'36', u'37', u'38', u'39', u'40', u'42', u'44', u'45', u'46', u'48', u'50', u'51', u'52', u'53', u'54', u'56', u'57', u'58', u'59', u'61', u'62', u'63', u'64', u'65', u'67', u'68', u'69', u'70', u'71', u'72', u'73', u'74', u'75', u'76', u'77', u'78', u'79', u'80', u'82', u'83', u'84', u'85', u'86', u'93', u'94', u'95', u'98', u'99', u'A0', u'A1', u'A2', u'A3', u'A4', u'A6', u'A7', u'A8', u'A9', u'AB', u'AC', u'AD', u'AE', u'AF', u'AG', u'AI', u'AJ', u'AK', u'AL', u'AR', u'BB', u'BC', u'BD', u'BE', u'BF', u'BG', u'BS'] ) ),
Composite( u'C023', Properties(req_sit=u'S',refdes='',seq=u'04',desc=u'Health Care Service Location Information'),
Element( u'UM04-01', Properties(desc=u'Facility Code Value', req_sit=u'R', data_type=(u'AN',u'1',u'2'), position=0,
codes=[] ) ),
Element( u'UM04-02', Properties(desc=u'Facility Code Qualifier', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=1,
codes=[u'A', u'B'] ) ),
Element( u'UM04-03', Properties(desc=u'Claim Frequency Type Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=2,
codes=[] ) ),
),
Composite( u'C024', Properties(req_sit=u'N',refdes='',seq=u'05',desc=u'Related Causes Information'),
),
Element( u'UM06', Properties(desc=u'Level of Service Code', req_sit=u'S', data_type=(u'ID',u'1',u'3'), position=6,
codes=[u'03', u'U'] ) ),
Element( u'UM07', Properties(desc=u'Current Health Condition Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=7,
codes=[] ) ),
Element( u'UM08', Properties(desc=u'Prognosis Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=8,
codes=[] ) ),
Element( u'UM09', Properties(desc=u'Release of Information Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=9,
codes=[] ) ),
Element( u'UM10', Properties(desc=u'Delay Reason Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=10,
codes=[] ) ),
),
Segment( u'HCR', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'050',desc=u'Health Care Services Review'),
Element( u'HCR01', Properties(desc=u'Action Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=1,
codes=[u'A1', u'A3', u'A4', u'A6', u'CT', u'NA'] ) ),
Element( u'HCR02', Properties(desc=u'Reference Identification', req_sit=u'S', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'HCR03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'35', u'36', u'37', u'41', u'53', u'69', u'70', u'82', u'83', u'84', u'85', u'86', u'87', u'88', u'89', u'90', u'91', u'92', u'96', u'98', u'E8'] ) ),
Element( u'HCR04', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'N', u'Y'] ) ),
),
Segment( u'REF', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'060',desc=u'Previous Certification Identification'),
Element( u'REF01', Properties(desc=u'Reference Identification Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=1,
codes=[u'BB'] ) ),
Element( u'REF02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'REF03', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=3,
codes=[] ) ),
Composite( u'C040', Properties(req_sit=u'N',refdes='',seq=u'04',desc=u'Reference Identifier'),
),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Service Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'472'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Admission Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'435'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Discharge Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'096'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Surgery Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'456'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Certification Issue Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'102'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Certification Expiration Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'036'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Certification Effective Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'007'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'HI', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'080',desc=u'Procedures'),
Composite( u'C022', Properties(req_sit=u'R',refdes='',seq=u'01',desc=u'Procedure Code 1'),
Element( u'HI01-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI01-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI01-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI01-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI01-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI01-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI01-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'02',desc=u'Procedure Code 2'),
Element( u'HI02-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI02-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI02-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI02-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI02-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI02-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI02-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'03',desc=u'Procedure Code 3'),
Element( u'HI03-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI03-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI03-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI03-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI03-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI03-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI03-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'04',desc=u'Procedure Code 4'),
Element( u'HI04-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI04-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI04-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI04-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI04-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI04-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI04-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'05',desc=u'Procedure Code 5'),
Element( u'HI05-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI05-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI05-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI05-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI05-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI05-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI05-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'06',desc=u'Procedure Code 6'),
Element( u'HI06-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI06-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI06-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI06-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI06-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI06-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI06-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'07',desc=u'Procedure Code 7'),
Element( u'HI07-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI07-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI07-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI07-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI07-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI07-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI07-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'08',desc=u'Procedure Code 8'),
Element( u'HI08-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI08-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI08-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI08-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI08-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI08-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI08-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'09',desc=u'Procedure Code 9'),
Element( u'HI09-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI09-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI09-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI09-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI09-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI09-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI09-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'10',desc=u'Procedure Code 10'),
Element( u'HI10-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI10-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI10-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI10-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI10-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI10-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI10-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'11',desc=u'Procedure Code 11'),
Element( u'HI11-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI11-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI11-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI11-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI11-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI11-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI11-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'12',desc=u'Procedure Code 12'),
Element( u'HI12-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'ABR', u'BO', u'BQ', u'JP', u'LOI', u'NDC', u'ZZ'] ) ),
Element( u'HI12-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI12-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8', u'RD8'] ) ),
Element( u'HI12-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI12-05', Properties(desc=u'Monetary Amount', req_sit=u'S', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI12-06', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI12-07', Properties(desc=u'Version Identifier', req_sit=u'S', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
),
Segment( u'HSD', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'090',desc=u'Health Care Services Delivery'),
Element( u'HSD01', Properties(desc=u'Quantity Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'DY', u'FL', u'HS', u'MN', u'VS'] ) ),
Element( u'HSD02', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=2,
codes=[] ) ),
Element( u'HSD03', Properties(desc=u'Unit or Basis for Measurement Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'DA', u'MO', u'WK'] ) ),
Element( u'HSD04', Properties(desc=u'Sample Selection Modulus', req_sit=u'S', data_type=(u'R',u'1',u'6'), position=4,
codes=[] ) ),
Element( u'HSD05', Properties(desc=u'Time Period Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=5,
codes=[u'6', u'7', u'21', u'26', u'27', u'34', u'35'] ) ),
Element( u'HSD06', Properties(desc=u'Number of Periods', req_sit=u'S', data_type=(u'N0',u'1',u'3'), position=6,
codes=[] ) ),
Element( u'HSD07', Properties(desc=u'Ship/Delivery or Calendar Pattern Code', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=7,
codes=[u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9', u'A', u'B', u'C', u'D', u'E', u'F', u'G', u'H', u'J', u'K', u'L', u'M', u'N', u'O', u'P', u'Q', u'R', u'S', u'SA', u'SB', u'SC', u'SD', u'SG', u'SL', u'SP', u'SX', u'SY', u'SZ', u'T', u'U', u'V', u'W', u'X', u'Y'] ) ),
Element( u'HSD08', Properties(desc=u'Ship/Delivery Pattern Time Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=8,
codes=[u'A', u'B', u'C', u'D', u'E', u'F', u'G', u'Y'] ) ),
),
Segment( u'CL1', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'110',desc=u'Institutional Claim Code'),
Element( u'CL101', Properties(desc=u'Admission Type Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=1,
codes=[] ) ),
Element( u'CL102', Properties(desc=u'Admission Source Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=2,
codes=[] ) ),
Element( u'CL103', Properties(desc=u'Patient Status Code', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=3,
codes=[] ) ),
Element( u'CL104', Properties(desc=u'Nursing Home Residential Status Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9'] ) ),
),
Segment( u'CR1', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'120',desc=u'Ambulance Transport Information'),
Element( u'CR101', Properties(desc=u'Unit or Basis for Measurement Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=1,
codes=[] ) ),
Element( u'CR102', Properties(desc=u'Weight', req_sit=u'N', data_type=(u'R',u'1',u'10'), position=2,
codes=[] ) ),
Element( u'CR103', Properties(desc=u'Ambulance Transport Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=3,
codes=[u'I', u'R', u'T', u'X'] ) ),
Element( u'CR104', Properties(desc=u'Ambulance Transport Reason Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=4,
codes=[] ) ),
Element( u'CR105', Properties(desc=u'Unit or Basis for Measurement Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'DH', u'DK'] ) ),
Element( u'CR106', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=6,
codes=[] ) ),
Element( u'CR107', Properties(desc=u'Address Information', req_sit=u'S', data_type=(u'AN',u'1',u'55'), position=7,
codes=[] ) ),
Element( u'CR108', Properties(desc=u'Address Information', req_sit=u'S', data_type=(u'AN',u'1',u'55'), position=8,
codes=[] ) ),
Element( u'CR109', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=9,
codes=[] ) ),
Element( u'CR110', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=10,
codes=[] ) ),
),
Segment( u'CR2', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'130',desc=u'Spinal Manipulation Service Information'),
Element( u'CR201', Properties(desc=u'Count', req_sit=u'S', data_type=(u'N0',u'1',u'9'), position=1,
codes=[] ) ),
Element( u'CR202', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=2,
codes=[] ) ),
Element( u'CR203', Properties(desc=u'Subluxation Level Code', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=3,
codes=[u'C1', u'C2', u'C3', u'C4', u'C5', u'C6', u'C7', u'CO', u'IL', u'L1', u'L2', u'L3', u'L4', u'L5', u'OC', u'SA', u'T1', u'T10', u'T11', u'T12', u'T2', u'T3', u'T4', u'T5', u'T6', u'T7', u'T8', u'T9'] ) ),
Element( u'CR204', Properties(desc=u'Subluxation Level Code', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=4,
codes=[u'C1', u'C2', u'C3', u'C4', u'C5', u'C6', u'C7', u'CO', u'IL', u'L1', u'L2', u'L3', u'L4', u'L5', u'OC', u'SA', u'T1', u'T10', u'T11', u'T12', u'T2', u'T3', u'T4', u'T5', u'T6', u'T7', u'T8', u'T9'] ) ),
Element( u'CR205', Properties(desc=u'Unit or Basis for Measurement Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'DA', u'MO', u'WK', u'YR'] ) ),
Element( u'CR206', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=6,
codes=[] ) ),
Element( u'CR207', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=7,
codes=[] ) ),
Element( u'CR208', Properties(desc=u'Nature of Condition Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=8,
codes=[] ) ),
Element( u'CR209', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=9,
codes=[] ) ),
Element( u'CR210', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=10,
codes=[] ) ),
Element( u'CR211', Properties(desc=u'Description', req_sit=u'N', data_type=(u'AN',u'1',u'80'), position=11,
codes=[] ) ),
Element( u'CR212', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=12,
codes=[] ) ),
),
Segment( u'CR5', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'140',desc=u'Home Oxygen Therapy Information'),
Element( u'CR501', Properties(desc=u'Certification Type Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=1,
codes=[] ) ),
Element( u'CR502', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=2,
codes=[] ) ),
Element( u'CR503', Properties(desc=u'Oxygen Equipment Type Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=3,
codes=[u'A', u'B', u'C', u'D', u'E', u'O'] ) ),
Element( u'CR504', Properties(desc=u'Oxygen Equipment Type Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'A', u'B', u'C', u'D', u'E', u'O'] ) ),
Element( u'CR505', Properties(desc=u'Description', req_sit=u'S', data_type=(u'AN',u'1',u'80'), position=5,
codes=[] ) ),
Element( u'CR506', Properties(desc=u'Quantity', req_sit=u'R', data_type=(u'R',u'1',u'15'), position=6,
codes=[] ) ),
Element( u'CR507', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=7,
codes=[] ) ),
Element( u'CR508', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=8,
codes=[] ) ),
Element( u'CR509', Properties(desc=u'Description', req_sit=u'S', data_type=(u'AN',u'1',u'80'), position=9,
codes=[] ) ),
Element( u'CR510', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=10,
codes=[] ) ),
Element( u'CR511', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=11,
codes=[] ) ),
Element( u'CR512', Properties(desc=u'Oxygen Test Condition Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=12,
codes=[] ) ),
Element( u'CR513', Properties(desc=u'Oxygen Test Findings Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=13,
codes=[] ) ),
Element( u'CR514', Properties(desc=u'Oxygen Test Findings Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=14,
codes=[] ) ),
Element( u'CR515', Properties(desc=u'Oxygen Test Findings Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=15,
codes=[] ) ),
Element( u'CR516', Properties(desc=u'Quantity', req_sit=u'S', data_type=(u'R',u'1',u'15'), position=16,
codes=[] ) ),
Element( u'CR517', Properties(desc=u'Oxygen Delivery System Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=17,
codes=[u'A', u'B', u'C', u'D', u'E'] ) ),
Element( u'CR518', Properties(desc=u'Oxygen Equipment Type Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=18,
codes=[u'A', u'B', u'C', u'D', u'E', u'O'] ) ),
),
Segment( u'CR6', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'150',desc=u'Home Health Care Information'),
Element( u'CR601', Properties(desc=u'Prognosis Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8'] ) ),
Element( u'CR602', Properties(desc=u'Date', req_sit=u'R', data_type=(u'DT',u'8',u'8'), position=2,
codes=[] ) ),
Element( u'CR603', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=3,
codes=[u'RD8'] ) ),
Element( u'CR604', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=4,
codes=[] ) ),
Element( u'CR605', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=5,
codes=[] ) ),
Element( u'CR606', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=6,
codes=[] ) ),
Element( u'CR607', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=7,
codes=[u'N', u'U', u'Y'] ) ),
Element( u'CR608', Properties(desc=u'Certification Type Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=8,
codes=[u'1', u'2', u'3', u'4', u'I', u'R', u'S'] ) ),
Element( u'CR609', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=9,
codes=[] ) ),
Element( u'CR610', Properties(desc=u'Product/Service ID Qualifier', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=10,
codes=[] ) ),
Element( u'CR611', Properties(desc=u'Medical Code Value', req_sit=u'N', data_type=(u'AN',u'1',u'15'), position=11,
codes=[] ) ),
Element( u'CR612', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=12,
codes=[] ) ),
Element( u'CR613', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=13,
codes=[] ) ),
Element( u'CR614', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=14,
codes=[] ) ),
Element( u'CR615', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=15,
codes=[] ) ),
Element( u'CR616', Properties(desc=u'Date Time Period', req_sit=u'N', data_type=(u'AN',u'1',u'35'), position=16,
codes=[] ) ),
Element( u'CR617', Properties(desc=u'Patient Location Code', req_sit=u'N', data_type=(u'ID',u'1',u'1'), position=17,
codes=[] ) ),
Element( u'CR618', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=18,
codes=[] ) ),
Element( u'CR619', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=19,
codes=[] ) ),
Element( u'CR620', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=20,
codes=[] ) ),
Element( u'CR621', Properties(desc=u'Date', req_sit=u'N', data_type=(u'DT',u'8',u'8'), position=21,
codes=[] ) ),
),
Segment( u'PWK', Properties(syntax='',req_sit=u'S',repeat=u'10',pos=u'155',desc=u'Additional Service Information'),
Element( u'PWK01', Properties(desc=u'Report Type Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'03', u'04', u'05', u'06', u'07', u'08', u'09', u'10', u'11', u'13', u'15', u'21', u'48', u'55', u'59', u'77', u'A3', u'A4', u'AM', u'AS', u'AT', u'B2', u'B3', u'BR', u'BS', u'BT', u'CB', u'CK', u'D2', u'DA', u'DB', u'DG', u'DJ', u'DS', u'FM', u'HC', u'HR', u'I5', u'IR', u'LA', u'M1', u'NN', u'OB', u'OC', u'OD', u'OE', u'OX', u'P4', u'P5', u'P6', u'P7', u'PE', u'PN', u'PO', u'PQ', u'PY', u'PZ', u'QC', u'QR', u'RB', u'RR', u'RT', u'RX', u'SG', u'V5', u'XP'] ) ),
Element( u'PWK02', Properties(desc=u'Report Transmission Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=2,
codes=[u'BM', u'EL', u'EM', u'FX', u'VO'] ) ),
Element( u'PWK03', Properties(desc=u'Report Copies Needed', req_sit=u'N', data_type=(u'N0',u'1',u'2'), position=3,
codes=[] ) ),
Element( u'PWK04', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'PWK05', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=5,
codes=[u'AC'] ) ),
Element( u'PWK06', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=6,
codes=[] ) ),
Element( u'PWK07', Properties(desc=u'Description', req_sit=u'S', data_type=(u'AN',u'1',u'80'), position=7,
codes=[] ) ),
Composite( u'C002', Properties(req_sit=u'N',refdes='',seq=u'08',desc=u'Actions Indicated'),
),
Element( u'PWK09', Properties(desc=u'Request Category Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=9,
codes=[] ) ),
),
Segment( u'MSG', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'160',desc=u'Message Text'),
Element( u'MSG01', Properties(desc=u'Free-form Message Text', req_sit=u'R', data_type=(u'AN',u'1',u'264'), position=1,
codes=[] ) ),
Element( u'MSG02', Properties(desc=u'Printer Carriage Control Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'MSG03', Properties(desc=u'Number', req_sit=u'N', data_type=(u'N0',u'1',u'9'), position=3,
codes=[] ) ),
),
parsed_278_2010F,
)
parsed_278_2000E = Loop( u'2000E', Properties(looptype='',repeat=u'>1',pos=u'180',req_sit=u'R',desc=u'Service Provider Level'),
Segment( u'HL', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Service Provider Level'),
Element( u'HL01', Properties(desc=u'Hierarchical ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=1,
codes=[] ) ),
Element( u'HL02', Properties(desc=u'Hierarchical Parent ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=2,
codes=[] ) ),
Element( u'HL03', Properties(desc=u'Hierarchical Level Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=3,
codes=[u'19'] ) ),
Element( u'HL04', Properties(desc=u'Hierarchical Child Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'1'] ) ),
),
Segment( u'MSG', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'160',desc=u'Message Text'),
Element( u'MSG01', Properties(desc=u'Free-form Message Text', req_sit=u'R', data_type=(u'AN',u'1',u'264'), position=1,
codes=[] ) ),
Element( u'MSG02', Properties(desc=u'Printer Carriage Control Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'MSG03', Properties(desc=u'Number', req_sit=u'N', data_type=(u'N0',u'1',u'9'), position=3,
codes=[] ) ),
),
parsed_278_2010E,
parsed_278_2000F,
)
parsed_278_2000D = Loop( u'2000D', Properties(looptype='',repeat=u'1',pos=u'180',req_sit=u'S',desc=u'Dependent Level'),
Segment( u'HL', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Dependent Level'),
Element( u'HL01', Properties(desc=u'Hierarchical ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=1,
codes=[] ) ),
Element( u'HL02', Properties(desc=u'Hierarchical Parent ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=2,
codes=[] ) ),
Element( u'HL03', Properties(desc=u'Hierarchical Level Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=3,
codes=[u'23'] ) ),
Element( u'HL04', Properties(desc=u'Hierarchical Child Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'1'] ) ),
),
Segment( u'TRN', Properties(syntax='',req_sit=u'S',repeat=u'3',pos=u'020',desc=u'Patient Event Tracking Number'),
Element( u'TRN01', Properties(desc=u'Trace Type Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=1,
codes=[u'1', u'2'] ) ),
Element( u'TRN02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'TRN03', Properties(desc=u'Originating Company Identifier', req_sit=u'R', data_type=(u'AN',u'10',u'10'), position=3,
codes=[] ) ),
Element( u'TRN04', Properties(desc=u'Reference Identification', req_sit=u'S', data_type=(u'AN',u'1',u'50'), position=4,
codes=[] ) ),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'030',desc=u'Dependent Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'15', u'33', u'56'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N'] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Accident Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'439'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Last Menstrual Period Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'484'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Estimated Date of Birth'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'ABC'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Onset of Current Symptoms or Illness Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'431'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'HI', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'080',desc=u'Dependent Diagnosis'),
Composite( u'C022', Properties(req_sit=u'R',refdes='',seq=u'01',desc=u'Diagnosis 1'),
Element( u'HI01-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'BJ', u'BK', u'LOI'] ) ),
Element( u'HI01-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI01-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI01-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI01-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI01-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI01-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'02',desc=u'Diagnosis 2'),
Element( u'HI02-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'BJ', u'LOI'] ) ),
Element( u'HI02-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI02-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI02-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI02-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI02-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI02-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'03',desc=u'Diagnosis 3'),
Element( u'HI03-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI03-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI03-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI03-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI03-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI03-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI03-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'04',desc=u'Diagnosis 4'),
Element( u'HI04-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI04-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI04-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI04-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI04-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI04-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI04-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'05',desc=u'Diagnosis 5'),
Element( u'HI05-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI05-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI05-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI05-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI05-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI05-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI05-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'06',desc=u'Diagnosis 6'),
Element( u'HI06-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI06-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI06-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI06-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI06-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI06-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI06-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'07',desc=u'Diagnosis 7'),
Element( u'HI07-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI07-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI07-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI07-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI07-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI07-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI07-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'08',desc=u'Diagnosis 8'),
Element( u'HI08-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI08-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI08-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI08-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI08-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI08-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI08-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'09',desc=u'Diagnosis 9'),
Element( u'HI09-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI09-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI09-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI09-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI09-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI09-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI09-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'10',desc=u'Diagnosis 10'),
Element( u'HI10-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI10-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI10-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI10-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI10-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI10-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI10-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'11',desc=u'Diagnosis 11'),
Element( u'HI11-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI11-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI11-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI11-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI11-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI11-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI11-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'12',desc=u'Diagnosis 12'),
Element( u'HI12-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI12-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI12-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI12-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI12-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI12-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI12-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
),
Segment( u'PWK', Properties(syntax='',req_sit=u'S',repeat=u'10',pos=u'155',desc=u'Additional Patient Information'),
Element( u'PWK01', Properties(desc=u'Report Type Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'03', u'04', u'05', u'06', u'07', u'08', u'09', u'10', u'11', u'13', u'15', u'21', u'48', u'55', u'59', u'77', u'A3', u'A4', u'AM', u'AS', u'AT', u'B2', u'B3', u'BR', u'BS', u'BT', u'CB', u'CK', u'D2', u'DA', u'DB', u'DG', u'DJ', u'DS', u'FM', u'HC', u'HR', u'I5', u'IR', u'LA', u'M1', u'NN', u'OB', u'OC', u'OD', u'OE', u'OX', u'P4', u'P5', u'P6', u'P7', u'PE', u'PN', u'PO', u'PQ', u'PY', u'PZ', u'QC', u'QR', u'RB', u'RR', u'RT', u'RX', u'SG', u'V5', u'XP'] ) ),
Element( u'PWK02', Properties(desc=u'Report Transmission Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=2,
codes=[u'BM', u'EL', u'EM', u'FX', u'VO'] ) ),
Element( u'PWK03', Properties(desc=u'Report Copies Needed', req_sit=u'N', data_type=(u'N0',u'1',u'2'), position=3,
codes=[] ) ),
Element( u'PWK04', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'PWK05', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=5,
codes=[u'AC'] ) ),
Element( u'PWK06', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=6,
codes=[] ) ),
Element( u'PWK07', Properties(desc=u'Description', req_sit=u'S', data_type=(u'AN',u'1',u'80'), position=7,
codes=[] ) ),
Composite( u'C002', Properties(req_sit=u'N',refdes='',seq=u'08',desc=u'Actions Indicated'),
),
Element( u'PWK09', Properties(desc=u'Request Category Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=9,
codes=[] ) ),
),
parsed_278_2010DA,
parsed_278_2010DB,
parsed_278_2000E,
)
parsed_278_2000C = Loop( u'2000C', Properties(looptype='',repeat=u'1',pos=u'180',req_sit=u'R',desc=u'Subscriber Level'),
Segment( u'HL', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Subscriber Level'),
Element( u'HL01', Properties(desc=u'Hierarchical ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=1,
codes=[] ) ),
Element( u'HL02', Properties(desc=u'Hierarchical Parent ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=2,
codes=[] ) ),
Element( u'HL03', Properties(desc=u'Hierarchical Level Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=3,
codes=[u'22'] ) ),
Element( u'HL04', Properties(desc=u'Hierarchical Child Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'1'] ) ),
),
Segment( u'TRN', Properties(syntax='',req_sit=u'S',repeat=u'3',pos=u'020',desc=u'Patient Event Tracking Number'),
Element( u'TRN01', Properties(desc=u'Trace Type Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=1,
codes=[u'1', u'2'] ) ),
Element( u'TRN02', Properties(desc=u'Reference Identification', req_sit=u'R', data_type=(u'AN',u'1',u'50'), position=2,
codes=[] ) ),
Element( u'TRN03', Properties(desc=u'Originating Company Identifier', req_sit=u'R', data_type=(u'AN',u'10',u'10'), position=3,
codes=[] ) ),
Element( u'TRN04', Properties(desc=u'Reference Identification', req_sit=u'S', data_type=(u'AN',u'1',u'50'), position=4,
codes=[] ) ),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'030',desc=u'Subscriber Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'S', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'15', u'33', u'56'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'S', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N'] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Accident Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'439'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Last Menstrual Period Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'484'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Estimated Date of Birth'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'ABC'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'DTP', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'070',desc=u'Onset of Current Symptoms or Illness Date'),
Element( u'DTP01', Properties(desc=u'Date/Time Qualifier', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'431'] ) ),
Element( u'DTP02', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'R', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'DTP03', Properties(desc=u'Date Time Period', req_sit=u'R', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
),
Segment( u'HI', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'080',desc=u'Subscriber Diagnosis'),
Composite( u'C022', Properties(req_sit=u'R',refdes='',seq=u'01',desc=u'Diagnosis 1'),
Element( u'HI01-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'BJ', u'BK', u'LOI'] ) ),
Element( u'HI01-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI01-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI01-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI01-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI01-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI01-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'02',desc=u'Diagnosis 2'),
Element( u'HI02-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'BJ', u'LOI'] ) ),
Element( u'HI02-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI02-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI02-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI02-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI02-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI02-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'03',desc=u'Diagnosis 3'),
Element( u'HI03-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI03-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI03-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI03-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI03-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI03-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI03-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'04',desc=u'Diagnosis 4'),
Element( u'HI04-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI04-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI04-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI04-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI04-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI04-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI04-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'05',desc=u'Diagnosis 5'),
Element( u'HI05-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI05-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI05-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI05-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI05-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI05-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI05-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'06',desc=u'Diagnosis 6'),
Element( u'HI06-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI06-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI06-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI06-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI06-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI06-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI06-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'07',desc=u'Diagnosis 7'),
Element( u'HI07-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI07-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI07-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI07-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI07-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI07-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI07-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'08',desc=u'Diagnosis 8'),
Element( u'HI08-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI08-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI08-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI08-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI08-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI08-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI08-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'09',desc=u'Diagnosis 9'),
Element( u'HI09-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI09-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI09-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI09-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI09-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI09-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI09-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'10',desc=u'Diagnosis 10'),
Element( u'HI10-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI10-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI10-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI10-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI10-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI10-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI10-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'11',desc=u'Diagnosis 11'),
Element( u'HI11-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI11-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI11-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI11-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI11-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI11-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI11-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
Composite( u'C022', Properties(req_sit=u'S',refdes='',seq=u'12',desc=u'Diagnosis 12'),
Element( u'HI12-01', Properties(desc=u'Code List Qualifier Code', req_sit=u'R', data_type=(u'ID',u'1',u'3'), position=0,
codes=[u'BF', u'LOI'] ) ),
Element( u'HI12-02', Properties(desc=u'Industry Code', req_sit=u'R', data_type=(u'AN',u'1',u'30'), position=1,
codes=[] ) ),
Element( u'HI12-03', Properties(desc=u'Date Time Period Format Qualifier', req_sit=u'S', data_type=(u'ID',u'2',u'3'), position=2,
codes=[u'D8'] ) ),
Element( u'HI12-04', Properties(desc=u'Date Time Period', req_sit=u'S', data_type=(u'AN',u'1',u'35'), position=3,
codes=[] ) ),
Element( u'HI12-05', Properties(desc=u'Monetary Amount', req_sit=u'N', data_type=(u'R',u'1',u'18'), position=4,
codes=[] ) ),
Element( u'HI12-06', Properties(desc=u'Quantity', req_sit=u'N', data_type=(u'R',u'1',u'15'), position=5,
codes=[] ) ),
Element( u'HI12-07', Properties(desc=u'Version Identifier', req_sit=u'N', data_type=(u'AN',u'1',u'30'), position=6,
codes=[] ) ),
),
),
Segment( u'PWK', Properties(syntax='',req_sit=u'S',repeat=u'10',pos=u'155',desc=u'Additional Patient Information'),
Element( u'PWK01', Properties(desc=u'Report Type Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'03', u'04', u'05', u'06', u'07', u'08', u'09', u'10', u'11', u'13', u'15', u'21', u'48', u'55', u'59', u'77', u'A3', u'A4', u'AM', u'AS', u'AT', u'B2', u'B3', u'BR', u'BS', u'BT', u'CB', u'CK', u'D2', u'DA', u'DB', u'DG', u'DJ', u'DS', u'FM', u'HC', u'HR', u'I5', u'IR', u'LA', u'M1', u'NN', u'OB', u'OC', u'OD', u'OE', u'OX', u'P4', u'P5', u'P6', u'P7', u'PE', u'PN', u'PO', u'PQ', u'PY', u'PZ', u'QC', u'QR', u'RB', u'RR', u'RT', u'RX', u'SG', u'V5', u'XP'] ) ),
Element( u'PWK02', Properties(desc=u'Report Transmission Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=2,
codes=[u'BM', u'EL', u'EM', u'FX', u'VO'] ) ),
Element( u'PWK03', Properties(desc=u'Report Copies Needed', req_sit=u'N', data_type=(u'N0',u'1',u'2'), position=3,
codes=[] ) ),
Element( u'PWK04', Properties(desc=u'Entity Identifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'3'), position=4,
codes=[] ) ),
Element( u'PWK05', Properties(desc=u'Identification Code Qualifier', req_sit=u'S', data_type=(u'ID',u'1',u'2'), position=5,
codes=[u'AC'] ) ),
Element( u'PWK06', Properties(desc=u'Identification Code', req_sit=u'S', data_type=(u'AN',u'2',u'80'), position=6,
codes=[] ) ),
Element( u'PWK07', Properties(desc=u'Description', req_sit=u'S', data_type=(u'AN',u'1',u'80'), position=7,
codes=[] ) ),
Composite( u'C002', Properties(req_sit=u'N',refdes='',seq=u'08',desc=u'Actions Indicated'),
),
Element( u'PWK09', Properties(desc=u'Request Category Code', req_sit=u'N', data_type=(u'ID',u'1',u'2'), position=9,
codes=[] ) ),
),
parsed_278_2010CA,
parsed_278_2010CB,
parsed_278_2000D,
)
parsed_278_2000B = Loop( u'2000B', Properties(looptype='',repeat=u'1',pos=u'180',req_sit=u'R',desc=u'Requester Level'),
Segment( u'HL', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Requester Level'),
Element( u'HL01', Properties(desc=u'Hierarchical ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=1,
codes=[] ) ),
Element( u'HL02', Properties(desc=u'Hierarchical Parent ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=2,
codes=[] ) ),
Element( u'HL03', Properties(desc=u'Hierarchical Level Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=3,
codes=[u'21'] ) ),
Element( u'HL04', Properties(desc=u'Hierarchical Child Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'1'] ) ),
),
parsed_278_2010B,
parsed_278_2000C,
)
parsed_278_2000A = Loop( u'2000A', Properties(looptype='',repeat=u'1',pos=u'010',req_sit=u'R',desc=u'Utilization Management Organization (UMO) Level'),
Segment( u'HL', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Utilization Management Organization (UMO) Level'),
Element( u'HL01', Properties(desc=u'Hierarchical ID Number', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=1,
codes=[] ) ),
Element( u'HL02', Properties(desc=u'Hierarchical Parent ID Number', req_sit=u'N', data_type=(u'AN',u'1',u'12'), position=2,
codes=[] ) ),
Element( u'HL03', Properties(desc=u'Hierarchical Level Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=3,
codes=[u'20'] ) ),
Element( u'HL04', Properties(desc=u'Hierarchical Child Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'1'] ) ),
),
Segment( u'AAA', Properties(syntax='',req_sit=u'S',repeat=u'9',pos=u'030',desc=u'Request Validation'),
Element( u'AAA01', Properties(desc=u'Yes/No Condition or Response Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=1,
codes=[u'N', u'Y'] ) ),
Element( u'AAA02', Properties(desc=u'Agency Qualifier Code', req_sit=u'N', data_type=(u'ID',u'2',u'2'), position=2,
codes=[] ) ),
Element( u'AAA03', Properties(desc=u'Reject Reason Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'04', u'41', u'42', u'79'] ) ),
Element( u'AAA04', Properties(desc=u'Follow-up Action Code', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'C', u'N', u'P', u'Y'] ) ),
),
parsed_278_2010A,
parsed_278_2000B,
)
parsed_278_DETAIL = Loop( u'DETAIL', Properties(looptype=u'wrapper',repeat=u'>1',pos=u'020',req_sit=u'S',desc=u'Table 2 - Detail'),
parsed_278_2000A,
)
parsed_278_ST_LOOP = Loop( u'ST_LOOP', Properties(looptype=u'explicit',repeat=u'>1',pos=u'020',req_sit=u'R',desc=u'Transaction Set Header'),
Segment( u'ST', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Transaction Set Header'),
Element( u'ST01', Properties(desc=u'Transaction Set Identifier Code', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=1,
codes=[u'278'] ) ),
Element( u'ST02', Properties(desc=u'Transaction Set Control Number', req_sit=u'R', data_type=(u'AN',u'4',u'9'), position=2,
codes=[] ) ),
),
parsed_278_HEADER,
parsed_278_DETAIL,
Segment( u'SE', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'280',desc=u'Transaction Set Trailer'),
Element( u'SE01', Properties(desc=u'Number of Included Segments', req_sit=u'R', data_type=(u'N0',u'1',u'10'), position=1,
codes=[] ) ),
Element( u'SE02', Properties(desc=u'Transaction Set Control Number', req_sit=u'R', data_type=(u'AN',u'4',u'9'), position=2,
codes=[] ) ),
),
)
parsed_278_GS_LOOP = Loop( u'GS_LOOP', Properties(looptype=u'explicit',repeat=u'>1',pos=u'020',req_sit=u'R',desc=u'Functional Group Header'),
Segment( u'GS', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Functional Group Header'),
Element( u'GS01', Properties(desc=u'Functional Identifier Code', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'HI'] ) ),
Element( u'GS02', Properties(desc=u'Application Senders Code', req_sit=u'R', data_type=(u'AN',u'2',u'15'), position=2,
codes=[] ) ),
Element( u'GS03', Properties(desc=u'124', req_sit=u'R', data_type=(u'AN',u'2',u'15'), position=3,
codes=[] ) ),
Element( u'GS04', Properties(desc=u'Date', req_sit=u'R', data_type=(u'DT',u'8',u'8'), position=4,
codes=[] ) ),
Element( u'GS05', Properties(desc=u'Time', req_sit=u'R', data_type=(u'TM',u'4',u'8'), position=5,
codes=[] ) ),
Element( u'GS06', Properties(desc=u'Group Control Number', req_sit=u'R', data_type=(u'N0',u'1',u'9'), position=6,
codes=[] ) ),
Element( u'GS07', Properties(desc=u'Responsible Agency Code', req_sit=u'R', data_type=(u'ID',u'1',u'2'), position=7,
codes=[u'X'] ) ),
Element( u'GS08', Properties(desc=u'Version / Release / Industry Identifier Code', req_sit=u'R', data_type=(u'AN',u'1',u'12'), position=8,
codes=[u'004010X094A1'] ) ),
),
parsed_278_ST_LOOP,
Segment( u'GE', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'030',desc=u'Functional Group Trailer'),
Element( u'GE01', Properties(desc=u'97', req_sit=u'R', data_type=(u'N0',u'1',u'6'), position=1,
codes=[] ) ),
Element( u'GE02', Properties(desc=u'Group Control Number', req_sit=u'R', data_type=(u'N0',u'1',u'9'), position=2,
codes=[] ) ),
),
)
parsed_278_ISA_LOOP = Loop( u'ISA_LOOP', Properties(looptype=u'explicit',repeat=u'>1',pos=u'001',req_sit=u'R',desc=u'Interchange Control Header'),
Segment( u'ISA', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'010',desc=u'Interchange Control Header'),
Element( u'ISA01', Properties(desc=u'I01', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=1,
codes=[u'00', u'03'] ) ),
Element( u'ISA02', Properties(desc=u'I02', req_sit=u'R', data_type=(u'AN',u'10',u'10'), position=2,
codes=[] ) ),
Element( u'ISA03', Properties(desc=u'I03', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=3,
codes=[u'00', u'01'] ) ),
Element( u'ISA04', Properties(desc=u'I04', req_sit=u'R', data_type=(u'AN',u'10',u'10'), position=4,
codes=[] ) ),
Element( u'ISA05', Properties(desc=u'I05', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=5,
codes=[u'01', u'14', u'20', u'27', u'28', u'29', u'30', u'33', u'ZZ'] ) ),
Element( u'ISA06', Properties(desc=u'I06', req_sit=u'R', data_type=(u'AN',u'15',u'15'), position=6,
codes=[] ) ),
Element( u'ISA07', Properties(desc=u'I05', req_sit=u'R', data_type=(u'ID',u'2',u'2'), position=7,
codes=[u'01', u'14', u'20', u'27', u'28', u'29', u'30', u'33', u'ZZ'] ) ),
Element( u'ISA08', Properties(desc=u'I07', req_sit=u'R', data_type=(u'AN',u'15',u'15'), position=8,
codes=[] ) ),
Element( u'ISA09', Properties(desc=u'I08', req_sit=u'R', data_type=(u'DT',u'6',u'6'), position=9,
codes=[] ) ),
Element( u'ISA10', Properties(desc=u'I09', req_sit=u'R', data_type=(u'TM',u'4',u'4'), position=10,
codes=[] ) ),
Element( u'ISA11', Properties(desc=u'I10', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=11,
codes=[u'U'] ) ),
Element( u'ISA12', Properties(desc=u'I11', req_sit=u'R', data_type=(u'ID',u'5',u'5'), position=12,
codes=[u'00401'] ) ),
Element( u'ISA13', Properties(desc=u'I12', req_sit=u'R', data_type=(u'N0',u'9',u'9'), position=13,
codes=[] ) ),
Element( u'ISA14', Properties(desc=u'I13', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=14,
codes=[u'0', u'1'] ) ),
Element( u'ISA15', Properties(desc=u'I14', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=15,
codes=[u'P', u'T'] ) ),
Element( u'ISA16', Properties(desc=u'I15', req_sit=u'R', data_type=(u'AN',u'1',u'1'), position=16,
codes=[] ) ),
),
parsed_278_GS_LOOP,
Segment( u'TA1', Properties(syntax='',req_sit=u'S',repeat=u'1',pos=u'020',desc=u'Interchange Acknowledgement'),
Element( u'TA101', Properties(desc=u'I12', req_sit=u'R', data_type=(u'N0',u'9',u'9'), position=1,
codes=[] ) ),
Element( u'TA102', Properties(desc=u'I08', req_sit=u'R', data_type=(u'DT',u'6',u'6'), position=2,
codes=[] ) ),
Element( u'TA103', Properties(desc=u'I09', req_sit=u'R', data_type=(u'TM',u'4',u'4'), position=3,
codes=[] ) ),
Element( u'TA104', Properties(desc=u'I17', req_sit=u'R', data_type=(u'ID',u'1',u'1'), position=4,
codes=[u'A', u'E', u'R'] ) ),
Element( u'TA105', Properties(desc=u'I18', req_sit=u'R', data_type=(u'ID',u'3',u'3'), position=5,
codes=[u'000', u'001', u'002', u'003', u'004', u'005', u'006', u'007', u'008', u'009', u'010', u'011', u'012', u'013', u'014', u'015', u'016', u'017', u'018', u'019', u'020', u'021', u'022', u'023', u'024', u'025', u'026', u'027', u'028', u'029', u'030', u'031'] ) ),
),
Segment( u'IEA', Properties(syntax='',req_sit=u'R',repeat=u'1',pos=u'030',desc=u'Interchange Control Trailer'),
Element( u'IEA01', Properties(desc=u'I16', req_sit=u'R', data_type=(u'N0',u'1',u'5'), position=1,
codes=[] ) ),
Element( u'IEA02', Properties(desc=u'I12', req_sit=u'R', data_type=(u'N0',u'9',u'9'), position=2,
codes=[] ) ),
),
)
parsed_278 = Message( u'278', Properties(desc=u'HIPAA Health Care Services Review: Response X094A1-278'),
parsed_278_ISA_LOOP,
)
|
bsd-3-clause
| 3,310,170,976,286,571,500
| 69.009751
| 707
| 0.610316
| false
| 2.496195
| false
| false
| false
|
chrislit/abydos
|
abydos/distance/_yjhhr.py
|
1
|
5217
|
# Copyright 2019-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.distance._yjhhr.
YJHHR distance
"""
from typing import Any, Counter as TCounter, Optional, Sequence, Set, Union
from ._token_distance import _TokenDistance
from ..tokenizer import _Tokenizer
__all__ = ['YJHHR']
class YJHHR(_TokenDistance):
r"""YJHHR distance.
For two sets X and Y and a parameter p, YJHHR distance
:cite:`Yang:2016` is
.. math::
dist_{YJHHR_p}(X, Y) =
\sqrt[p]{|X \setminus Y|^p + |Y \setminus X|^p}
In :ref:`2x2 confusion table terms <confusion_table>`, where a+b+c+d=n,
this is
.. math::
dist_{YJHHR} =
\sqrt[p]{b^p + c^p}
.. versionadded:: 0.4.0
"""
def __init__(
self,
pval: int = 1,
alphabet: Optional[
Union[TCounter[str], Sequence[str], Set[str], int]
] = None,
tokenizer: Optional[_Tokenizer] = None,
intersection_type: str = 'crisp',
**kwargs: Any
) -> None:
"""Initialize YJHHR instance.
Parameters
----------
pval : int
The :math:`p`-value of the :math:`L^p`-space
alphabet : Counter, collection, int, or None
This represents the alphabet of possible tokens.
See :ref:`alphabet <alphabet>` description in
:py:class:`_TokenDistance` for details.
tokenizer : _Tokenizer
A tokenizer instance from the :py:mod:`abydos.tokenizer` package
intersection_type : str
Specifies the intersection type, and set type as a result:
See :ref:`intersection_type <intersection_type>` description in
:py:class:`_TokenDistance` for details.
**kwargs
Arbitrary keyword arguments
Other Parameters
----------------
qval : int
The length of each q-gram. Using this parameter and tokenizer=None
will cause the instance to use the QGram tokenizer with this
q value.
metric : _Distance
A string distance measure class for use in the ``soft`` and
``fuzzy`` variants.
threshold : float
A threshold value, similarities above which are counted as
members of the intersection for the ``fuzzy`` variant.
.. versionadded:: 0.4.0
"""
super(YJHHR, self).__init__(
alphabet=alphabet,
tokenizer=tokenizer,
intersection_type=intersection_type,
**kwargs
)
self.set_params(pval=pval)
def dist_abs(self, src: str, tar: str) -> float:
"""Return the YJHHR distance of two strings.
Parameters
----------
src : str
Source string (or QGrams/Counter objects) for comparison
tar : str
Target string (or QGrams/Counter objects) for comparison
Returns
-------
float
YJHHR distance
Examples
--------
>>> cmp = YJHHR()
>>> cmp.dist_abs('cat', 'hat')
4.0
>>> cmp.dist_abs('Niall', 'Neil')
7.0
>>> cmp.dist_abs('aluminum', 'Catalan')
15.0
>>> cmp.dist_abs('ATCG', 'TAGC')
10.0
.. versionadded:: 0.4.0
"""
if src == tar:
return 0.0
self._tokenize(src, tar)
b = self._src_only_card() ** self.params['pval']
c = self._tar_only_card() ** self.params['pval']
return float(round((b + c) ** (1 / self.params['pval']), 14))
def dist(self, src: str, tar: str) -> float:
"""Return the normalized YJHHR distance of two strings.
Parameters
----------
src : str
Source string (or QGrams/Counter objects) for comparison
tar : str
Target string (or QGrams/Counter objects) for comparison
Returns
-------
float
normalized YJHHR distance
Examples
--------
>>> cmp = YJHHR()
>>> cmp.dist('cat', 'hat')
0.6666666666666666
>>> cmp.dist('Niall', 'Neil')
0.7777777777777778
>>> cmp.dist('aluminum', 'Catalan')
0.9375
>>> cmp.dist('ATCG', 'TAGC')
1.0
.. versionadded:: 0.4.0
"""
distance = self.dist_abs(src, tar)
union = self._union_card()
if union == 0:
return 0.0
return distance / union
if __name__ == '__main__':
import doctest
doctest.testmod()
|
gpl-3.0
| 6,945,651,139,748,284,000
| 26.75
| 78
| 0.553575
| false
| 3.997701
| false
| false
| false
|
lovelysystems/pyjamas
|
examples/libtest/BuiltinTest.py
|
1
|
4923
|
from UnitTest import UnitTest
try:
builtin_value = builtin.value
except:
builtin_value = None
if False:
import builtin
import builtin
class Foo:
pass
class BuiltinTest(UnitTest):
def testMinMax(self):
self.assertEqual(max(1,2,3,4), 4)
self.assertEqual(min(1,2,3,4), 1)
self.assertEqual(max([1,2,3,4]), 4)
self.assertEqual(min([1,2,3,4]), 1)
self.assertTrue(max([5,3,4],[6,1,2]) == [6,1,2] , "max([5,3,4],[6,1,2])")
self.assertTrue(min([5,3,4],[6,1,2]) == [5,3,4] , "min([5,3,4],[6,1,2])")
def testInt(self):
self.assertEqual(int("5"), 5)
self.assertEqual(int("09"), 9)
self.assertEqual(6, 6)
try:
int('not int')
self.fail("No int() argument error raised")
except ValueError, e:
self.assertEqual(e[0], "invalid literal for int() with base 10: 'not int'")
try:
int(1, 10)
self.fail("No int() argument error raised")
except TypeError, e:
self.assertEqual(e[0], "int() can't convert non-string with explicit base")
def testOrdChr(self):
for i in range(256):
self.assertEqual(ord(chr(i)), i)
def testMod(self):
self.assertEqual(12 % 5, 2)
def testPower(self):
self.assertEqual(3 ** 4, 81)
def testPowerfunc(self):
self.assertEqual(pow(10, 3), 1000)
self.assertEqual(pow(10, 3, 7), 6)
def testHex(self):
self.assertEqual(hex(23), '0x17')
try:
h = hex(23.2)
self.fail("No hex() argument error raised")
except TypeError, why:
self.assertEqual(why.args[0], "hex() argument can't be converted to hex")
def testOct(self):
self.assertEqual(oct(23), '027')
try:
o = oct(23.2)
self.fail("No oct() argument error raised")
except TypeError, why:
self.assertEqual(str(why), "oct() argument can't be converted to oct")
def testRound(self):
self.assertEqual(round(13.12345), 13.0)
self.assertEqual(round(13.12345, 3), 13.123)
self.assertEqual(round(-13.12345), -13.0)
self.assertEqual(round(-13.12345, 3), -13.123)
self.assertEqual(round(13.62345), 14.0)
self.assertEqual(round(13.62345, 3), 13.623)
self.assertEqual(round(-13.62345), -14.0)
self.assertEqual(round(-13.62345, 3), -13.623)
def testDivmod(self):
test_set = [(14, 3, 4, 2),
(14.1, 3, 4.0, 2.1),
(14.1, 3.1, 4.0, 1.7),
]
for x, y, p, q in test_set:
d = divmod(x,y)
self.assertEqual(d[0], p)
self.assertEqual(abs(d[1] - q) < 0.00001, True)
def testFloorDiv(self):
self.assertEqual(1, 4//3)
self.assertEqual(1, 5//3)
self.assertEqual(2, 6//3)
def testAll(self):
self.assertEqual(all([True, 1, 'a']), True)
self.assertEqual(all([True, 1, None, 'a']), False)
self.assertEqual(all([True, 1, '', 'a']), False)
self.assertEqual(all([True, 1, False, 'a']), False)
def testAny(self):
self.assertEqual(any([True, 1, 'a']), True)
self.assertEqual(any([True, 1, None, 'a']), True)
self.assertEqual(any([True, 1, '', 'a']), True)
self.assertEqual(any([True, 1, False, 'a']), True)
self.assertEqual(any([False, '', None]), False)
def testRepr(self):
l1 = [1,2,3]
l2 = ["a", "b", "c"]
t1 = (4,5,6,7)
t2 = ("aa", "bb")
d1 = {'a': 1, "b": "B"}
d2 = {1: l1, 2: l2, 3: t1, 4: t2, 5:d1}
self.assertEqual(repr(l1), '[1, 2, 3]')
self.assertEqual(repr(l2), "['a', 'b', 'c']")
self.assertEqual(repr(t1), '(4, 5, 6, 7)')
self.assertEqual(repr(t2), "('aa', 'bb')")
self.assertEqual(repr(d1), "{'a': 1, 'b': 'B'}")
self.assertEqual(repr(d2), "{1: [1, 2, 3], 2: ['a', 'b', 'c'], 3: (4, 5, 6, 7), 4: ('aa', 'bb'), 5: {'a': 1, 'b': 'B'}}")
def testIsInstance(self):
s = 'hello'
self.assertTrue(isinstance(s, str), "s is a string")
self.assertFalse(isinstance(s, int), "s is a string not an integer")
s = 1
self.assertFalse(isinstance(s, str), "s is an integer not a string")
self.assertTrue(isinstance(s, int), "s is an integer")
def testImport(self):
self.assertEqual(builtin_value, None, "The builtin is loaded before import!")
try:
self.assertEqual(builtin.value, builtin.get_value())
except:
self.fail("Import failed for builtin")
def testBitOperations(self):
self.assertEqual(1 << 2 - 1, 2, "shift error 1")
self.assertEqual((1 << 2) - 1, 3, "shift error 2")
self.assertEqual(1 & 3 + 1, 0, "and error 1")
self.assertEqual((1 & 3) + 1, 2, "and error 2")
|
apache-2.0
| 842,479,220,760,072,800
| 33.1875
| 129
| 0.530977
| false
| 3.174081
| true
| false
| false
|
levilucio/SyVOLT
|
ExFamToPerson/contracts/HPos_ChildSchool_ConnectedLHS.py
|
1
|
11680
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HPos_ChildSchool_ConnectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HPos_ChildSchool_ConnectedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HPos_ChildSchool_ConnectedLHS, self).__init__(name='HPos_ChildSchool_ConnectedLHS', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Pos_ChildSchool')
# Set the node attributes
# match class Child() node
self.add_node()
self.vs[0]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["MT_label__"] = """1"""
self.vs[0]["MT_dirty__"] = False
self.vs[0]["mm__"] = """MT_pre__Child"""
self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class School() node
self.add_node()
self.vs[1]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_label__"] = """2"""
self.vs[1]["MT_dirty__"] = False
self.vs[1]["mm__"] = """MT_pre__School"""
self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class Service() node
self.add_node()
self.vs[2]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_label__"] = """3"""
self.vs[2]["MT_dirty__"] = False
self.vs[2]["mm__"] = """MT_pre__Service"""
self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# Nodes that represent the edges of the property.
# match association Child--goesTo-->School node
self.add_node()
self.vs[3]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "goesTo"
"""
self.vs[3]["MT_label__"] = """4"""
self.vs[3]["MT_subtypes__"] = []
self.vs[3]["MT_dirty__"] = False
self.vs[3]["mm__"] = """MT_pre__directLink_S"""
self.vs[3]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'assoc3')
# match association School--special-->Service node
self.add_node()
self.vs[4]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "special"
"""
self.vs[4]["MT_label__"] = """5"""
self.vs[4]["MT_subtypes__"] = []
self.vs[4]["MT_dirty__"] = False
self.vs[4]["mm__"] = """MT_pre__directLink_S"""
self.vs[4]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'assoc4')
# Add the edges
self.add_edges([
(0,3), # match_class Child() -> association goesTo
(3,1), # association goesTo -> match_class School()
(1,4), # match_class School() -> association special
(4,2) # association special -> match_class Service()
])
# Add the attribute equations
self["equations"] = []
def eval_attr11(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_attr12(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_attr13(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_attr14(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "goesTo"
def eval_attr15(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return attr_value == "special"
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
|
mit
| -8,973,953,865,711,570,000
| 50.681416
| 128
| 0.469863
| false
| 5.149912
| false
| false
| false
|
leedsEM/movement
|
reorder4LMBFGS.py
|
1
|
2635
|
#!/usr/bin/python
import glob
import os
import sys
## find the labels and identify the right columns; put in labeldic
#
vers = "0.3"
# vers 0.3 updated to output in fortran formatted numbers
print "**** reorder starfile for individual particle correction using LMBFGS v {0}".format(vers)
### ---- function: reorder the starfile -----------
def reorder_starfile(filename):
relionfile = open(filename, "r")
odata = relionfile.readlines()
data = []
for i in odata:
if len(i.split()) > 3:
data.append(i)
# get the column number
for i in odata:
if '_rlnImageName' in i:
colnum = int(i.split('#')[-1]) -1
labelsdic = {}
for i in data:
if i.split()[colnum].split("/")[-1] in labelsdic.keys():
labelsdic[i.split()[colnum].split("/")[-1]].append(i.split())
if i.split()[colnum].split("/")[-1] not in labelsdic.keys():
labelsdic[i.split()[colnum].split("/")[-1]] = [i.split()]
## write new header
output = open("{0}_LMBFGS.star".format(filename.split('.')[0]),"w")
for i in odata:
if len(i.split()) < 3:
output.write(i)
# write the particles
for key in sorted(labelsdic):
for line in labelsdic[key]:
output.write("\n")
print line
for i in line:
if is_number(i):
count = len(i.split('.'))
if count > 1:
i = float(i)
if len(str(i).split('.')[0]) > 5:
output.write("{0:.6e} ".format(i))
else:
output.write("{0:12.6f} ".format(i))
else:
output.write("{0: 12d} ".format(int(i)))
else:
output.write("{0} ".format(i))
return("{0}_LMBFGS.star".format(filename.split('.')[0]))
#-----------------------------------------------------------------------
#------- function test if string is a number --------------------------#
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
#-----------------------------------------------------------------------
## get the relion file and read the data
files = glob.glob(raw_input("star files search string: ") or "*.star")
assert len(files) >= 1, "no files found"
for i in files:
print i
go = raw_input("Do it? (Y/N)")
if go in ("Y","y","yes","YES","Yes"):
for each in files:
newfile = reorder_starfile(each)
print each," --> ",newfile
|
gpl-2.0
| 8,108,566,278,060,007,000
| 30
| 97
| 0.476281
| false
| 3.85798
| false
| false
| false
|
chjost/analysis-code
|
analysis/fitresults.py
|
1
|
12123
|
# a class to contain the fit results
import numpy as np
from analyze_fcts import calc_error
from ensemble import LatticeEnsemble
from fit import genfit, genfit_comb, set_fit_interval
from fit import fit as fit1
from plot import genplot, genplot_comb
from input_output import write_fitresults, read_fitresults
from module_global import multiprocess
# this function circumvents the problem that only top-level functions
# of a module can be pickled, which is needed for the multiprocessing
# to work
def fitting_func(args, kwargs):
return fit1(args, kwargs)
class FitResults(object):
"""class to hold fit results.
Nothing is immutable, so be careful!
"""
def _depth(self, var):
return isinstance(var, list) and max(map(self._depth, var)) + 1
def __init__(self, ensemble, label):
if not isinstance(ensemble, LatticeEnsemble):
raise TypeError("FitResults expected LatticeEnsemble, got %s" %
(type(ensemble)))
# always needed
self.ensemble = ensemble
self.name = ensemble.get_data("name")
self.label = label
self.depth = 0
self.fitranges = []
self.verbose = False
self.old_data = None
self.data = None
self.fitfunc = None
# needed for combined fit
self.combfit = False
self.par = None
self.par_index = None
# results
self.res = None
self.pvals = None
self.chi2 = None
@classmethod
def combined_fit(cls, prev_fr, label, fitrange, par_index=0):
"""Initialize a new combined fit using the results of
prev_fr.
"""
tmp = cls(prev_fr.ensemble, label)
tmp.prepare_combined_fit(fitrange, prev_fr.fitranges[0],
prev_fr.data, par_index)
return tmp
@classmethod
def from_file(cls, ensemble, label, filename):
"""Initialize a fit from a file."""
raise NotImplementedError()
tmp = cls(ensemble, label)
res = read_fitresults(filename)
if len(res) == 4:
cls.set_fit(res)
elif len(res) == 5:
cls.set_fit_comb(res)
else:
raise RuntimeError("Cannot make sense initializing fit from file")
return tmp
def toggle_verbose(self):
if self.verbose:
self.verbose = False
else:
self.verbose = True
def add_fitrange(self, fitrange):
"""Add a fitrange for fitting a single fit"""
if len(self.fitranges) == 0:
self.fitranges.append(np.asarray(fitrange))
else:
raise RuntimeError("%s already has a fitrange, cannot add another")
def add_fitranges(self, fitrange_data, fitrange_par):
"""Add two fitrange for fitting a combined fit.
Args:
fitint_data: List of intervals for the fit of the functions.
fitint_par: List of intervals for the varying parameter
"""
if len(self.fitranges) == 0:
self.fitranges.append(np.asarray(fitrange_data))
self.fitranges.append(np.asarray(fitrange_par))
self.combfit = True
else:
raise RuntimeError("%s already has a fitrange, cannot add another"%\
self.__repr__)
def set_fitrange(self, _data, lo, upi, step=2):
"""Set fit interval"""
self.data = np.atleast_3d(_data)
self.add_fitrange(set_fit_interval(_data, lo, up, skip))
def add_par(self, par, par_index=0):
"""Add parameters for a combined fit and the index needed."""
self.par = par
self.par_index = par_index
self.combfit = True
def use_old_data(self, old_data):
"""Reuse the data located at 'old_data' if possible"""
self.old_data = old_data
def prepare_fit(self, fitrange, old_data=None):
"""Set everything needed for a fit."""
self.comb_fit = False
self.add_fitrange(fitrange)
self.use_old_data(old_data)
def prepare_combined_fit(self, fitrange_data, fitrange_par, par,
par_index=0, old_data=None):
"""Set everything needed for a combined fit."""
self.comb_fit = True
self.add_fitranges(fitrange_data, fitrange_par)
self.add_par(par, par_index)
self.use_old_data(old_data)
def do_fit(self, _data, fitfunc, start_params):
if self.data is not None:
if not (self.data==_data).all():
raise RuntimeError("Fitresult has already data which is" +
"compatible with new data")
else:
self.data = np.atleast_3d(_data)
# init variables
nboot = self.data.shape[0]
T2 = self.data.shape[1]
ncorr = self.data.shape[2]
npar = len(start_params)
ninter = [len(fitint) for fitint in self.fitranges[0]]
# set fit data
tlist = np.linspace(0., float(T2), float(T2), endpoint=False)
# initialize empty arrays
self.res = []
self.chi2 = []
self.pval = []
func_args = []
func_kwargs = []
# initialize array for every principal correlator
for _l in range(ncorr):
self.res.append(np.zeros((nboot, npar, ninter[_l])))
self.chi2.append(np.zeros((nboot, ninter[_l])))
self.pval.append(np.zeros((nboot, ninter[_l])))
def ffunc(args, kwargs):
return fit1(fitfunc, args, kwargs)
for _l in range(ncorr):
# setup
#mdata, ddata = calc_error(data[:,:,_l])
for _i in range(ninter[_l]):
lo, up = self.fitranges[0][_l][_i]
if self.verbose:
print("Interval [%d, %d]" % (lo, up))
print("correlator %d" % _l)
# fit the energy and print information
if self.verbose:
print("fitting correlation function")
print(tlist[lo:up+1])
func_args.append((tlist[lo:up+1], self.data[:,lo:up+1,_l], start_params))
y=len(func_kwargs)
func_kwargs.append({"num":y, "verbose":False})
#res[_l][:,:,_i], chi2[_l][:,_i], pval[_l][:,_i] = fitting(fitfunc,
# tlist[lo:up+1], data[:,lo:up+1,_l], start_params, verbose=False)
#if verbose:
# print("p-value %.7lf\nChi^2/dof %.7lf\nresults:"
# % (pval[_l][ 0, _i], chi2[_l][0,_i]/( (up - lo + 1) -
# len(start_params))))
# for p in enumerate(res[_l][0,:,_i]):
# print("\tpar %d = %lf" % p)
# print(" ")
#for a, b in zip(func_args, func_kwargs):
# print(a, b)
#fit1(*(func_args[0]), **(func_kwargs[0]))
multiprocess(ffunc, func_args, func_kwargs)
return
def fit(self, _data, fitfunc, start_params):
"""Fit the data using the fitfunction.
Args:
_data: The correlation functions.
fitfunc: The function to fit to the data.
start_params: The starting parameters for the fit function.
"""
if self.verbose:
print("fitting %s '%s'"% (self.name, self.label))
self.fitfunc = fitfunc
self.data = _data
if self.combfit:
# sanity checks
if len(self.fitranges) != 2:
raise RuntimeError("%s needs 2 fitranges for combined fit" %\
self.__repr__)
if not self.par:
raise RuntimeError("%s needs parameter data for combined fit"%\
self.__repr__)
# fit
myargs = [_data, self.fitranges[0], self.fitranges[1], fitfunc,
start_params, par]
mykwargs = {"par_index": self.par_index, "olddata": self.old_data,
"verbose": self.verbose}
self.res, self.chi2, self.pvals = genfit_comb(*myargs, **mykwargs)
else:
myargs = [_data, self.fitranges[0], fitfunc, start_params]
mykwargs = {"olddata": self.old_data, "verbose": self.verbose}
self.res, self.chi2, self.pvals = genfit(*myargs, **mykwargs)
self.depth = self._depth(self.res)
def set_results(self, res):
"""Set results when reading from file."""
self.res, self.chi2, self.pvals = res[:3]
self.add_fitrange(res[3])
def set_results_comb(self, res):
"""Set results when reading from file."""
self.res, self.chi2, self.pvals = res[:3]
self.add_fitranges(res[3], res[4])
self.combfit = True
def get_results(self):
"""Returns the fit results, the $\chi^2$ values and the p-values."""
return self.res, self.chi2, self.pvals, self.fitranges[0]
def save(self, filename):
"""save data to disk."""
if self.verbose:
print("saving %s '%s'"% (self.name, self.label))
if self.combfit:
write_fitresults(filename, self.fitranges[0], self.res,
self.chi2, self.pvals, self.fitranges[1],
self.verbose)
else:
write_fitresults(filename, self.fitranges[0], self.res,
self.chi2, self.pvals, self.verbose)
def save2(self, filename):
"""Save class to disk."""
if self.combfit:
raise NotImplementedError()
dic = {'fi0' : self.fitranges[0]}
dic = {'fi1' : self.fitranges[1]}
dic.update({'pi%02d' % i: p for (i, p) in enumerate(self.res)})
dic.update({'ch%02d' % i: p for (i, p) in enumerate(self.chi2)})
dic.update({'pv%02d' % i: p for (i, p) in enumerate(self.pvals)})
dic.update({'data': self.data})
dic.update({'par': self.par})
np.savez(filename, **dic)
else:
arr = numpy.array(2, dtype=object)
dic = {'fi0' : self.fitranges[0]}
dic.update({'pi%02d' % i: p for (i, p) in enumerate(self.res)})
dic.update({'ch%02d' % i: p for (i, p) in enumerate(self.chi2)})
dic.update({'pv%02d' % i: p for (i, p) in enumerate(self.pvals)})
dic.update({'data': self.data})
np.savez(filename, **dic)
def plot(self, label, path="./plots/", plotlabel="corr"):
"""Plot data.
label: Labels for the title and the axis.
path: Path to the saving place of the plot.
plotlabel: Label for the plot file.
"""
if self.verbose:
print("plotting %s '%s'"% (self.name, self.label))
if self.combfit:
myargs = [self.data, self.pvals, self.fitranges[0],
self.fitranges[1], self.fitfunc, self.res, self.par,
self.ensemble.get_data("tmin"), self.name,
self.ensemble.get_data("d"), label]
mykwargs = {"path": path, "plotlabel": plotlabel,
"verbose":self.verbose, "par_par_index": self.par_index}
genplot_comb(*myargs, **mykwargs)
else:
myargs = [self.data, self.res, self.pvals, self.fitranges[0],
self.fitfunc, self.ensemble.get_data("tmin"), self.name,
self.ensemble.get_data("d"), label]
mykwargs = {"path": path, "plotlabel": plotlabel,
"verbose":self.verbose}
genplot(*myargs, **mykwargs)
def __str__(self):
restring = "FitResult %s '%s' with depth %d" % (self.name, self.label,
self.depth)
if self.data:
restring = "\n".join((restring,"Data:\n"))
for key in self.data:
restring = "".join((restring, "\t%s: " % (str(key)),
str(self.data[key]), "\n"))
else:
retstring = "".join((retstring, "\n"))
return restring
def __repr__(self):
return "[ FitResult %s '%s' with depth %d]" % (self.name, self.label,
self.depth)
|
gpl-3.0
| -143,162,281,004,933,150
| 38.23301
| 89
| 0.541203
| false
| 3.625299
| false
| false
| false
|
guzmonne/meraki_api
|
meraki_api/networks.py
|
1
|
4729
|
"""
Meraki Networks API Resource
"""
import urllib
from .meraki_api_resource import MerakiAPIResource
from .devices import Devices
from .ssids import SSIDs
from .site_to_site_vpn import SiteToSiteVPN
from .phone_contacts import PhoneContacts
from .sm import SM
from .static_routes import StaticRoutes
from .vlans import VLANs
from .utils import clean
from .clients import Clients
class Networks(MerakiAPIResource):
""" Meraki API Networks resource. """
resource = "networks"
parameters = ["name", "timeZone", "tags", "type"]
traffic_parameters = ["timespan", "deviceType"]
air_marshal_parameters = ["timespan"]
bind_parameters = ["configurationTemplateId", "autoBind"]
clients_parameters = ["id_or_mac_or_ip"]
events_parameters = ["productType", "includedEventTypes", "excludedEventTypes", "deviceMac", "deviceSerial", "deviceName", "clientIp", "clientMac", "clientName", "smDeviceMac", "smDeviceName", "perPage", "startingAfter", "endingBefore"]
def __init__(self, key, prefix=None, resource_id=None):
MerakiAPIResource.__init__(self, key, prefix, resource_id)
def check_timespan(self, query):
""" Checks if the query object has the timespan value configured. """
if query is None or query.get("timespan") is None:
raise ValueError("You must set the timespan query value.")
def static_routes(self, static_route_id=None):
""" Returns the Networks Static Routes API Resource. """
self.check_for_resource_id()
return StaticRoutes(self.key, self.endpoint(), static_route_id)
def devices(self, serial=None):
""" Returns the Networks Devices API Resource. """
self.check_for_resource_id()
return Devices(self.key, self.endpoint(), serial)
def ssids(self, ssid_id=None):
""" Returns the Network SSIDs API Resource."""
self.check_for_resource_id()
return SSIDs(self.key, self.endpoint(), ssid_id)
def site_to_site_vpn(self, site_to_site_vpn_id=None):
""" Returns site-to-site VPN settings API Resource. """
self.check_for_resource_id()
return SiteToSiteVPN(self.key, self.endpoint(), site_to_site_vpn_id)
def vlans(self, vlan_id=None):
""" Returns VLANs VPN settings API Resource. """
self.check_for_resource_id()
return VLANs(self.key, self.endpoint(), vlan_id)
def sm(self):
""" Returns Network SM API Resource. """
self.check_for_resource_id()
return SM(self.key, self.endpoint())
def traffic(self, query):
"""
The traffic analysis data for this network. Traffic Analysis with
Hostname Visibility must be enabled on the network.
"""
self.check_for_resource_id()
self.check_timespan(query)
query = clean(query, self.traffic_parameters)
return self.get("/traffic?" + urllib.parse.urlencode(query))
def bind(self, data):
""" Binds template to network. """
self.check_for_resource_id()
data = clean(data, self.bind_parameters)
return self.post("/bind", data)
def unbind(self):
""" Unbind template from network. """
self.check_for_resource_id()
return self.post("/unbind")
def access_policies(self):
""" List the access policies (MS). """
self.check_for_resource_id()
return self.get("/accessPolicies")
def air_marshal(self, query):
""" Air marshal scan results from a network. """
self.check_timespan(query)
self.check_for_resource_id()
query = clean(query, self.air_marshal_parameters)
return self.get("/airMarshal?" + urllib.parse.urlencode(query))
def phone_contacts(self, phone_contact_id=None):
""" List the phone contacts in a network. """
self.check_for_resource_id()
return PhoneContacts(self.key, self.endpoint(), phone_contact_id)
def phone_numbers(self):
""" List all the phone numbers in a network. """
self.check_for_resource_id()
return self.get("/phoneNumbers")
def available_phone_numbers(self):
""" List all the available phone numbers in a network. """
self.check_for_resource_id()
return self.get("/phoneNumbers/available")
def clients(self, clients_id=None):
""" Returns the Clients API Resource. """
self.check_for_resource_id()
return Clients(self.key, self.endpoint(), clients_id)
def events(self, query):
""" Returns the Clients API Resource. """
self.check_for_resource_id()
query = clean(query, self.events_parameters)
return self.get("/events?" + urllib.parse.urlencode(query))
|
mit
| -4,322,985,330,267,941,400
| 35.376923
| 244
| 0.642842
| false
| 3.866721
| false
| false
| false
|
bjuvensjo/scripts
|
vang/misc/basic.py
|
1
|
1626
|
#!/usr/bin/env python3
from argparse import ArgumentParser
from base64 import encodebytes
from os import environ, name, system
from sys import argv
def get_basic_auth(username, password):
"""Returns basic authentication.
Args:
username (str): the username (defaults to environment variable 'U')
password (str): the password (defaults to environment variable 'P')
Return:
base 64 encoded Authorization header value
>>> get_basic_auth("foo", "bar")
'Basic Zm9vOmJhcg=='
"""
auth = f"{username}:{password}"
return f"Basic {encodebytes(auth.encode()).decode('UTF-8').strip()}"
def get_basic_auth_header(username, password):
return f"Authorization: {get_basic_auth(username, password)}"
def parse_args(args):
parser = ArgumentParser(
description='Prints and place in clipboard basic authentication header')
# parser.add_argument(
# '-u', '--username', help='Username', default=environ['U'])
# parser.add_argument(
# '-p', '--password', help='Password', default=environ['P'])
parser.add_argument(
'-u', '--username', help='Username')
parser.add_argument(
'-p', '--password', help='Password')
return parser.parse_args(args)
def main(username, password):
basic_auth_header = get_basic_auth_header(username, password)
if name == 'posix':
system(f"echo '{basic_auth_header}\c' | pbcopy")
print(f"'{basic_auth_header}' copied to clipboard")
else:
print(basic_auth_header)
if __name__ == '__main__': # pragma: no cover
main(**parse_args(argv[1:]).__dict__)
|
apache-2.0
| 8,338,868,900,330,906,000
| 29.111111
| 80
| 0.641451
| false
| 3.799065
| false
| false
| false
|
qisanstudio/qsapp-express
|
src/express/panel/account.py
|
1
|
2438
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from flask import request, url_for, flash, redirect
from flask.ext.admin import expose
from flask.ext.admin.babel import gettext
from flask.ext.admin.actions import action
from studio.core.engines import db
from express.models.account import (RoleModel, PrivilegeModel,
AccountModel, EmailModel)
from express.panel.base import BaseView
class Role(BaseView):
perm = 'role'
column_list = ['id', 'title']
column_default_sort = ('id', True)
def __init__(self, **kwargs):
super(Role, self).__init__(RoleModel, db.session, **kwargs)
def create_form(self, obj=None):
form = super(Role, self).create_form(obj=obj)
delattr(form, 'accounts')
return form
def edit_form(self, obj=None):
form = super(Role, self).edit_form(obj=obj)
delattr(form, 'accounts')
return form
class Privilege(BaseView):
perm = 'role'
column_list = ['id', 'code', 'description', 'date_created']
column_default_sort = ('date_created', True)
def __init__(self, **kwargs):
super(Privilege, self).__init__(PrivilegeModel, db.session, **kwargs)
def create_form(self, obj=None):
form = super(Privilege, self).create_form(obj=obj)
delattr(form, 'date_created')
return form
def edit_form(self, obj=None):
form = super(Privilege, self).edit_form(obj=obj)
delattr(form, 'date_created')
return form
class Account(BaseView):
perm = 'account'
can_create = False
column_list = ['uid', 'nickname', 'date_created']
column_default_sort = ('date_created', True)
def __init__(self, **kwargs):
super(Account, self).__init__(AccountModel, db.session, **kwargs)
def edit_form(self, obj=None):
form = super(Account, self).edit_form(obj=obj)
delattr(form, 'addresses')
delattr(form, 'bills')
delattr(form, 'date_created')
return form
class Email(BaseView):
perm = 'account'
can_create = False
can_edit = False
column_list = ['uid', 'email', 'date_last_signed_in', 'date_created']
column_default_sort = ('date_last_signed_in', True)
def __init__(self, **kwargs):
super(Email, self).__init__(EmailModel, db.session, **kwargs)
|
mit
| -9,158,380,060,821,169,000
| 26.682353
| 77
| 0.597621
| false
| 3.705167
| false
| false
| false
|
tcpcloud/openvstorage
|
webapps/api/backend/views/tasks.py
|
1
|
2521
|
# Copyright 2014 Open vStorage NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for working with celery tasks
"""
from rest_framework import status, viewsets
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import link
from backend.decorators import required_roles, load, log
from celery.task.control import inspect
from ovs.celery_run import celery
class TaskViewSet(viewsets.ViewSet):
"""
Information about celery tasks
"""
permission_classes = (IsAuthenticated,)
prefix = r'tasks'
base_name = 'tasks'
@log()
@required_roles(['read'])
@load()
def list(self):
"""
Overview of active, scheduled, reserved and revoked tasks
"""
inspector = inspect()
data = {'active' : inspector.active(),
'scheduled': inspector.scheduled(),
'reserved' : inspector.reserved(),
'revoked' : inspector.revoked()}
return Response(data, status=status.HTTP_200_OK)
@log()
@required_roles(['read'])
@load()
def retrieve(self, pk):
"""
Load information about a given task
"""
result = celery.AsyncResult(pk)
if result.successful():
result_data = result.result
else:
result_data = str(result.result) if result.result is not None else None
data = {'id' : result.id,
'status' : result.status,
'successful': result.successful(),
'failed' : result.failed(),
'ready' : result.ready(),
'result' : result_data}
return Response(data, status=status.HTTP_200_OK)
@link()
@log()
@required_roles(['read'])
@load()
def get(self, pk):
"""
Gets a given task's result
"""
result = celery.AsyncResult(pk)
return Response(result.get(), status=status.HTTP_200_OK)
|
apache-2.0
| -2,150,323,591,975,315,000
| 30.911392
| 83
| 0.623165
| false
| 4.215719
| false
| false
| false
|
gregunz/ada2017
|
project/src/fetch_source_country.py
|
1
|
11025
|
# -*- coding: utf-8 -*-
import csv
import os
import re
import pandas as pd
import requests
from bs4 import BeautifulSoup
def get_all_newspapers_to_country_dict(v2=True):
"""Get the country associated to each newspapers url in a dict following the format: {'Clean URL' : 'Country name'}
This function is not sufficient on its own to get the country of every newspaper, please see get_countries_for_dataframe in data_cleaning.py
"""
def clean_url(url):
""" This function clean a url.
For example https://example.com will be returned as example.com
Keyword arguments:
url -- The url
"""
url_pair = re.findall(r'\b(?!www\.)([a-zA-Z0-9-]+(\.[a-z]+)+)', url.lower())
if (url_pair == []): # If it is not a url
return url
else:
return url_pair[0][0]
if v2:
columns = ['Country name', 'Newspaper Name', 'Newspaper Url']
df = pd.DataFrame(columns=columns)
if not os.path.isfile('../data/locations/clean_url_to_country_v2.csv'):
if not os.path.isfile('../data/locations/brute_newspapers_to_country_v2.csv'):
base_url = 'http://www.abyznewslinks.com/'
def get_newspapers(url, country_name):
df = pd.DataFrame(columns=columns)
r = requests.get(url)
# Find newspaper links
soup = BeautifulSoup(r.text, 'html.parser')
divs = soup.find_all('div')[3:]
for div in divs:
news_links = div.find_all('a')
for a_news in news_links:
a_news_link = a_news.get('href')
if a_news_link is not None:
# Check whether it links to a page of the website with more newspapers instead of a newspaper webstes
if a_news_link[-3:] == 'htm' and a_news_link[:3] != 'htt' and a_news_link[:3] != 'www':
df = df.append(get_newspapers(base_url + a_news.get('href'), country_name))
else:
newspaper_name = a_news.text
newspaper_url = a_news.get('href')
news_df = pd.DataFrame([[country_name, newspaper_name, newspaper_url]],
columns=columns)
df = df.append(news_df)
return df
r = requests.get(base_url + 'allco.htm')
soup = BeautifulSoup(r.text, 'html.parser')
countries = soup.find_all('table')[5].find_all('a')
for a in countries:
# Get specific page
country_name = a.text
df = df.append(get_newspapers(base_url + a.get('href'), country_name))
df.to_csv('../data/locations/brute_newspapers_to_country_v2.csv', index=False)
df = df.drop_duplicates(subset=['Newspaper Url'], keep='first')
df.to_csv('../data/locations/no_duplicate_brute_newspapers_to_country_v2.csv', index=False)
else:
if not os.path.isfile('../data_locations/no_duplicate_brute_newspapers_to_country_v2.csv'):
print('hi2')
df = pd.read_csv('../data/locations/brute_newspapers_to_country_v2.csv')
df = df.drop_duplicates(subset=['Newspaper Url'], keep='first')
df.to_csv('../data/locations/no_duplicate_brute_newspapers_to_country_v2.csv', index=False)
else:
df = pd.read_csv('../data_locations/no_duplicate_brute_newspapers_to_country_v2.csv')
df['Clean URL'] = df['Newspaper Url'].apply(lambda x: clean_url(x))
df.to_csv('../data/locations/clean_url_to_country_v2.csv', index=False)
else:
df = pd.read_csv('../data/locations/clean_url_to_country_v2.csv')
return df[["Clean URL", "Country name"]].set_index("Clean URL").to_dict().get("Country name")
else:
# If the file does not exist, fetch everything (takes ~ 16 hours)
if not os.path.isfile('../data/locations/clean_url_to_country.csv'):
# First get newspaper per country as referenced in https://www.thepaperboy.com/newspapers-by-country.cfm
if not os.path.isfile('../data/locations/countries_news.csv'):
# Get country specific page and get all the newspapers
def get_country_newspaper(country_name, url):
df = pd.DataFrame(columns=columns)
r = requests.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
newspapers = soup.find_all('td', align='left')[1:]
for n in newspapers:
newspaper_name = n.find('strong').text
# Get the newspaper specific page
r = requests.get(base_url + n.find('a').get('href'))
soup = BeautifulSoup(r.text, 'html.parser')
n_url = soup.find_all('h1')
newspaper_url = n_url[0].find('a').get('href') # Newspaper url
news_df = pd.DataFrame([[country_name, newspaper_name, newspaper_url]], columns=columns)
df = df.append(news_df)
return df
r = requests.get('https://www.thepaperboy.com/newspapers-by-country.cfm')
soup = BeautifulSoup(r.text, 'html.parser')
base_url = 'https://www.thepaperboy.com'
country_links = soup.find_all('a', class_='mediumlink')
columns = ['Country name', 'Newspaper Name', 'Newspaper Url']
df = pd.DataFrame(columns=columns)
for a in country_links:
country_name = a.text.split(sep='(')[0]
if country_name[0] == ' ':
country_name = country_name[1:]
if country_name[-1] == ' ':
country_name = country_name[:-1]
# United states has a different structure
if country_name != 'United States':
df = df.append(get_country_newspaper(country_name, base_url + a.get('href')))
else:
r = requests.get('https://www.thepaperboy.com/united-states/newspapers/country.cfm')
soup = BeautifulSoup(r.text, 'html.parser')
us_states = soup.find_all('a', class_='mediumlink')
for state in us_states:
df = df.append(get_country_newspaper(country_name, base_url + state.get('href')))
df.to_csv('../data/locations/countries_news.csv')
original_websites_df = df
else:
original_websites_df = pd.read_csv('../data/locations/countries_news.csv')
def get_new_url(base_url):
""" This function try to get a redirection url and return it otherwise return the base url
"""
try:
response = requests.get(base_url, timeout=5)
if response.history:
return response.url
else:
return base_url
except:
return base_url
# As the website is not up to date, we get the new address of the newspapers which changed their address over time (But keep both for older news!). Basically we follow the previous link and get if there is a redirection
if not os.path.isfile('../data/locations/revisited_website_url.csv'):
for _, row in original_websites_df.iterrows():
new_url = get_new_url(row['Newspaper Url'])
df = pd.DataFrame({'Country name': [row['Country name']], 'New URL': [new_url]})
with open('../data/locations/revisited_website_url.csv', 'a') as f:
df.to_csv(f, header=False)
redirected_websites_df = pd.read_csv('../data/locations/revisited_website_url.csv',
names=['Country name', 'New URL'])
original_websites_df['Clean URL'] = original_websites_df['Newspaper Url'].apply(lambda x: clean_url(x))
original_websites_df.to_csv('../data/locations/original_clean_url_to_country.csv', index=False)
redirected_websites_df['Clean URL'] = redirected_websites_df['New URL'].apply(lambda x: clean_url(x))
redirected_websites_df.to_csv('../data/locations/redirected_clean_url_to_country.csv', index=False)
websites_df = original_websites_df[["Clean URL", "Country name"]].append(
redirected_websites_df[["Clean URL", "Country name"]])
websites_df.to_csv('../data/locations/clean_url_to_country.csv', index=False)
return websites_df[["Clean URL", "Country name"]].set_index("Clean URL").to_dict().get("Country name")
else:
return pd.read_csv('../data/locations/clean_url_to_country.csv')[["Clean URL", "Country name"]].set_index(
"Clean URL").to_dict().get("Country name")
def get_tld_to_country_dict():
"""Get the country associated to a top level domain in the formt: {'TLD' : 'Country name'}
This function is not sufficient on its own to get the country of every newspaper, please see get_countries_for_dataframe in data_cleaning.py
"""
if not os.path.isfile('../data/locations/top_level_domain_to_country.csv'):
r = requests.get('https://raw.githubusercontent.com/mledoze/countries/master/countries.json')
# Get the mapping
countries = [(c['name']['common'], c['tld']) for c in r.json()]
tld_to_country = {}
for c in countries:
for domain in c[1]:
if not domain in tld_to_country:
tld_to_country[domain] = c[0]
tld_to_country['.us'] = 'United States' # hardcoded otherwise it's not the good value
with open('../data/locations/top_level_domain_to_country.csv', 'w', encoding='utf-8') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(['tld', 'Country name'])
for key, value in tld_to_country.items():
writer.writerow([key, value])
return pd.read_csv('../data/locations/top_level_domain_to_country.csv').set_index('tld').to_dict().get(
'Country name')
|
mit
| 7,856,140,663,845,053,000
| 49.27907
| 231
| 0.524444
| false
| 4.019322
| false
| false
| false
|
ksetyadi/Sahana-Eden
|
models/assess.py
|
1
|
92989
|
# -*- coding: utf-8 -*-
""" Assessment - Model
@author: Fran Boon
@author: Dominic König
@author: Michael Howden
This module currently contains 2 types of Assessments
* Flexible Impact Assessments
* Rapid Assessment Tool (from ECB: http://www.ecbproject.org/page/48)
@ToDo Validation similar to shn_sitrep_school_report_onvalidation()
http://bazaar.launchpad.net/~flavour/sahana-eden/trunk/annotate/head:/models/sitrep.py#L99
"""
module = "assess"
if deployment_settings.has_module(module):
# ---------------------------------------------------------------------
# Flexible Impact Assessments
# ---------------------------------------------------------------------
# Assessment
resourcename = "assess"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("datetime", "datetime"),
location_id(),
organisation_id(),
person_id("assessor_person_id"),
comments(),
ireport_id(), # Assessment can be linked to an Incident Report
migrate=migrate, *s3_meta_fields()
)
table.datetime.label = T("Date & Time")
table.datetime.default = request.utcnow
table.assessor_person_id.label = T("Assessor")
if auth.is_logged_in():
table.assessor_person_id.default = shn_get_db_field_value(db = db,
table = "pr_person",
field = "pe_id",
look_up = session.auth.user.person_uuid,
look_up_field = "uuid"
)
assess_id = S3ReusableField("assess_id", table,
requires = IS_NULL_OR(IS_ONE_OF(db, "assess_assess.id", "%(id)s")),
represent = lambda id: id,
label = T("Assessment"),
ondelete = "RESTRICT"
)
# CRUD strings
ADD_ASSESSMENT = T("Add Assessment")
LIST_ASSESSMENTS = T("List Assessments")
s3.crud_strings[tablename] = Storage(
title_create = ADD_ASSESSMENT,
title_display = T("Assessment Details"),
title_list = LIST_ASSESSMENTS,
title_update = T("Edit Assessment"),
title_search = T("Search Assessments"),
subtitle_create = T("Add New Assessment"),
subtitle_list = T("Assessments"),
label_list_button = LIST_ASSESSMENTS,
label_create_button = ADD_ASSESSMENT,
label_delete_button = T("Delete Assessment"),
msg_record_created = T("Assessment added"),
msg_record_modified = T("Assessment updated"),
msg_record_deleted = T("Assessment deleted"),
msg_list_empty = T("No Assessments currently registered"))
# assess_assess as component of org_organisation
s3xrc.model.add_component(module, resourcename,
multiple=True,
joinby=dict(org_organisation="organisation_id")
)
#==============================================================================
# Baseline Type
resourcename = "baseline_type"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("name", length=128, notnull=True, unique=True),
migrate=migrate, *s3_meta_fields()
)
# CRUD strings
ADD_BASELINE_TYPE = T("Add Baseline Type")
LIST_BASELINE_TYPE = T("List Baseline Types")
s3.crud_strings[tablename] = Storage(
title_create = ADD_BASELINE_TYPE,
title_display = T("Baseline Type Details"),
title_list = LIST_BASELINE_TYPE,
title_update = T("Edit Baseline Type"),
title_search = T("Search Baseline Type"),
subtitle_create = T("Add New Baseline Type"),
subtitle_list = T("Baseline Types"),
label_list_button = LIST_BASELINE_TYPE,
label_create_button = ADD_BASELINE_TYPE,
label_delete_button = T("Delete Baseline Type"),
msg_record_created = T("Baseline Type added"),
msg_record_modified = T("Baseline Type updated"),
msg_record_deleted = T("Baseline Type deleted"),
msg_list_empty = T("No Baseline Types currently registered"))
def baseline_type_comment():
if auth.has_membership(auth.id_group("'Administrator'")):
return DIV(A(ADD_BASELINE_TYPE,
_class="colorbox",
_href=URL(r=request, c="assess", f="baseline_type", args="create", vars=dict(format="popup")),
_target="top",
_title=ADD_BASELINE_TYPE
)
)
else:
return None
baseline_type_id = S3ReusableField("baseline_type_id", db.assess_baseline_type, sortby="name",
requires = IS_NULL_OR(IS_ONE_OF(db, "assess_baseline_type.id","%(name)s", sort=True)),
represent = lambda id: shn_get_db_field_value(db = db,
table = "assess_baseline_type",
field = "name",
look_up = id),
label = T("Baseline Type"),
comment = baseline_type_comment(),
ondelete = "RESTRICT"
)
#==============================================================================
# Baseline
resourcename = "baseline"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assess_id(),
baseline_type_id(),
Field("value", "double"),
comments(),
migrate=migrate, *s3_meta_fields()
)
# Hide FK fields in forms
table.assess_id.readable = table.assess_id.writable = False
# CRUD strings
ADD_BASELINE = T("Add Baseline")
LIST_BASELINE = T("List Baselines")
s3.crud_strings[tablename] = Storage(
title_create = ADD_BASELINE,
title_display = T("Impact Baselines"),
title_list = LIST_BASELINE,
title_update = T("Edit Baseline"),
title_search = T("Search Baselines"),
subtitle_create = T("Add New Baseline"),
subtitle_list = T("Baselines"),
label_list_button = LIST_BASELINE,
label_create_button = ADD_BASELINE,
label_delete_button = T("Delete Baseline"),
msg_record_created = T("Baseline added"),
msg_record_modified = T("Baseline updated"),
msg_record_deleted = T("Baseline deleted"),
msg_list_empty = T("No Baselines currently registered"))
# Baseline as component of assessments
s3xrc.model.add_component(module, resourcename,
multiple=True,
joinby=dict(assess_assess="assess_id"),
deletable=True,
editable=True)
#==============================================================================
# Summary
resourcename = "summary"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assess_id(),
cluster_id(),
#Field("value", "double"),
Field("value", "integer",
default = 0),
comments(),
migrate=migrate, *s3_meta_fields()
)
# Hide FK fields in forms
table.assess_id.readable = table.assess_id.writable = False
assess_severity_opts = {
0: T("Low"),
1: T("Medium"),
2: T("High"),
3: T("Very High"),
}
table.value.label = T("Severity")
table.value.requires = IS_EMPTY_OR(IS_IN_SET(assess_severity_opts))
table.value.widget = SQLFORM.widgets.radio.widget
assess_colour_opts = {
0:"green",
1:"yellow",
2:"orange",
3:"red"
}
def shn_assess_severity_represent(value):
if value:
return IMG( _src="/%s/static/img/%s_circle_16px.png" % (request.application, assess_colour_opts[value]),
_alt= value,
_align="middle"
)
else:
return NONE
table.value.represent = shn_assess_severity_represent
# CRUD strings
ADD_ASSESS_SUMMARY = T("Add Assessment Summary")
LIST_ASSESS_SUMMARY = T("List Assessment Summaries")
s3.crud_strings[tablename] = Storage(
title_create = ADD_ASSESS_SUMMARY,
title_display = T("Impact Assessment Summaries"),
title_list = LIST_ASSESS_SUMMARY,
title_update = T("Edit Assessment Summary"),
title_search = T("Search Assessment Summaries"),
subtitle_create = T("Add New Assessment Summary"),
subtitle_list = T("Assessment Summaries"),
label_list_button = LIST_ASSESS_SUMMARY,
label_create_button = ADD_ASSESS_SUMMARY,
label_delete_button = T("Delete Assessment Summary"),
msg_record_created = T("Assessment Summary added"),
msg_record_modified = T("Assessment Summary updated"),
msg_record_deleted = T("Assessment Summary deleted"),
msg_list_empty = T("No Assessment Summaries currently registered"))
# Summary as component of assessments
s3xrc.model.add_component(module, resourcename,
multiple=True,
joinby=dict(assess_assess="assess_id"),
deletable=True,
editable=True)
#==============================================================================
# Rapid Assessment Tool
#==============================================================================
# Section CRUD strings
ADD_SECTION = T("Add Section")
LIST_SECTIONS = T("List Sections")
rat_section_crud_strings = Storage(
title_create = ADD_SECTION,
title_display = T("Section Details"),
title_list = LIST_SECTIONS,
title_update = "",
title_search = T("Search Sections"),
subtitle_create = "",
subtitle_list = T("Sections"),
label_list_button = LIST_SECTIONS,
label_create_button = ADD_SECTION,
label_delete_button = T("Delete Section"),
msg_record_created = T("Section updated"),
msg_record_modified = T("Section updated"),
msg_record_deleted = T("Section deleted"),
msg_list_empty = T("No Sections currently registered"))
# -------------------------------------------------------------------------
# Common options
rat_walking_time_opts = {
1: T("0-15 minutes"),
2: T("15-30 minutes"),
3: T("30-60 minutes"),
4: T("over one hour"),
999: NOT_APPLICABLE
}
# -------------------------------------------------------------------------
# Helper functions
def shn_rat_represent_multiple(set, opt):
""" Represent an IS_IN_SET with multiple=True as
comma-separated list of options
@param set: the options set as dict
@param opt: the selected option(s)
"""
if isinstance(opt, (list, tuple)):
opts = opt
vals = [str(set.get(o, o)) for o in opts]
#elif isinstance(opt, basestring):
# opts = opt.split("|")
# vals = [str(set.get(int(o), o)) for o in opts if o]
elif isinstance(opt, int):
opts = [opt]
vals = str(set.get(opt, opt))
else:
return T("None")
if len(opts) > 1:
vals = ", ".join(vals)
else:
vals = len(vals) and vals[0] or ""
return vals
def shn_rat_label_and_tooltip(field, label, tooltip, multiple=False):
""" Add label and tooltip to a field """
field.label = T(label)
if multiple:
field.comment = DIV("(%s)" % T("Select all that apply"),
DIV(_class="tooltip",
_title="%s|%s" % (T(label), T(tooltip))))
else:
field.comment = DIV(DIV(_class="tooltip",
_title="%s|%s" % (T(label), T(tooltip))))
rat_interview_location_opts = {
1:T("Village"),
2:T("Urban area"),
3:T("Collective center"),
4:T("Informal camp"),
5:T("Formal camp"),
6:T("School"),
7:T("Mosque"),
8:T("Church"),
99:T("Other")
}
rat_interviewee_opts = {
1:T("Male"),
2:T("Female"),
3:T("Village Leader"),
4:T("Informal Leader"),
5:T("Community Member"),
6:T("Religious Leader"),
7:T("Police"),
8:T("Healthcare Worker"),
9:T("School Teacher"),
10:T("Womens Focus Groups"),
11:T("Child (< 18 yrs)"),
99:T("Other")
}
rat_accessibility_opts = {
1:T("2x4 Car"),
2:T("4x4 Car"),
3:T("Truck"),
4:T("Motorcycle"),
5:T("Boat"),
6:T("Walking Only"),
7:T("No access at all"),
99:T("Other")
}
# Main Resource -----------------------------------------------------------
# contains Section 1: Identification Information
#
resourcename = "rat"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
Field("date", "date"),
location_id(),
staff_id(),
staff_id("staff2_id", label = T("Staff 2")),
Field("interview_location", "list:integer"),
Field("interviewee", "list:integer"),
Field("accessibility", "integer"),
comments(),
document_id(),
shelter_id(),
migrate=migrate, *s3_meta_fields())
table.date.requires = [IS_DATE(), IS_NOT_EMPTY()]
table.date.default = datetime.datetime.today()
table.interview_location.label = T("Interview taking place at")
table.interview_location.requires = IS_NULL_OR(IS_IN_SET(rat_interview_location_opts, multiple=True, zero=None))
table.interview_location.represent = lambda opt, set=rat_interview_location_opts: \
shn_rat_represent_multiple(set, opt)
table.interview_location.comment = "(" + T("Select all that apply") + ")"
#table.interview_location.widget = SQLFORM.widgets.checkboxes.widget
table.interviewee.label = T("Person interviewed")
table.interviewee.requires = IS_NULL_OR(IS_IN_SET(rat_interviewee_opts, multiple=True, zero=None))
table.interviewee.represent = lambda opt, set=rat_interviewee_opts: \
shn_rat_represent_multiple(set, opt)
table.interviewee.comment = "(" + T("Select all that apply") + ")"
#table.interviewee.widget = SQLFORM.widgets.checkboxes.widget
table.accessibility.requires = IS_NULL_OR(IS_IN_SET(rat_accessibility_opts, zero=None))
table.accessibility.represent = lambda opt: rat_accessibility_opts.get(opt, opt)
table.accessibility.label = T("Accessibility of Affected Location")
# CRUD strings
ADD_ASSESSMENT = T("Add Assessment")
LIST_ASSESSMENTS = T("List Assessments")
s3.crud_strings[tablename] = Storage(
title_create = ADD_ASSESSMENT,
title_display = T("Assessment Details"),
title_list = LIST_ASSESSMENTS,
title_update = T("Edit Assessment"),
title_search = T("Search Assessments"),
subtitle_create = T("Add New Assessment"),
subtitle_list = T("Assessments"),
label_list_button = LIST_ASSESSMENTS,
label_create_button = ADD_ASSESSMENT,
label_delete_button = T("Delete Assessment"),
msg_record_created = T("Assessment added"),
msg_record_modified = T("Assessment updated"),
msg_record_deleted = T("Assessment deleted"),
msg_list_empty = T("No Assessments currently registered"))
# -------------------------------------------------------------------------
def rat_assessment_onaccept(form):
id = form.vars.get("id", None)
if id:
for x in xrange(2, 10):
section = "assess_section%s" % x
set = db(db[section].assessment_id == id)
record = set.select(db[section].id, limitby=(0, 1)).first()
if not record:
db[section].insert(assessment_id=id)
# -------------------------------------------------------------------------
def shn_rat_represent(id):
""" Represent assessment as string """
table = db.assess_rat
row = db(table.id == id).select(table.date,
table.staff_id,
table.staff2_id,
table.location_id,
limitby = (0, 1)).first()
if row:
date = row.date and str(row.date) or ""
location = row.location_id and shn_gis_location_represent(row.location_id) or ""
table = db.org_staff
org = ["", ""]
i = 0
for staff_id in [row.staff_id, row.staff2_id]:
i += 1
if staff_id:
staff = db(table.id == staff_id).select(table.organisation_id,
limitby=(0, 1)).first()
if staff:
org[i] = shn_organisation_represent(staff.organisation_id)
assessment_represent = XML("<div>%s %s, %s %s</div>" % (location, org[0], org[1], date))
else:
assessment_represent = "-"
return assessment_represent
# -------------------------------------------------------------------------
# re-usable field
assessment_id = S3ReusableField("assessment_id", table,
requires = IS_NULL_OR(IS_ONE_OF(db, "assess_rat.id", shn_rat_represent, orderby="assess_rat.id")),
represent = lambda id: shn_rat_represent(id),
label = T("Rapid Assessment"),
comment = A(ADD_ASSESSMENT, _class="colorbox", _href=URL(r=request, c="assess", f="rat", args="create", vars=dict(format="popup")), _target="top", _title=ADD_ASSESSMENT),
ondelete = "RESTRICT")
# Assessment as component of doc_document and cr_shelter.
# RAT has components itself, so best not to constrain within the parent resource tabs
# - therefore disable the listadd & jump out of the tabs for Create/Update
s3xrc.model.add_component(module, resourcename,
multiple=True,
joinby=dict(cr_shelter="shelter_id", doc_document="document_id"))
s3xrc.model.configure(table,
listadd=False,
onaccept=lambda form: rat_assessment_onaccept(form))
# Section 2: Demographic --------------------------------------------------
resourcename = "section2"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("population_total", "integer"),
Field("households_total", "integer"),
Field("population_affected", "integer"),
Field("households_affected", "integer"),
Field("male_05", "double"),
Field("male_612", "double"),
Field("male_1317", "double"),
Field("male_1825", "double"),
Field("male_2660", "double"),
Field("male_61", "double"),
Field("female_05", "double"),
Field("female_612", "double"),
Field("female_1317", "double"),
Field("female_1825", "double"),
Field("female_2660", "double"),
Field("female_61", "double"),
Field("dead_women", "integer"),
Field("dead_men", "integer"),
Field("dead_girl", "integer"),
Field("dead_boy", "integer"),
Field("injured_women", "integer"),
Field("injured_men", "integer"),
Field("injured_girl", "integer"),
Field("injured_boy", "integer"),
Field("missing_women", "integer"),
Field("missing_men", "integer"),
Field("missing_girl", "integer"),
Field("missing_boy", "integer"),
Field("household_head_elderly", "integer"),
Field("household_head_female", "integer"),
Field("household_head_child", "integer"),
Field("disabled_physical", "integer"),
Field("disabled_mental", "integer"),
Field("pregnant", "integer"),
Field("lactating", "integer"),
Field("minorities", "integer"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
table.population_affected.label = T("Estimated # of people who are affected by the emergency")
table.population_affected.comment = T("people")
table.households_affected.label = T("Estimated # of households who are affected by the emergency")
table.households_affected.comment = T("households")
table.population_total.label = T("Total population of site visited")
table.population_total.comment = T("people")
table.households_total.label = T("Total # of households of site visited")
table.households_total.comment = T("households")
table.male_05.label = T("Number/Percentage of affected population that is Male & Aged 0-5")
table.male_612.label = T("Number/Percentage of affected population that is Male & Aged 6-12")
table.male_1317.label = T("Number/Percentage of affected population that is Male & Aged 13-17")
table.male_1825.label = T("Number/Percentage of affected population that is Male & Aged 18-25")
table.male_2660.label = T("Number/Percentage of affected population that is Male & Aged 26-60")
table.male_61.label = T("Number/Percentage of affected population that is Male & Aged 61+")
table.female_05.label = T("Number/Percentage of affected population that is Female & Aged 0-5")
table.female_612.label = T("Number/Percentage of affected population that is Female & Aged 6-12")
table.female_1317.label = T("Number/Percentage of affected population that is Female & Aged 13-17")
table.female_1825.label = T("Number/Percentage of affected population that is Female & Aged 18-25")
table.female_2660.label = T("Number/Percentage of affected population that is Female & Aged 26-60")
table.female_61.label = T("Number/Percentage of affected population that is Female & Aged 61+")
table.dead_women.label = T("How many Women (18 yrs+) are Dead due to the crisis")
table.dead_women.comment = T("people")
table.dead_men.label = T("How many Men (18 yrs+) are Dead due to the crisis")
table.dead_men.comment = T("people")
table.dead_girl.label = T("How many Girls (0-17 yrs) are Dead due to the crisis")
table.dead_girl.comment = T("people")
table.dead_boy.label = T("How many Boys (0-17 yrs) are Dead due to the crisis")
table.dead_boy.comment = T("people")
table.missing_women.label = T("How many Women (18 yrs+) are Missing due to the crisis")
table.missing_women.comment = T("people")
table.missing_men.label = T("How many Men (18 yrs+) are Missing due to the crisis")
table.missing_men.comment = T("people")
table.missing_girl.label = T("How many Girls (0-17 yrs) are Missing due to the crisis")
table.missing_girl.comment = T("people")
table.missing_boy.label = T("How many Boys (0-17 yrs) are Missing due to the crisis")
table.missing_boy.comment = T("people")
table.injured_women.label = T("How many Women (18 yrs+) are Injured due to the crisis")
table.injured_women.comment = T("people")
table.injured_men.label = T("How many Men (18 yrs+) are Injured due to the crisis")
table.injured_men.comment = T("people")
table.injured_girl.label = T("How many Girls (0-17 yrs) are Injured due to the crisis")
table.injured_girl.comment = T("people")
table.injured_boy.label = T("How many Boys (0-17 yrs) are Injured due to the crisis")
table.injured_boy.comment = T("people")
table.household_head_elderly.label = T("Elderly person headed households (>60 yrs)")
table.household_head_elderly.comment = T("households")
table.household_head_female.label = T("Female headed households")
table.household_head_female.comment = T("households")
table.household_head_child.label = T("Child headed households (<18 yrs)")
table.household_head_child.comment = T("households")
table.disabled_physical.label = T("Persons with disability (physical)")
table.disabled_physical.comment = T("people")
table.disabled_mental.label = T("Persons with disability (mental)")
table.disabled_mental.comment = T("people")
table.pregnant.label = T("Pregnant women")
table.pregnant.comment = T("people")
table.lactating.label = T("Lactating women")
table.lactating.comment = T("people")
table.minorities.label = T("Migrants or ethnic minorities")
table.minorities.comment = T("people")
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# Section 3: Shelter & Essential NFIs -------------------------------------
rat_houses_salvmat_types = {
1: T("Wooden plank"),
2: T("Zinc roof"),
3: T("Bricks"),
4: T("Wooden poles"),
5: T("Door frame"),
6: T("Window frame"),
7: T("Roof tile"),
999: NOT_APPLICABLE
}
rat_water_container_types = {
1: T("Jerry can"),
2: T("Bucket"),
3: T("Water gallon"),
99: T("Other (specify)")
}
resourcename = "section3"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("houses_total", "integer"),
Field("houses_destroyed", "integer"),
Field("houses_damaged", "integer"),
Field("houses_salvmat", "list:integer"),
Field("water_containers_available", "boolean"),
Field("water_containers_sufficient", "boolean"),
Field("water_containers_types", "list:integer"),
Field("water_containers_types_other"),
Field("cooking_equipment_available", "boolean"),
Field("sanitation_items_available", "boolean"),
Field("sanitation_items_available_women", "boolean"),
Field("bedding_materials_available", "boolean"),
Field("clothing_sets_available", "boolean"),
Field("nfi_assistance_available", "boolean"),
Field("kits_hygiene_received", "boolean"),
Field("kits_hygiene_source"),
Field("kits_household_received", "boolean"),
Field("kits_household_source"),
Field("kits_dwelling_received", "boolean"),
Field("kits_dwelling_source"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
table.houses_total.label = T("Total number of houses in the area")
table.houses_total.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 99999999))
shn_rat_label_and_tooltip(table.houses_destroyed,
"Number of houses destroyed/uninhabitable",
"How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?")
table.houses_destroyed.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 99999999))
shn_rat_label_and_tooltip(table.houses_damaged,
"Number of houses damaged, but usable",
"How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?")
table.houses_destroyed.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 99999999))
shn_rat_label_and_tooltip(table.houses_salvmat,
"Salvage material usable from destroyed houses",
"What type of salvage material can be used from destroyed houses?",
multiple=True)
table.houses_salvmat.requires = IS_NULL_OR(IS_IN_SET(rat_houses_salvmat_types, multiple=True, zero=None))
table.houses_salvmat.represent = lambda opt, set=rat_houses_salvmat_types: \
shn_rat_represent_multiple(set, opt)
shn_rat_label_and_tooltip(table.water_containers_available,
"Water storage containers available for HH",
"Do households have household water storage containers?")
shn_rat_label_and_tooltip(table.water_containers_sufficient,
"Water storage containers sufficient per HH",
"Do households each have at least 2 containers (10-20 litres each) to hold water?")
shn_rat_label_and_tooltip(table.water_containers_types,
"Types of water storage containers available",
"What types of household water storage containers are available?",
multiple=True)
table.water_containers_types.requires = IS_EMPTY_OR(IS_IN_SET(rat_water_container_types, zero=None, multiple=True))
table.water_containers_types.represents = lambda opt, set=rat_water_container_types: \
shn_rat_represent_multiple(set, opt)
table.water_containers_types_other.label = T("Other types of water storage containers")
shn_rat_label_and_tooltip(table.cooking_equipment_available,
"Appropriate cooking equipment/materials in HH",
"Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?")
shn_rat_label_and_tooltip(table.sanitation_items_available,
"Reliable access to sanitation/hygiene items",
"Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?")
shn_rat_label_and_tooltip(table.sanitation_items_available_women,
"Easy access to sanitation items for women/girls",
"Do women and girls have easy access to sanitary materials?")
shn_rat_label_and_tooltip(table.bedding_materials_available,
"Bedding materials available",
"Do households have bedding materials available (tarps, plastic mats, blankets)?")
shn_rat_label_and_tooltip(table.clothing_sets_available,
"Appropriate clothing available",
"Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?")
shn_rat_label_and_tooltip(table.nfi_assistance_available,
"Shelter/NFI assistance received/expected",
"Have households received any shelter/NFI assistance or is assistance expected in the coming days?")
table.kits_hygiene_received.label = T("Hygiene kits received")
table.kits_hygiene_source.label = T("Hygiene kits, source")
table.kits_household_received.label = T("Household kits received")
table.kits_household_source.label = T("Household kits, source")
table.kits_dwelling_received.label = T("Family tarpaulins received")
table.kits_dwelling_source.label = T("Family tarpaulins, source")
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# Section 4 - Water and Sanitation ----------------------------------------
rat_water_source_types = {
1: T("PDAM"),
2: T("Dug Well"),
3: T("Spring"),
4: T("River"),
5: T("Other Faucet/Piped Water"),
99: T("Other (describe)"),
999: NOT_APPLICABLE
}
rat_water_coll_person_opts = {
1: T("Child"),
2: T("Adult male"),
3: T("Adult female"),
4: T("Older person (>60 yrs)"),
999: NOT_APPLICABLE
}
rat_defec_place_types = {
1: T("open defecation"),
2: T("pit"),
3: T("latrines"),
4: T("river"),
99: T("other")
}
rat_defec_place_animals_opts = {
1: T("enclosed area"),
2: T("within human habitat"),
999: NOT_APPLICABLE
}
rat_latrine_types = {
1: T("flush latrine with septic tank"),
2: T("pit latrine"),
999: NOT_APPLICABLE
}
resourcename = "section4"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("water_source_pre_disaster_type", "integer"),
Field("water_source_pre_disaster_description"),
Field("dwater_source_type", "integer"),
Field("dwater_source_description"),
Field("dwater_reserve"),
Field("swater_source_type", "integer"),
Field("swater_source_description"),
Field("swater_reserve"),
Field("water_coll_time", "integer"),
Field("water_coll_safe", "boolean"),
Field("water_coll_safety_problems"),
Field("water_coll_person", "integer"),
Field("defec_place_type"),
Field("defec_place_description"),
Field("defec_place_distance", "integer"),
Field("defec_place_animals", "integer"),
Field("close_industry", "boolean"),
Field("waste_disposal"),
Field("latrines_number", "integer"),
Field("latrines_type", "integer"),
Field("latrines_separation", "boolean"),
Field("latrines_distance", "integer"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
table.water_source_pre_disaster_type.label = T("Type of water source before the disaster")
table.water_source_pre_disaster_type.requires = IS_EMPTY_OR(IS_IN_SET(rat_water_source_types, zero=None))
table.water_source_pre_disaster_type.represent = lambda opt: rat_water_source_types.get(opt, UNKNOWN_OPT)
table.water_source_pre_disaster_description.label = T("Description of water source before the disaster")
shn_rat_label_and_tooltip(table.dwater_source_type,
"Current type of source for drinking water",
"What is your major source of drinking water?")
table.dwater_source_type.requires = IS_EMPTY_OR(IS_IN_SET(rat_water_source_types, zero=None))
table.dwater_source_type.represent = lambda opt: rat_water_source_types.get(opt, UNKNOWN_OPT)
table.dwater_source_description.label = T("Description of drinking water source")
shn_rat_label_and_tooltip(table.dwater_reserve,
"How long will this water resource last?",
"Specify the minimum sustainability in weeks or days.")
shn_rat_label_and_tooltip(table.swater_source_type,
"Current type of source for sanitary water",
"What is your major source of clean water for daily use (ex: washing, cooking, bathing)?")
table.swater_source_type.requires = IS_EMPTY_OR(IS_IN_SET(rat_water_source_types, zero=None))
table.swater_source_type.represent = lambda opt: rat_water_source_types.get(opt, UNKNOWN_OPT)
table.swater_source_description.label = T("Description of sanitary water source")
shn_rat_label_and_tooltip(table.swater_reserve,
"How long will this water resource last?",
"Specify the minimum sustainability in weeks or days.")
shn_rat_label_and_tooltip(table.water_coll_time,
"Time needed to collect water",
"How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.")
table.water_coll_time.requires = IS_EMPTY_OR(IS_IN_SET(rat_walking_time_opts, zero=None))
table.water_coll_time.represent = lambda opt: rat_walking_time_opts.get(opt, UNKNOWN_OPT)
table.water_coll_safe.label = T("Is it safe to collect water?")
table.water_coll_safe.default = True
table.water_coll_safety_problems.label = T("If no, specify why")
table.water_coll_person.label = T("Who usually collects water for the family?")
table.water_coll_person.requires = IS_EMPTY_OR(IS_IN_SET(rat_water_coll_person_opts, zero=None))
table.water_coll_person.represent = lambda opt: rat_water_coll_person_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.defec_place_type,
"Type of place for defecation",
"Where do the majority of people defecate?",
multiple=True)
table.defec_place_type.requires = IS_EMPTY_OR(IS_IN_SET(rat_defec_place_types, zero=None, multiple=True))
table.defec_place_type.represent = lambda opt: rat_defec_place_types.get(opt, UNKNOWN_OPT)
table.defec_place_description.label = T("Description of defecation area")
table.defec_place_distance.label = T("Distance between defecation area and water source")
table.defec_place_distance.comment = T("meters")
table.defec_place_animals.label = T("Defecation area for animals")
table.defec_place_animals.requires = IS_EMPTY_OR(IS_IN_SET(rat_defec_place_animals_opts, zero = None))
table.defec_place_animals.represent = lambda opt: rat_defec_place_animals_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.close_industry,
"Industry close to village/camp",
"Is there any industrial or agro-chemical production close to the affected area/village?")
shn_rat_label_and_tooltip(table.waste_disposal,
"Place for solid waste disposal",
"Where is solid waste disposed in the village/camp?")
shn_rat_label_and_tooltip(table.latrines_number,
"Number of latrines",
"How many latrines are available in the village/IDP centre/Camp?")
table.latrines_number.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.latrines_type,
"Type of latrines",
"What type of latrines are available in the village/IDP centre/Camp?")
table.latrines_type.requires = IS_EMPTY_OR(IS_IN_SET(rat_latrine_types, zero=None))
table.latrines_type.represent = lambda opt: rat_latrine_types.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.latrines_separation,
"Separate latrines for women and men",
"Are there separate latrines for women and men available?")
shn_rat_label_and_tooltip(table.latrines_distance,
"Distance between shelter and latrines",
"Distance between latrines and temporary shelter in meters")
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# Section 5 - Health ------------------------------------------------------
rat_health_services_types = {
1: T("Community Health Center"),
2: T("Hospital")
}
rat_health_problems_opts = {
1: T("Respiratory Infections"),
2: T("Diarrhea"),
3: T("Dehydration"),
99: T("Other (specify)")
}
rat_infant_nutrition_alternative_opts = {
1: T("Porridge"),
2: T("Banana"),
3: T("Instant Porridge"),
4: T("Air tajin"),
99: T("Other (specify)")
}
resourcename = "section5"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("health_services_pre_disaster", "boolean"),
Field("medical_supplies_pre_disaster", "boolean"),
Field("health_services_post_disaster", "boolean"),
Field("medical_supplies_post_disaster", "boolean"),
Field("medical_supplies_reserve", "integer"),
Field("health_services_available_types", "list:integer"),
Field("staff_number_doctors", "integer"),
Field("staff_number_nurses", "integer"),
Field("staff_number_midwives", "integer"),
Field("health_service_walking_time", "integer"),
Field("health_problems_adults", "list:integer"),
Field("health_problems_adults_other"),
Field("health_problems_children", "list:integer"),
Field("health_problems_children_other"),
Field("chronical_illness_cases", "boolean"),
Field("chronical_illness_children", "boolean"),
Field("chronical_illness_elderly", "boolean"),
Field("chronical_care_sufficient", "boolean"),
Field("malnutrition_present_pre_disaster", "boolean"),
Field("mmd_present_pre_disaster", "boolean"),
Field("breast_milk_substitutes_pre_disaster", "boolean"),
Field("breast_milk_substitutes_post_disaster", "boolean"),
Field("infant_nutrition_alternative", "list:integer"),
Field("infant_nutrition_alternative_other"),
Field("u5_diarrhea", "boolean"),
Field("u5_diarrhea_rate_48h", "integer"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
shn_rat_label_and_tooltip(table.health_services_pre_disaster,
"Health services functioning prior to disaster",
"Were there health services functioning for the community prior to the disaster?")
shn_rat_label_and_tooltip(table.medical_supplies_pre_disaster,
"Basic medical supplies available prior to disaster",
"Were basic medical supplies available for health services prior to the disaster?")
shn_rat_label_and_tooltip(table.health_services_post_disaster,
"Health services functioning since disaster",
"Are there health services functioning for the community since the disaster?")
shn_rat_label_and_tooltip(table.medical_supplies_post_disaster,
"Basic medical supplies available since disaster",
"Are basic medical supplies available for health services since the disaster?")
table.medical_supplies_reserve.label = T("How many days will the supplies last?")
shn_rat_label_and_tooltip(table.health_services_available_types,
"Types of health services available",
"What types of health services are still functioning in the affected area?",
multiple=True)
table.health_services_available_types.requires = IS_EMPTY_OR(IS_IN_SET(rat_health_services_types,
zero=None, multiple=True))
table.health_services_available_types.represent = lambda opt: \
shn_rat_represent_multiple(rat_health_service_types, opt)
shn_rat_label_and_tooltip(table.staff_number_doctors,
"Number of doctors actively working",
"How many doctors in the health centers are still actively working?")
table.staff_number_doctors.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.staff_number_nurses,
"Number of nurses actively working",
"How many nurses in the health centers are still actively working?")
table.staff_number_nurses.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.staff_number_midwives,
"Number of midwives actively working",
"How many midwives in the health centers are still actively working?")
table.staff_number_midwives.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.health_service_walking_time,
"Walking time to the health service",
"How long does it take you to walk to the health service?")
table.health_service_walking_time.requires = IS_EMPTY_OR(IS_IN_SET(rat_walking_time_opts, zero=None))
table.health_service_walking_time.represent = lambda opt: rat_walking_time_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.health_problems_adults,
"Current type of health problems, adults",
"What types of health problems do people currently have?",
multiple=True)
table.health_problems_adults.requires = IS_EMPTY_OR(IS_IN_SET(rat_health_problems_opts, zero=None, multiple=True))
table.health_problems_adults.represent = lambda opt, set=rat_health_problems_opts: \
shn_rat_represent_multiple(set, opt)
table.health_problems_adults_other.label = T("Other current health problems, adults")
shn_rat_label_and_tooltip(table.health_problems_children,
"Current type of health problems, children",
"What types of health problems do children currently have?",
multiple=True)
table.health_problems_children.requires = IS_EMPTY_OR(IS_IN_SET(rat_health_problems_opts, zero=None, multiple=True))
table.health_problems_children.represent = lambda opt, set=rat_health_problems_opts: \
shn_rat_represent_multiple(set, opt)
table.health_problems_children_other.label = T("Other current health problems, children")
shn_rat_label_and_tooltip(table.chronical_illness_cases,
"People with chronical illnesses",
"Are there people with chronical illnesses in your community?")
shn_rat_label_and_tooltip(table.chronical_illness_children,
"Children with chronical illnesses",
"Are there children with chronical illnesses in your community?")
shn_rat_label_and_tooltip(table.chronical_illness_elderly,
"Older people with chronical illnesses",
"Are there older people with chronical illnesses in your community?")
shn_rat_label_and_tooltip(table.chronical_care_sufficient,
"Sufficient care/assistance for chronically ill",
"Are the chronically ill receiving sufficient care and assistance?")
shn_rat_label_and_tooltip(table.malnutrition_present_pre_disaster,
"Malnutrition present prior to disaster",
"Were there cases of malnutrition in this area prior to the disaster?")
shn_rat_label_and_tooltip(table.mmd_present_pre_disaster,
"Micronutrient malnutrition prior to disaster",
"Were there reports or evidence of outbreaks of any micronutrient malnutrition disorders before the emergency?")
shn_rat_label_and_tooltip(table.breast_milk_substitutes_pre_disaster,
"Breast milk substitutes used prior to disaster",
"Were breast milk substitutes used prior to the disaster?")
shn_rat_label_and_tooltip(table.breast_milk_substitutes_post_disaster,
"Breast milk substitutes in use since disaster",
"Are breast milk substitutes being used here since the disaster?")
shn_rat_label_and_tooltip(table.infant_nutrition_alternative,
"Alternative infant nutrition in use",
"Babies who are not being breastfed, what are they being fed on?",
multiple=True)
table.infant_nutrition_alternative.requires = \
IS_EMPTY_OR(IS_IN_SET(rat_infant_nutrition_alternative_opts, zero=None, multiple=True))
table.infant_nutrition_alternative.represent = lambda opt, set=rat_infant_nutrition_alternative_opts: \
shn_rat_represent_multiple(set, opt)
table.infant_nutrition_alternative_other.label = T("Other alternative infant nutrition in use")
shn_rat_label_and_tooltip(table.u5_diarrhea,
"Diarrhea among children under 5",
"Are there cases of diarrhea among children under the age of 5?")
shn_rat_label_and_tooltip(table.u5_diarrhea_rate_48h,
"Approx. number of cases/48h",
"Approximately how many children under 5 with diarrhea in the past 48 hours?"),
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# Section 6 - Nutrition/Food Security -------------------------------------
rat_main_dish_types = {
1: T("Rice"),
2: T("Noodles"),
3: T("Biscuits"),
4: T("Corn"),
5: T("Wheat"),
6: T("Cassava"),
7: T("Cooking Oil")
}
rat_side_dish_types = {
1: T("Salted Fish"),
2: T("Canned Fish"),
3: T("Chicken"),
4: T("Eggs"),
99: T("Other (specify)")
}
rat_food_stock_reserve_opts = {
1: T("1-3 days"),
2: T("4-7 days"),
3: T("8-14 days")
}
rat_food_source_types = {
1: "Local market",
2: "Field cultivation",
3: "Food stall",
4: "Animal husbandry",
5: "Raising poultry",
99: "Other (specify)"
}
resourcename = "section6"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("food_stocks_main_dishes", "list:integer"),
Field("food_stocks_side_dishes", "list:integer"),
Field("food_stocks_other_side_dishes"),
Field("food_stocks_reserve", "integer"),
Field("food_sources", "list:integer"),
Field("food_sources_other"),
Field("food_sources_disruption", "boolean"),
Field("food_sources_disruption_details"),
Field("food_assistance_available", "boolean"),
Field("food_assistance_details", "text"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
shn_rat_label_and_tooltip(table.food_stocks_main_dishes,
"Existing food stocks, main dishes",
"What food stocks exist? (main dishes)",
multiple=True)
table.food_stocks_main_dishes.requires = IS_EMPTY_OR(IS_IN_SET(rat_main_dish_types, zero=None, multiple=True))
table.food_stocks_main_dishes.represent = lambda opt, set=rat_main_dish_types: \
shn_rat_represent_multiple(set, opt)
shn_rat_label_and_tooltip(table.food_stocks_side_dishes,
"Existing food stocks, side dishes",
"What food stocks exist? (side dishes)",
multiple=True)
table.food_stocks_side_dishes.requires = IS_EMPTY_OR(IS_IN_SET(rat_side_dish_types, zero=None, multiple=True))
table.food_stocks_side_dishes.represent = lambda opt, set=rat_side_dish_types: \
shn_rat_represent_multiple(set, opt)
table.food_stocks_other_side_dishes.label = T("Other side dishes in stock")
table.food_stocks_reserve.label = T("How long will the food last?")
table.food_stocks_reserve.requires = IS_EMPTY_OR(IS_IN_SET(rat_food_stock_reserve_opts, zero=None))
table.food_stocks_reserve.represent = lambda opt: rat_food_stock_reserve_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.food_sources,
"Usual food sources in the area",
"What are the people's normal ways to obtain food in this area?",
multiple=True)
table.food_sources.requires = IS_EMPTY_OR(IS_IN_SET(rat_food_source_types, zero=None, multiple=True))
table.food_sources.represent = lambda opt, set=rat_food_source_types: \
shn_rat_represent_multiple(set, opt)
table.food_sources_other.label = T("Other ways to obtain food")
shn_rat_label_and_tooltip(table.food_sources_disruption,
"Normal food sources disrupted",
"Have normal food sources been disrupted?")
table.food_sources_disruption_details.label = T("If yes, which and how")
shn_rat_label_and_tooltip(table.food_assistance_available,
"Food assistance available/expected",
"Have the people received or are you expecting any medical or food assistance in the coming days?")
table.food_assistance_details.label = T("If yes, specify what and by whom")
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# Section 7 - Livelihood --------------------------------------------------
rat_income_source_opts = {
1: T("Agriculture"),
2: T("Fishing"),
3: T("Poultry"),
4: T("Casual Labor"),
5: T("Small Trade"),
6: T("Other")
}
rat_expense_types = {
1: T("Education"),
2: T("Health"),
3: T("Food"),
4: T("Hygiene"),
5: T("Shelter"),
6: T("Clothing"),
7: T("Funeral"),
8: T("Alcohol"),
99: T("Other (specify)")
}
rat_cash_source_opts = {
1: T("Family/friends"),
2: T("Government"),
3: T("Bank/micro finance"),
4: T("Humanitarian NGO"),
99: T("Other (specify)")
}
rat_ranking_opts = xrange(1,7)
resourcename = "section7"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("income_sources_pre_disaster", "list:integer"),
Field("income_sources_post_disaster", "list:integer"),
Field("main_expenses", "list:integer"),
Field("main_expenses_other"),
Field("business_damaged", "boolean"),
Field("business_cash_available", "boolean"),
Field("business_cash_source", "list:integer"),
Field("rank_reconstruction_assistance", "integer"),
Field("rank_farmland_fishing_assistance", "integer"),
Field("rank_poultry_restocking", "integer"),
Field("rank_health_care_assistance", "integer"),
Field("rank_transportation_assistance", "integer"),
Field("other_assistance_needed"),
Field("rank_other_assistance", "integer"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
shn_rat_label_and_tooltip(table.income_sources_pre_disaster,
"Main income sources before disaster",
"What were your main sources of income before the disaster?",
multiple=True)
table.income_sources_pre_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_income_source_opts, zero=None, multiple=True))
table.income_sources_pre_disaster.represent = lambda opt, set=rat_income_source_opts: \
shn_rat_represent_multiple(set, opt)
shn_rat_label_and_tooltip(table.income_sources_post_disaster,
"Current main income sources",
"What are your main sources of income now?",
multiple=True)
table.income_sources_post_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_income_source_opts, zero=None, multiple=True))
table.income_sources_post_disaster.represent = lambda opt, set=rat_income_source_opts: \
shn_rat_represent_multiple(set, opt)
shn_rat_label_and_tooltip(table.main_expenses,
"Current major expenses",
"What do you spend most of your income on now?",
multiple=True)
table.main_expenses.requires = IS_EMPTY_OR(IS_IN_SET(rat_expense_types, zero=None, multiple=True))
table.main_expenses.represent = lambda opt, set=rat_expense_types: \
shn_rat_represent_multiple(set, opt)
table.main_expenses_other.label = T("Other major expenses")
shn_rat_label_and_tooltip(table.business_damaged,
"Business damaged",
"Has your business been damaged in the course of the disaster?")
shn_rat_label_and_tooltip(table.business_cash_available,
"Cash available to restart business",
"Do you have access to cash to restart your business?")
shn_rat_label_and_tooltip(table.business_cash_source,
"Main cash source",
"What are your main sources of cash to restart your business?")
table.business_cash_source.requires = IS_EMPTY_OR(IS_IN_SET(rat_cash_source_opts, zero=None, multiple=True))
table.business_cash_source.represent = lambda opt, set=rat_cash_source_opts: \
shn_rat_represent_multiple(set, opt)
shn_rat_label_and_tooltip(table.rank_reconstruction_assistance,
"Immediate reconstruction assistance, Rank",
"Assistance for immediate repair/reconstruction of houses")
table.rank_reconstruction_assistance.requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))
table.rank_farmland_fishing_assistance.label = T("Farmland/fishing material assistance, Rank")
table.rank_farmland_fishing_assistance.requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))
table.rank_poultry_restocking.label = T("Poultry restocking, Rank")
table.rank_poultry_restocking.requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))
table.rank_health_care_assistance.label = T("Health care assistance, Rank")
table.rank_health_care_assistance.requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))
table.rank_transportation_assistance.label = T("Transportation assistance, Rank")
table.rank_transportation_assistance.requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))
table.other_assistance_needed.label = T("Other assistance needed")
table.rank_other_assistance.label = T("Other assistance, Rank")
table.rank_other_assistance.requires = IS_EMPTY_OR(IS_IN_SET(rat_ranking_opts, zero=None))
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# Section 8 - Education ---------------------------------------------------
rat_schools_salvmat_types = {
1: T("Wooden plank"),
2: T("Zinc roof"),
3: T("Bricks"),
4: T("Wooden poles"),
5: T("Door frame"),
6: T("Window frame"),
7: T("Roof tile"),
999: NOT_APPLICABLE
}
rat_alternative_study_places = {
1: T("Community Centre"),
2: T("Church"),
3: T("Mosque"),
4: T("Open area"),
5: T("Government building"),
6: T("Other (specify)"),
999: NOT_APPLICABLE
}
rat_school_attendance_barriers_opts = {
1: T("School used for other purpose"),
2: T("School destroyed"),
3: T("Lack of school uniform"),
4: T("Lack of transport to school"),
5: T("Children not enrolled in new school"),
6: T("School heavily damaged"),
7: T("Desire to remain with family"),
8: T("Lack of supplies at school"),
9: T("Displaced"),
10: T("Other (specify)"),
999: NOT_APPLICABLE
}
resourcename = "section8"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("schools_total", "integer"),
Field("schools_public", "integer"),
Field("schools_private", "integer"),
Field("schools_religious", "integer"),
Field("schools_destroyed", "integer"),
Field("schools_damaged", "integer"),
Field("schools_salvmat", "list:integer"),
Field("alternative_study_places_available", "boolean"),
Field("alternative_study_places_number", "integer"),
Field("alternative_study_places", "list:integer"),
Field("alternative_study_places_other"),
Field("schools_open_pre_disaster", "integer"),
Field("schools_open_post_disaster", "integer"),
Field("teachers_active_pre_disaster", "integer"),
Field("teachers_affected_by_disaster", "integer"),
Field("children_0612_female", "integer"),
Field("children_0612_male", "integer"),
Field("children_0612_not_in_school_female", "integer"),
Field("children_0612_not_in_school_male", "integer"),
Field("children_1318_female", "integer"),
Field("children_1318_male", "integer"),
Field("children_1318_not_in_school_female", "integer"),
Field("children_1318_not_in_school_male", "integer"),
Field("school_attendance_barriers", "list:integer"),
Field("school_attendance_barriers_other"),
Field("school_assistance_available", "boolean"),
Field("school_assistance_tents_available", "boolean"),
Field("school_assistence_tents_source"),
Field("school_assistance_materials_available", "boolean"),
Field("school_assistance_materials_source"),
Field("school_assistance_other_available", "boolean"),
Field("school_assistance_other"),
Field("school_assistance_other_source"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
# @todo: onvalidation!
table.schools_total.label = T("Total number of schools in affected area")
table.schools_total.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
table.schools_public.label = T("Number of public schools")
table.schools_public.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
table.schools_private.label = T("Number of private schools")
table.schools_private.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
table.schools_religious.label = T("Number of religious schools")
table.schools_religious.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.schools_destroyed,
"Number of schools destroyed/uninhabitable",
"uninhabitable = foundation and structure destroyed")
table.schools_destroyed.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.schools_damaged,
"Number of schools damaged but usable",
"windows broken, cracks in walls, roof slightly damaged")
table.schools_damaged.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.schools_salvmat,
"Salvage material usable from destroyed schools",
"What type of salvage material can be used from destroyed schools?",
multiple=True)
table.schools_salvmat.requires = IS_EMPTY_OR(IS_IN_SET(rat_schools_salvmat_types, zero=None, multiple=True))
table.schools_salvmat.represent = lambda opt, set=rat_schools_salvmat_types: \
shn_rat_represent_multiple(set, opt)
shn_rat_label_and_tooltip(table.alternative_study_places_available,
"Alternative places for studying available",
"Are there alternative places for studying?")
table.alternative_study_places_number.label = T("Number of alternative places for studying")
table.alternative_study_places_number.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.alternative_study_places,
"Alternative places for studying",
"Where are the alternative places for studying?",
multiple=True)
table.alternative_study_places.requires = IS_EMPTY_OR(IS_IN_SET(rat_alternative_study_places,
zero=None, multiple=True))
table.alternative_study_places.represent = lambda opt, set=rat_alternative_study_places: \
shn_rat_represent_multiple(set, opt)
table.alternative_study_places_other.label = T("Other alternative places for study")
shn_rat_label_and_tooltip(table.schools_open_pre_disaster,
"Number of schools open before disaster",
"How many primary/secondary schools were opening prior to the disaster?")
table.schools_open_pre_disaster.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.schools_open_post_disaster,
"Number of schools open now",
"How many of the primary/secondary schools are now open and running a regular schedule of class?")
table.schools_open_post_disaster.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.teachers_active_pre_disaster,
"Number of teachers before disaster",
"How many teachers worked in the schools prior to the disaster?")
table.teachers_active_pre_disaster.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.teachers_affected_by_disaster,
"Number of teachers affected by disaster",
"How many teachers have been affected by the disaster (affected = unable to work)?")
table.teachers_affected_by_disaster.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_0612_female,
"Girls 6-12 yrs in affected area",
"How many primary school age girls (6-12) are in the affected area?")
table.children_0612_female.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_0612_male,
"Boys 6-12 yrs in affected area",
"How many primary school age boys (6-12) are in the affected area?")
table.children_0612_male.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_0612_not_in_school_female,
"Girls 6-12 yrs not attending school",
"How many of the primary school age girls (6-12) in the area are not attending school?")
table.children_0612_not_in_school_female.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_0612_not_in_school_male,
"Boys 6-12 yrs not attending school",
"How many of the primary school age boys (6-12) in the area are not attending school?")
table.children_0612_not_in_school_male.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_1318_female,
"Girls 13-18 yrs in affected area",
"How many secondary school age girls (13-18) are in the affected area?")
table.children_1318_female.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_1318_male,
"Boys 13-18 yrs in affected area",
"How many secondary school age boys (13-18) are in the affected area?")
table.children_1318_male.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_1318_not_in_school_female,
"Girls 13-18 yrs not attending school",
"How many of the secondary school age girls (13-18) in the area are not attending school?")
table.children_1318_not_in_school_female.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.children_1318_not_in_school_male,
"Boys 13-18 yrs not attending school",
"How many of the secondary school age boys (13-18) in the area are not attending school?")
table.children_1318_not_in_school_male.requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, 999999))
shn_rat_label_and_tooltip(table.school_attendance_barriers,
"Factors affecting school attendance",
"What are the factors affecting school attendance?",
multiple=True)
table.school_attendance_barriers.requires = IS_EMPTY_OR(IS_IN_SET(rat_school_attendance_barriers_opts,
zero=None, multiple=True))
table.school_attendance_barriers.represent = lambda opt, set=rat_school_attendance_barriers_opts: \
shn_rat_represent_multiple(set, opt)
table.school_attendance_barriers_other.label = T("Other factors affecting school attendance")
shn_rat_label_and_tooltip(table.school_assistance_available,
"School assistance received/expected",
"Have schools received or are expecting to receive any assistance?")
table.school_assistance_tents_available.label = T("School tents received")
table.school_assistence_tents_source.label = T("School tents, source")
table.school_assistance_materials_available.label = T("Education materials received")
table.school_assistance_materials_source.label = T("Education materials, source")
table.school_assistance_other_available.label = T("Other school assistance received")
table.school_assistance_other.label = T("Other school assistance, details")
table.school_assistance_other_source.label = T("Other school assistance, source")
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# Section 9 - Protection --------------------------------------------------
rat_fuzzy_quantity_opts = {
1: T("None"),
2: T("Few"),
3: T("Some"),
4: T("Many")
}
rat_quantity_opts = {
1: "1-10",
2: "11-50",
3: "51-100",
4: "100+"
}
rat_child_activity_opts = {
1: T("Playing"),
2: T("Domestic chores"),
3: T("School/studying"),
4: T("Doing nothing (no structured activity)"),
5: T("Working or other to provide money/food"),
99: T("Other (specify)")
}
rat_child_activity_post_disaster_opts = rat_child_activity_opts.copy()
rat_child_activity_post_disaster_opts.update({
6: T("Disaster clean-up/repairs")
})
resourcename = "section9"
tablename = "%s_%s" % (module, resourcename)
table = db.define_table(tablename,
assessment_id(),
Field("vulnerable_groups_safe_env", "boolean"),
Field("safety_children_women_affected", "boolean"),
Field("sec_incidents", "boolean"),
Field("sec_incidents_gbv", "boolean"),
Field("sec_current_needs"),
Field("children_separated", "integer"),
Field("children_separated_origin"),
Field("children_missing", "integer"),
Field("children_orphaned", "integer"),
Field("children_unattended", "integer"),
Field("children_disappeared", "integer"),
Field("children_evacuated", "integer"),
Field("children_evacuated_to"),
Field("children_with_older_caregivers", "integer"),
Field("children_in_disabled_homes", "boolean"),
Field("children_in_orphanages", "boolean"),
Field("children_in_boarding_schools", "boolean"),
Field("children_in_juvenile_detention", "boolean"),
Field("children_in_adult_prisons", "boolean"),
Field("people_in_adult_prisons", "boolean"),
Field("people_in_care_homes", "boolean"),
Field("people_in_institutions_est_total", "integer"),
Field("staff_in_institutions_present", "boolean"),
Field("adequate_food_water_in_institutions", "boolean"),
Field("child_activities_u12f_pre_disaster", "list:integer"),
Field("child_activities_u12f_pre_disaster_other"),
Field("child_activities_u12m_pre_disaster", "list:integer"),
Field("child_activities_u12m_pre_disaster_other"),
Field("child_activities_o12f_pre_disaster", "list:integer"),
Field("child_activities_o12f_pre_disaster_other"),
Field("child_activities_o12m_pre_disaster", "list:integer"),
Field("child_activities_o12m_pre_disaster_other"),
Field("child_activities_u12f_post_disaster", "list:integer"),
Field("child_activities_u12f_post_disaster_other"),
Field("child_activities_u12m_post_disaster", "list:integer"),
Field("child_activities_u12m_post_disaster_other"),
Field("child_activities_o12f_post_disaster", "list:integer"),
Field("child_activities_o12f_post_disaster_other"),
Field("child_activities_o12m_post_disaster", "list:integer"),
Field("child_activities_o12m_post_disaster_other"),
Field("coping_activities_elderly", "boolean"),
Field("coping_activities_women", "boolean"),
Field("coping_activities_disabled", "boolean"),
Field("coping_activities_minorities", "boolean"),
Field("coping_activities_adolescent", "boolean"),
Field("current_general_needs", "text"),
comments(),
migrate=migrate, *s3_meta_fields())
table.assessment_id.readable = table.assessment_id.writable = False
shn_rat_label_and_tooltip(table.vulnerable_groups_safe_env,
"Safe environment for vulnerable groups",
"Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?")
shn_rat_label_and_tooltip(table.safety_children_women_affected,
"Safety of children and women affected by disaster",
"Has the safety and security of women and children in your community changed since the emergency?")
shn_rat_label_and_tooltip(table.sec_incidents,
"Known incidents of violence since disaster",
"Do you know of any incidents of violence?")
shn_rat_label_and_tooltip(table.sec_incidents_gbv,
"Known incidents of violence against women/girls",
"Without mentioning any names or indicating anyone, do you know of any incidents of violence against women or girls occuring since the disaster?")
shn_rat_label_and_tooltip(table.sec_current_needs,
"Needs to reduce vulnerability to violence",
"What should be done to reduce women and children's vulnerability to violence?")
shn_rat_label_and_tooltip(table.children_separated,
"Children separated from their parents/caregivers",
"Do you know of children separated from their parents or caregivers?")
table.children_separated.requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts, zero=None))
table.children_separated.represent = lambda opt: rat_fuzzy_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.children_separated_origin,
"Origin of the separated children",
"Where are the separated children originally from?")
shn_rat_label_and_tooltip(table.children_missing,
"Parents/Caregivers missing children",
"Do you know of parents/caregivers missing children?")
table.children_missing.requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts, zero=None))
table.children_missing.represent = lambda opt: rat_fuzzy_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.children_orphaned,
"Children orphaned by the disaster",
"Do you know of children that have been orphaned by the disaster?")
table.children_orphaned.requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts, zero=None))
table.children_orphaned.represent = lambda opt: rat_fuzzy_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.children_evacuated,
"Children that have been sent to safe places",
"Do you know of children that have been sent to safe places?")
table.children_evacuated.requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts, zero=None))
table.children_evacuated.represent = lambda opt: rat_fuzzy_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.children_evacuated_to,
"Places the children have been sent to",
"Where have the children been sent?")
shn_rat_label_and_tooltip(table.children_unattended,
"Children living on their own (without adults)",
"Do you know of children living on their own (without adults)?")
table.children_unattended.requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts, zero=None))
table.children_unattended.represent = lambda opt: rat_fuzzy_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.children_disappeared,
"Children who have disappeared since the disaster",
"Do you know of children that have disappeared without explanation in the period since the disaster?")
table.children_disappeared.requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts, zero=None))
table.children_disappeared.represent = lambda opt: rat_fuzzy_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.children_with_older_caregivers,
"Older people as primary caregivers of children",
"Do you know of older people who are primary caregivers of children?")
table.children_with_older_caregivers.requires = IS_EMPTY_OR(IS_IN_SET(rat_fuzzy_quantity_opts, zero=None))
table.children_with_older_caregivers.represent = lambda opt: rat_fuzzy_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.children_in_disabled_homes,
"Children in homes for disabled children",
"Are there children living in homes for disabled children in this area?")
shn_rat_label_and_tooltip(table.children_in_orphanages,
"Children in orphanages",
"Are there children living in orphanages in this area?")
shn_rat_label_and_tooltip(table.children_in_boarding_schools,
"Children in boarding schools",
"Are there children living in boarding schools in this area?")
shn_rat_label_and_tooltip(table.children_in_juvenile_detention,
"Children in juvenile detention",
"Are there children living in juvenile detention in this area?")
shn_rat_label_and_tooltip(table.children_in_adult_prisons,
"Children in adult prisons",
"Are there children living in adult prisons in this area?")
shn_rat_label_and_tooltip(table.people_in_adult_prisons,
"Adults in prisons",
"Are there adults living in prisons in this area?")
shn_rat_label_and_tooltip(table.people_in_care_homes,
"Older people in care homes",
"Are there older people living in care homes in this area?")
shn_rat_label_and_tooltip(table.people_in_institutions_est_total,
"Estimated total number of people in institutions",
"What is the estimated total number of people in all of these institutions?")
table.people_in_institutions_est_total.requires = IS_EMPTY_OR(IS_IN_SET(rat_quantity_opts, zero=None))
table.people_in_institutions_est_total.represent = lambda opt: rat_quantity_opts.get(opt, UNKNOWN_OPT)
shn_rat_label_and_tooltip(table.staff_in_institutions_present,
"Staff present and caring for residents",
"Are there staff present and caring for the residents in these institutions?")
shn_rat_label_and_tooltip(table.adequate_food_water_in_institutions,
"Adequate food and water available",
"Is adequate food and water available for these institutions?")
shn_rat_label_and_tooltip(table.child_activities_u12f_pre_disaster,
"Activities of girls <12yrs before disaster",
"How did girls <12yrs spend most of their time prior to the disaster?",
multiple=True)
table.child_activities_u12f_pre_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_u12f_pre_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_u12f_pre_disaster_other.label = T("Other activities of girls<12yrs before disaster")
shn_rat_label_and_tooltip(table.child_activities_u12m_pre_disaster,
"Activities of boys <12yrs before disaster",
"How did boys <12yrs spend most of their time prior to the disaster?",
multiple=True)
table.child_activities_u12m_pre_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_u12m_pre_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_u12m_pre_disaster_other.label = T("Other activities of boys <12yrs before disaster")
shn_rat_label_and_tooltip(table.child_activities_o12f_pre_disaster,
"Activities of girls 13-17yrs before disaster",
"How did boys girls 13-17yrs spend most of their time prior to the disaster?",
multiple=True)
table.child_activities_o12f_pre_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_o12f_pre_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_o12f_pre_disaster_other.label = T("Other activities of girls 13-17yrs before disaster")
shn_rat_label_and_tooltip(table.child_activities_o12m_pre_disaster,
"Activities of boys 13-17yrs before disaster",
"How did boys 13-17yrs spend most of their time prior to the disaster?",
multiple=True)
table.child_activities_o12m_pre_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_o12m_pre_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_o12m_pre_disaster_other.label = T("Other activities of boys 13-17yrs before disaster")
shn_rat_label_and_tooltip(table.child_activities_u12f_post_disaster,
"Activities of girls <12yrs now",
"How do girls <12yrs spend most of their time now?",
multiple=True)
table.child_activities_u12f_post_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_u12f_post_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_u12f_post_disaster_other.label = T("Other activities of girls<12yrs")
shn_rat_label_and_tooltip(table.child_activities_u12m_post_disaster,
"Activities of boys <12yrs now",
"How do boys <12yrs spend most of their time now?",
multiple=True)
table.child_activities_u12m_post_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_u12m_post_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_u12m_post_disaster_other.label = T("Other activities of boys <12yrs")
shn_rat_label_and_tooltip(table.child_activities_o12f_post_disaster,
"Activities of girls 13-17yrs now",
"How do girls 13-17yrs spend most of their time now?",
multiple=True)
table.child_activities_o12f_post_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_o12f_post_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_o12f_post_disaster_other.label = T("Other activities of girls 13-17yrs")
shn_rat_label_and_tooltip(table.child_activities_o12m_post_disaster,
"Activities of boys 13-17yrs now",
"How do boys 13-17yrs spend most of their time now?",
multiple=True)
table.child_activities_o12m_post_disaster.requires = IS_EMPTY_OR(IS_IN_SET(rat_child_activity_opts,
zero=None, multiple=True))
table.child_activities_o12m_post_disaster.represent = lambda opt, set=rat_child_activity_opts: \
shn_rat_represent_multiple(set, opt)
table.child_activities_o12m_post_disaster_other.label = T("Other activities of boys 13-17yrs")
shn_rat_label_and_tooltip(table.coping_activities_elderly,
"Older people participating in coping activities",
"Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")
shn_rat_label_and_tooltip(table.coping_activities_women,
"Women participating in coping activities",
"Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")
shn_rat_label_and_tooltip(table.coping_activities_disabled,
"Disabled participating in coping activities",
"Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")
shn_rat_label_and_tooltip(table.coping_activities_minorities,
"Minorities participating in coping activities",
"Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")
shn_rat_label_and_tooltip(table.coping_activities_adolescent,
"Adolescent participating in coping activities",
"Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)")
shn_rat_label_and_tooltip(table.current_general_needs,
"Current greatest needs of vulnerable groups",
"In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?")
# CRUD strings
s3.crud_strings[tablename] = rat_section_crud_strings
s3xrc.model.add_component(module, resourcename,
multiple = False,
joinby = dict(assess_rat="assessment_id"))
s3xrc.model.configure(table, deletable=False)
# -----------------------------------------------------------------------------
def shn_rat_summary(r, **attr):
""" Aggregate reports """
if r.name == "rat":
if r.representation == "html":
return dict()
elif r.representation == "xls":
return None
else:
# Other formats?
raise HTTP(501, body=BADFORMAT)
else:
raise HTTP(501, body=BADMETHOD)
s3xrc.model.set_method(module, "rat",
method="summary",
action=shn_rat_summary)
# -----------------------------------------------------------------------------
# END
|
mit
| -4,170,479,478,176,753,000
| 49.84199
| 206
| 0.582585
| false
| 3.836655
| false
| false
| false
|
HybridF5/jacket
|
jacket/api/compute/openstack/compute/legacy_v2/contrib/certificates.py
|
1
|
2777
|
# Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from jacket.api.compute.openstack import extensions
import jacket.compute.cert.rpcapi
from jacket.compute import exception
from jacket.i18n import _
authorize = extensions.extension_authorizer('compute', 'certificates')
def _translate_certificate_view(certificate, private_key=None):
return {
'data': certificate,
'private_key': private_key,
}
class CertificatesController(object):
"""The x509 Certificates API controller for the OpenStack API."""
def __init__(self):
self.cert_rpcapi = jacket.compute.cert.rpcapi.CertAPI()
super(CertificatesController, self).__init__()
def show(self, req, id):
"""Return certificate information."""
context = req.environ['compute.context']
authorize(context)
if id != 'root':
msg = _("Only root certificate can be retrieved.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
try:
cert = self.cert_rpcapi.fetch_ca(context,
project_id=context.project_id)
except exception.CryptoCAFileNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return {'certificate': _translate_certificate_view(cert)}
def create(self, req, body=None):
"""Create a certificate."""
context = req.environ['compute.context']
authorize(context)
pk, cert = self.cert_rpcapi.generate_x509_cert(context,
user_id=context.user_id, project_id=context.project_id)
return {'certificate': _translate_certificate_view(cert, pk)}
class Certificates(extensions.ExtensionDescriptor):
"""Certificates support."""
name = "Certificates"
alias = "os-certificates"
namespace = ("http://docs.openstack.org/compute/ext/"
"certificates/api/v1.1")
updated = "2012-01-19T00:00:00Z"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-certificates',
CertificatesController(),
member_actions={})
resources.append(res)
return resources
|
apache-2.0
| 5,370,336,433,982,505,000
| 34.151899
| 78
| 0.658264
| false
| 4.213961
| false
| false
| false
|
LaserTron/web-crs
|
control.py
|
1
|
8891
|
import web
import gradebook
import hashlib
import time
#
#Requires a DB with a table called 'States' with columns 'state' and 'page'
#Requires a DB with a tabe called 'Users' with columns 'username' and 'section'
def sha1digest(s):
salt = "boy this is salty frdew34567uhygfrer6uhgfrtyuhijhbgftrdfg"
ho = hashlib.sha1(s+salt)
return ho.hexdigest()
ctrl = web.database(dbn="sqlite",db="control.db")
ctrl.ctx.db.text_factory=str #erm... I have NO CLUE what this means :-/
def isInTable(table,col,entry):
wherestring = '{0}=\"{1}\"'.format(col,entry)
res = ctrl.select(table,where=wherestring)
return bool(res)
def getEntry(table,col,key,ID):
"""
returns value of column from corresponding key/ID. Returns
only one entry.
"""
wherestring = "{0}=\"{1}\"".format(key,ID)
bob = ctrl.select(table,where=wherestring,what=col)
if bool(bob): #Calling bool(bob) depletes the iterator
bob = ctrl.select(table,where=wherestring,what=col)
return bob[0][col]
else:
return None
def isStudent(user):
return isInTable("students","username",user)
def isInstructor(user):
return isInTable("instructors","username",user)
def getPassHash(user):
"""
Returns the hash of the user's password or returns false if
the user doesn't exist.
"""
emp = lambda x: x==None or x=="" or x.isspace()
if isStudent(user):
paswd = getEntry("students","password","username",user)
if emp(paswd):
return None
else:
return paswd
elif isInstructor(user):
paswd = getEntry("instructors","password","username",user)
if emp(paswd):
return None
else:
return paswd
else:
return False
def setPassword(user,paswd):
"""
Stores a hash of the user's password. Returns false if
the user is not found.
"""
passhash = sha1digest(paswd)
sqldic={}
sqldic['where']="username = \"{0}\"".format(user)
sqldic['password']=passhash
if isStudent(user):
ctrl.update("students",**sqldic)
elif isInstructor(user):
ctrl.update("instructors",**sqldic)
else:
return False
def clearPassword(user):
"""
Deletes the password
"""
sqldic={}
sqldic['where']="username = \"{0}\"".format(user)
sqldic['password']=None
if isStudent(user):
ctrl.update("students",**sqldic)
elif isInstructor(user):
ctrl.update("instructors",**sqldic)
else:
return False
def validatePassword(user,pashash):
return pashash == getPassHash(user)
def addInstructor(user):
user=user.strip()
res = isInTable('instructors','username',user)
if not res:
ctrl.insert('instructors', username=user)
# def getUserSection(user):
# wherestring="username=\"{0}\"".format(user)
# bob=ctrl.select('Users', where=wherestring, what='section')
# return bob[0]['section']
def delUser(user):#UPDATE
wherestring="username=\"{0}\"".format(user)
ctrl.delete('Users',where=wherestring)
def assignInstructor(instr,section):
wherestring = 'name=\'{0}\''.format(section)
ctrl.update("sections", where=wherestring, instructor=instr)
def assignSession(qu,section):
wherestring = 'name=\'{0}\''.format(section)
ctrl.update("sections", where=wherestring, session=qu)
def getSections():
"""
Returns list of section names
"""
bob= ctrl.select('sections',what='name')
output = []
for i in bob:
output.append(i['name'])
return output
def getAssignedQuiz(sec):
"""
Returns the quiz currently assigned to a section
"""
return getEntry('sections','session','name',sec)
def getInstrSections(instr):
"""
Returns list of sections assigned to an instructor
"""
wherestring = 'instructor = \"{0}\"'.format(instr)
bob= ctrl.select('sections',what='name',where=wherestring)
output = []
for i in bob:
output.append(i['name'])
return output
def addSection(nam):
nam=nam.strip()
res = isInTable('sections','name', nam)
if not res:#no user there
ctrl.insert('sections', name = nam)
def addStudent(user,sec):#adds a student
user=user.strip()
res = isInTable('students','username', user)
if not res:#no user there
ctrl.insert('students',username = user, section = sec)
sec = sec.strip()#ensures section is added as well, if necessary
res = isInTable('sections','name',sec)
if not res:
addSection(sec)
def populateSections():
"""
This method populates the section list from the student roster.
"""
#ISSUE this is a hack
stus = ctrl.select("students")
for i in stus:
addStudent(i["username"],i["section"])
def getStudentsBySec(section):
"""
Returns the list of student usernames in a given section
"""
wherestring = "section = \"{0}\"".format(section)
students = ctrl.select("students",where=wherestring)
output = []
for i in students:
output.append(i['username'])
return output
def sessionAdd(sesname):
"""
Adds an entry to the sessions table with initialized states
"""
if isInTable("sessions","name",sesname):
return None
ctrl.insert("sessions",name = sesname, page=0, state="init")
def getInstrSession(instr):
return getEntry("instructors","session","username",instr)
def setInstrSession(instr,session):
wherestring = "username = \"{0}\"".format(instr)
sqldic={'where':wherestring,'session':session}
ctrl.update("instructors",**sqldic)
def getSessionSection(instr):
sess = getInstrSession(instr)
return getEntry("sections","name","session",sess)
def getSessionStudents(instr):
sec = getSessionSection(instr)
return getStudentsBySec(sec)
def getSessionPage(session):
return getEntry("sessions","page","name",session)
def getSessionState(session):
return getEntry("sessions","state","name",session)
def setSessionState(session,state):
sqldic={
"where" : "name = \"{0}\"".format(session),
"state": state
}
ctrl.update("sessions",**sqldic)
def getStudentSession(user):
sec = getEntry("students","section","username",user)
return getEntry("sections","session","name",sec)
def getStudentState(student):
sess = getStudentSession(student)
return getEntry("sessions","state","name",sess)
def getStudentPage(user):
return getSessionPage(getStudentSession(user))
def getUserSession(user):
if isStudent(user):
return getStudentSession(user)
if isInstructor(user):
return getInstrSession(user)
def getUserPage(user):
sess = getUserSession(user)
return getSessionPage(sess)
def getUserState(user):
sess = getUserSession(user)
return getSessionState(sess)
def updateEntry(table,col,key,ID,newvalue):
"""
Enters newvalue in the column corresponding to the given key/ID pair
"""
wherestring = "{0} = \"{1}\"".format(key,ID)
sqldict={"where":wherestring, col:newvalue}
ctrl.update(table,**sqldict) #**converts dict to keywords
def getQuizLength(session):
"""
Returns the number of questions in the quiz assigned to
session
"""
quizstr = gradebook.getSessionQuestions(session)
quizli = quizstr.split(',')
return len(quizli)
def advanceSession(session):
"""
Increments the question number. Sets the session to finished if
finished.
"""
length = getQuizLength(session)
curpage = getSessionPage(session)
if curpage >= length-1:
wherestring = "name = \"{0}\"".format(session)
sqldict={"where":wherestring,"state":"finished","page":curpage+1}
ctrl.update("sessions",**sqldict)
return False
else:
sqldict={
"where" : "name = \"{0}\"".format(session),
"page" : curpage+1,
"state" : "init"
}
ctrl.update("sessions",**sqldict)
return True
def setUltimatum(instr,duration):
"""
An ultimatum for timers.
"""
sess = getInstrSession(instr)
now = time.time()
then = now+duration+1
sqldic={
"where":"name = \"{0}\"".format(sess),
"ultimatum":then,
"state":"ultimatum",
}
ctrl.update("sessions",**sqldic)
def giveTimeLeft(user):
"""
Computes the time left in the ultimatum. If negative,
sets session to closed. Otherwise returns the string representation of
the number of seconds remaining.
"""
if isInstructor(user):
sess = getInstrSession(user)
else:
sess = getStudentSession(user)
timeup = getEntry("sessions","ultimatum","name",sess)
now = time.time()
left = int(timeup-now)
if left < -1:
setSessionState(sess,"closed")
return "closed"
return str(left)
|
lgpl-3.0
| 3,575,754,073,379,911,000
| 27.225397
| 79
| 0.637611
| false
| 3.62898
| false
| false
| false
|
kmunve/processgpr
|
core/picks.py
|
1
|
1331
|
'''
The I{PICKS} class handles picked data.
Created on 20.10.2010
@author: Karsten Mueller
'''
class PICKS():
def __init__(self):
self.trace = []
self.twt = []
self.x = []
self.y = []
self.elev = []
self.tell = 0 # counts the picks
self.pltid = 0 # controls if picks are plotted
def append(self, trace, twt, x, y, elev):
self.trace.append(trace)
self.twt.append(twt)
self.x.append(x)
self.y.append(y)
self.elev.append(elev)
self.tell += 1
def save(self, file):
fid = open(file, 'w')
fid.write("#trace, twt, x, y, elev\n")
for n in range(self.tell):
fid.write('%i, %f, %f, %f, %f\n' % (self.trace[n], self.twt[n],
self.x[n], self.y[n],
self.elev[n]))
fid.close()
def load(self, file):
from numpy import loadtxt, dtype, int, float
dt = dtype([('trace', int), ('twt', float),
('x', float), ('y', float), ('elev', float)])
data = loadtxt(file, dtype=dt, delimiter=',')
self.trace = data['trace']
self.twt = data['twt']
self.x = data['x']
self.y = data['y']
self.elev = data['elev']
|
gpl-2.0
| -3,223,778,338,680,800,000
| 27.340426
| 75
| 0.464313
| false
| 3.33584
| false
| false
| false
|
ReCodEx/monitor
|
monitor/zeromq_connection.py
|
1
|
3194
|
#!/usr/bin/env python3
"""
Handle zeromq socket.
"""
import zmq
import json
class ServerConnection:
"""
Class responsible for creating zeromq socket (server) and receiving
messages from connected clients. The message should be text with
format <ID>,<MESSAGE>, where text <MESSAGE> will be sent to websocket
client subscribed to channel <ID>.
"""
def __init__(self, address, port, logger):
"""
Initialize new instance with given address and port.
:param address: String representation of IP address
to listen to or a hostname.
:param port: String port where to listen.
:param logger: System logger
"""
self._logger = logger
context = zmq.Context()
self._receiver = context.socket(zmq.ROUTER)
self._receiver.setsockopt(zmq.IDENTITY, b"recodex-monitor")
address = "tcp://{}:{}".format(address, port)
self._receiver.bind(address)
self._logger.info("zeromq server initialized at {}".format(address))
def start(self, message_callback):
"""
Start receiving messages from underlying zeromq socket.
:param message_callback: Function to be called when new messages arrived.
This function should not block for long. Required are two parameters, first
is id of stream and second is text of the message. Both are strings.
:return: True if exited normally (by "exit" message with ID 0), False if
socket error occurred.
"""
while True:
# try to receive a message
try:
message = self._receiver.recv_multipart()
self._logger.debug("zeromq server: got message '{}'".format(message))
except Exception as e:
self._logger.error("zeromq server: socket error: {}".format(e))
return False
# split given message
try:
"""
decode the message with following parts:
0 - zeromq identity of sender
1 - byte array with channel id
2 - byte array with message command
3 - byte array with message task_id - only for TASK command
4 - byte array with message task_state - only for TASK command
"""
decoded_message = [item.decode() for item in message[1:]]
client_id = decoded_message[0]
keys = ["command", "task_id", "task_state"]
data = json.dumps(dict(zip(keys, decoded_message[1:])), sort_keys=True)
except ValueError:
continue
if client_id == "0" and data == '{"command": "exit"}':
self._logger.info("zeromq server: got shutdown command")
break
# call registered callback with given data
message_callback(client_id, data)
# after last message (command FINISHED) send also poison pill
# to close listening sockets
if decoded_message[1] == "FINISHED":
message_callback(client_id, None)
return True
|
mit
| 1,098,159,602,580,343,600
| 39.948718
| 87
| 0.576706
| false
| 4.752976
| false
| false
| false
|
any1m1c/ipc20161
|
lista4/ipc_lista4.19.py
|
1
|
1551
|
# EQUIPE 2
#
# Ana Beatriz Frota - 1615310027
#
#
#
#Kylciane Cristiny Lopes Freitas - 1615310052
#Questao 19
votos = [0 ,0, 0, 0, 0, 0]
sistemas =["1- Windows Server", "2- Unix", "3- Linux","4- Netware","5- Mac OS","6- Outro","0- Sair da enquete"]
continua = True
total = 0
porc_votos = []
while(continua):
print("Qual o melhor sistema operacional para uso em servidores ?")
for i in range(len(sistemas)-1):
print(sistemas[i])
escolha = int(raw_input())
if(escolha == 1):
votos[escolha - 1] += 1
elif(escolha == 2):
votos[escolha - 1] += 1
elif(escolha == 3):
votos[escolha - 1] += 1
elif(escolha == 4):
votos[escolha - 1] += 1
elif(escolha == 5):
votos[escolha - 1] += 1
elif(escolha == 6):
votos[escolha - 1] += 1
elif(escolha == 0):
continua = False
else:
print("digite um valor válido !")
for i in range(len(votos)):
total += votos[i]
for i in range(len(votos)):
x = votos[i]*100/total
porc_votos.append(x)
print("Sistema Operacional\t Votos\t %")
print("------------------\t -----\t -\n")
mensagem =''
espaco1 = 19
espaco2 = 5
espaco = ' '
for i in range(len(votos)):
campo1 = espaco * (espaco1 - len(sistemas[i]))
campo2 = espaco * (espaco2 - len(str(votos[i])))
mensagem = sistemas[i] + campo1 +"\t" + str(votos[i])+ campo2 + "\t" + str(porc_votos[i])
print(mensagem)
print("\n------------------\t -----\t -")
print("Total:" + "\t"+ str(total))
|
apache-2.0
| -2,458,113,463,606,470,700
| 21.794118
| 111
| 0.53871
| false
| 2.516234
| false
| false
| false
|
TristanCacqueray/tbac-reg
|
main/tools/src/scripts/tbac_get_attr.py
|
1
|
1404
|
#!/usr/bin/python -OO
# -*- coding: utf8 -*-
############################################################################
# (c) 2005-2010 freenode#rsbac
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################
""" Filename: main/tools/src/scripts/tbac_get_attr.py
Project: tbac-reg
Last update: 2009/06/11
Purpose: Get attr command line tools
"""
import tbac
def main():
import sys
if len(sys.argv) < 3:
print "usage: %s flags|range file/dirname(s)"
return -1
for path in sys.argv[2:]:
if sys.argv[1].lower() == "flags":
val = tbac.get_flags(path)
else:
val = tbac.get_range(path)
print "%s: returned '%s'" % (path, val)
if __name__ == "__main__":
main()
|
gpl-2.0
| 8,475,136,004,425,136,000
| 32.428571
| 76
| 0.626781
| false
| 3.6
| false
| false
| false
|
auready/django
|
django/contrib/gis/feeds.py
|
1
|
5765
|
from django.contrib.syndication.views import Feed as BaseFeed
from django.utils.feedgenerator import Atom1Feed, Rss201rev2Feed
class GeoFeedMixin:
"""
This mixin provides the necessary routines for SyndicationFeed subclasses
to produce simple GeoRSS or W3C Geo elements.
"""
def georss_coords(self, coords):
"""
In GeoRSS coordinate pairs are ordered by lat/lon and separated by
a single white space. Given a tuple of coordinates, return a string
GeoRSS representation.
"""
return ' '.join('%f %f' % (coord[1], coord[0]) for coord in coords)
def add_georss_point(self, handler, coords, w3c_geo=False):
"""
Adds a GeoRSS point with the given coords using the given handler.
Handles the differences between simple GeoRSS and the more popular
W3C Geo specification.
"""
if w3c_geo:
lon, lat = coords[:2]
handler.addQuickElement('geo:lat', '%f' % lat)
handler.addQuickElement('geo:lon', '%f' % lon)
else:
handler.addQuickElement('georss:point', self.georss_coords((coords,)))
def add_georss_element(self, handler, item, w3c_geo=False):
"""
This routine adds a GeoRSS XML element using the given item and handler.
"""
# Getting the Geometry object.
geom = item.get('geometry')
if geom is not None:
if isinstance(geom, (list, tuple)):
# Special case if a tuple/list was passed in. The tuple may be
# a point or a box
box_coords = None
if isinstance(geom[0], (list, tuple)):
# Box: ( (X0, Y0), (X1, Y1) )
if len(geom) == 2:
box_coords = geom
else:
raise ValueError('Only should be two sets of coordinates.')
else:
if len(geom) == 2:
# Point: (X, Y)
self.add_georss_point(handler, geom, w3c_geo=w3c_geo)
elif len(geom) == 4:
# Box: (X0, Y0, X1, Y1)
box_coords = (geom[:2], geom[2:])
else:
raise ValueError('Only should be 2 or 4 numeric elements.')
# If a GeoRSS box was given via tuple.
if box_coords is not None:
if w3c_geo:
raise ValueError('Cannot use simple GeoRSS box in W3C Geo feeds.')
handler.addQuickElement('georss:box', self.georss_coords(box_coords))
else:
# Getting the lower-case geometry type.
gtype = str(geom.geom_type).lower()
if gtype == 'point':
self.add_georss_point(handler, geom.coords, w3c_geo=w3c_geo)
else:
if w3c_geo:
raise ValueError('W3C Geo only supports Point geometries.')
# For formatting consistent w/the GeoRSS simple standard:
# http://georss.org/1.0#simple
if gtype in ('linestring', 'linearring'):
handler.addQuickElement('georss:line', self.georss_coords(geom.coords))
elif gtype in ('polygon',):
# Only support the exterior ring.
handler.addQuickElement('georss:polygon', self.georss_coords(geom[0].coords))
else:
raise ValueError('Geometry type "%s" not supported.' % geom.geom_type)
# ### SyndicationFeed subclasses ###
class GeoRSSFeed(Rss201rev2Feed, GeoFeedMixin):
def rss_attributes(self):
attrs = super().rss_attributes()
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
super().add_item_elements(handler, item)
self.add_georss_element(handler, item)
def add_root_elements(self, handler):
super().add_root_elements(handler)
self.add_georss_element(handler, self.feed)
class GeoAtom1Feed(Atom1Feed, GeoFeedMixin):
def root_attributes(self):
attrs = super().root_attributes()
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
return attrs
def add_item_elements(self, handler, item):
super().add_item_elements(handler, item)
self.add_georss_element(handler, item)
def add_root_elements(self, handler):
super().add_root_elements(handler)
self.add_georss_element(handler, self.feed)
class W3CGeoFeed(Rss201rev2Feed, GeoFeedMixin):
def rss_attributes(self):
attrs = super().rss_attributes()
attrs['xmlns:geo'] = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
return attrs
def add_item_elements(self, handler, item):
super().add_item_elements(handler, item)
self.add_georss_element(handler, item, w3c_geo=True)
def add_root_elements(self, handler):
super().add_root_elements(handler)
self.add_georss_element(handler, self.feed, w3c_geo=True)
# ### Feed subclass ###
class Feed(BaseFeed):
"""
This is a subclass of the `Feed` from `django.contrib.syndication`.
This allows users to define a `geometry(obj)` and/or `item_geometry(item)`
methods on their own subclasses so that geo-referenced information may
placed in the feed.
"""
feed_type = GeoRSSFeed
def feed_extra_kwargs(self, obj):
return {'geometry': self._get_dynamic_attr('geometry', obj)}
def item_extra_kwargs(self, item):
return {'geometry': self._get_dynamic_attr('item_geometry', item)}
|
bsd-3-clause
| -6,600,715,152,181,236,000
| 39.598592
| 101
| 0.57294
| false
| 4.077086
| false
| false
| false
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py
|
1
|
3598
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .linked_service import LinkedService
class AmazonRedshiftLinkedService(LinkedService):
"""Linked service for Amazon Redshift.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param connect_via: The integration runtime reference.
:type connect_via:
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param type: Constant filled by server.
:type type: str
:param server: The name of the Amazon Redshift server. Type: string (or
Expression with resultType string).
:type server: object
:param username: The username of the Amazon Redshift source. Type: string
(or Expression with resultType string).
:type username: object
:param password: The password of the Amazon Redshift source.
:type password: ~azure.mgmt.datafactory.models.SecureString
:param database: The database name of the Amazon Redshift source. Type:
string (or Expression with resultType string).
:type database: object
:param port: The TCP port number that the Amazon Redshift server uses to
listen for client connections. The default value is 5439. Type: integer
(or Expression with resultType integer).
:type port: object
:param encrypted_credential: The encrypted credential used for
authentication. Credentials are encrypted using the integration runtime
credential manager. Type: string (or Expression with resultType string).
:type encrypted_credential: object
"""
_validation = {
'type': {'required': True},
'server': {'required': True},
'database': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'server': {'key': 'typeProperties.server', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecureString'},
'database': {'key': 'typeProperties.database', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, username=None, password=None, port=None, encrypted_credential=None):
super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
self.server = server
self.username = username
self.password = password
self.database = database
self.port = port
self.encrypted_credential = encrypted_credential
self.type = 'AmazonRedshift'
|
mit
| -5,348,006,601,177,244,000
| 46.342105
| 173
| 0.660367
| false
| 4.520101
| false
| false
| false
|
boshmaf/sypy
|
sypy/results.py
|
1
|
2512
|
# SyPy: A Python framework for evaluating graph-based Sybil detection
# algorithms in social and information networks.
#
# Copyright (C) 2013 Yazan Boshmaf
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Results:
def __init__(self, detector):
self.nodes = detector.network.graph.nodes()
self.honests_predicted = detector.honests_predicted
self.honests_truth = detector.honests_truth
self.confusion_matrix = self.__compute_confusion_matrix()
def __compute_confusion_matrix(self):
N = len(self.honests_truth)
P = len(self.nodes) - N
TN = len(
set.intersection(
set(self.honests_truth),
set(self.honests_predicted)
)
)
FN = len(
set.intersection(
set(self.honests_predicted),
(set(self.nodes) - set(self.honests_truth))
)
)
TP = len(
set.intersection(
(set(self.nodes) - set(self.honests_truth)),
(set(self.nodes) - set(self.honests_predicted))
)
)
FP = len(
set.intersection(
set(self.honests_truth),
(set(self.nodes) - set(self.honests_predicted))
)
)
confusion_matrix = {
"N": N,
"P": P,
"TN": TN,
"FN": FN,
"TP": TP,
"FP": FP
}
return confusion_matrix
def accuracy(self):
cm = self.confusion_matrix
return (cm["TP"] + cm["TN"])/(float)(cm["P"] + cm["N"])
def sensitivity(self):
cm = self.confusion_matrix
return cm["TP"]/(float)(cm["TP"]+cm["FN"])
def specificity(self):
cm = self.confusion_matrix
return cm["TN"]/(float)(cm["FP"]+cm["TN"])
|
gpl-3.0
| -5,480,218,016,483,117,000
| 30.4
| 74
| 0.556927
| false
| 3.800303
| false
| false
| false
|
bmaggard/luigi
|
luigi/contrib/bigquery.py
|
1
|
18778
|
# -*- coding: utf-8 -*-
#
# Copyright 2015 Twitter Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import collections
import logging
import luigi.target
import time
logger = logging.getLogger('luigi-interface')
try:
import httplib2
import oauth2client
from googleapiclient import discovery
from googleapiclient import http
except ImportError:
logger.warning('Bigquery module imported, but google-api-python-client is '
'not installed. Any bigquery task will fail')
class CreateDisposition(object):
CREATE_IF_NEEDED = 'CREATE_IF_NEEDED'
CREATE_NEVER = 'CREATE_NEVER'
class WriteDisposition(object):
WRITE_TRUNCATE = 'WRITE_TRUNCATE'
WRITE_APPEND = 'WRITE_APPEND'
WRITE_EMPTY = 'WRITE_EMPTY'
class QueryMode(object):
INTERACTIVE = 'INTERACTIVE'
BATCH = 'BATCH'
class SourceFormat(object):
CSV = 'CSV'
DATASTORE_BACKUP = 'DATASTORE_BACKUP'
NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON'
BQDataset = collections.namedtuple('BQDataset', 'project_id dataset_id')
class BQTable(collections.namedtuple('BQTable', 'project_id dataset_id table_id')):
@property
def dataset(self):
return BQDataset(project_id=self.project_id, dataset_id=self.dataset_id)
@property
def uri(self):
return "bq://" + self.project_id + "/" + \
self.dataset.dataset_id + "/" + self.table_id
class BigqueryClient(object):
"""A client for Google BigQuery.
For details of how authentication and the descriptor work, see the
documentation for the GCS client. The descriptor URL for BigQuery is
https://www.googleapis.com/discovery/v1/apis/bigquery/v2/rest
"""
def __init__(self, oauth_credentials=None, descriptor='', http_=None):
http_ = http_ or httplib2.Http()
if not oauth_credentials:
oauth_credentials = oauth2client.client.GoogleCredentials.get_application_default()
if descriptor:
self.client = discovery.build_from_document(descriptor, credentials=oauth_credentials, http=http_)
else:
self.client = discovery.build('bigquery', 'v2', credentials=oauth_credentials, http=http_)
def dataset_exists(self, dataset):
"""Returns whether the given dataset exists.
:param dataset:
:type dataset: BQDataset
"""
try:
self.client.datasets().get(projectId=dataset.project_id,
datasetId=dataset.dataset_id).execute()
except http.HttpError as ex:
if ex.resp.status == 404:
return False
raise
return True
def table_exists(self, table):
"""Returns whether the given table exists.
:param table:
:type table: BQTable
"""
if not self.dataset_exists(table.dataset):
return False
try:
self.client.tables().get(projectId=table.project_id,
datasetId=table.dataset_id,
tableId=table.table_id).execute()
except http.HttpError as ex:
if ex.resp.status == 404:
return False
raise
return True
def make_dataset(self, dataset, raise_if_exists=False, body={}):
"""Creates a new dataset with the default permissions.
:param dataset:
:type dataset: BQDataset
:param raise_if_exists: whether to raise an exception if the dataset already exists.
:raises luigi.target.FileAlreadyExists: if raise_if_exists=True and the dataset exists
"""
try:
self.client.datasets().insert(projectId=dataset.project_id, body=dict(
{'id': '{}:{}'.format(dataset.project_id, dataset.dataset_id)}, **body)).execute()
except http.HttpError as ex:
if ex.resp.status == 409:
if raise_if_exists:
raise luigi.target.FileAlreadyExists()
else:
raise
def delete_dataset(self, dataset, delete_nonempty=True):
"""Deletes a dataset (and optionally any tables in it), if it exists.
:param dataset:
:type dataset: BQDataset
:param delete_nonempty: if true, will delete any tables before deleting the dataset
"""
if not self.dataset_exists(dataset):
return
self.client.datasets().delete(projectId=dataset.project_id,
datasetId=dataset.dataset_id,
deleteContents=delete_nonempty).execute()
def delete_table(self, table):
"""Deletes a table, if it exists.
:param table:
:type table: BQTable
"""
if not self.table_exists(table):
return
self.client.tables().delete(projectId=table.project_id,
datasetId=table.dataset_id,
tableId=table.table_id).execute()
def list_datasets(self, project_id):
"""Returns the list of datasets in a given project.
:param project_id:
:type project_id: str
"""
request = self.client.datasets().list(projectId=project_id,
maxResults=1000)
response = request.execute()
while response is not None:
for ds in response.get('datasets', []):
yield ds['datasetReference']['datasetId']
request = self.client.datasets().list_next(request, response)
if request is None:
break
response = request.execute()
def list_tables(self, dataset):
"""Returns the list of tables in a given dataset.
:param dataset:
:type dataset: BQDataset
"""
request = self.client.tables().list(projectId=dataset.project_id,
datasetId=dataset.dataset_id,
maxResults=1000)
response = request.execute()
while response is not None:
for t in response.get('tables', []):
yield t['tableReference']['tableId']
request = self.client.tables().list_next(request, response)
if request is None:
break
response = request.execute()
def get_view(self, table):
"""Returns the SQL query for a view, or None if it doesn't exist or is not a view.
:param table: The table containing the view.
:type table: BQTable
"""
request = self.client.tables().get(projectId=table.project_id,
datasetId=table.dataset_id,
tableId=table.table_id)
try:
response = request.execute()
except http.HttpError as ex:
if ex.resp.status == 404:
return None
raise
return response['view']['query'] if 'view' in response else None
def update_view(self, table, view):
"""Updates the SQL query for a view.
If the output table exists, it is replaced with the supplied view query. Otherwise a new
table is created with this view.
:param table: The table to contain the view.
:type table: BQTable
:param view: The SQL query for the view.
:type view: str
"""
body = {
'tableReference': {
'projectId': table.project_id,
'datasetId': table.dataset_id,
'tableId': table.table_id
},
'view': {
'query': view
}
}
if self.table_exists(table):
self.client.tables().update(projectId=table.project_id,
datasetId=table.dataset_id,
tableId=table.table_id,
body=body).execute()
else:
self.client.tables().insert(projectId=table.project_id,
datasetId=table.dataset_id,
body=body).execute()
def run_job(self, project_id, body, dataset=None):
"""Runs a bigquery "job". See the documentation for the format of body.
.. note::
You probably don't need to use this directly. Use the tasks defined below.
:param dataset:
:type dataset: BQDataset
"""
if dataset and not self.dataset_exists(dataset):
self.make_dataset(dataset)
new_job = self.client.jobs().insert(projectId=project_id, body=body).execute()
job_id = new_job['jobReference']['jobId']
logger.info('Started import job %s:%s', project_id, job_id)
while True:
status = self.client.jobs().get(projectId=project_id, jobId=job_id).execute()
if status['status']['state'] == 'DONE':
if status['status'].get('errors'):
raise Exception('Bigquery job failed: {}'.format(status['status']['errors']))
return
logger.info('Waiting for job %s:%s to complete...', project_id, job_id)
time.sleep(5.0)
def copy(self,
source_table,
dest_table,
create_disposition=CreateDisposition.CREATE_IF_NEEDED,
write_disposition=WriteDisposition.WRITE_TRUNCATE):
"""Copies (or appends) a table to another table.
:param source_table:
:type source_table: BQTable
:param dest_table:
:type dest_table: BQTable
:param create_disposition: whether to create the table if needed
:type create_disposition: CreateDisposition
:param write_disposition: whether to append/truncate/fail if the table exists
:type write_disposition: WriteDisposition
"""
job = {
"projectId": dest_table.project_id,
"configuration": {
"copy": {
"sourceTable": {
"projectId": source_table.project_id,
"datasetId": source_table.dataset_id,
"tableId": source_table.table_id,
},
"destinationTable": {
"projectId": dest_table.project_id,
"datasetId": dest_table.dataset_id,
"tableId": dest_table.table_id,
},
"createDisposition": create_disposition,
"writeDisposition": write_disposition,
}
}
}
self.run_job(dest_table.project_id, job, dataset=dest_table.dataset)
class BigqueryTarget(luigi.target.Target):
def __init__(self, project_id, dataset_id, table_id, client=None):
self.table = BQTable(project_id=project_id, dataset_id=dataset_id, table_id=table_id)
self.client = client or BigqueryClient()
@classmethod
def from_bqtable(cls, table, client=None):
"""A constructor that takes a :py:class:`BQTable`.
:param table:
:type table: BQTable
"""
return cls(table.project_id, table.dataset_id, table.table_id, client=client)
def exists(self):
return self.client.table_exists(self.table)
def __str__(self):
return str(self.table)
class MixinBigqueryBulkComplete(object):
"""
Allows to efficiently check if a range of BigqueryTargets are complete.
This enables scheduling tasks with luigi range tools.
If you implement a custom Luigi task with a BigqueryTarget output, make sure to also inherit
from this mixin to enable range support.
"""
@classmethod
def bulk_complete(cls, parameter_tuples):
if len(parameter_tuples) < 1:
return
# Instantiate the tasks to inspect them
tasks_with_params = [(cls(p), p) for p in parameter_tuples]
# Grab the set of BigQuery datasets we are interested in
datasets = set([t.output().table.dataset for t, p in tasks_with_params])
logger.info('Checking datasets %s for available tables', datasets)
# Query the available tables for all datasets
client = tasks_with_params[0][0].output().client
available_datasets = filter(client.dataset_exists, datasets)
available_tables = {d: set(client.list_tables(d)) for d in available_datasets}
# Return parameter_tuples belonging to available tables
for t, p in tasks_with_params:
table = t.output().table
if table.table_id in available_tables.get(table.dataset, []):
yield p
class BigqueryLoadTask(MixinBigqueryBulkComplete, luigi.Task):
"""Load data into bigquery from GCS."""
@property
def source_format(self):
"""The source format to use (see :py:class:`SourceFormat`)."""
return SourceFormat.NEWLINE_DELIMITED_JSON
@property
def write_disposition(self):
"""What to do if the table already exists. By default this will fail the job.
See :py:class:`WriteDisposition`"""
return WriteDisposition.WRITE_EMPTY
@property
def schema(self):
"""Schema in the format defined at https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.schema.
If the value is falsy, it is omitted and inferred by bigquery, which only works for CSV inputs."""
return []
@property
def max_bad_records(self):
return 0
@property
def source_uris(self):
"""Source data which should be in GCS."""
return [x.path for x in luigi.task.flatten(self.input())]
def run(self):
output = self.output()
assert isinstance(output, BigqueryTarget), 'Output should be a bigquery target, not %s' % (output)
bq_client = output.client
source_uris = self.source_uris()
assert all(x.startswith('gs://') for x in source_uris)
job = {
'projectId': output.table.project_id,
'configuration': {
'load': {
'destinationTable': {
'projectId': output.table.project_id,
'datasetId': output.table.dataset_id,
'tableId': output.table.table_id,
},
'sourceFormat': self.source_format,
'writeDisposition': self.write_disposition,
'sourceUris': source_uris,
'maxBadRecords': self.max_bad_records,
}
}
}
if self.schema:
job['configuration']['load']['schema'] = {'fields': self.schema}
bq_client.run_job(output.table.project_id, job, dataset=output.table.dataset)
class BigqueryRunQueryTask(MixinBigqueryBulkComplete, luigi.Task):
@property
def write_disposition(self):
"""What to do if the table already exists. By default this will fail the job.
See :py:class:`WriteDisposition`"""
return WriteDisposition.WRITE_TRUNCATE
@property
def create_disposition(self):
"""Whether to create the table or not. See :py:class:`CreateDisposition`"""
return CreateDisposition.CREATE_IF_NEEDED
@property
def query(self):
"""The query, in text form."""
raise NotImplementedError()
@property
def query_mode(self):
"""The query mode. See :py:class:`QueryMode`."""
return QueryMode.INTERACTIVE
def run(self):
output = self.output()
assert isinstance(output, BigqueryTarget), 'Output should be a bigquery target, not %s' % (output)
query = self.query
assert query, 'No query was provided'
bq_client = output.client
logger.info('Launching Query')
logger.info('Query destination: %s (%s)', output, self.write_disposition)
logger.info('Query SQL: %s', query)
job = {
'projectId': output.table.project_id,
'configuration': {
'query': {
'query': query,
'priority': self.query_mode,
'destinationTable': {
'projectId': output.table.project_id,
'datasetId': output.table.dataset_id,
'tableId': output.table.table_id,
},
'allowLargeResults': True,
'createDisposition': self.create_disposition,
'writeDisposition': self.write_disposition,
}
}
}
bq_client.run_job(output.table.project_id, job, dataset=output.table.dataset)
class BigqueryCreateViewTask(luigi.Task):
"""
Creates (or updates) a view in BigQuery.
The output of this task needs to be a BigQueryTarget.
Instances of this class should specify the view SQL in the view property.
If a view already exist in BigQuery at output(), it will be updated.
"""
@property
def view(self):
"""The SQL query for the view, in text form."""
raise NotImplementedError()
def complete(self):
output = self.output()
assert isinstance(output, BigqueryTarget), 'Output must be a bigquery target, not %s' % (output)
if not output.exists():
return False
existing_view = output.client.get_view(output.table)
return existing_view == self.view
def run(self):
output = self.output()
assert isinstance(output, BigqueryTarget), 'Output must be a bigquery target, not %s' % (output)
view = self.view
assert view, 'No view was provided'
logger.info('Create view')
logger.info('Destination: %s', output)
logger.info('View SQL: %s', view)
output.client.update_view(output.table, view)
class ExternalBigqueryTask(MixinBigqueryBulkComplete, luigi.ExternalTask):
"""
An external task for a BigQuery target.
"""
pass
|
apache-2.0
| 4,507,529,935,092,633,600
| 32.834234
| 126
| 0.578017
| false
| 4.424599
| false
| false
| false
|
gjhiggins/elixir
|
elixir/collection.py
|
1
|
4508
|
'''
Default entity collection implementation
'''
import sys
import re
class BaseCollection(list):
def __init__(self, entities=None):
list.__init__(self)
if entities is not None:
self.extend(entities)
def extend(self, entities):
for e in entities:
self.append(e)
def clear(self):
del self[:]
def resolve_absolute(self, key, full_path, entity=None, root=None):
if root is None:
root = entity._descriptor.resolve_root
if root:
full_path = '%s.%s' % (root, full_path)
module_path, classname = full_path.rsplit('.', 1)
module = sys.modules[module_path]
res = getattr(module, classname, None)
if res is None:
if entity is not None:
raise Exception("Couldn't resolve target '%s' <%s> in '%s'!"
% (key, full_path, entity.__name__))
else:
raise Exception("Couldn't resolve target '%s' <%s>!"
% (key, full_path))
return res
def __getattr__(self, key):
return self.resolve(key)
# default entity collection
class GlobalEntityCollection(BaseCollection):
def __init__(self, entities=None):
# _entities is a dict of entities keyed on their name.
self._entities = {}
super(GlobalEntityCollection, self).__init__(entities)
def append(self, entity):
'''
Add an entity to the collection.
'''
super(EntityCollection, self).append(entity)
existing_entities = self._entities.setdefault(entity.__name__, [])
existing_entities.append(entity)
def resolve(self, key, entity=None):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
# Do we have a fully qualified entity name?
if '.' in key:
return self.resolve_absolute(key, key, entity)
else:
# Otherwise we look in the entities of this collection
res = self._entities.get(key, None)
if res is None:
if entity:
raise Exception("Couldn't resolve target '%s' in '%s'"
% (key, entity.__name__))
else:
raise Exception("This collection does not contain any "
"entity corresponding to the key '%s'!"
% key)
elif len(res) > 1:
raise Exception("'%s' resolves to several entities, you should"
" use the full path (including the full module"
" name) to that entity." % key)
else:
return res[0]
def clear(self):
self._entities = {}
super(GlobalEntityCollection, self).clear()
# backward compatible name
EntityCollection = GlobalEntityCollection
_leading_dots = re.compile('^([.]*).*$')
class RelativeEntityCollection(BaseCollection):
# the entity=None does not make any sense with a relative entity collection
def resolve(self, key, entity):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
full_path = key
if '.' not in key or key.startswith('.'):
# relative target
# any leading dot is stripped and with each dot removed,
# the entity_module is stripped of one more chunk (starting with
# the last one).
num_dots = _leading_dots.match(full_path).end(1)
full_path = full_path[num_dots:]
chunks = entity.__module__.split('.')
chunkstokeep = len(chunks) - num_dots
if chunkstokeep < 0:
raise Exception("Couldn't resolve relative target "
"'%s' relative to '%s'" % (
key, entity.__module__))
entity_module = '.'.join(chunks[:chunkstokeep])
if entity_module and entity_module is not '__main__':
full_path = '%s.%s' % (entity_module, full_path)
root = ''
else:
root = None
return self.resolve_absolute(key, full_path, entity, root=root)
def __getattr__(self, key):
raise NotImplementedError
|
mit
| -6,948,553,760,769,409,000
| 34.21875
| 79
| 0.537045
| false
| 4.671503
| false
| false
| false
|
a1ezzz/wasp-general
|
wasp_general/os/linux/lvm.py
|
1
|
15583
|
# -*- coding: utf-8 -*-
# wasp_general/os/linux/lvm.py
#
# Copyright (C) 2017 the wasp-general authors and contributors
# <see AUTHORS file>
#
# This file is part of wasp-general.
#
# Wasp-general is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Wasp-general is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with wasp-general. If not, see <http://www.gnu.org/licenses/>.
# TODO: document the code
# TODO: write tests for the code
# noinspection PyUnresolvedReferences
from wasp_general.version import __author__, __version__, __credits__, __license__, __copyright__, __email__
# noinspection PyUnresolvedReferences
from wasp_general.version import __status__
import subprocess
import os
import math
from wasp_general.verify import verify_type, verify_value
from wasp_general.os.linux.mounts import WMountPoint
class WLVMInfoCommand:
""" This is a helper, with which it is easier to call for pvdisplay, vgdisplay or lvdisplay program.
This class uses subprocess.check_output method for a program calling. And when non-zero code is returned by
the program, an subprocess.CalledProcessError exception is raised. There is a timeout for a program to be
complete. If a program wasn't completed for that period of time, subprocess.TimeoutExpired exception is
raised
"""
__lvm_cmd_default_timeout__ = 3
""" Default timeout for command to process
"""
@verify_type(command=str, fields_count=int, cmd_timeout=(int, float, None), sudo=bool)
@verify_value(cmd_timeout=lambda x: x is None or x > 0)
def __init__(self, command, fields_count, cmd_timeout=None, sudo=False):
""" Create new command
:param command: program to execute
:param fields_count: fields in a program output
:param cmd_timeout: timeout for a program (if it is None - then default value is used)
:param sudo: flag - whether to run this program with sudo or not
"""
self.__command = command
self.__fields_count = fields_count
self.__cmd_timeout = cmd_timeout if cmd_timeout is not None else self.__lvm_cmd_default_timeout__
self.__sudo = sudo
def command(self):
""" Return target program
:return: str
"""
return self.__command
def fields_count(self):
""" Return number of fields in a program output
:return: int
"""
return self.__fields_count
def cmd_timeout(self):
""" Timeout for a program to complete
:return: int, float
"""
return self.__cmd_timeout
def sudo(self):
""" Return 'sudo' flag (whether to run this program with sudo or not)
:return: bool
"""
return self.__sudo
@verify_type(name=(str, None))
def lvm_info(self, name=None):
""" Call a program
:param name: if specified - program will return information for that lvm-entity only. otherwise -
all available entries are returned
:return: tuple of str (fields)
"""
cmd = [] if self.sudo() is False else ['sudo']
cmd.extend([self.command(), '-c'])
if name is not None:
cmd.append(name)
output = subprocess.check_output(cmd, timeout=self.cmd_timeout())
output = output.decode()
result = []
fields_count = self.fields_count()
for line in output.split('\n'):
line = line.strip()
fields = line.split(':')
if len(fields) == fields_count:
result.append(fields)
if name is not None and len(result) != 1:
raise RuntimeError('Unable to parse command result')
return tuple(result)
class WLVMInfo:
""" Basic class for actual LVM information. This class creates :class:`.WLVMInfoCommand` object
which may be called on an object creation (it depends on constructor parameters)
"""
__lvm_info_cmd_timeout__ = 3
""" Timeout for a program to complete
"""
@verify_type('paranoid', command=str, fields_count=int, sudo=bool)
@verify_type(lvm_entity=(str, tuple, list, set))
@verify_value(lvm_entity=lambda x: len(x) > 0 if isinstance(x, str) else True)
def __init__(self, command, fields_count, lvm_entity, sudo=False):
""" Create new info-object
:param command: same as command in :meth:`.WLVMInfoCommand.__init__`
:param fields_count: same as fields_count in :meth:`.WLVMInfoCommand.__init__`
:param lvm_entity: if this is a list/tuple/set - then it is a collection of fields (collection length \
must be the same as 'fields_count'). If it is a string, then command is executed to get corresponding \
fields
:param sudo: same as sudo in :meth:`.WLVMInfoCommand.__init__`
"""
self.__lvm_command = WLVMInfoCommand(
command, fields_count, cmd_timeout=self.__class__.__lvm_info_cmd_timeout__, sudo=sudo
)
if isinstance(lvm_entity, (tuple, list, set)) is True:
if len(lvm_entity) != fields_count:
raise ValueError(
'Invalid lvm entity fields count: %i (expected: %i)' %
(len(lvm_entity), fields_count)
)
self.__lvm_entity = tuple(lvm_entity)
else:
self.__lvm_entity = (self.lvm_command().lvm_info(lvm_entity)[0])
def lvm_command(self):
""" Return LVM-command object
:return: WLVMInfoCommand
"""
return self.__lvm_command
def lvm_entity(self):
""" Return object fields
:return: tuple of str (fields)
"""
return self.__lvm_entity
class WPhysicalVolume(WLVMInfo):
""" Class represent a physical volume
"""
@verify_type('paranoid', physical_volume=(str, tuple, list, set), sudo=bool)
@verify_value('paranoid', physical_volume=lambda x: len(x) > 0 if isinstance(x, str) else True)
def __init__(self, physical_volume, sudo=False):
""" Create new physical volume descriptor
:param physical_volume: same as 'lvm_entity' in :meth:`.WLVMInfo.__init__`
:param sudo: same as 'sudo' in :meth:`.WLVMInfo.__init__`
"""
WLVMInfo.__init__(self, 'pvdisplay', 12, physical_volume, sudo=sudo)
def all(self):
""" Return every physical volume in the system
:return: tuple of WPhysicalVolume
"""
return tuple([WPhysicalVolume(x) for x in self.lvm_command().lvm_info()])
def device_name(self):
""" Return physical volume device name
:return: str
"""
return self.lvm_entity()[0]
def volume_group(self):
""" Return related volume group name (may be empty string if this volume is not allocated to any)
:return: str
"""
return self.lvm_entity()[1]
def sectors_count(self):
""" Return physical volume size in sectors
:return: int
"""
return int(self.lvm_entity()[2])
def extent_size(self):
""" Return physical extent size in kilobytes (may have 0 value if this volume is not allocated to any)
:return: int
"""
return int(self.lvm_entity()[7])
def total_extents(self):
""" Return total number of physical extents (may have 0 value if this volume is not allocated to any)
:return: int
"""
return int(self.lvm_entity()[8])
def free_extents(self):
""" Return free number of physical extents (may have 0 value if this volume is not allocated to any)
:return: int
"""
return int(self.lvm_entity()[9])
def allocated_extents(self):
""" Return allocated number of physical extents (may have 0 value if this volume is not allocated to \
any)
:return: int
"""
return int(self.lvm_entity()[10])
def uuid(self):
""" Return physical volume UUID
:return: str
"""
return self.lvm_entity()[11]
class WVolumeGroup(WLVMInfo):
""" Class represent a volume group
"""
@verify_type('paranoid', volume_group=(str, tuple, list, set), sudo=bool)
@verify_value('paranoid', volume_group=lambda x: len(x) > 0 if isinstance(x, str) else True)
def __init__(self, volume_group, sudo=False):
""" Create new volume group descriptor
:param volume_group: same as 'lvm_entity' in :meth:`.WLVMInfo.__init__`
:param sudo: same as 'sudo' in :meth:`.WLVMInfo.__init__`
"""
WLVMInfo.__init__(self, 'vgdisplay', 17, volume_group, sudo=sudo)
def all(self):
""" Return every volume group in the system
:return: tuple of WVolumeGroup
"""
return tuple([WVolumeGroup(x) for x in self.lvm_command().lvm_info()])
def group_name(self):
""" Return volume group name
:return: str
"""
return self.lvm_entity()[0]
def group_access(self):
""" Return volume group access
:return: str
"""
return self.lvm_entity()[1]
def maximum_logical_volumes(self):
""" Return maximum number of logical volumes (0 - for unlimited)
:return: int
"""
return int(self.lvm_entity()[4])
def logical_volumes(self):
""" Return current number of logical volumes
:return: int
"""
return int(self.lvm_entity()[5])
def opened_logical_volumes(self):
""" Return open count of all logical volumes in this volume group
:return: int
"""
return int(self.lvm_entity()[6])
def maximum_physical_volumes(self):
""" Return maximum number of physical volumes (0 - for unlimited)
:return: int
"""
return int(self.lvm_entity()[8])
def physical_volumes(self):
""" Return current number of physical volumes
:return: int
"""
return int(self.lvm_entity()[9])
def actual_volumes(self):
""" Return actual number of physical volumes
:return: int
"""
return int(self.lvm_entity()[10])
def size(self):
""" Return size of volume group in kilobytes
:return: int
"""
return int(self.lvm_entity()[11])
def extent_size(self):
""" Return physical extent size in kilobytes
:return: int
"""
return int(self.lvm_entity()[12])
def total_extents(self):
""" Return total number of physical extents for this volume group
:return: int
"""
return int(self.lvm_entity()[13])
def allocated_extents(self):
""" Return allocated number of physical extents for this volume group
:return: int
"""
return int(self.lvm_entity()[14])
def free_extents(self):
""" Return free number of physical extents for this volume group
:return: int
"""
return int(self.lvm_entity()[15])
def uuid(self):
""" Return UUID of volume group
:return: str
"""
return self.lvm_entity()[16]
class WLogicalVolume(WLVMInfo):
""" Class represent a logical volume
"""
__lvm_snapshot_create_cmd_timeout__ = 3
""" Timeout for snapshot creation command to complete
"""
__lvm_snapshot_remove_cmd_timeout__ = 3
""" Timeout for snapshot removing command to complete
"""
__lvm_snapshot_check_cmd_timeout__ = 3
""" Timeout for snapshot checking (getting parameters) command to complete
"""
__snapshot_maximum_allocation__ = 99.9
""" Maximum space usage for snapshot, till that value snapshot is treated as valid
"""
@verify_type('paranoid', logical_volume=(str, tuple, list, set), sudo=bool)
@verify_value('paranoid', logical_volume=lambda x: len(x) > 0 if isinstance(x, str) else True)
def __init__(self, logical_volume, sudo=False):
""" Create new logical volume descriptor
:param logical_volume: same as 'lvm_entity' in :meth:`.WLVMInfo.__init__`
:param sudo: same as 'sudo' in :meth:`.WLVMInfo.__init__`
"""
WLVMInfo.__init__(self, 'lvdisplay', 13, logical_volume, sudo=sudo)
def all(self):
""" Return every logical volume in the system
:return: tuple of WLogicalVolume
"""
return tuple([WLogicalVolume(x) for x in self.lvm_command().lvm_info()])
def volume_path(self):
""" Return logical volume path
:return: str
"""
return self.lvm_entity()[0]
def volume_name(self):
""" Return logical volume name
:return: str
"""
return os.path.basename(self.volume_path())
def volume_group_name(self):
""" Return volume group name
:return: str
"""
return self.lvm_entity()[1]
def volume_group(self):
""" Return volume group
:return: WVolumeGroup
"""
return WVolumeGroup(self.volume_group_name(), sudo=self.lvm_command().sudo())
def sectors_count(self):
""" Return logical volume size in sectors
:return: int
"""
return int(self.lvm_entity()[6])
def extents_count(self):
""" Return current logical extents associated to logical volume
:return: int
"""
return int(self.lvm_entity()[7])
def device_number(self):
""" Return tuple of major and minor device number of logical volume
:return: tuple of int
"""
return int(self.lvm_entity()[11]), int(self.lvm_entity()[12])
def uuid(self):
""" Return UUID of logical volume
:return: str
"""
uuid_file = '/sys/block/%s/dm/uuid' % os.path.basename(os.path.realpath(self.volume_path()))
lv_uuid = open(uuid_file).read().strip()
if lv_uuid.startswith('LVM-') is True:
return lv_uuid[4:]
return lv_uuid
@verify_type(snapshot_size=(int, float), snapshot_suffix=str)
@verify_value(snapshot_size=lambda x: x > 0, snapshot_suffix=lambda x: len(x) > 0)
def create_snapshot(self, snapshot_size, snapshot_suffix):
""" Create snapshot for this logical volume.
:param snapshot_size: size of newly created snapshot volume. This size is a fraction of the source \
logical volume space (of this logical volume)
:param snapshot_suffix: suffix for logical volume name (base part is the same as the original volume \
name)
:return: WLogicalVolume
"""
size_extent = math.ceil(self.extents_count() * snapshot_size)
size_kb = self.volume_group().extent_size() * size_extent
snapshot_name = self.volume_name() + snapshot_suffix
lvcreate_cmd = ['sudo'] if self.lvm_command().sudo() is True else []
lvcreate_cmd.extend([
'lvcreate', '-L', '%iK' % size_kb, '-s', '-n', snapshot_name, '-p', 'r', self.volume_path()
])
subprocess.check_output(lvcreate_cmd, timeout=self.__class__.__lvm_snapshot_create_cmd_timeout__)
return WLogicalVolume(self.volume_path() + snapshot_suffix, sudo=self.lvm_command().sudo())
def remove_volume(self):
""" Remove this volume
:return: None
"""
lvremove_cmd = ['sudo'] if self.lvm_command().sudo() is True else []
lvremove_cmd.extend(['lvremove', '-f', self.volume_path()])
subprocess.check_output(lvremove_cmd, timeout=self.__class__.__lvm_snapshot_remove_cmd_timeout__)
def snapshot_allocation(self):
""" Return allocated size (fraction of total snapshot volume space). If this is not a snapshot volume,
than RuntimeError exception is raised.
:return: float
"""
check_cmd = ['lvs', self.volume_path(), '-o', 'snap_percent', '--noheadings']
output = subprocess.check_output(check_cmd, timeout=self.__class__.__lvm_snapshot_check_cmd_timeout__)
output = output.decode().strip()
if len(output) == 0:
raise RuntimeError('Unable to check general logical volume')
return float(output.replace(',', '.', 1))
def snapshot_corrupted(self):
""" Check if this snapshot volume is corrupted or not
:return: bool (True if corrupted, False - otherwise)
"""
return self.snapshot_allocation() > self.__class__.__snapshot_maximum_allocation__
@classmethod
@verify_type('paranoid', file_path=str, sudo=bool)
@verify_value('paranoid', file_path=lambda x: len(x) > 0)
def logical_volume(cls, file_path, sudo=False):
""" Return logical volume that stores the given path
:param file_path: target path to search
:param sudo: same as 'sudo' in :meth:`.WLogicalVolume.__init__`
:return: WLogicalVolume or None (if file path is outside current mount points)
"""
mp = WMountPoint.mount_point(file_path)
if mp is not None:
name_file = '/sys/block/%s/dm/name' % mp.device_name()
if os.path.exists(name_file):
lv_path = '/dev/mapper/%s' % open(name_file).read().strip()
return WLogicalVolume(lv_path, sudo=sudo)
|
lgpl-3.0
| -1,874,695,065,266,353,400
| 28.072761
| 108
| 0.687416
| false
| 3.305685
| false
| false
| false
|
mountainpenguin/BySH
|
server/lib/tornado/web.py
|
1
|
85300
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting
started guide.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
import uuid
from lib.tornado import escape
from lib.tornado import httputil
from lib.tornado import locale
from lib.tornado.log import access_log, app_log, gen_log
from lib.tornado import stack_context
from lib.tornado import template
from lib.tornado.escape import utf8, _unicode
from lib.tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_modules` to avoid
# possible conflicts.
self.ui["_modules"] = ObjectDict((n, self._ui_module(n, m)) for n, m in
application.ui_modules.items())
self.ui["modules"] = self.ui["_modules"]
self.clear()
# Check since connection is not available in WSGI
if getattr(self.request, "connection", None):
self.request.connection.set_close_callback(
self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.gmtime()),
})
self.set_default_headers()
if not self.request.supports_http_1_1():
if self.request.headers.get("Connection") == "Keep-Alive":
self.set_header("Connection", "Keep-Alive")
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if len(value) > 4000 or re.search(br"[\x00-\x1f]", value):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we throw an HTTP 400 exception if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise HTTPError(400, "Missing argument %s" % name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = []
for v in self.request.arguments.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
return _unicode(value)
@property
def cookies(self):
"""An alias for `self.request.cookies <.httpserver.HTTPRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self):
"""Deletes all the cookies the user sent with this request."""
for name in self.request.cookies:
self.clear_cookie(name)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
"""
self.set_cookie(name, self.create_signed_value(name, value),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value)
def get_secure_cookie(self, name, value=None, max_age_days=31):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
# Remove whitespace
url = re.sub(br"[\x00-\x20]+", "", utf8(url))
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
url))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes_type)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
"""
if self.application._wsgi:
raise Exception("WSGI applications do not support flush()")
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = b""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers, callback=callback)
return
self.request.write(headers + chunk, callback=callback)
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
inm = self.request.headers.get("If-None-Match")
if inm and inm.find(etag) != -1:
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the IOStream (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.stream.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = None
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
"""
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("debug") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The local for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf")
if not token:
token = binascii.b2a_hex(uuid.uuid4().bytes)
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", token, expires_days=expires_days)
self._xsrf_token = token
return self._xsrf_token
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
if self.xsrf_token != token:
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
We append ``?v=<signature>`` to the returned URL, which makes our
static file handler set an infinite expiration header on the
returned content. The signature is based on the content of the
file.
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
static_handler_class = self.settings.get(
"static_handler_class", StaticFileHandler)
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + static_handler_class.make_static_url(self.settings, path)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
self.prepare()
if not self._finished:
getattr(self, self.request.method.lower())(
*self.path_args, **self.path_kwargs)
if self._auto_finish and not self._finished:
self.finish()
except Exception as e:
self._handle_request_exception(e)
def _generate_headers(self):
reason = self._reason
lines = [utf8(self.request.version + " " +
str(self._status_code) +
" " + reason)]
lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()])
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
return b"\r\n".join(lines) + b"\r\n\r\n"
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
if isinstance(e, HTTPError):
if e.log_message:
format = "%d %s: " + e.log_message
args = [e.status_code, self._request_summary()] + list(e.args)
gen_log.warning(format, *args)
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=True)
self.send_error(500, exc_info=sys.exc_info())
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
return method(self, *args, **kwargs)
return wrapper
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
Each tuple can contain an optional third element, which should be
a dictionary if it is present. That dictionary is passed as
keyword arguments to the contructor of the handler. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
if transforms is None:
self.transforms = []
if settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
# Automatically reload modified modules
if self.settings.get("debug") and not wsgi:
from lib.tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from lib.tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, type(())):
assert len(spec) in (2, 3)
pattern = spec[0]
handler = spec[1]
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
if len(spec) == 3:
kwargs = spec[2]
else:
kwargs = {}
spec = URLSpec(pattern, handler, kwargs)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
self, request, url="http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
handler = spec.handler_class(self, request, **spec.kwargs)
if spec.regex.groups:
# None-safe wrapper around url_unescape to handle
# unmatched optional groups correctly
def unquote(s):
if s is None:
return s
return escape.url_unescape(s, encoding=None)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
# Note that args are passed as bytes so the handler can
# decide what encoding to use.
if spec.regex.groupindex:
kwargs = dict(
(str(k), unquote(v))
for (k, v) in match.groupdict().items())
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
handler = ErrorHandler(self, request, status_code=404)
# In debug mode, re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if self.settings.get("debug"):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
StaticFileHandler.reset()
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
To map a path to this handler for a static data directory ``/var/www``,
you would add a line to your application like::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The local root directory of the content should be passed as the ``path``
argument to the handler.
To support aggressive browser caching, if the argument ``v`` is given
with the path, we set an infinite HTTP expiration header. So, if you
want browsers to cache a file indefinitely, send them to, e.g.,
``/static/images/myimage.png?v=xxx``. Override `get_cache_time` method for
more fine-grained cache control.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = os.path.abspath(path) + os.path.sep
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
path = self.parse_url_path(path)
abspath = os.path.abspath(os.path.join(self.root, path))
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (abspath + os.path.sep).startswith(self.root):
raise HTTPError(403, "%s is not in root static directory", path)
if os.path.isdir(abspath) and self.default_filename is not None:
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/")
return
abspath = os.path.join(abspath, self.default_filename)
if not os.path.exists(abspath):
raise HTTPError(404)
if not os.path.isfile(abspath):
raise HTTPError(403, "%s is not a file", path)
stat_result = os.stat(abspath)
modified = datetime.datetime.fromtimestamp(stat_result[stat.ST_MTIME])
self.set_header("Last-Modified", modified)
mime_type, encoding = mimetypes.guess_type(abspath)
if mime_type:
self.set_header("Content-Type", mime_type)
cache_time = self.get_cache_time(path, modified, mime_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(path)
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if_since = datetime.datetime.fromtimestamp(time.mktime(date_tuple))
if if_since >= modified:
self.set_status(304)
return
with open(abspath, "rb") as file:
data = file.read()
if include_body:
self.write(data)
else:
assert self.request.method == "HEAD"
self.set_header("Content-Length", len(data))
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it is
a class method rather than an instance method).
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
"""
static_url_prefix = settings.get('static_url_prefix', '/static/')
version_hash = cls.get_version(settings, path)
if version_hash:
return static_url_prefix + path + "?v=" + version_hash
return static_url_prefix + path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
This method may be overridden in subclasses (but note that it
is a class method rather than a static method). The default
implementation uses a hash of the file's contents.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
"""
abs_path = os.path.join(settings["static_path"], path)
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
f = open(abs_path, "rb")
hashes[abs_path] = hashlib.md5(f.read()).hexdigest()
f.close()
except Exception:
gen_log.error("Could not open static file %r", path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh[:5]
return None
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httpserver.HTTPRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml", "application/javascript",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, status_code, headers, chunk, finishing):
# 304 responses have no body (not even a zero-length body), and so
# should not have either Content-Length or Transfer-Encoding headers.
if self._chunking and status_code != 304:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return status_code, headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n"
if finishing:
block += b"0\r\n\r\n"
return block
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.current_user = handler.current_user
self.locale = handler.locale
def render(self, *args, **kwargs):
"""Overridden in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes_type)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler_class, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
self.handler_class = handler_class
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes_type)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a)))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value):
timestamp = utf8(str(int(time.time())))
value = base64.b64encode(utf8(value))
signature = _create_signature(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
def decode_signed_value(secret, name, value, max_age_days=31):
if not value:
return None
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > time.time() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _create_signature(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
|
gpl-3.0
| 7,279,264,925,478,021,000
| 38.822596
| 96
| 0.590973
| false
| 4.360718
| false
| false
| false
|
black-silence/PlanetNomadsSavegameEditor
|
PlanetNomads/Savegame.py
|
1
|
30585
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sqlite3
import xml.etree.ElementTree as ETree
import re
import random
import zipfile
import os
import atexit
from math import sqrt
from collections import OrderedDict
class Savegame:
def __init__(self):
self.filename = ""
self.temp_extracted_file = ""
self.loaded = False
self.dbconnector = None
self.db = None
self.__machines = []
self.settings = None
atexit.register(self.cleanup)
def __del__(self):
self.cleanup()
def cleanup(self):
if self.db:
self.db.close()
self.db = None
os.remove(self.temp_extracted_file)
def load(self, filename):
self.filename = filename
with zipfile.ZipFile(filename, "r") as myzip:
self.temp_extracted_file = myzip.extract("_working.db", "PNSE_extract")
self.dbconnector = sqlite3.connect(self.temp_extracted_file)
self.db = self.dbconnector.cursor()
self.db.row_factory = sqlite3.Row
self.loaded = True
self.reset()
def reset(self):
self.__machines = []
def get_name(self):
if not self.loaded:
raise ValueError("No file loaded")
self.db.execute("select value from simple_storage where key = 'game_name'")
return self.db.fetchone()["value"]
def teleport_player(self, x, y, z):
self.db.execute("select value from simple_storage where key = 'playerData'")
player_data = self.db.fetchone()["value"]
lines = player_data.split("\n")
for key, line in enumerate(lines):
if line.startswith("PL"):
continue
current_position = line.split(" ")
current_position[0] = "{:0.3f}".format(x)
current_position[1] = "{:0.3f}".format(y)
current_position[2] = "{:0.3f}".format(z)
lines[key] = " ".join(current_position)
player_data = "\n".join(lines)
self.db.execute("update simple_storage set value = ? where key = 'playerData'", (player_data,))
self.on_save()
return True
def get_player_position(self):
self.db.execute("select value from simple_storage where key = 'playerData'")
player_data = self.db.fetchone()["value"]
lines = player_data.split("\n")
for key, line in enumerate(lines):
if line.startswith("PL"):
continue
return [float(x) for x in line.split(" ")[:3]]
raise IOError("Player data not found in simple_storage")
def get_setting(self, name):
if not self.settings:
self.db.execute("select value from simple_storage where key='advanced_settings'")
try:
self.settings = ETree.fromstring(self.db.fetchone()["value"])
except TypeError:
# Old games don't have advanced settings in simple storage
return None
for tag in self.settings:
if tag.tag == name:
return tag.text
return None
@property
def machines(self):
if not self.__machines:
self.__load_machines()
return self.__machines
def __load_machines(self):
self.db.execute("select * from machine")
for row in self.db.fetchall():
self.__machines.append(Machine(row, self.db))
self.db.execute("select * from active_blocks")
active_block_data = self.db.fetchall()
for m in self.__machines:
m.set_active_blocks(active_block_data)
def on_save(self):
self.dbconnector.commit()
self.write_zip()
def save(self):
for m in self.__machines:
if not m.is_changed():
continue
data = '<?xml version="1.0" encoding="utf-8"?>' + m.get_xml_string()
update = (data, m.transform, m.identifier)
self.db.execute("update machine set data = ?, transform = ? where id = ?", update)
# write changed active blocks too, required for pushing stuff around
active_blocks = m.get_changed_active_blocks()
for b in active_blocks:
update = (active_blocks[b].get_xml_string(), b)
self.db.execute("update active_blocks set data = ? where id = ?", update)
self.on_save()
def write_zip(self):
# PN uses deflate so to be safe this is the mode we want to use
with zipfile.ZipFile(self.filename, "w", zipfile.ZIP_DEFLATED) as myzip:
myzip.write(os.path.join("PNSE_extract", "_working.db"), "_working.db")
def unlock_recipes(self):
unlock_string = "PL1\n" + "_".join([str(i) for i in range(1, 100)])
self.db.execute("update simple_storage set value = ? where key = 'playerTechnology'", (unlock_string,))
affected = self.db.rowcount
self.on_save()
return affected > 0
def debug(self):
print("Debug info")
print('Name: {}'.format(self.get_name()))
print("Number of machines: {}".format(len(self.machines)))
def get_planet_size(self):
radius = self.get_setting("PlanetRadius")
if radius:
return int(radius)
# Old games had 10k, even older games may have 16k. Not important enough to calculate it.
return 10000
def get_player_inventory(self):
inventory = Container(self.db, self.on_save)
if not inventory.load(0):
return
return inventory
def create_north_pole_beacon(self):
"""Create a solar beacon with navigation C on at the north pole."""
self.create_beacon(0, self.get_planet_size(), 0)
def create_south_pole_beacon(self):
"""Create a solar beacon with navigation C on at the south pole."""
self.create_beacon(0, -1 * self.get_planet_size(), 0, rot_z=-180)
def create_gps_beacons(self):
self.create_beacon(0, self.get_planet_size(), 0) # North pole
self.create_beacon(self.get_planet_size(), 0, 0, rot_z=90)
self.create_beacon(0, 0, self.get_planet_size(), rot_z=90)
def create_beacon(self, x, y, z, rot_x=0, rot_y=0, rot_z=0):
self.db.execute("select max(id) as mx from active_blocks")
next_active_id = int(self.db.fetchone()["mx"]) + 1
xml = '<ActiveBlock xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' \
'xmlns:xsd="http://www.w3.org/2001/XMLSchema" ID="{}" Type_ID="56" Container_ID="-1" Name="">' \
'<Module ID="0" Type="SwitchModule"><Prop key="TurnState"><value xsi:type="xsd:int">1</value></Prop></Module>' \
'<Module ID="1" Type="PowerIn" />' \
'<Module ID="2" Type="PositionModule"><Prop key="BasePosition"><value xsi:type="xsd:string">{:0.0f};{:0.0f};{:0.0f}</value></Prop></Module>' \
'<Module ID="3" Type="PowerOut"><Prop key="PowerState"><value xsi:type="xsd:int">0</value></Prop></Module>' \
'<Module ID="4" Type="SwitchModule"><Prop key="TurnState"><value xsi:type="xsd:int">0</value></Prop></Module>' \
'<Module ID="5" Type="SensorModule" />' \
'<Module ID="6" Type="RenameModule" />' \
'<Module ID="7" Type="ConnectPowerInOutModule" />' \
'<Module ID="8" Type="NavigationModule"><Prop key="Icon"><value xsi:type="xsd:int">2</value></Prop><Prop key="TurnState"><value xsi:type="xsd:int">1</value></Prop></Module>' \
'</ActiveBlock>'.format(next_active_id, x, y, z)
sql = "INSERT INTO active_blocks (id, type_id, data, container_id) VALUES (?, 56, ?, -1)"
self.db.execute(sql, (next_active_id, xml))
sql = 'INSERT INTO machine (id, data, transform) VALUES (?, ?, ' \
'"{:0.0f} {:0.0f} {:0.0f} {:0.0f} {:0.0f} {:0.0f}")'.format(x, y, z, rot_x, rot_y, rot_z)
machine_id = random.Random().randint(1000000, 10000000) # Is there a system behind the ID?
xml = '<?xml version="1.0" encoding="utf-8"?>\n' \
'<MachineSaveData xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">\n' \
'<Grid ID="{}">\n' \
'<BasePosition X="{:0.0f}" Y="{:0.0f}" Z="{:0.0f}" />' \
'<BaseRotation X="{:0.0f}" Y="{:0.0f}" Z="{:0.0f}" />' \
'<Blocks>\n' \
'<Block ID="56" Health="80" Weld="80" Ground="true" ActiveID="{}">' \
'<Pos x="0" y="0" z="0" /><Rot v="0" /><Col r="0" g="0" b="0" />' \
'</Block>\n' \
'</Blocks>\n</Grid>\n</MachineSaveData>\n'.format(machine_id, x, y, z, rot_x, rot_y, rot_z, next_active_id)
self.db.execute(sql, (machine_id, xml))
# Solar beacon is self powered
sql = 'INSERT INTO activeblocks_connector_power (block_id_1, module_id_1, block_id_2, module_id_2, power) ' \
'VALUES (?, 3, ?, 1, 20)'
self.db.execute(sql, (next_active_id, next_active_id))
# No idea what this does
sql = 'INSERT INTO machine_rtree_rowid (rowid, nodeno) VALUES (?, 1)'
self.db.execute(sql, (machine_id,))
# Insert into machine_rtree seems unhealthy
self.on_save()
class Container:
"""0-based, player inventory = index 0
contents is 0-based, serialized json-like
first item is probably a version
v:1,0:{package:com.planetnomads, id:59, count:1, props:},1:{...},
"""
stacks = {}
size = 0
db_key = None
def __init__(self, db, save_callback):
self.db = db
self.save_callback = save_callback
def load(self, key):
"""Load container from db
:return bool
"""
sql = "select * from containers where id = ?"
self.db.execute(sql, (key,))
row = self.db.fetchone()
if not row:
return False
self.size = row["size"]
self.stacks = ContentParser.parse_item_stack(row["content"])
self.db_key = key
return True
def save(self):
sorted_keys = sorted(self.stacks)
s = []
for key in sorted_keys:
s.append("{}:{}".format(key, self.stacks[key].get_db_string()))
sql = "update containers set content = ? where id = ?"
self.db.execute(sql, ("v:1," + ",".join(s) + ",", self.db_key))
self.save_callback()
return True
def get_stacks(self):
return self.stacks
def add_stack(self, item, count):
if len(self.stacks) >= self.size:
return False
for i in range(self.size):
stack = self.stacks.get(i, None)
if stack:
continue # skip all stacks that are occupied
self.stacks[i] = Stack(item, count=count)
return True
def __str__(self):
return "Container with {} slots, {} slots used".format(self.size, len(self.stacks))
class ContentParser:
"""
Content is 0-based, serialized json-like. The number shows the slot in the container, empty slots are skipped.
~0.6.8 added a version number as first item
Example: v:1,0:{package:com.planetnomads, id:59, count:1, props:},10:{...},
"""
@staticmethod
def parse_item_stack(content):
# TODO check version number
start = content.find(",")
content = content[start + 1:] # Remove version number because it breaks my nice regexes
regex_val = re.compile(r"[, {](\w+):([^,}]*)[,}]")
regex_slot = re.compile(r"^(\d+):{")
parts = re.split(r"(?<=}),(?=\d+:{|$)", content)
result = {}
for part in parts:
if part == "":
continue
m = regex_slot.match(part)
if m:
key = int(m.group(1))
else:
continue
vars = {}
m = regex_val.findall(part)
if m:
for k, v in m:
if k == "id":
item_id = int(v)
elif k == "count":
vars[k] = int(v)
else:
vars[k] = v
item = Item(item_id)
stack = Stack(item, **vars)
result[key] = stack
return result
class Stack:
def __init__(self, item, count=1, package="com.planetnomads", props="False", infinityCount="False"):
self.item = item
self.count = count
self.package = package
self.props = props
self.infinity_count = infinityCount
def get_item_name(self):
return self.item.get_name()
def get_count(self):
return self.count
def get_db_string(self):
start = "{"
end = "}"
data = "package:{}, id:{}, count:{}, infinityCount:{}, props:{}".format(self.package, self.item.item_type,
self.count, self.infinity_count,
self.props)
return start + data + end
def __str__(self):
return "Stack of {} {}".format(self.get_count(), self.item.get_name())
class Item:
names = {
33: "Battery",
49: "Carbon",
51: "Aluminium",
52: "Silicium",
56: "Iron",
57: "Titanium",
58: "Gold",
59: "Silver",
60: "Cobalt",
61: "Uranium",
62: "Xaenite",
63: "Enriched Uranium",
64: "Deuterium",
65: "Xaenite Rod",
67: "Plating",
68: "Composite Plating",
69: "Basic Frame",
70: "Reinforced Frame",
72: "Glass Components",
73: "Standard Electronics",
74: "SuperConductive Electronics",
75: "Quantum Electronics",
76: "Standard Mechanical Components",
77: "SuperAlloy Mechanical",
78: "Composite Parts",
79: "Advanced Composite Parts",
80: "Fabric Mk1",
81: "Fabric Mk2",
82: "ALM",
83: "Advanced ALM",
84: "Super ALM",
86: "Fruitage",
87: "Dirty Water",
88: "Herbs",
89: "Raw Meat",
90: "Purified Water",
91: "Electrolytes Water",
92: "Nutrition Capsules",
93: "Super Food",
95: "Bandages",
96: "Stimulation Injection",
108: "Exploration Suit Mk2",
109: "Exploration Suit Mk3",
110: "Exploration Suit Mk4",
112: "Jetpack Mk2",
113: "Jetpack Mk3",
114: "Jetpack Mk4",
116: "MultiTool Mk2",
117: "MultiTool Mk3",
118: "MultiTool Mk4",
392745: "Biomass Container",
9550358: "Seeds",
11691828: "Sleeping Bag",
}
def __init__(self, item_type: int):
self.item_type = item_type
def get_name(self):
if self.item_type in self.names:
return self.names[self.item_type]
return "unknown item type {}".format(self.item_type)
class Machine:
"""
0 16000 0 0 0 0 = north pole at sea level
0 -16000 0 0 0 180 = south pole at sea level, "upside down"
planet diameter is 32km
"""
def __init__(self, db_data, db):
self.identifier = db_data['id']
self.xml = db_data['data']
self.transform = db_data['transform']
self.loaded = False
self.grid = [] # Only one grid per machine
self.changed = False
self.active_block_ids = []
self.db = db
self.name = None
self.type = None
root = ETree.fromstring(self.xml)
for node in root:
if node.tag == "Grid":
self.grid.append(Grid(node))
else:
raise IOError("Unexpected element %s in machine" % node.tag)
self.active_block_ids = self.grid[0].get_active_block_ids()
self.active_block_data = {}
@property
def grids(self):
return self.grid
def set_active_blocks(self, data):
for row in data:
if row["id"] not in self.active_block_ids:
continue
self.active_block_data[row["id"]] = ActiveBlock(row["data"])
def randomize_color(self):
for g in self.grids:
g.randomize_color()
self.changed = True
def set_color(self, color, replace=None):
for g in self.grids:
g.set_color(color, replace)
self.changed = True
def get_xml_string(self):
"""Save the current machine, replaces original xml"""
xml = ETree.Element("MachineSaveData")
for g in self.grid:
g.build_xml(xml)
return ETree.tostring(xml, "unicode")
def is_changed(self):
return self.changed
def get_changed_active_blocks(self):
result = {}
for aid in self.active_block_data:
active_block = self.active_block_data[aid]
if active_block.changed:
result[aid] = active_block
return result
def __str__(self):
grounded = self.is_grounded()
return "Machine {} ({})".format(
self.get_name_or_id(),
self.get_type()
)
def is_grounded(self):
for g in self.grids: # TODO only 1 grid per machine now
if g.is_grounded():
return True
return False
def teleport(self, distance: int, target):
"""Teleport machine over/under the target."""
rot_x, rot_y, rot_z = self.get_rotation()
(x, y, z) = self.get_coordinates()
(target_x, target_y, target_z) = target.get_coordinates()
distance_to_planet_center = sqrt(target_x ** 2 + target_y ** 2 + target_z ** 2)
factor = 1 + distance / distance_to_planet_center
target_x2 = target_x * factor # TODO use np
target_y2 = target_y * factor
target_z2 = target_z * factor
self.transform = "{:0.3f} {:0.3f} {:0.3f} {} {} {}".format(target_x2, target_y2, target_z2, rot_x, rot_y, rot_z)
# Use the exact difference to move subgrids, this is important or the object will disappear
difference = (target_x2 - x, target_y2 - y, target_z2 - z)
for g in self.grid:
g.move_by(difference, self.active_block_data)
self.changed = True
def get_rotation(self):
"""Get rotation as tuple of string"""
(x, y, z, rotX, rotY, rotZ) = [x for x in self.transform.split(" ")]
return rotX, rotY, rotZ
def get_coordinates(self):
"""Get coords as tuple of x, y, z"""
(x, y, z, rotX, rotY, rotZ) = [x for x in self.transform.split(" ")]
return [float(i) for i in (x, y, z)]
def get_name_or_id(self):
n = self.get_name()
if n:
return n
return self.identifier
def get_type(self):
if self.type:
return self.type
self.type = "Construct"
if not self.is_grounded():
if self.has_cockpit():
self.type = "Vehicle"
return "Vehicle"
# If it has no cockpit it's random scattered blocks
return "Construct"
if self.has_generator():
self.type = "Base"
return "Base"
return "Construct"
def get_name(self):
if self.name is not None:
return self.name
for g in self.grids:
name = g.get_name(self.active_block_data)
if name:
self.name = name
return name
self.name = ""
return ""
def has_cockpit(self):
return self.grids[0].has_cockpit()
def has_generator(self):
return self.grids[0].has_generator()
class XmlNode:
"""Basic XML node"""
def __init__(self, node):
self.type = node.tag
self._attribs = OrderedDict()
self._children = []
for a in node.attrib:
self._attribs[a] = node.attrib[a]
expected_children = self.get_expected_children_types()
for item in node:
if item.tag in expected_children:
self._children.append(globals()[item.tag](item)) # Create object from class name
else:
print("Unexpected children type %s" % item.tag)
def get_attribs(self):
"""Get attributes in the original order, much easier to diff xml this way"""
return self._attribs
def get_children(self):
return self._children
def build_xml(self, xml):
sub = ETree.SubElement(xml, self.type, self.get_attribs())
for c in self._children:
c.build_xml(sub)
def get_expected_children_types(self):
return []
class MachineNode(XmlNode):
def get_active_block_ids(self):
res = []
for c in self._children:
try:
res.extend(c.get_active_block_ids())
except AttributeError:
pass # Class doesn't have active blocks
return res
def is_grounded(self):
for c in self.get_children():
try:
if c.is_grounded():
return True
except AttributeError:
pass
return False
def has_cockpit(self):
for x in self.get_children():
try:
if x.has_cockpit():
return True
except AttributeError:
pass
return False
def has_generator(self):
for c in self.get_children():
try:
if c.has_generator():
return True
except AttributeError:
pass
return False
def has_hoverjack(self):
for c in self.get_children():
try:
if c.has_hoverjack():
return True
except AttributeError:
pass
return False
def get_name(self, active_block_data):
for c in self.get_children():
try:
name = c.get_name(active_block_data)
if name != "":
return name
except AttributeError:
pass
return ""
def get_expected_children_types(self):
return ['Grid']
def move_by(self, vector, active_block_data):
for c in self._children:
try:
c.move_by(vector, active_block_data)
except AttributeError:
pass
def randomize_color(self):
for c in self._children:
try:
c.randomize_color()
except AttributeError:
pass
def set_color(self, color, replace):
for c in self._children:
try:
c.set_color(color, replace)
except AttributeError:
pass
class Blocks(MachineNode):
def get_expected_children_types(self):
return ['Block']
class BasePosition(XmlNode):
def move_by(self, vector, active_blocks):
x = float(self._attribs["X"])
y = float(self._attribs["Y"])
z = float(self._attribs["Z"])
self._attribs["X"] = "{:0.5f}".format(x + vector[0])
self._attribs["Y"] = "{:0.5f}".format(y + vector[1])
self._attribs["Z"] = "{:0.5f}".format(z + vector[2])
class BaseRotation(XmlNode):
pass
class BaseBounds(XmlNode):
def move_by(self, vector, active_blocks):
x = float(self._attribs["MinX"])
y = float(self._attribs["MinY"])
z = float(self._attribs["MinZ"])
self._attribs["MinX"] = "{:0.5f}".format(x + vector[0])
self._attribs["MinY"] = "{:0.5f}".format(y + vector[1])
self._attribs["MinZ"] = "{:0.5f}".format(z + vector[2])
x = float(self._attribs["MaxX"])
y = float(self._attribs["MaxY"])
z = float(self._attribs["MaxZ"])
self._attribs["MaxX"] = "{:0.5f}".format(x + vector[0])
self._attribs["MaxY"] = "{:0.5f}".format(y + vector[1])
self._attribs["MaxZ"] = "{:0.5f}".format(z + vector[2])
class DistancePhysicsFreezeData(XmlNode):
pass
class Pos(XmlNode):
pass
class Rot(XmlNode):
pass
class Col(XmlNode):
def randomize_color(self):
self._attribs["r"] = str(random.randrange(0, 255))
self._attribs["g"] = str(random.randrange(0, 255))
self._attribs["b"] = str(random.randrange(0, 255))
def set_color(self, color, replace):
if replace:
if int(self._attribs["r"]) != replace[0]:
return
if int(self._attribs["g"]) != replace[1]:
return
if int(self._attribs["b"]) != replace[2]:
return
self._attribs["r"] = str(int(color[0]))
self._attribs["g"] = str(int(color[1]))
self._attribs["b"] = str(int(color[2]))
class Grid(MachineNode):
"""Every machine has 1 Grid which contains 1 Blocks"""
def get_expected_children_types(self):
return ['Blocks', 'BasePosition', 'BaseRotation', 'BaseBounds', 'DistancePhysicsFreezeData']
class SubGrid(Grid):
pass
class ActiveBlock:
def __init__(self, xml):
self.root = ETree.fromstring(xml)
self.name = self.root.attrib.get("Name", "")
self.changed = False
def get_xml_string(self):
return ETree.tostring(self.root, "unicode")
def get_name(self):
return self.name
def move_by(self, vector):
for node in self.root:
if node.tag != "Module":
continue
if node.attrib["Type"] != "PositionModule":
continue
position = node[0][0].text
x, y, z = [float(i) for i in position.split(";")]
node[0][0].text = "{:0.3f};{:0.3f};{:0.3f}".format(x + vector[0], y + vector[1], z + vector[2])
self.changed = True
class Block(MachineNode):
types = {
1: "Full Armor Block",
2: "Corner Armor Block",
3: "Compact Battery Rack",
4: "Cockpit 2x3",
5: "Reinforced Wall",
6: "Armor Corner Slope - Inverted",
7: "Armor Corner Slope - Long Inverted",
8: "Armor Corner Slope",
9: "Armor Slope Long",
10: "Armor Slope Corner (Long)",
11: "Armor Slope",
#12 active
13: "Conveyor L-Section",
14: "Conveyor",
15: "Conveyor T-Section",
16: "Conveyor X-Section",
#17 probly active block
18: "Wheel",
19: "Compact Container",
20: "Bio Generator",
21: "Reinforced Wall with Light",
22: "Reinforced Wall - Short",
23: "Reinforced Wall Corner",
24: "Reinforced Wall Outer Corner",
25: "Base Foundation (double height)",
26: "Raised Floor",
#27
28: "Compact Medbay",
29: "Medium Refinery",
#30
#31
32: "Reinforced Wall with Door",
33: "Ceiling Panel",
34: "Suspension",
#35 probly active
36: "Jack tool",
37: "Hover Jack",
38: "Railing",
39: "Short Railing",
40: "Stairs",
41: "Beacon",
42: "Uranium Generator",
43: "Ceiling Light",
44: "Indoor Light",
45: "Search Light - Front Mount",
46: "Search Light - Top Mount",
47: "Large Container",
48: "Fence",
49: "Fence Corner",
50: "Ramp",
51: "Inner Wall with Doors",
52: "Reinforced Wall Exterior/Interior Joint",
53: "Short inner wall",
54: "Inner Wall",
55: "Windowed Outer Wall",
56: "Solar Beacon",
57: "Escape pod",
61: "Base Foundation",
64: "Emergency 3D printer",
66: "Hinge",
68: "Rotating Plate",
71: "Item Dispenser",
73: "Mining Machine",
76: "Medium Armory",
78: "Medium Medbay",
79: "Escape Pod (broken)", # 3k health
80: "Radar", # 300 health
81: "Winch",
82: "Winch Shackle",
83: "Thruster", # 300 health
84: "Tank", # 250 health
85: "Big Tank", # 750 health
86: "Sloped Arc Corner",
87: "Corner Arc",
88: "Arc Block",
#89
90: "Wreck Container",
91: "Wreck Beacon",
92: "Cockpit 3x3",
93: "Rounded Cockpit 2x3",
94: "Buggy Wheel",
95: "Mobile Base Wheel",
96: "Large Suspension",
97: "Rounded Cockpit 3x3",
98: "Switchboard",
100: "Hover Pad",
101: "Floating Foundation",
114: "Air Blade",
126: "Glassed Cockpit 3x3",
}
def is_grounded(self):
return "Ground" in self._attribs and self._attribs["Ground"] == "true"
def get_active_block_id(self):
if "ActiveID" in self._attribs:
return int(self._attribs["ActiveID"])
return None
def get_active_block_ids(self):
result = []
active_id = self.get_active_block_id()
if active_id:
result.append(active_id)
result.extend(super(Block, self).get_active_block_ids())
return result
def get_active_block(self, active_blocks):
aid = self.get_active_block_id()
if aid:
if aid == 0:
pass
elif aid in active_blocks:
return active_blocks[aid]
else:
# Avoid crash if active block did not load. Why is it missing though?
print("Active block %i not found" % aid)
return None
def get_name(self, active_blocks):
active_block = self.get_active_block(active_blocks) # type: ActiveBlock
if active_block:
name = active_block.get_name()
if name:
return name
return super().get_name(active_blocks)
def has_cockpit(self):
if self._attribs["ID"] in ("4", "92", "93", "97", "126"):
return True
return super().has_cockpit()
def has_generator(self):
if self._attribs["ID"] in ("20", "42"):
return True
return super().has_generator()
def has_hoverjack(self):
if self._attribs["ID"] == "37":
return True
return super().has_hoverjack()
def get_expected_children_types(self):
return ['Pos', 'Col', 'Rot', 'SubGrid']
def move_by(self, vector, active_blocks):
super().move_by(vector, active_blocks)
active_block = self.get_active_block(active_blocks)
if not active_block:
return
active_block.move_by(vector)
|
mit
| -1,921,983,318,273,434,600
| 32.064865
| 189
| 0.534935
| false
| 3.63804
| false
| false
| false
|
sidorov-si/TADStates
|
calc_enr.py
|
1
|
6053
|
#!/usr/bin/env python
"""
Calculate enrichment of regions with states using ChromHMM.
Usage:
calc_enr.py (-r <regions_file> | -R <directory_with_regions_files>) (-s <segmentation_directory> | -S <directory_with_segmentation_directories>) -c <ChromHMM_directory> -o <output_directory>
Options:
-h --help Show this screen.
--version Show version.
-r <regions_file> BED file with regions to calc enrichments for.
-R <directory_with_regions_files> Directory with BED files containing regions to calc enrichments for.
-s <segmentation_directory> Directory with segmentation produced by ChromHMM.
-S <directory_with_segmentation_directories> Directory with directories containing segmentations.
-c <ChromHMM_directory> ChromHMM directory.
-o <output_directory> Output directory name.
"""
import sys
print
modules = ["docopt", "os", "subprocess"]
exit_flag = False
for module in modules:
try:
__import__(module)
except ImportError:
exit_flag = True
sys.stderr.write("Error: Python module " + module + " is not installed.\n")
if exit_flag:
sys.stderr.write("You can install these modules with a command: pip install <module>\n")
sys.stderr.write("(Administrator privileges may be required.)\n")
sys.exit(1)
from docopt import docopt
from os.path import basename
from os.path import splitext
from os.path import join
from os.path import exists
from os.path import isdir
from os.path import isfile
from os import makedirs
from os import listdir
from subprocess import call
from sys import stdout
def calc_enr(regions, segm_files, png_directory, svg_directory, txt_directory, chromhmm_directory):
regions_part = splitext(basename(regions))[0]
for segm_file in segm_files:
print
print 'Calc enrichment for', basename(regions), 'and', basename(segm_file), '...'
stdout.flush()
segm_part = splitext(basename(segm_file))[0]
prefix = regions_part + '_' + segm_part
command_line_list = ['java', '-mx1600M', '-jar', join(chromhmm_directory, 'ChromHMM.jar'), \
'OverlapEnrichment', segm_file, regions, prefix]
code = call(command_line_list)
if code != 0:
print 'Something went wrong!'
else:
print 'Done.'
call(['mv', prefix + '.png', png_directory])
call(['mv', prefix + '.svg', svg_directory])
call(['mv', prefix + '.txt', txt_directory])
if __name__ == '__main__':
arguments = docopt(__doc__, version='calc_enr 0.2')
if arguments["-r"] != None:
regions = arguments["-r"]
if not exists(regions):
print "Error: Can't find BED file with regions: no such file '" + \
regions + "'. Exit.\n"
sys.exit(1)
if not isfile(regions):
print "Error: BED file with regions must be a regular file. " + \
"Something else given. Exit.\n"
sys.exit(1)
else:
regions = arguments["-R"].rstrip('/')
if not exists(regions):
print "Error: Can't find directory with region BED files: no such directory '" + \
regions + "'. Exit.\n"
sys.exit(1)
if not isdir(regions):
print "Error: Directory with region BED files must be a directory:). " + \
"Something else given. Exit.\n"
sys.exit(1)
if arguments["-s"] != None:
segm_dir = arguments["-s"].rstrip('/')
if not exists(segm_dir):
print "Error: Can't find directory with segmentation: no such directory '" + \
segm_dir + "'. Exit.\n"
sys.exit(1)
if not isdir(segm_dir):
print "Error: Directory with segmentation must be a directory:). " + \
"Something else given. Exit.\n"
sys.exit(1)
segm_directory = None
else:
segm_dir = None
segm_directory = arguments["-S"].rstrip('/')
if not exists(segm_directory):
print "Error: Can't find directory with directories containing segmentations: " + \
"no such directory '" + segm_directory + "'. Exit.\n"
sys.exit(1)
if not isdir(segm_directory):
print "Error: Directory with directories containing segmentations must " + \
"be a directory:). Something else given. Exit.\n"
sys.exit(1)
chromhmm_directory = arguments["-c"].rstrip('/')
if not exists(chromhmm_directory):
print "Error: Can't find ChromHMM directory: no such directory '" + \
chromhmm_directory + "'. Exit.\n"
sys.exit(1)
if not isdir(chromhmm_directory):
print "Error: ChromHMM directory must be a directory:). Something else given. Exit.\n"
sys.exit(1)
output_directory = arguments["-o"].rstrip('/')
if not exists(output_directory):
makedirs(output_directory)
png_directory = join(output_directory, 'PNG')
svg_directory = join(output_directory, 'SVG')
txt_directory = join(output_directory, 'TXT')
if not exists(png_directory):
makedirs(png_directory)
if not exists(svg_directory):
makedirs(svg_directory)
if not exists(txt_directory):
makedirs(txt_directory)
if segm_dir != None: # there is only one segmentation
segm_dirs = [segm_dir]
else:
segm_dirnames = sorted(listdir(segm_directory))
segm_dirs = [join(segm_directory, d) for d in segm_dirnames]
segm_files = []
for dir in segm_dirs:
filenames_list = listdir(dir)
segm_filenames = [f for f in filenames_list if 'segments' in f]
segm_files.extend([join(dir, f) for f in segm_filenames])
calc_enr(regions, segm_files, png_directory, svg_directory, txt_directory, chromhmm_directory)
|
gpl-2.0
| -5,044,784,646,270,650,000
| 38.822368
| 192
| 0.594581
| false
| 3.895109
| false
| false
| false
|
SopaXorzTaker/pypowder
|
thepowdertoy/thepowdertoy.py
|
1
|
18583
|
import hashlib
import requests
# Servers
DEFAULT_SERVER = "powdertoy.co.uk"
DEFAULT_STATIC_SERVER = "static.powdertoy.co.uk"
# Check for version updates
UPDATE_VERSION_STABLE = "Stable"
UPDATE_VERSION_BETA = "Beta"
UPDATE_VERSION_SNAPSHOT = "Snapshot"
# User elevations
USER_ELEVATION_NONE = ""
USER_ELEVATION_MOD = "Mod"
USER_ELEVATION_ADMIN = "Admin"
class LoginError(Exception):
pass
class ServerError(Exception):
pass
class Notification(object):
def __init__(self, link, text):
"""
Creates a Notification object.
:param link: the link of the notification
:param text: the text of the notification
"""
self.link, self.text = link, text
class Comment(object):
def __init__(self, username, user_id, text, timestamp, gravatar=None):
"""
Creates a new Comment object.
:param username: the username
:param user_id: the ID of the user
:param text: the text
:param timestamp: the timestamp of the comment
:param gravatar: the Gravatar ID of the user
"""
self.username, self.user_id, self.text, self.timestamp, self.gravatar = \
username, user_id, text, timestamp, gravatar
class User(object):
def __init__(self, username, user_id, avatar, age, location, biography, website, register_time, saves, forum):
self.username, self.user_id, self.avatar, self.age, self.location, self.biography, \
self.website, self.register_time, self.saves, self.forum = \
username, user_id, avatar, age, location, biography, website, register_time, saves, forum
class Save(object):
def __init__(self, parent, user, save_id, score, my_score, name,
description, date_created, date, published, favorite, views, version, tags):
"""
Creates a new Save object.
:param parent: ThePowderToy class that created the object
:param user: The User object
:param save_id: the ID of the save
:param score: the score of the save
:param my_score: my score of the save
:param name: the name of the save
:param description: the description of the save
:param date_created: the date of creation of the save
:param date: the last update of the save
:param published: whether the save is published
:param favorite: whether the save is in favorite
:param views: the count of views
:param version: the version which created the save
:param tags: the tags
"""
self._parent, self.user, self.save_id, self.score, self.my_score, self.name, self.description, \
self.date_created, self.date, self.published, self.favorite, self.views, self.version, \
self.tags = parent, user, save_id, score, my_score, name, description, date_created, date, \
published, favorite, views, version, tags
self._comments = None
self._save_data = None
def tag(self, tag, add=True):
"""
Tag a save.
:param tag: the tag to be added
:param add: if True (the default), the tag is to be added, else removed.
"""
self._parent.tag(self.save_id, tag, add)
if add:
self.tags.append(tag)
else:
self.tags.remove(tag)
def comment(self, comment):
"""
Add a comment to a save.
:param comment: the text of the comment
"""
self._parent.add_comment(self.save_id, comment)
self._comments = None
def vote(self, up=True):
"""
Vote for a save.
:param up: if True (the default), the save is voted up, otherwise down.
"""
self._parent.vote(self.save_id, up)
upvotes, downvotes = self.score
if up:
upvotes += 1
else:
downvotes += 1
self.score = (upvotes, downvotes)
def remove(self):
"""
Remove a save.
"""
self._parent.remove_save(self.save_id)
def publish(self, publish=True):
"""
Publishes or unpublishes a save
:param publish: whether to publish the save
"""
self._parent.publish_save(self.save_id, publish)
self.published = publish
@property
def comments(self):
if not self._comments:
self._comments = self._parent.get_comments(self.save_id)
return self._comments
@property
def save_data(self):
if not self._save_data:
self._save_data = self._parent.get_save_data(self.save_id)
return self._save_data
class ThePowderToy(object):
def vote(self, save_id, up=True):
"""
Vote for a save.
:param save_id: the ID of the save
:param up: if True (the default), the save is voted up, otherwise down.
"""
if not self._user_id:
raise ServerError("Not authorized")
vote_direction = "Up" if up else "Down"
req = requests.post("http://" + self._server + "/Vote.api", data={
"ID": str(save_id),
"Action": vote_direction
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
})
req.raise_for_status()
if not req.text == "OK":
raise ServerError(req.text)
if "Error" in req.text:
raise ServerError(req.text)
def tag(self, save_id, tag, add=True):
"""
Tag a save.
:param save_id: the ID of the save
:param tag: the tag to be added
:param add: if True (the default), the tag is to be added, else removed.
"""
if not self._user_id:
raise ServerError("Not authorized")
operation = "add" if add else "delete"
req = requests.get("http://" + self._server + "/Browse/EditTag.json", params={
"Op": operation,
"ID": str(save_id),
"Tag": tag,
"Key": self._session_key
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
})
req.raise_for_status()
if "Error" in req.text:
raise ServerError(req.text)
def get_comments(self, save_id, comment_start=0, comment_count=-1):
"""
Get a save's comments
:param save_id: The ID of the save
:param comment_start: The starting comment
:param comment_count: The final comment
:return: an array of Comment
"""
comment_req = requests.get("http://" + self._server + "/Browse/Comments.json", params={
"ID": str(save_id),
"Start": comment_start,
"Count": comment_count
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
} if self._user_id else None)
comment_req.raise_for_status()
comments = []
comment_json = comment_req.json()
for comment in comment_json:
comments.append(Comment(comment["Username"], comment["UserID"], comment["Text"], comment["Timestamp"],
comment["Gravatar"] if "Gravatar" in comment else None))
return comments
def search_saves(self, search_query="", start=0, count=-1, sort="", category=""):
"""
Search for saves.
:param search_query: the search query
:param start: the starting result
:param count: count of results
:param sort: sorting of results
:param category: the category name
:return: list of Save objects
"""
if sort == "date":
search_query += " sort:%s" % sort
req = requests.get("http://" + self._server + "/Browse.json", params={
"Search_Query": search_query,
"Category": category,
"Start": start,
"Count": count
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
} if self._user_id else None)
req.raise_for_status()
json = req.json()
if "Status" in json and json["Status"] == 1:
raise ServerError(json["Error"])
output = []
saves = json["Saves"]
for save in saves:
output.append(self.get_save(save["ID"]))
return output
def get_save(self, save_id, date=None):
"""
Get the save.
:param save_id: the ID of the save
:param date: the date of interest
:return: the save
"""
req = requests.get("http://" + self._server + "/Browse/View.json", params={
"ID": str(save_id),
"Date": str(date) if date else None
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
} if self._user_id else None)
req.raise_for_status()
if "Error" in req.text:
raise ServerError(req.text)
json = req.json()
return Save(self, self.get_user_by_name(json["Username"]), save_id, (json["ScoreUp"], json["ScoreDown"]),
json["ScoreMine"] if "ScoreMine" in json else 0, json["Name"], json["Description"],
json["DateCreated"], json["Date"], json["Published"], json["Favourite"], json["Views"],
json["Version"], json["Tags"] if "Tags" in json else [])
def get_user_by_id(self, user_id):
"""
Get a user's profile by ID.
:param user_id: the ID of the user
:return: the User object
"""
req = requests.get("http://" + self._server + "/User.json", params={
"ID": user_id
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
} if self._user_id else None)
req.raise_for_status()
if "Error" in req.text:
raise ServerError(req.text)
json = req.json()
user = json["User"]
saves = json["Saves"] if "Saves" in json else None
forum = json["Forum"] if "Forum" in json else None
return User(user["Username"], user["ID"], user["Avatar"], user["Age"], user["Location"], user["Biography"],
user["Website"], user["RegisterTime"] if "RegisterTime" in user else 0, saves, forum)
def get_user_by_name(self, username):
"""
Get a user's profile by username.
:param username: the name of the user
:return: the User object
"""
req = requests.get("http://" + self._server + "/User.json", params={
"Name": username
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
} if self._user_id else None)
req.raise_for_status()
if "Error" in req.text:
raise ServerError(req.text)
json = req.json()
user = json["User"]
saves = json["Saves"] if "Saves" in json else None
forum = json["Forum"] if "Forum" in json else None
return User(user["Username"], user["ID"], user["Avatar"], user["Age"], user["Location"], user["Biography"],
user["Website"], user["RegisterTime"] if "RegisterTime" in user else 0, saves, forum)
def upload_save(self, name, description, data, publish=True):
"""
Upload a save
:param name: the name of the save
:param description: the description of the save
:param data: the raw data of the save
:param publish: whether to publish the save or not
:return: the uploaded save, downloaded from the server as a Save object
"""
if not self._user_id:
raise ServerError("Not authorized")
if not data:
raise ValueError("Empty save")
req = requests.post("http://" + self._server + "/Save.api", data={
"Name": name,
"Description": description,
"Publish": "Public" if publish else "Private",
}, files=[
("Data", ("save.bin", data))
], headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
})
req.raise_for_status()
return self.get_save(int(req.text[3:].strip()))
def remove_save(self, save_id):
"""
Remove a save.
:param save_id: the ID of the save to be deleted
"""
if not self._user_id:
raise ServerError("Not authorized")
req = requests.get("http://" + self._server + "/Browse/Delete.json", params={
"ID": save_id,
"Mode": "Delete",
"Key": self._session_key
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
})
req.raise_for_status()
status = req.json()["Status"]
if not status:
raise ServerError(req.json()["Error"])
def publish_save(self, save_id, publish=True):
"""
Publishes or unpublishes a save
:param save_id: the ID of the save
:param publish: whether to publish the save
"""
if not self._user_id:
raise ServerError("Not authorized")
if publish:
# We have to do that twice to recheck the status.
req = None
for i in xrange(2):
req = requests.post("http://" + self._server + "/Browse/View.json", params={
"ID": save_id,
"Key": self._session_key
}, data={
"ActionPublish": " "
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
})
req.raise_for_status()
if not req.json()["Published"]:
raise ServerError("Can't publish the save")
else:
req = requests.get("http://" + self._server + "/Browse/Delete.json", params={
"ID": save_id,
"Mode": "Unpublish",
"Key": self._session_key
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
})
req.raise_for_status()
status = req.json()["Status"]
if not status:
raise ServerError(req.json()["Error"])
def add_comment(self, save_id, comment):
"""
Add a comment to a save.
:param save_id: the ID of the save
:param comment: the text of the comment
"""
if not self._user_id:
raise ServerError("Not authorized")
req = requests.post("http://" + self._server + "/Browse/Comments.json", params={
"ID": save_id,
}, data={
"Comment": comment
}, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
})
if "Error" in req.text:
return ServerError(req.text)
req.raise_for_status()
def get_save_data(self, save_id, timestamp=None):
"""
Get raw save data.
:param save_id: the ID of the save
:param timestamp: the timestamp
:return: raw save data
"""
if timestamp:
url = "http://" + self._static_server + "/" + str(save_id) + "_" + str(timestamp) + ".cps"
else:
url = "http://" + self._static_server + "/" + str(save_id) + ".cps"
req = requests.get(url, headers={
"X-Auth-User-Id": self._user_id,
"X-Auth-Session-Key": self._session_id
} if self._user_id else None, stream=True)
req.raise_for_status()
return req.raw.data
def _authenticate(self):
user_hash = hashlib.md5(self._username + "-" + hashlib.md5(self._password).hexdigest()).hexdigest()
req = requests.post("http://" + self._server + "/Login.json", data={
"Username": self._username,
"Hash": user_hash
})
req.raise_for_status()
json = req.json()
if json["Status"] == 0:
if "Error" in json:
raise LoginError(json["Error"])
else:
raise LoginError
user_elevation = json["Elevation"]
if user_elevation == "Admin":
self._user_elevation = USER_ELEVATION_ADMIN
elif user_elevation == "Mod":
self._user_elevation = USER_ELEVATION_MOD
else:
self._user_elevation = USER_ELEVATION_NONE
self._user_id = json["UserID"]
self._notifications = [Notification(notification["Link"], notification["Text"])
for notification in json["Notifications"]]
self._session_id = json["SessionID"]
self._session_key = json["SessionKey"]
def _check_updates(self):
req = requests.get("http://" + self._server + "/Startup.json", auth=(self._user_id, self._session_id))
req.raise_for_status()
json = req.json()
self._latest_version = json["Updates"][self._update_version]
self._message_of_the_day = json["MessageOfTheDay"]
def get_latest_version(self):
"""
Get the latest version as a dictionary
:return: the latest version with description
"""
return self._latest_version
def get_message_of_the_day(self):
"""
Get the message of the day
:return: the message of the day
"""
return self._message_of_the_day
def __init__(self, credentials=(None, None), server=DEFAULT_SERVER, static_server=DEFAULT_STATIC_SERVER,
update_version=UPDATE_VERSION_STABLE):
"""
Creates a ThePowderToy object.
:param credentials: a tuple of username and password
:param server: the URL of the main server
:param static_server: the URL of the static server
"""
self._user_id = None
self._session_id = None
self._session_key = None
self._username = None
self._user_elevation = None
self._server, self._static_server, self._update_version = server, static_server, update_version
self._username, self._password = credentials
if self._username:
self._authenticate()
self._check_updates()
|
gpl-3.0
| 3,383,186,490,061,603,000
| 30.49661
| 115
| 0.544745
| false
| 3.986057
| false
| false
| false
|
jhallock7/SparseBayes-Python
|
SB2_ParameterSettings.py
|
1
|
3373
|
# The following is a Python translation of a MATLAB file originally written principally by Mike Tipping
# as part of his SparseBayes software library. Initially published on GitHub on July 21st, 2015.
# SB2_PARAMETERSETTINGS User parameter initialisation for SPARSEBAYES
#
# SETTINGS = SB2_PARAMETERSETTINGS(parameter1, value1, parameter2, value2,...)
#
# OUTPUT ARGUMENTS:
#
# SETTINGS An initialisation structure to pass to SPARSEBAYES
#
# INPUT ARGUMENTS:
#
# Optional number of parameter-value pairs to specify some, all, or
# none of the following:
#
# BETA (Gaussian) noise precision (inverse variance)
# NOISESTD (Gaussian) noise standard deviation
# RELEVANT Indices of columns of basis matrix to use at start-up
# MU (WEIGHTS) Corresponding vector of weights to RELEVANT
# ALPHA Corresponding vector of hyperparameter values to RELEVANT
#
# EXAMPLE:
#
# SETTINGS = SB2_ParameterSettings('NoiseStd',0.1)
#
# NOTES:
#
# 1. If no input arguments are supplied, defaults (effectively an
# empty structure) will be returned.
#
# 2. If both BETA and NOISESTD are specified, BETA will take
# precedence.
#
# 3. RELEVANT may be specified without WEIGHTS or ALPHA (these will be
# sensibly initialised later).
#
# 4. If RELEVANT is specified, WEIGHTS may be specified also without ALPHA.
#
#
# Copyright 2009, Vector Anomaly Ltd
#
# This file is part of the SPARSEBAYES library for Matlab (V2.0).
#
# SPARSEBAYES is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# SPARSEBAYES is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with SPARSEBAYES in the accompanying file "licence.txt"; if not, write to
# the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston,
# MA 02110-1301 USA
#
# Contact the author: m a i l [at] m i k e t i p p i n g . c o m
#
def SB2_ParameterSettings(*args):
# Ensure arguments are supplied in pairs
if len(args) % 2 != 0:
raise Exception('Arguments to SB2_ParameterSettings should be (property, value) pairs')
# Any settings specified?
numSettings = len(args)/2
## Defaults - over-ridden later if requested
# Two options for setting noise level (purely for convenience)
# - if 'beta' set, 'noiseStdDev' will be over-ridden
SETTINGS = {
'BETA' : [],
'NOISESTD' : [],
'RELEVANT' : [],
'MU' : [],
'ALPHA' : []
}
## Requested overrides
# Parse string/variable pairs
for n in range(numSettings):
property_ = args[n*2]
value = args[n*2 + 1]
if property_ not in SETTINGS:
raise Exception('Unrecognised initialisation property: {0}'.format(property_))
else:
SETTINGS[property_] = value
return SETTINGS
|
gpl-2.0
| -5,058,282,655,556,856,000
| 31.432692
| 103
| 0.660243
| false
| 3.670294
| false
| false
| false
|
nickwolensky/mayaunittest
|
testcode_inspect_ui.py
|
1
|
4789
|
"""User interface to help test maya tool code. Will work from within Maya UI as
well as standalone and can be run from an external interpreter such as mayapy.
Example::
Todo:
"""
import os
import sys
from nw_tools.Qt import QtWidgets, QtGui, QtCore
from nw_tools.ui.tools import get_maya_window, SuperWindow
import runmayatests
class Tree(QtWidgets.QTreeView):
def __init__(self, parent):
super(Tree, self).__init__(parent)
self.parent = parent
self.create_actions()
def create_actions(self):
self.walk_up_action = QtWidgets.QAction('Walk up directory', self)
self.walk_up_action.triggered.connect(self.parent.walk_up)
def mouseDoubleClickEvent(self, event):
self.parent.walk_down()
def contextMenuEvent(self, event):
menu = QtWidgets.QMenu(self)
menu.addAction(self.walk_up_action)
menu.exec_(event.globalPos())
class TestCodeUI(SuperWindow):
# Class constants
TITLE = 'Maya Unittest Pro - Nick Wolensky, 2017'
WIDTH = 800
HEIGHT = 400
# Should only be applicable if application is run from within Maya
DOCKABLE = True
def __init__(self, parent=None):
self.current_dir = QtCore.QDir.rootPath()
super(TestCodeUI, self).__init__(parent)
def _init_ui(self):
SuperWindow._init_ui(self)
self._add_widgets()
self._add_signals()
def add_toolbar_items(self):
SuperWindow.add_toolbar_items(self)
self.open_dir_action = QtWidgets.QAction('Open...', self)
self.run_action = QtWidgets.QAction('Run...', self)
self.stop_action = QtWidgets.QAction('Stop', self)
for action in [self.open_dir_action,
self.run_action,
self.stop_action]:
self.toolbar.addAction(action)
def _add_widgets(self):
split = QtWidgets.QSplitter(self.centralWidget())
split.resize(self.WIDTH, self.HEIGHT)
split.setHandleWidth(3)
split.setContentsMargins(2, 2, 2, 2)
btn_widg = QtWidgets.QWidget()
vbox = QtWidgets.QVBoxLayout()
btn_row = QtWidgets.QHBoxLayout()
btn_row.setContentsMargins(0, 0, 0, 0)
btn_widg.setLayout(vbox)
vbox.setContentsMargins(0, 0, 2, 0)
# Create a tree outliner for package that I want to recursively go
# through and test | Left Panel
self.model = QtWidgets.QFileSystemModel()
self.model.setRootPath(self.current_dir)
self.tree = Tree(self)
self.tree.setModel(self.model)
self.tree.hideColumn(1)
self.tree.hideColumn(2)
self.tree.hideColumn(3)
self._update_system_tree(self.current_dir)
# Create output section that tests get printed out to | Right Panel
text_display = QtWidgets.QLineEdit('This is going to be the place '
'where I display output from the '
'unittests')
text_display.setReadOnly(True)
text_display.setAlignment(QtCore.Qt.AlignTop)
self.btn = QtWidgets.QPushButton('^')
self.btn.setFixedWidth(25)
btn_row.addWidget(self.btn)
btn_row.addStretch(0)
vbox.addLayout(btn_row)
vbox.addWidget(self.tree)
split.addWidget(btn_widg)
split.addWidget(text_display)
split.setStretchFactor(0, 0)
split.setStretchFactor(1, 2)
def _add_signals(self):
# Open command
# self.open_dir_action.triggered.connect(self.update_system_tree)
# Run command
def run_tests():
runmayatests.main(test_dir=[self.get_selected_dir()])
self.run_action.triggered.connect(run_tests)
# Stop command
# self.stop_action.triggered.connect(self.dir_up)
# Move up button
self.btn.clicked.connect(self.walk_up)
def get_selected_dir(self):
index = self.tree.currentIndex()
path = self.model.filePath(index)
print path
return path
def _update_system_tree(self, directory):
self.tree.setRootIndex(self.model.index(directory))
self.current_dir = directory
def walk_up(self):
par_dir = os.path.abspath(os.path.join(self.current_dir, os.pardir))
self._update_system_tree(par_dir)
def walk_down(self):
self._update_system_tree(self.get_selected_dir())
def open_file_dialog(self):
pass
def dragEnterEvent(self, event):
pass
if __name__ == '__main__':
# Create the Qt Application
app = QtWidgets.QApplication(sys.argv)
# Build the UI window. Must keep a reference to the window class or else it
# goes out of scope
ui = TestCodeUI()
sys.exit(app.exec_())
|
mit
| 5,874,005,164,232,687,000
| 29.698718
| 79
| 0.622886
| false
| 3.756078
| true
| false
| false
|
valohai/minique
|
minique/encoding.py
|
1
|
1624
|
import json
from typing import Union, Any
registry = {}
default_encoding_name = None
def register_encoding(name, *, default=False):
def decorator(cls):
global default_encoding_name
registry[name] = cls
if default:
default_encoding_name = name
return cls
return decorator
class BaseEncoding:
def encode(self, value: Any, failsafe: bool = False) -> Union[str, bytes]:
"""
Encode a value to a string or bytes.
:param failsafe: When set, hint that the encoder should try hard not to fail,
even if it requires loss of fidelity.
"""
raise NotImplementedError("Encoding not implemented")
def decode(self, value: Union[str, bytes]) -> Any:
raise NotImplementedError("Decoding not implemented")
@register_encoding("json", default=True)
class JSONEncoding(BaseEncoding):
"""
Default (JSON) encoding for kwargs and results.
"""
# These can be effortlessly overridden in subclasses
dump_kwargs = {
"ensure_ascii": False,
"separators": (",", ":"),
}
load_kwargs = {}
failsafe_default = str
def encode(self, value: Any, failsafe: bool = False) -> Union[str, bytes]:
kwargs = self.dump_kwargs.copy()
if failsafe:
kwargs["default"] = self.failsafe_default
return json.dumps(
value,
**kwargs,
)
def decode(self, value: Union[str, bytes]) -> Any:
if isinstance(value, bytes):
value = value.decode()
return json.loads(value, **self.load_kwargs)
|
mit
| 2,236,752,375,533,370,600
| 26.525424
| 85
| 0.602217
| false
| 4.240209
| false
| false
| false
|
edx/i18n-tools
|
i18n/main.py
|
1
|
1437
|
#!/usr/bin/env python
"""
Main function for internationalization tools.
"""
import importlib
import sys
from path import Path
def get_valid_commands():
"""
Returns valid commands.
Returns:
commands (list): List of valid commands
"""
modules = [m.basename().split('.')[0] for m in Path(__file__).dirname().files('*.py')]
commands = []
for modname in modules:
if modname == 'main':
continue
mod = importlib.import_module('i18n.%s' % modname)
if hasattr(mod, 'main'):
commands.append(modname)
return commands
def error_message():
"""
Writes out error message specifying the valid commands.
Returns:
Failure code for system exit
"""
sys.stderr.write('valid commands:\n')
for cmd in get_valid_commands():
sys.stderr.write('\t%s\n' % cmd)
return -1
def main():
"""
Executes the given command. Returns error_message if command is not valid.
Returns:
Output of the given command or error message if command is not valid.
"""
try:
command = sys.argv[1]
except IndexError:
return error_message()
try:
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
except (ImportError, AttributeError):
return error_message()
return module.main()
if __name__ == '__main__':
sys.exit(main())
|
apache-2.0
| 5,347,723,365,741,315,000
| 21.453125
| 90
| 0.601253
| false
| 4.070822
| false
| false
| false
|
OpenEdition/bilbo
|
src/bilbo/reference/Word.py
|
1
|
4264
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
Created on April 18, 2012
@author: Young-Min Kim, Jade Tavernier
"""
from bilbo.reference.Balise import Balise
from bilbo.reference.Feature import Feature
class Word(object):
"""
A class corresponding to a word in a reference. It contains word name, features, tags, etc.
Word object is first created in CleanCorpus1 and CleanCorpus2.
"""
def __init__(self, mot, tags=[], features=[]):
"""
nom : word name
tag : list of Balise objects
feature : list of Feature objects
item : indicator of sub-reference (0 : no, 1 : yes)
"""
'Generate Tag objects'
self.nom = mot
self.core = mot
self.tag = []
self.feature = []
self.ignoreWord = 0
'item is an indicator showing if the word is in a sub reference'
self.item = 0
if type(mot) is str:
print mot
for tag in tags:
'Eliminate the spaces at the beginning and ending'
tag.lstrip()
tag.rstrip()
if tag != "" and self.getTag(tag) == -1:
self.tag.append(Balise(tag))
'Generate Feature objects'
for feature in features:
'Eliminate the spaces at the beginning and ending'
feature.lstrip()
feature.rstrip()
if feature != "" and self.getFeature(feature) == -1:
self.feature.append(Feature(feature))
def affiche(self):
print "\nWord : ",self.nom.encode('utf8'), self.core.encode('utf8')
if len(self.tag) >= 1:
print "\tTAG :"
for key in self.tag:
key.affiche()
if len(self.feature) >= 1:
print "\tFEATURE :"
for key in self.feature:
key.affiche()
def addFeature(self,feature):
if isinstance(feature, list):
for carac in feature:
'Eliminate the spaces at the beginning and ending'
carac.lstrip()
carac.rstrip()
if carac != "" and self.getFeature(carac) == -1:
self.feature.append(Feature(carac))
else:
'Eliminate the spaces at the beginning and ending'
feature.lstrip()
feature.rstrip()
if self.getFeature(feature) == -1:
self.feature.append(Feature(feature))
def addTag(self,tag):
if isinstance(tag, list):
for bal in tag:
'Eliminate the spaces at the beginning and ending'
bal.lstrip()
bal.rstrip()
if bal != "" and self.getTag(bal) == -1:
self.tag.append(Balise(bal))
else:
'Eliminate the spaces at the beginning and ending'
tag.lstrip()
tag.rstrip()
if self.getTag(tag) == -1:
self.tag.append(Balise(tag))
def delFeature(self,feature):
ref = self.getFeature(feature)
if ref != -1:
self.feature.remove(ref)
return -1
def delTag(self,tag):
ref = self.getTag(tag)
if ref != -1:
self.tag.remove(ref)
return -1
def delAllFeature(self):
del(self.feature[:])
def delAllTag(self):
del(self.tag[:])
def getFeature(self,feature):
for carac in self.feature:
if carac.nameIs(feature) == 1:
return carac
return -1
def getTag(self,tag):
for bal in self.tag:
if bal.nameIs(tag) == 1:
return bal
return -1
def listNomFeature(self):
carac = []
for key in self.feature:
carac.append(key.nom)
return carac
def listNomTag(self):
bal = []
for key in self.tag:
bal.append(key.nom)
return bal
def getLastFeature(self):
"""
Return the last feature
"""
if len(self.feature) == 0:
return -1
return self.feature[len(self.feature)-1]
def getLastTag(self):
"""
Return the last tag
"""
if len(self.tag) == 0:
return -1
if self.tag[len(self.tag)-1].nom == 'hi' and len(self.tag) > 1:
return self.tag[len(self.tag)-2]
return self.tag[len(self.tag)-1]
def getFeatureIndice(self, index):
"""
Return the feature at the index
"""
if index < 0: return -1
return self.feature[index]
def getTagIndice(self, index):
"""
Return the tag at the index
"""
if index < 0: return -1
return self.tag[index]
def getAllFeature(self):
"""
Return all the features
"""
return self.feature
def getAllTag(self):
"""
Return all the tags
"""
return self.tag
def nbTag(self):
"""
Return the number of tags
"""
return len(self.tag)
def nbFeatures(self):
"""
Return the number of features
"""
return len(self.feature)
def __getattr__(self, nom):
print("Alert ! There is no attribute {0} here !".format(nom))
|
gpl-2.0
| 7,075,776,964,256,177,000
| 19.113208
| 92
| 0.645403
| false
| 2.898708
| false
| false
| false
|
fluxer/spm
|
nuitka/nuitka/containers/odict.py
|
1
|
6241
|
# :copyright: (c) 2008 by Armin Ronacher and PEP 273 authors.
# :license: modified BSD license.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Kay Hayen did some changes for Nuitka, and put everything he added under the same
# modified BSD license.
""" This module is only an abstraction of OrderedDict as present in 2.7 and 3.x.
It is not in 2.6, for this version we are using the odict.py as mentioned in the
PEP-0372.
This can be removed safely after the transition, note that the documentation was
removed, as it's not interesting really, being redundant to the Python 2.7
documentation. """
# pylint: disable=E0611,W0141
try:
from collections import OrderedDict
except ImportError:
from itertools import izip, imap
from copy import deepcopy
missing = object()
class OrderedDict(dict):
def __init__(self, *args, **kwargs):
dict.__init__(self)
self._keys = []
self.update(*args, **kwargs)
def __delitem__(self, key):
dict.__delitem__(self, key)
self._keys.remove(key)
def __setitem__(self, key, item):
if key not in self:
self._keys.append(key)
dict.__setitem__(self, key, item)
def __deepcopy__(self, memo = None):
if memo is None:
memo = {}
d = memo.get(id(self), missing)
if d is not missing:
return d
memo[id(self)] = d = self.__class__()
dict.__init__(d, deepcopy(self.items(), memo))
d._keys = self._keys[:]
return d
def __getstate__(self):
return {"items": dict(self), "keys": self._keys}
def __setstate__(self, d):
self._keys = d["keys"]
dict.update(d["items"])
def __reversed__(self):
return reversed(self._keys)
def __eq__(self, other):
if isinstance(other, OrderedDict):
if not dict.__eq__(self, other):
return False
return self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self.__eq__(other)
def __cmp__(self, other):
if isinstance(other, OrderedDict):
return cmp(self.items(), other.items())
elif isinstance(other, dict):
return dict.__cmp__(self, other)
return NotImplemented
@classmethod
def fromkeys(cls, iterable, default = None):
return cls((key, default) for key in iterable)
def clear(self):
del self._keys[:]
dict.clear(self)
def copy(self):
return self.__class__(self)
def items(self):
return zip(self._keys, self.values())
def iteritems(self):
return izip(self._keys, self.itervalues())
def keys(self):
return self._keys[:]
def iterkeys(self):
return iter(self._keys)
def pop(self, key, default = missing):
if default is missing:
return dict.pop(self, key)
elif key not in self:
return default
self._keys.remove(key)
return dict.pop(self, key, default)
def popitem(self, key):
self._keys.remove(key)
return dict.popitem(key)
def setdefault(self, key, default = None):
if key not in self:
self._keys.append(key)
dict.setdefault(self, key, default)
def update(self, *args, **kwargs):
sources = []
if len(args) == 1:
if hasattr(args[0], "iteritems"):
sources.append(args[0].iteritems())
else:
sources.append(iter(args[0]))
elif args:
raise TypeError("expected at most one positional argument")
if kwargs:
sources.append(kwargs.iteritems())
for iterable in sources:
for key, val in iterable:
self[key] = val
def values(self):
return map(self.get, self._keys)
def itervalues(self):
return imap(self.get, self._keys)
def index(self, item):
return self._keys.index(item)
def byindex(self, item):
key = self._keys[item]
return (key, dict.__getitem__(self, key))
def reverse(self):
self._keys.reverse()
def sort(self, *args, **kwargs):
self._keys.sort(*args, **kwargs)
def __repr__(self):
return "OrderedDict(%r)" % self.items()
__copy__ = copy
__iter__ = iterkeys
|
gpl-2.0
| -8,327,234,843,170,627,000
| 33.480663
| 83
| 0.579875
| false
| 4.4867
| false
| false
| false
|
Tiimber/terminal-notification
|
growl_notifier.py
|
1
|
1544
|
try:
import gntp.notifier
except ImportError:
pass
class GrowlNotifier():
growl = None
@staticmethod
def register():
if GrowlNotifier.growl is None:
GrowlNotifier.growl = gntp.notifier.GrowlNotifier(
applicationName='Terminal Notification',
notifications=['Message'],
defaultNotifications=['Message'],
)
growl_register = GrowlNotifier.growl.register()
if not growl_register:
GrowlNotifier.growl = None
return GrowlNotifier.growl is not None
@staticmethod
def notify_obj(notify_object):
if GrowlNotifier.growl is not None:
title = str(notify_object['title']) if 'title' in notify_object else None
subtitle = str(notify_object['subtitle']) if 'subtitle' in notify_object else None
if title is not None and subtitle is not None:
title = title + ' / ' + subtitle
elif subtitle is not None:
title = subtitle
message = str(notify_object['message']) if 'message' in notify_object else None
return GrowlNotifier.notify(title=title, message=message)
else:
return False
@staticmethod
def notify(title=None, message=None):
notify_success = GrowlNotifier.growl.notify(
noteType='Message',
title=title,
description=message,
sticky=False,
priority=1
)
return notify_success
|
gpl-2.0
| 2,967,684,299,168,456,000
| 32.586957
| 94
| 0.588731
| false
| 4.568047
| false
| false
| false
|
lyubent/CassTor
|
cassandra/pylib/cqlshlib/formatting.py
|
1
|
8617
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import time
from collections import defaultdict
from . import wcwidth
from .displaying import colorme, FormattedValue, DEFAULT_VALUE_COLORS
from cql import cqltypes
unicode_controlchars_re = re.compile(r'[\x00-\x31\x7f-\xa0]')
controlchars_re = re.compile(r'[\x00-\x31\x7f-\xff]')
def _show_control_chars(match):
txt = repr(match.group(0))
if txt.startswith('u'):
txt = txt[2:-1]
else:
txt = txt[1:-1]
return txt
bits_to_turn_red_re = re.compile(r'\\([^uUx]|u[0-9a-fA-F]{4}|x[0-9a-fA-F]{2}|U[0-9a-fA-F]{8})')
def _make_turn_bits_red_f(color1, color2):
def _turn_bits_red(match):
txt = match.group(0)
if txt == '\\\\':
return '\\'
return color1 + txt + color2
return _turn_bits_red
default_null_placeholder = 'null'
default_time_format = ''
default_float_precision = 3
default_colormap = DEFAULT_VALUE_COLORS
empty_colormap = defaultdict(lambda: '')
def format_by_type(cqltype, val, encoding, colormap=None, addcolor=False,
nullval=None, time_format=None, float_precision=None):
if nullval is None:
nullval = default_null_placeholder
if val is None:
return colorme(nullval, colormap, 'error')
if addcolor is False:
colormap = empty_colormap
elif colormap is None:
colormap = default_colormap
if time_format is None:
time_format = default_time_format
if float_precision is None:
float_precision = default_float_precision
return format_value(cqltype, val, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval)
def format_value_default(val, colormap, **_):
val = str(val)
escapedval = val.replace('\\', '\\\\')
bval = controlchars_re.sub(_show_control_chars, escapedval)
tbr = _make_turn_bits_red_f(colormap['hex'], colormap['text'])
coloredval = colormap['text'] + bits_to_turn_red_re.sub(tbr, bval) + colormap['reset']
return FormattedValue(bval, coloredval)
# Mapping cql type base names ("int", "map", etc) to formatter functions,
# making format_value a generic function
_formatters = {}
def format_value(cqltype, val, **kwargs):
formatter = _formatters.get(cqltype.typename, format_value_default)
return formatter(val, subtypes=cqltype.subtypes, **kwargs)
def formatter_for(typname):
def registrator(f):
_formatters[typname] = f
return f
return registrator
@formatter_for('blob')
def format_value_blob(val, colormap, **_):
bval = ''.join('%02x' % ord(c) for c in val)
return colorme(bval, colormap, 'hex')
def format_python_formatted_type(val, colormap, color):
bval = str(val)
return colorme(bval, colormap, color)
@formatter_for('decimal')
def format_value_decimal(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'decimal')
@formatter_for('uuid')
def format_value_uuid(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'uuid')
@formatter_for('inet')
def formatter_value_inet(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'inet')
@formatter_for('boolean')
def format_value_boolean(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'boolean')
def format_floating_point_type(val, colormap, float_precision, **_):
bval = '%.*g' % (float_precision, val)
return colorme(bval, colormap, 'float')
formatter_for('float')(format_floating_point_type)
formatter_for('double')(format_floating_point_type)
def format_integer_type(val, colormap, **_):
# base-10 only for now; support others?
bval = str(val)
return colorme(bval, colormap, 'int')
formatter_for('bigint')(format_integer_type)
formatter_for('int')(format_integer_type)
formatter_for('varint')(format_integer_type)
formatter_for('counter')(format_integer_type)
@formatter_for('timestamp')
def format_value_timestamp(val, colormap, time_format, **_):
bval = strftime(time_format, val)
return colorme(bval, colormap, 'timestamp')
@formatter_for('timeuuid')
def format_value_timeuuid(val, colormap, time_format, **_):
utime = cqltypes.unix_time_from_uuid1(val)
bval = strftime(time_format, utime)
return colorme(bval, colormap, 'timestamp')
def strftime(time_format, seconds):
local = time.localtime(seconds)
formatted = time.strftime(time_format, local)
if local.tm_isdst != 0:
offset = -time.altzone
else:
offset = -time.timezone
if formatted[-4] != '0000' or time_format[-2] != '%z' or offset == 0:
return formatted
# deal with %z on platforms where it isn't supported. see CASSANDRA-4746.
if offset < 0:
sign = '-'
else:
sign = '+'
hours, minutes = divmod(abs(offset) / 60, 60)
return formatted[:-5] + sign + '{0:0=2}{1:0=2}'.format(hours, minutes)
@formatter_for('text')
def format_value_text(val, encoding, colormap, **_):
escapedval = val.replace(u'\\', u'\\\\')
escapedval = unicode_controlchars_re.sub(_show_control_chars, escapedval)
bval = escapedval.encode(encoding, 'backslashreplace')
displaywidth = wcwidth.wcswidth(bval.decode(encoding))
tbr = _make_turn_bits_red_f(colormap['hex'], colormap['text'])
coloredval = colormap['text'] + bits_to_turn_red_re.sub(tbr, bval) + colormap['reset']
return FormattedValue(bval, coloredval)
# name alias
formatter_for('varchar')(format_value_text)
def format_simple_collection(subtype, val, lbracket, rbracket, encoding,
colormap, time_format, float_precision, nullval):
subs = [format_value(subtype, sval, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval)
for sval in val]
bval = lbracket + ', '.join(sval.strval for sval in subs) + rbracket
lb, sep, rb = [colormap['collection'] + s + colormap['reset']
for s in (lbracket, ', ', rbracket)]
coloredval = lb + sep.join(sval.coloredval for sval in subs) + rb
displaywidth = 2 * len(subs) + sum(sval.displaywidth for sval in subs)
return FormattedValue(bval, coloredval, displaywidth)
@formatter_for('list')
def format_value_list(val, encoding, colormap, time_format, float_precision, subtypes, nullval, **_):
return format_simple_collection(subtypes[0], val, '[', ']', encoding, colormap,
time_format, float_precision, nullval)
@formatter_for('set')
def format_value_set(val, encoding, colormap, time_format, float_precision, subtypes, nullval, **_):
return format_simple_collection(subtypes[0], val, '{', '}', encoding, colormap,
time_format, float_precision, nullval)
@formatter_for('map')
def format_value_map(val, encoding, colormap, time_format, float_precision, subtypes, nullval, **_):
def subformat(v, subtype):
return format_value(subtype, v, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval)
subkeytype, subvaltype = subtypes
subs = [(subformat(k, subkeytype), subformat(v, subvaltype)) for (k, v) in val.items()]
bval = '{' + ', '.join(k.strval + ': ' + v.strval for (k, v) in subs) + '}'
lb, comma, colon, rb = [colormap['collection'] + s + colormap['reset']
for s in ('{', ', ', ': ', '}')]
coloredval = lb \
+ comma.join(k.coloredval + colon + v.coloredval for (k, v) in subs) \
+ rb
displaywidth = 4 * len(subs) + sum(k.displaywidth + v.displaywidth for (k, v) in subs)
return FormattedValue(bval, coloredval, displaywidth)
|
mit
| -2,934,228,930,725,762,600
| 39.455399
| 101
| 0.659278
| false
| 3.456478
| false
| false
| false
|
genialis/resolwe-bio
|
resolwe_bio/tools/demultiplex.py
|
1
|
11588
|
#!/usr/bin/env python3
# XXX: Refactor to a comand line tool and remove pylint disable
"""NGS reads demultiplexer."""
import argparse
import gzip
import json
import os
import subprocess
import sys
from resolwe_runtime_utils import error, export_file, progress, run, save, send_message
from six import iteritems
parser = argparse.ArgumentParser(description="NGS reads demultiplexer.")
parser.add_argument("barcodes", help="barcodes file")
parser.add_argument(
"s", metavar="READS", nargs="?", help="file containing unpaired reads"
)
parser.add_argument("-1", metavar="READS-1", help="file containing upstream mates")
parser.add_argument("-2", metavar="READS-2", help="file containing downstream mates")
parser.add_argument("-m", "--mapping", help="barcode mapping file")
parser.add_argument(
"--progress-start", type=float, default=0.0, help="initial progress"
)
args = parser.parse_args()
if not (args.s or (args.__dict__["1"] and args.__dict__["2"])) or (
args.s and args.__dict__["1"] and args.__dict__["2"]
):
sys.stderr.write("Give either unpaired reads or both paired read mates.")
print()
exit(1)
if args.s:
reads1 = args.s
reads2 = ""
else:
reads1 = args.__dict__["1"]
reads2 = args.__dict__["2"]
if not os.path.isfile(reads2):
sys.stderr.write("Reads file {} not found.".format(reads2))
print()
exit(1)
if not os.path.isfile(reads1):
sys.stderr.write("Reads file {} not found.".format(reads1))
print()
exit(1)
if not os.path.isfile(args.barcodes):
sys.stderr.write("Barcodes file {} not found.".format(args.barcodes))
print()
exit(1)
if args.mapping and not os.path.isfile(args.mapping):
sys.stderr.write("Barcode mapping file {} not found.".format(args.mapping))
print()
exit(1)
pool_maps = {}
def isnum(number):
"""Check if number."""
try:
int(number)
return True
except ValueError:
return False
barcode_length = 0
if args.mapping:
with open(args.mapping, encoding="utf-8") as fd:
for line in fd:
line = line.rstrip()
if not line:
continue
t = line.split("\t")
barcode, filename = "", ""
if len(t) == 2:
barcode, filename = t[0:2]
if len(t) > 2 and isnum(t[0]):
barcode, filename = t[1:3]
barcode, filename = barcode.strip(), filename.strip()
if barcode and filename:
pool_maps[barcode] = filename
if barcode_length > 0 and barcode_length != len(barcode):
send_message(error("Barcodes should be of the same length."))
exit(1)
else:
barcode_length = len(barcode)
for bar, _map in iteritems(pool_maps):
print("{}: {}".format(bar, _map))
def read_multiplexed(
reads1_file, reads2_file, barcodes_file, pool_maps, progress_start
):
"""Parse multiplexed file."""
pool_name = reads1_file.split(".")[0]
def nicename(a):
return a.replace("#", "").replace(" ", " ").replace("/", " ").replace(" ", "_")
files, f1, f2, fbar = {}, None, None, None
try:
barcodes = set(pool_maps.keys())
print("BARCODES: {}".format(barcodes))
for barcode in barcodes:
name = nicename(pool_maps[barcode])
if reads2_file:
filename = "{}_{}_{}_mate1.fq.gz".format(pool_name, name, barcode)
files[barcode] = gzip.open(filename, "wb")
filename = "{}_{}_{}_mate2.fq.gz".format(pool_name, name, barcode)
files[barcode + "2"] = gzip.open(filename, "wb")
else:
filename = "{}_{}_{}.fq.gz".format(pool_name, name, barcode)
files[barcode] = gzip.open(filename, "wb")
if reads2_file:
files["notmatched"] = gzip.open(
"Not_Matched_{}_mate1.fq.gz".format(pool_name), "wb"
)
files["badquality"] = gzip.open(
"Bad_Quality_{}_mate1.fq.gz".format(pool_name), "wb"
)
files["notmatched2"] = gzip.open(
"Not_Matched_{}_mate2.fq.gz".format(pool_name), "wb"
)
files["badquality2"] = gzip.open(
"Bad_Quality_{}_mate2.fq.gz".format(pool_name), "wb"
)
else:
files["notmatched"] = gzip.open(
"Not_Matched_{}.fq.gz".format(pool_name), "wb"
)
files["badquality"] = gzip.open(
"Bad_Quality_{}.fq.gz".format(pool_name), "wb"
)
filenames = list(sorted(set(f.name for f in files.values())))
p = subprocess.Popen(
"gzip -dc {} | wc -l".format(barcodes_file),
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
numlines, err = p.communicate()
if err:
raise Exception(err)
numlines = int(numlines)
readid, matched, notmatched, badquality, skipped = 0, 0, 0, 0, 0
send_message(progress(progress_start))
_progress = progress_start
progress_step = (0.9 - _progress) / 20.0
progress_span = numlines / 20
def save_results(matched, notmatched, badquality, skipped, total, _progress):
total = float(total)
send_message(
save(
"matched",
"{:,} reads ({:.2f} %)".format(matched, 100 * matched / total),
)
)
send_message(
save(
"notmatched",
"{:,} reads ({:.2f} %)".format(
notmatched, 100 * notmatched / total
),
)
)
send_message(
save(
"badquality",
"{:,} reads ({:.2f} %)".format(
badquality, 100 * badquality / total
),
)
)
send_message(
save(
"skipped",
"{:,} reads ({:.2f} %)".format(skipped, 100 * skipped / total),
)
)
send_message(progress(_progress))
f1 = gzip.GzipFile(reads1_file, "r")
fbar = gzip.GzipFile(barcodes_file, "r")
if reads2_file:
f2 = gzip.GzipFile(reads2_file, "r")
while True:
readid += 1
r1 = f1.readline()
if not r1:
break
r1 = r1.decode("utf-8").rstrip("\r").rstrip("\n").split("\t")
if len(r1) != 11:
print("SKIPPED: error in {} line in r1".format(readid))
continue
s1 = r1[-3].replace(".", "N")
p1 = r1[-1]
rbar = fbar.readline()
if not rbar:
break
rbar = rbar.decode("utf-8").rstrip("\r").rstrip("\n").split("\t")
if len(rbar) != 11:
print("SKIPPED: error in {} line in rbar".format(readid))
continue
sbar = rbar[-3].replace(".", "N")[:barcode_length]
pbar = rbar[-1]
if reads2_file:
r2 = f2.readline()
if not r2:
break
r2 = r2.decode("utf-8").rstrip("\r").rstrip("\n").split("\t")
if len(r2) != 11:
print("SKIPPED: error in {} line in r2".format(readid))
continue
s2 = r2[-3].replace(".", "N")
p2 = r2[-1]
else:
r2 = r1
p2 = p1
if r1[:7] == r2[:7] == rbar[:7] and p1 == p2 == pbar:
idline = "@" + ":".join(r1[:7]) + " " + sbar
if p1 == "1" and p2 == "1":
if sbar in barcodes:
files[sbar].write(
(
idline + "\n" + s1 + "\n" + "+" + "\n" + r1[-2] + "\n"
).encode("utf-8")
)
if reads2_file:
files[sbar + "2"].write(
(
idline
+ "\n"
+ s2
+ "\n"
+ "+"
+ "\n"
+ r2[-2]
+ "\n"
).encode("utf-8")
)
matched += 1
else:
files["notmatched"].write(
(
idline + "\n" + s1 + "\n" + "+" + "\n" + r1[-2] + "\n"
).encode("utf-8")
)
if reads2_file:
files["notmatched2"].write(
(
idline
+ "\n"
+ s2
+ "\n"
+ "+"
+ "\n"
+ r2[-2]
+ "\n"
).encode("utf-8")
)
notmatched += 1
else:
files["badquality"].write(
(idline + "\n" + s1 + "\n" + "+" + "\n" + r1[-2] + "\n").encode(
"utf-8"
)
)
if reads2_file:
files["badquality2"].write(
(
idline + "\n" + s2 + "\n" + "+" + "\n" + r2[-2] + "\n"
).encode("utf-8")
)
badquality += 1
else:
print(
"SKIPPED: {}, p1: {}, p2: {}, pbar: {}".format(readid, p1, p2, pbar)
)
print("{} ? {} ? {}".format(r1[:7], r2[:7], rbar[:7]))
skipped += 1
if readid % progress_span == 0:
_progress += progress_step
save_results(
matched, notmatched, badquality, skipped, readid, _progress
)
save_results(matched, notmatched, badquality, skipped, readid, 0.9)
finally:
if f1:
f1.close()
if f2:
f2.close()
if fbar:
fbar.close()
for f in files:
files[f].close()
return filenames
filenames = read_multiplexed(
reads1, reads2, args.barcodes, pool_maps, args.progress_start
)
for name in filenames:
if reads2:
if name.endswith("_mate2.fq.gz"):
continue
name2 = name.replace("_mate1", "_mate2")
send_message(export_file(name))
send_message(export_file(name2))
process = "upload-fastq-paired"
proc_input = {"src1": [name], "src2": [name2]}
else:
send_message(export_file(name))
process = "upload-fastq-single"
proc_input = {"src": [name]}
send_message(run(process, json.dumps(proc_input, separators=(",", ":"))))
|
apache-2.0
| -835,447,415,483,483,300
| 31.827195
| 88
| 0.427166
| false
| 4.060266
| false
| false
| false
|
aimas/TuniErp-8.0
|
addons/pad_project/__openerp__.py
|
1
|
1503
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Pad on tasks',
'version': '1.0',
'category': 'Project Management',
'description': """
This module adds a PAD in all project kanban views.
===================================================
""",
'author': 'OpenERP SA',
'website': 'https://www.tunierp.com/page/project-management',
'depends': ['project', 'pad'],
'data': ['project_task.xml'],
'demo': [],
'installable': True,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| -4,788,185,978,103,792,000
| 38.552632
| 78
| 0.578177
| false
| 4.245763
| false
| false
| false
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-sql/azure/mgmt/sql/models/elastic_pool_operation.py
|
1
|
4797
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .proxy_resource import ProxyResource
class ElasticPoolOperation(ProxyResource):
"""A elastic pool operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar elastic_pool_name: The name of the elastic pool the operation is
being performed on.
:vartype elastic_pool_name: str
:ivar operation: The name of operation.
:vartype operation: str
:ivar operation_friendly_name: The friendly name of operation.
:vartype operation_friendly_name: str
:ivar percent_complete: The percentage of the operation completed.
:vartype percent_complete: int
:ivar server_name: The name of the server.
:vartype server_name: str
:ivar start_time: The operation start time.
:vartype start_time: datetime
:ivar state: The operation state.
:vartype state: str
:ivar error_code: The operation error code.
:vartype error_code: int
:ivar error_description: The operation error description.
:vartype error_description: str
:ivar error_severity: The operation error severity.
:vartype error_severity: int
:ivar is_user_error: Whether or not the error is a user error.
:vartype is_user_error: bool
:ivar estimated_completion_time: The estimated completion time of the
operation.
:vartype estimated_completion_time: datetime
:ivar description: The operation description.
:vartype description: str
:ivar is_cancellable: Whether the operation can be cancelled.
:vartype is_cancellable: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'elastic_pool_name': {'readonly': True},
'operation': {'readonly': True},
'operation_friendly_name': {'readonly': True},
'percent_complete': {'readonly': True},
'server_name': {'readonly': True},
'start_time': {'readonly': True},
'state': {'readonly': True},
'error_code': {'readonly': True},
'error_description': {'readonly': True},
'error_severity': {'readonly': True},
'is_user_error': {'readonly': True},
'estimated_completion_time': {'readonly': True},
'description': {'readonly': True},
'is_cancellable': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'elastic_pool_name': {'key': 'properties.elasticPoolName', 'type': 'str'},
'operation': {'key': 'properties.operation', 'type': 'str'},
'operation_friendly_name': {'key': 'properties.operationFriendlyName', 'type': 'str'},
'percent_complete': {'key': 'properties.percentComplete', 'type': 'int'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'str'},
'error_code': {'key': 'properties.errorCode', 'type': 'int'},
'error_description': {'key': 'properties.errorDescription', 'type': 'str'},
'error_severity': {'key': 'properties.errorSeverity', 'type': 'int'},
'is_user_error': {'key': 'properties.isUserError', 'type': 'bool'},
'estimated_completion_time': {'key': 'properties.estimatedCompletionTime', 'type': 'iso-8601'},
'description': {'key': 'properties.description', 'type': 'str'},
'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'},
}
def __init__(self):
super(ElasticPoolOperation, self).__init__()
self.elastic_pool_name = None
self.operation = None
self.operation_friendly_name = None
self.percent_complete = None
self.server_name = None
self.start_time = None
self.state = None
self.error_code = None
self.error_description = None
self.error_severity = None
self.is_user_error = None
self.estimated_completion_time = None
self.description = None
self.is_cancellable = None
|
mit
| -3,615,658,341,865,835,000
| 41.078947
| 103
| 0.608505
| false
| 4.058376
| false
| false
| false
|
yesudeep/cmc
|
app/models.py
|
1
|
7739
|
#!/usr/bin/env python
# -*- coding: utf-8; mode: python; tab-width: 4; indent-tabs-mode: nil; -*-
# Models for the datastore.
# Copyright (c) 2009 happychickoo.
#
# The MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import configuration
from google.appengine.ext import db
from google.appengine.api import memcache
from aetycoon import TransformProperty
from django.template.defaultfilters import slugify
from caching_counter import CachingCounter
from dbhelper import SerializableModel, serialize_entities, deserialize_entities
import appengine_admin
class OpenIDUser(SerializableModel):
nickname = db.StringProperty()
identifier = db.StringProperty(required=True)
email = db.EmailProperty()
class SuggestedTitle(SerializableModel):
title = db.StringProperty(required=True)
slug = TransformProperty(title, slugify)
def increment_vote_count(self, delta=1):
CachingCounter('SuggestedTitle(%s).vote_count.key=%s' % (self.slug, str(self.key()))).incr(delta=delta)
@property
def vote_count(self):
return CachingCounter('SuggestedTitle(%s).vote_count.key=%s' % (self.slug, str(self.key()))).count
@classmethod
def up_vote_or_insert(cls, title):
t = SuggestedTitle.all().filter('slug = ', slugify(title)).get()
if not t:
t = SuggestedTitle(title=title)
t.put()
t.increment_vote_count()
return t
def __unicode__(self):
return self.title
def __str__(self):
return self.__unicode__()
class Celebrity(SerializableModel):
name = db.StringProperty(required=True)
slug = TransformProperty(name, slugify)
def increment_vote_count(self, delta=1):
CachingCounter('Celebrity(%s).vote_count.key=%s' % (self.slug, str(self.key()))).incr(delta=delta)
@property
def vote_count(self):
return CachingCounter('Celebrity(%s).vote_count.key=%s' % (self.slug, str(self.key()))).count
@classmethod
def up_vote_or_insert(cls, name):
t = Celebrity.all().filter('slug = ', slugify(name)).get()
if not t:
t = Celebrity(name=name)
t.put()
t.increment_vote_count()
return t
@classmethod
def get_latest(cls, count=100):
cache_key = 'Celebrity.get_latest(count=%d)' % count
celebrities = deserialize_entities(memcache.get(cache_key))
if not celebrities:
celebrities = Celebrity.all().order('-when_modified').fetch(count)
memcache.set(cache_key, serialize_entities(celebrities), 10)
return celebrities
def __unicode__(self):
return self.name
def __str__(self):
return self.name
class Person(SerializableModel):
full_name = db.StringProperty(required=True)
email = db.EmailProperty(required=True)
mobile_number = db.StringProperty()
def __unicode__(self):
return self.full_name
def __str__(self):
return self.full_name
class SuggestedTitlePerson(Person):
suggested_title = db.ReferenceProperty(SuggestedTitle, collection_name='people')
def __unicode__(self):
return self.full_name
def __str__(self):
return self.full_name
class StoryAuthor(Person):
def __unicode__(self):
return self.full_name
def __str__(self):
return self.full_name
class NotifyReleasePerson(Person):
def __unicode__(self):
return self.full_name
def __str__(self):
return self.full_name
class Story(SerializableModel):
title = db.StringProperty(required=True)
content = db.TextProperty(default=db.Blob(""))
author = db.ReferenceProperty(StoryAuthor, collection_name="stories")
def __unicode__(self):
return self.title
def __str__(self):
return self.title
def get_latest_document(self):
"""Returns the latest document submitted."""
pass
class StoryDocument(SerializableModel):
story = db.ReferenceProperty(Story, collection_name="documents")
path = db.StringProperty()
name = db.StringProperty()
def __unicode__(self):
return self.name
def __str__(self):
return self.name
@property
def document(self):
import static
return static.get(self.path)
class AdminCelebrity(appengine_admin.ModelAdmin):
model = Celebrity
listFields = ("name", "slug", "vote_count")
editFields = ("name",)
readonlyFields = ("slug", "when_created", 'vote_count', "when_modified")
listGql = "order by name asc"
class AdminStoryAuthor(appengine_admin.ModelAdmin):
model = StoryAuthor
listFields = ("full_name", 'email', 'mobile_number',)
editFields = ("full_name", "email", 'mobile_number',)
listGql = 'order by full_name asc'
class AdminNotifyReleasePerson(appengine_admin.ModelAdmin):
model = NotifyReleasePerson
listFields = ("full_name", "email", "mobile_number",)
editFields = ("full_name", "email", "mobile_number",)
listGql = 'order by full_name asc'
class AdminSuggestedTitlePerson(appengine_admin.ModelAdmin):
model = SuggestedTitlePerson
listFields = ("full_name", "email", "mobile_number", "suggested_title")
editFields = ("full_name", "email", "mobile_number", "suggested_title")
listGql = 'order by full_name asc'
class AdminSuggestedTitle(appengine_admin.ModelAdmin):
model = SuggestedTitle
listFields = ('title', 'people', 'vote_count')
editFields = ('title', )
readonlyFields = ('slug', 'people', 'vote_count', 'when_created', 'when_modified')
class AdminStory(appengine_admin.ModelAdmin):
model = Story
listFields = ('title', 'author')
editFields = ('title', 'content')
readonlyFields = ('author', 'when_created', 'when_modified')
listGql = 'order by when_created desc'
class AdminStoryDocument(appengine_admin.ModelAdmin):
model = StoryDocument
listFields = ('path', 'story', 'name', 'document')
editFields = ('path', 'story', 'name')
readonlyFields = ('path', 'name', 'when_created', 'when_modified')
listGql = 'order by when_created desc'
class AdminStaticContent(appengine_admin.ModelAdmin):
from static import StaticContent
model = StaticContent
listFields = ('body', 'content_type', 'status',)
editFields = ('body', 'content_type',)
readonlyFields = ('status', 'last_modified', 'headers', 'etag')
listGql = 'order by last_modified desc'
appengine_admin.register(
AdminStory,
AdminStoryDocument,
AdminCelebrity,
AdminStoryAuthor,
AdminNotifyReleasePerson,
AdminSuggestedTitlePerson,
AdminSuggestedTitle,
AdminStaticContent
)
|
mit
| -3,802,891,922,520,883,000
| 31.931915
| 111
| 0.671663
| false
| 3.888945
| false
| false
| false
|
msimacek/koschei
|
alembic/versions/14ef9d47d314_split_dependency_changes_table.py
|
1
|
1760
|
"""Split dependency changes table
Revision ID: 14ef9d47d314
Revises: 31d647dbc4c5
Create Date: 2015-09-07 16:23:42.789628
"""
# revision identifiers, used by Alembic.
revision = '14ef9d47d314'
down_revision = '31d647dbc4c5'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('unapplied_change',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('dep_name', sa.String(), nullable=False),
sa.Column('prev_epoch', sa.Integer(), nullable=True),
sa.Column('prev_version', sa.String(), nullable=True),
sa.Column('prev_release', sa.String(), nullable=True),
sa.Column('curr_epoch', sa.Integer(), nullable=True),
sa.Column('curr_version', sa.String(), nullable=True),
sa.Column('curr_release', sa.String(), nullable=True),
sa.Column('distance', sa.Integer(), nullable=True),
sa.Column('package_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['package_id'], ['package.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_unapplied_change_package_id'), 'unapplied_change', ['package_id'], unique=False)
op.execute("""
ALTER TABLE dependency_change RENAME TO applied_change;
DELETE FROM applied_change WHERE applied_in_id IS NULL;
ALTER TABLE applied_change RENAME COLUMN applied_in_id TO build_id;
ALTER TABLE applied_change ALTER COLUMN build_id SET NOT NULL;
ALTER TABLE applied_change DROP COLUMN package_id;
DROP INDEX ix_dependency_change_applied_in_id;
""")
op.create_index(op.f('ix_applied_change_build_id'), 'applied_change', ['build_id'], unique=False)
def downgrade():
raise NotImplementedError()
|
gpl-2.0
| 2,047,308,295,171,628,800
| 38.111111
| 109
| 0.667614
| false
| 3.562753
| false
| false
| false
|
alokjani/contrail-datapipeline
|
tools/dummy-http-receiver.py
|
1
|
1358
|
#!/usr/bin/env python
"""
Very simple HTTP server in python.
Usage::
./dummy-web-server.py [<port>]
Send a GET request::
curl http://localhost
Send a HEAD request::
curl -I http://localhost
Send a POST request::
curl -d "foo=bar&bin=baz" http://localhost
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import SocketServer
class DummyReceiver(BaseHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
self._set_headers()
self.wfile.write("<html><body><h1>hi!</h1></body></html>")
def do_HEAD(self):
self._set_headers()
def do_POST(self):
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
print post_data # <-- Print post data
self._set_headers()
def run(server_class=HTTPServer, handler_class=DummyReceiver, port=80):
server_address = ('', port)
httpd = server_class(server_address, handler_class)
print 'Starting httpd...'
httpd.serve_forever()
if __name__ == "__main__":
from sys import argv
if len(argv) == 2:
run(port=int(argv[1]))
else:
run()
|
apache-2.0
| -7,341,904,458,939,571,000
| 24.148148
| 89
| 0.616348
| false
| 3.621333
| false
| false
| false
|
PermutaTriangle/PermStruct
|
examples/classical_3_4/321_2134.py
|
1
|
1261
|
from __future__ import print_function
from permuta import *
import permstruct
import permstruct.dag
from permstruct import *
from permstruct.dag import taylor_dag
import sys
is_classical = True
# -- Wilf-class 2 in http://wikipedia.org/wiki/Enumerations_of_specific_permutation_classes -- #
# STATUS ================================================ > SUCCESS!
patts = [Permutation([3,2,1]), Permutation([2,1,3,4])]
perm_bound = 8
verify_bound = 12
ignored = 0
# The dag
max_len_patt = None
upper_bound = None
remove = False # True (3, 3) 4 works
# Grids
max_rule_size = (6, 6)
max_non_empty = 6
max_rules = None
# ------------------------------------------------------------------------------
settings = StructSettings(
perm_bound=perm_bound,
verify_bound=verify_bound,
max_rule_size=max_rule_size,
max_non_empty=max_non_empty,
max_rules=max_rules,
verbosity=StructLogger.INFO)
# settings.set_input(StructInput.from_avoidance(settings, patts))
settings.set_input(AvoiderInput(settings, patts))
settings.set_dag(taylor_dag(settings,
max_len_patt=max_len_patt,
remove=remove,
upper_bound=upper_bound))
exhaustive(settings)
|
bsd-3-clause
| 245,150,646,829,064,480
| 25.270833
| 96
| 0.59318
| false
| 3.380697
| false
| false
| false
|
CZ-NIC/foris
|
foris/config_handlers/profile.py
|
1
|
2032
|
# coding=utf-8
# Foris - web administration interface
# Copyright (C) 2018 CZ.NIC, z.s.p.o. <http://www.nic.cz>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .base import BaseConfigHandler
from foris import fapi
from foris.form import Hidden
from foris.state import current_state
from foris.utils.translators import gettext_dummy as gettext, _
class ProfileHandler(BaseConfigHandler):
""" Profile settings handler
"""
userfriendly_title = gettext("Guide workflow")
def __init__(self, *args, **kwargs):
self.load_backend_data()
super(ProfileHandler, self).__init__(*args, **kwargs)
def load_backend_data(self):
self.backend_data = current_state.backend.perform("web", "get_guide")
def get_form(self):
data = {"workflow": self.backend_data["current_workflow"]}
if self.data:
data.update(self.data)
profile_form = fapi.ForisForm("profile", data)
main = profile_form.add_section(name="set_profile", title=_(self.userfriendly_title))
main.add_field(Hidden, name="workflow", value=self.backend_data["current_workflow"])
def profile_form_cb(data):
result = current_state.backend.perform(
"web", "update_guide", {"enabled": True, "workflow": data["workflow"]}
)
return "save_result", result
profile_form.add_callback(profile_form_cb)
return profile_form
|
gpl-3.0
| 8,376,547,051,335,723,000
| 33.440678
| 93
| 0.682579
| false
| 3.863118
| false
| false
| false
|
DarthMaulware/EquationGroupLeaks
|
Leak #5 - Lost In Translation/windows/Resources/Ops/PyScripts/lib/ops/cmd/drivers.py
|
1
|
3371
|
import ops.cmd
import ops
import ops.env
import ops.cmd.safetychecks
from ops.cmd import getBoolOption, setBoolOption, getValueOption, setListOption, setStringOption
OpsCommandException = ops.cmd.OpsCommandException
VALID_OPTIONS = ['minimal', 'load', 'unload', 'list', 'minimal', 'nosignature', 'noversion']
class DriversCommand(ops.cmd.DszCommand, ):
optgroups = {'operation': ['load', 'unload', 'list']}
reqgroups = ['operation']
rejects = {'load': ['minimal', 'nosignature', 'version'], 'unload': ['minimal', 'nosignature', 'version']}
reqopts = []
defopts = {}
def __init__(self, plugin='drivers', autominimal=False, **optdict):
self.autominimal = autominimal
ops.cmd.DszCommand.__init__(self, plugin, **optdict)
def validateInput(self):
for opt in self.optdict:
if (opt not in VALID_OPTIONS):
return False
if ((not self.driver_list) and (self.load is None) and (self.unload is None)):
return False
if (((self.load is not None) or (self.unload is not None)) and (self.minimal or self.nosignature or self.noversion)):
return False
return True
def __getAutoMinimal(self):
return self.__autoMinimal
def __setAutoMinimal(self, val):
self.__autoMinimal = val
autominimal = property(__getAutoMinimal, __setAutoMinimal)
minimal = property((lambda x: getBoolOption(x, 'minimal')), (lambda x, y: setBoolOption(x, y, 'minimal')))
nosignature = property((lambda x: getBoolOption(x, 'nosignature')), (lambda x, y: setBoolOption(x, y, 'nosignature')))
noversion = property((lambda x: getBoolOption(x, 'noversion')), (lambda x, y: setBoolOption(x, y, 'noversion')))
driver_list = property((lambda x: getBoolOption(x, 'list')), (lambda x, y: setBoolOption(x, y, 'list')))
load = property((lambda x: getValueOption(x, 'load')), (lambda x, y: setStringOption(x, y, 'load')))
unload = property((lambda x: getValueOption(x, 'unload')), (lambda x, y: setStringOption(x, y, 'unload')))
ops.cmd.command_classes['drivers'] = DriversCommand
ops.cmd.aliasoptions['drivers'] = VALID_OPTIONS
def mySafetyCheck(self):
good = True
msgparts = []
if ((ops.env.get('OPS_NODRIVER').upper() == 'TRUE') and ((self.load is not None) or (self.unload is not None))):
good = False
msgparts.append('OPS_NODRIVER is set to TRUE, you should probably not load or unload drivers')
if ((ops.env.get('OPS_DRIVERLIST_MINIMAL').upper() == 'TRUE') and (not self.minimal) and self.driver_list):
if self.autominimal:
self.minimal = True
else:
good = False
msgparts.append('OPS_DRIVERLIST_MINIMAL is set to TRUE, you should not run a drivers -list without -minimal')
if ((ops.env.get('OPS_NODRIVERLIST').upper() == 'TRUE') and self.driver_list):
good = False
msgparts.append('OPS_NODRIVERLIST is set to true, you probably should not run a drivers -list')
if (not self.validateInput()):
good = False
msgparts.append('Your command did not pass input validation')
msg = ''
if (len(msgparts) > 0):
msg = msgparts[0]
for msgpart in msgparts[1:]:
msg += ('\n\t' + msgpart)
return (good, msg)
ops.cmd.safetychecks.addSafetyHandler('drivers', 'ops.cmd.drivers.mySafetyCheck')
|
unlicense
| -1,865,745,882,127,163,600
| 47.171429
| 125
| 0.646099
| false
| 3.518789
| false
| false
| false
|
cosurgi/trunk
|
examples/mpi/testMPI_3D_bisection.py
|
1
|
2164
|
# Possible executions of this script
# ./yadempi script.py #interactive will spawn additional workers
# mpiexec -n 4 ./yadempi script.py #non interactive
NSTEPS=100 #turn it >0 to see time iterations, else only initilization TODO!HACK
import os
from yade import mpy as mp
numThreads = 6
#add spheres
young = 5e6
compFricDegree = 0.0
O.materials.append(FrictMat(young=young, poisson=0.5, frictionAngle = radians(compFricDegree), density= 2600, label='sphereMat'))
O.materials.append(FrictMat(young=young*100, poisson = 0.5, frictionAngle = compFricDegree, density =2600, label='wallMat'))
mn,mx=Vector3(0,0,0),Vector3(150,150,100)
pred = pack.inAlignedBox(mn,mx)
O.bodies.append(pack.regularHexa(pred,radius=3,gap=0, material='sphereMat'))
walls=aabbWalls([Vector3(-mx[0]*2,-1,-mx[2]*2),Vector3(mx[0]*3,mx[1],mx[2]*3)], oversizeFactor=1, material='wallMat',wire=False)
for w in walls: w.shape.wire=False
O.bodies.append(walls[:3]+walls[4:]) #don't insert top wall
collider.verletDist = 2
newton.gravity=(0.05,-0.5,0.05) #else nothing would move
tsIdx=O.engines.index(timeStepper) #remove the automatic timestepper. Very important: we don't want subdomains to use many different timesteps...
O.engines=O.engines[0:tsIdx]+O.engines[tsIdx+1:]
O.dt=0.01
######### RUN ##########
def collectTiming():
created = os.path.isfile("collect.dat")
f=open('collect.dat','a')
if not created: f.write("numThreads mpi omp Nspheres N M runtime \n")
from yade import timing
f.write(str(numThreads)+" "+str(os.getenv('OMPI_COMM_WORLD_SIZE'))+" "+os.getenv('OMP_NUM_THREADS')+" "+str(N*M*(numThreads-1))+" "+str(N)+" "+str(M)+" "+str(timing.runtime())+"\n")
f.close()
# customize mpy
mp.VERBOSE_OUTPUT=False
mp.YADE_TIMING=False
mp.DOMAIN_DECOMPOSITION= True
#mp.MERGE_W_INTERACTIONS=True
#mp.ERASE_REMOTE_MASTER=True
mp.REALLOCATE_FREQUENCY=2
mp.mpirun(NSTEPS,numThreads,True)
#def animate():
#for k in range(600):
# single-thread vtk output from merged scene
#if mp.rank == 0:
#from yade import export
#v=export.VTKExporter("mpi3d")
#for k in range(600):
#mp.mpirun(15,4,True)
#if mp.rank == 0:
#v.exportSpheres(what=dict(subdomain='b.subdomain'))
|
gpl-2.0
| 2,623,087,043,722,613,000
| 30.823529
| 182
| 0.719963
| false
| 2.610374
| false
| false
| false
|
cloudxaas/cloudauth
|
lib/libauthz.py
|
1
|
3763
|
#!/usr/bin/python
import os
import sys
import pwd
import uuid
import struct
import socket
import logging
import base64
import urlparse
import time
import datetime
import json
import grp, pwd
import libauthn
from M2Crypto import EVP, EC, util
logger = logging.getLogger("libauthz")
def assert_authz(qstr, authn_cert, authz_keypem):
#qstr: ttype=qst&tval=b64urlsafe&srvs=foo&srvs=bar
#qstr: ttype=jmt&tval=jmt-token&srvs=foo&srvs=bar
logger.info(qstr)
attrs = urlparse.parse_qs(qstr)
ttype = attrs["ttype"][0]
token = attrs["tval"][0]
if (ttype == "qst"):
token = libauthn.base64url_decode(token)
try:
services = attrs["srvs"]
except KeyError:
services = ["OMNI"]
if (libauthn.verify_authn(ttype, token, authn_cert) == False):
return qstr
if (ttype == "qst"):
return assert_authz_qst(token, services, authn_cert, authz_keypem)
elif (ttype == "qsb"):
token = libauthn.qsb2qst(token)
token = assert_authz_qst(token, services, authn_cert, authz_keypem)
btkns = ""
tkns = token.split("\r\n")
for token in tkns :
if (token == None or len(token.strip()) <= 0): break
btkns += libauthn.qst2qsb(token, "authz") + "\r\n"
return btkns
elif (ttype == "jwt"):
return assert_authz_jwt(token, services, authn_cert, authz_keypem)
else:
logger.error("unsupported authn token: %s", qstr)
return qstr
def assert_authz_jwt(token, services, authn_cert, authz_keypem):
hdr, bdy, sig = token.split(".", 2)
hdr = libauthn.base64url_decode(hdr).strip()
bdy = libauthn.base64url_decode(bdy).strip()
logger.info("hdr=%s", hdr)
logger.info("bdy=%s", bdy)
hdr_obj = json.loads(hdr)
bdy_obj = json.loads(bdy)
subject = bdy_obj["s"]
authz_tokens = ""
for srvs in services :
bd = bdy[:-1] if bdy.endswith('}') else bdy
bd += ', "sv":"' + srvs + '"'
roles = assert_roles(subject, srvs)
logger.info("roles for %s %s: %s", subject, srvs, roles)
if (len(roles) > 0):
bd += ', "rl" : ["' + roles[0] + '"'
for i in range(1, len(roles)):
bd += ', "' + roles[i] + '"'
bd += "]}"
bd = json.loads(bd)
bd = json.dumps(bd)
logger.info("body=%s", bd)
stkn = base64.urlsafe_b64encode(json.dumps(hdr)) + "." + base64.urlsafe_b64encode(bd).rstrip("=")
sig = libauthn.hash_n_sign(stkn, "sha1", authz_keypem)
stkn = stkn + "." + base64.urlsafe_b64encode(sig).rstrip("=")
authz_tokens += stkn + "\r\n"
logger.info(authz_tokens)
return authz_tokens
def assert_authz_qst(token, services, authn_cert, authz_keypem):
token = token[0:token.find("&h=")] #strip of authn sig
tkn_attrs = urlparse.parse_qs(token)
subject = tkn_attrs["s"][0]
authz_tokens = ""
for srvs in services :
stkn = token + "&sv=" + srvs
roles = assert_roles(subject, srvs)
logger.info("roles for %s %s: %s", subject, srvs, roles)
for role in roles:
stkn += "&rl=" + role
sig = libauthn.hash_n_sign(stkn, "sha1", authz_keypem)
stkn = stkn + "&h=" + base64.urlsafe_b64encode(sig).rstrip("=")
authz_tokens += stkn + "\r\n"
logger.info(authz_tokens)
return authz_tokens
def assert_roles(subject, service = None):
# test function to get user's group info
app, host, user = subject.split("~", 2)
groups = [g.gr_name for g in grp.getgrall() if user in g.gr_mem]
gid = pwd.getpwnam(user).pw_gid
groups.append(grp.getgrgid(gid).gr_name)
return groups
|
apache-2.0
| 6,111,352,499,256,282,000
| 21.806061
| 105
| 0.577199
| false
| 3.056864
| false
| false
| false
|
thtrieu/essence
|
src/optimizer.py
|
1
|
2211
|
import numpy as np
from .utils import extract
class Optimizer(object):
def __init__(self, lr = 1e-3, *args, **kwargs):
minimize, kwargs = extract('minimize', True, **kwargs)
self._lr = lr * (2. * np.float64(minimize) - 1.)
self._construct(*args, **kwargs)
def apply(self, var_slot):
self._current = var_slot
var_slot.apply_grad(self._rule)
self._current = None
def finalize_step(self): pass
def _construct(*args, **kwargs): pass
class StochasticDescentOptimizer(Optimizer):
def _construct(self, decay = 1.):
self._decay = decay
def _rule(self, v, g):
return v - self._lr * g
def finalize_step(self):
self._lr *= self._decay
class RMSPropOptimizer(Optimizer):
def _construct(self, p = .975):
self._p = p
self._moments = dict()
def _rule(self, v, g):
c = self._current
m = self._moments
if c not in m:
m[c] = 0
r = m[c]
r = self._p * r + (1. - self._p) * g * g
m[c] = r
dv = self._lr * np.divide(g, np.sqrt(1e-8 + r))
return v - dv
class AdamOptimizer(Optimizer):
def _construct(self, p1 = .9, p2 = .999):
self._p1, self._p2 = p1, p2
self._moments = dict()
def _rule(self, v, g):
c = self._current
m = self._moments
if c not in m:
m[c] = dict({'s': 0, 'r': 0, 't': 0})
s, r, t = m[c]['s'], m[c]['r'], m[c]['t']
s = s * self._p1 + (1. - self._p1) * g
r = r * self._p2 + (1. - self._p2) * g * g
m[c]['s'], m[c]['r'], m[c]['t'] = s, r, (t + 1)
s_ = np.divide(s, 1. - np.power(self._p1, t + 1))
r_ = np.divide(r, 1. - np.power(self._p2, t + 1))
dv = self._lr * np.divide(s_, np.sqrt(r_) + 1e-8)
return v - dv
"""
Optimizer factory
"""
_optimizer_factory = dict({
'sgd' : StochasticDescentOptimizer,
'adam': AdamOptimizer,
'rmsprop': RMSPropOptimizer
})
def optimizer_factory(name, *args, **kwargs):
assert name in _optimizer_factory, \
'Optimizer {} not found'.format(name)
return _optimizer_factory[name](*args, **kwargs)
|
gpl-3.0
| -509,026,370,414,560,960
| 25.97561
| 62
| 0.509272
| false
| 3.049655
| false
| false
| false
|
kwoodhouse93/astro-bomber
|
source/bomber.py
|
1
|
5440
|
import pygame
from pygame.locals import *
import pymunk
from source import game
from source.constants import *
from source.utilities import *
from source.weapon import *
class Bomber:
def __init__(self):
self.width = width = BOMBER_WIDTH
self.height = height = BOMBER_HEIGHT
vertices = [
(-(width/2), -(height/2)),
( 0, (height/2)),
( (width/2), -(height/2))
]
radius = 5
mass = 5
moment = pymunk.moment_for_poly(mass, vertices, radius=radius)
self.body = pymunk.Body(mass, moment)
self.body.position = SCREEN_CENTER
self.shape = pymunk.Poly(self.body, vertices, radius=radius)
self.shape.collision_type = CT_BOMBER
# Object constants
self.turn_torque = BOMBER_TORQUE
self.engine_thrust = BOMBER_MAIN_ENGINE_THRUST
self.braking_force = BOMBER_BRAKE_FORCE
self.reverse_thrust = BOMBER_REVERSE_ENGINE_THRUST
self.ang_vel_limit = BOMBER_ANG_VEL_LIMIT
# State variables
self.strength = BOMBER_STRENGTH
self.turning_left = False
self.turning_right = False
self.thrusting = False
self.braking = False
# Register callback functors
event_manager = game.event_manager
event_manager.register_keydown(K_LEFT, self.cb_left_turn_on)
event_manager.register_keydown(K_RIGHT, self.cb_right_turn_on)
event_manager.register_keydown(K_UP, self.cb_thrust_forwards_on)
event_manager.register_keydown(K_DOWN, self.cb_thrust_backwards_on)
event_manager.register_keydown(K_LSHIFT, self.cb_fire_primary_weapon)
event_manager.register_keydown(K_RSHIFT, self.cb_fire_primary_weapon)
event_manager.register_keydown(K_LCTRL, self.cb_fire_secondary_weapon)
event_manager.register_keydown(K_RCTRL, self.cb_fire_secondary_weapon)
event_manager.register_keyup(K_LEFT, self.cb_left_turn_off)
event_manager.register_keyup(K_RIGHT, self.cb_right_turn_off)
event_manager.register_keyup(K_UP, self.cb_thrust_forwards_off)
event_manager.register_keyup(K_DOWN, self.cb_thrust_backwards_off)
# Add to space
game.space.add(self.body, self.shape)
# Add components
self.components = []
self.primary_weapon = PrimaryCannon(self)
self.components.append(self.primary_weapon)
self.secondary_weapon = SecondaryBombLauncher(self)
self.components.append(self.secondary_weapon)
def cb_left_turn_on(self, event):
self.turning_left = True
def cb_right_turn_on(self, event):
self.turning_right = True
def cb_thrust_forwards_on(self, event):
self.thrusting = True
def cb_thrust_backwards_on(self, event):
self.braking = True
def cb_left_turn_off(self, event):
self.turning_left = False
def cb_right_turn_off(self, event):
self.turning_right = False
def cb_thrust_forwards_off(self, event):
self.thrusting = False
def cb_thrust_backwards_off(self, event):
self.braking = False
def cb_fire_primary_weapon(self, event):
self.primary_weapon.activate()
def cb_fire_secondary_weapon(self, event):
self.secondary_weapon.activate()
def hit(self, damage):
self.strength -= damage
if self.strength < 0:
print("SHIP DESTROYED")
# self.strength = BOMBER_STRENGTH
game.object_manager.unregister_player(self)
def delete(self):
game.space.remove(self.body, self.shape)
for component in self.components:
game.object_manager.unregister(component)
def update(self):
Utils.wrap_body(self.body, radius=(self.width / 2))
# print(str(self.body.angular_velocity))
if self.turning_left and not self.turning_right and self.body.angular_velocity < self.ang_vel_limit:
self.body.torque = self.turn_torque
elif self.turning_right and not self.turning_left and self.body.angular_velocity > -self.ang_vel_limit:
self.body.torque = -self.turn_torque
elif self.body.angular_velocity > 0.1:
self.body.torque = -self.turn_torque
elif self.body.angular_velocity < -0.1:
self.body.torque = self.turn_torque
else:
self.body.angular_velocity = 0
self.body.torque = 0
# forward_vel = self.body.velocity.rotated(-self.body.angle).y
if self.thrusting:
force = (0, self.engine_thrust)
point = (0, 0)
self.body.apply_force_at_local_point(force, point)
elif self.braking:# and forward_vel > 1:
# force = (0, -self.braking_force)
force = (0, -self.reverse_thrust)
point = (0, 0)
self.body.apply_force_at_local_point(force, point)
# print(str(self.body.torque))
def draw(self):
screen = game.screen
# width, height = Utils.get_screen_size()
# print (str(self.body.position))
# position = int(self.body.position.x), \
# height - int(self.body.position.y)
# pygame.draw.circle(screen, (0, 0, 255), position, int(self.radius), 2)
# def draw_bomber(screen, bomber):
# p = int(bomber.body.position.x), 600 - int(bomber.body.position.y)
# pygame.draw.circle(screen, (0, 0, 255), p, int(bomber.radius), 2)
|
mit
| -653,415,825,549,484,000
| 36.777778
| 111
| 0.627206
| false
| 3.37469
| false
| false
| false
|
rebase-helper/rebase-helper
|
rebasehelper/tests/functional/test_rebase.py
|
1
|
10429
|
# -*- coding: utf-8 -*-
#
# This tool helps you rebase your package to the latest version
# Copyright (C) 2013-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hráček <phracek@redhat.com>
# Tomáš Hozza <thozza@redhat.com>
# Nikola Forró <nforro@redhat.com>
# František Nečas <fifinecas@seznam.cz>
import json
import os
import git # type: ignore
import pytest # type: ignore
import unidiff # type: ignore
from typing import List
from rebasehelper.cli import CLI
from rebasehelper.config import Config
from rebasehelper.application import Application
from rebasehelper.constants import RESULTS_DIR, CHANGES_PATCH
from rebasehelper.helpers.git_helper import GitHelper
@pytest.fixture
def initialized_git_repo(workdir):
repo = git.Repo.init(workdir)
# Configure user otherwise app.apply_changes() will fail
repo.git.config('user.name', GitHelper.get_user(), local=True)
repo.git.config('user.email', GitHelper.get_email(), local=True)
repo.git.add(all=True)
repo.index.commit('Initial commit', skip_hooks=True)
return repo
class TestRebase:
TEST_FILES: List[str] = [
'rebase/test.spec',
'rebase/applicable.patch',
'rebase/backported.patch',
'rebase/conflicting.patch',
'rebase/renamed-0.1.patch',
]
@pytest.mark.xfail(reason='''
the test fails from time to time due to RPM macros not being expanded,
see https://github.com/rebase-helper/rebase-helper/issues/811
''')
@pytest.mark.parametrize('buildtool', [
pytest.param('rpmbuild', marks=pytest.mark.skipif(
os.geteuid() != 0,
reason='requires superuser privileges')),
pytest.param('mock', marks=pytest.mark.long_running),
])
@pytest.mark.parametrize('favor_on_conflict', ['upstream', 'downstream', 'off'])
@pytest.mark.integration
@pytest.mark.usefixtures('initialized_git_repo')
def test_rebase(self, buildtool, favor_on_conflict):
new_version = '0.2'
cli = CLI([
'--non-interactive',
'--disable-inapplicable-patches',
'--buildtool', buildtool,
'--favor-on-conflict', favor_on_conflict,
'--outputtool', 'json',
'--pkgcomparetool', 'rpmdiff,pkgdiff,abipkgdiff,licensecheck,sonamecheck',
'--color=always',
'--apply-changes',
new_version,
])
config = Config()
config.merge(cli)
execution_dir, results_dir = Application.setup(config)
app = Application(config, os.getcwd(), execution_dir, results_dir)
app.run()
changes = os.path.join(RESULTS_DIR, CHANGES_PATCH)
patch = unidiff.PatchSet.from_filename(changes, encoding='UTF-8')
if favor_on_conflict == 'upstream':
backported_patch, conflicting_patch, renamed_patch, spec_file = patch
assert conflicting_patch.is_removed_file # conflicting.patch
elif favor_on_conflict == 'downstream':
backported_patch, conflicting_patch, renamed_patch, spec_file = patch
assert conflicting_patch.is_modified_file # conflicting.patch
else:
backported_patch, renamed_patch, spec_file = patch
# Non interactive mode - inapplicable patches are only commented out.
assert [h for h in spec_file if '+#Patch1: conflicting.patch\n' in h.target]
assert [h for h in spec_file if '+#%%patch1 -p1\n' in h.target]
assert renamed_patch.is_rename # renamed patch 0.1.patch to 0.2.patch
assert os.path.basename(renamed_patch.source_file) == 'renamed-0.1.patch'
assert os.path.basename(renamed_patch.target_file) == 'renamed-0.2.patch'
# Check that the renamed patch path is unchanged
assert not [h for h in spec_file if '-Patch3: renamed-%{version}.patch\n' in h.source]
assert backported_patch.is_removed_file # backported.patch
assert spec_file.is_modified_file # test.spec
if favor_on_conflict != 'downstream':
assert [h for h in spec_file if '-Patch1: conflicting.patch\n' in h.source]
assert [h for h in spec_file if '-%patch1 -p1\n' in h.source]
assert [h for h in spec_file if '-Patch2: backported.patch\n' in h.source]
assert [h for h in spec_file if '-%patch2 -p1\n' in h.source]
assert [h for h in spec_file if '+- New upstream release {}\n'.format(new_version) in h.target]
with open(os.path.join(RESULTS_DIR, 'report.json')) as f:
report = json.load(f)
assert 'success' in report['result']
# patches
assert 'applicable.patch' in report['patches']['untouched']
if favor_on_conflict == 'upstream':
# In case of conflict, upstream code is favored, therefore conflicting patch is unused.
assert 'conflicting.patch' in report['patches']['deleted']
elif favor_on_conflict == 'downstream':
assert 'conflicting.patch' in report['patches']['modified']
else:
# Non interactive mode - skipping conflicting patches
assert 'conflicting.patch' in report['patches']['inapplicable']
assert 'backported.patch' in report['patches']['deleted']
# licensecheck
assert report['checkers']['licensecheck']['license_changes']
assert len(report['checkers']['licensecheck']['disappeared_licenses']) == 1
assert len(report['checkers']['licensecheck']['new_licenses']) == 1
# rpmdiff
assert report['checkers']['rpmdiff']['files_changes']['added'] == 1
assert report['checkers']['rpmdiff']['files_changes']['changed'] == 3
assert report['checkers']['rpmdiff']['files_changes']['removed'] == 1
# abipkgdiff
assert report['checkers']['abipkgdiff']['abi_changes']
lib = report['checkers']['abipkgdiff']['packages']['test']['libtest1.so']
if 'Function symbols changes summary' in lib:
assert lib['Function symbols changes summary']['Added']['count'] == 1
elif 'Functions changes summary' in lib:
assert lib['Functions changes summary']['Added']['count'] == 1
if favor_on_conflict != 'downstream':
if 'Variable symbols changes summary' in lib:
assert lib['Variable symbols changes summary']['Removed']['count'] == 1
elif 'Variables changes summary' in lib:
assert lib['Variables changes summary']['Removed']['count'] == 1
# sonamecheck
change = report['checkers']['sonamecheck']['soname_changes']['test']['changed'][0]
assert change['from'] == 'libtest2.so.0.1'
assert change['to'] == 'libtest2.so.0.2'
repo = git.Repo(execution_dir)
assert '- New upstream release {}'.format(new_version) in repo.commit().summary
@pytest.mark.parametrize('buildtool', [
pytest.param('rpmbuild', marks=pytest.mark.skipif(
os.geteuid() != 0,
reason='requires superuser privileges')),
pytest.param('mock', marks=pytest.mark.long_running),
])
@pytest.mark.integration
@pytest.mark.usefixtures('initialized_git_repo')
def test_files_build_log_hook(self, buildtool):
new_version = '0.3'
cli = CLI([
'--non-interactive',
'--disable-inapplicable-patches',
'--force-build-log-hooks',
'--buildtool', buildtool,
'--outputtool', 'json',
'--pkgcomparetool', '',
'--color=always',
new_version,
])
config = Config()
config.merge(cli)
execution_dir, results_dir = Application.setup(config)
app = Application(config, os.getcwd(), execution_dir, results_dir)
app.run()
changes = os.path.join(RESULTS_DIR, CHANGES_PATCH)
patch = unidiff.PatchSet.from_filename(changes, encoding='UTF-8')
_, _, spec_file = patch
assert spec_file.is_modified_file
# removed files
assert [h for h in spec_file if '-%doc README.md CHANGELOG.md\n' in h.source]
assert [h for h in spec_file if '+%doc README.md\n' in h.target]
assert [h for h in spec_file if '-%doc %{_docdir}/%{name}/notes.txt\n' in h.source]
assert [h for h in spec_file if '-%{_datadir}/%{name}/1.dat\n' in h.source]
assert [h for h in spec_file if '-%{_datadir}/%{name}/extra/C.dat\n' in h.source]
assert [h for h in spec_file if '-%doc data/extra/README.extra\n' in h.source]
# added files
assert [h for h in spec_file if '+%{_datadir}/%{name}/2.dat\n' in h.target]
assert [h for h in spec_file if '+%{_datadir}/%{name}/extra/D.dat\n' in h.target]
with open(os.path.join(RESULTS_DIR, 'report.json')) as f:
report = json.load(f)
assert 'success' in report['result']
# files build log hook
added = report['build_log_hooks']['files']['added']
assert '%{_datadir}/%{name}/2.dat' in added['%files']
assert '%{_datadir}/%{name}/extra/D.dat' in added['%files extra']
removed = report['build_log_hooks']['files']['removed']
assert 'CHANGELOG.md' in removed['%files']
assert '%{_docdir}/%{name}/notes.txt' in removed['%files']
assert '%{_datadir}/%{name}/1.dat' in removed['%files']
assert '%{_datadir}/%{name}/extra/C.dat' in removed['%files extra']
assert 'data/extra/README.extra' in removed['%files extra']
|
gpl-2.0
| 1,191,313,108,908,472,000
| 46.807339
| 103
| 0.613702
| false
| 3.857143
| true
| false
| false
|
dssg/wikienergy
|
proto/pylearn2/create_appliance_detection_dataset.py
|
1
|
1872
|
import sys
import os.path
sys.path.append(os.path.abspath(os.path.join(os.pardir,os.pardir)))
import disaggregator as da
import disaggregator.PecanStreetDatasetAdapter as psda
import pylearn2.datasets as ds
import pickle
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='create appliance detection datasets for pylearn2.')
parser.add_argument('appliance',type=str,
help='appliance to make the datasets around')
parser.add_argument('data_dir',type=str,
help='directory in which to store data')
parser.add_argument('prefix',type=str,
help='prefix for dataset files')
args = parser.parse_args()
schema = 'shared'
tables = [u'validated_01_2014',
u'validated_02_2014',
u'validated_03_2014',
u'validated_04_2014',
u'validated_05_2014',]
db_url = "postgresql://USERNAME:PASSWORD@db.wiki-energy.org:5432/postgres"
psda.set_url(db_url)
window_length=24*4*7
window_stride=24*4
train,valid,test = psda.get_appliance_detection_arrays(
schema,tables,args.appliance,window_length,window_stride,10)
train_dataset = ds.DenseDesignMatrix(X=train[0],y=train[1])
valid_dataset = ds.DenseDesignMatrix(X=valid[0],y=valid[1])
test_dataset = ds.DenseDesignMatrix(X=test[0],y=test[1])
with open('{data_dir}/{prefix}_train.pkl'
.format(data_dir=args.data_dir,prefix=args.prefix), 'w') as f:
pickle.dump(train_dataset,f)
with open('{data_dir}/{prefix}_valid.pkl'
.format(data_dir=args.data_dir,prefix=args.prefix), 'w') as f:
pickle.dump(valid_dataset,f)
with open('{data_dir}/{prefix}_test.pkl'
.format(data_dir=args.data_dir,prefix=args.prefix), 'w') as f:
pickle.dump(test_dataset,f)
import pdb; pdb.set_trace()
|
mit
| -4,833,271,291,454,625,000
| 36.44
| 101
| 0.657585
| false
| 3.379061
| true
| false
| false
|
diana-hep/carl
|
tests/distributions/test_join.py
|
1
|
1471
|
# Carl is free software; you can redistribute it and/or modify it
# under the terms of the Revised BSD License; see LICENSE file for
# more details.
import numpy as np
from numpy.testing import assert_array_almost_equal
from carl.distributions import Join
from carl.distributions import Normal
from carl.distributions import Histogram
def test_join():
p = Join(components=[Normal(mu=0), Normal(mu=1), Normal(mu=2)])
assert p.ndim == 3
assert len(p.parameters_) == 6
X = p.rvs(10000, random_state=1)
assert X.shape == (10000, 3)
assert np.abs(np.mean(X[:, 0]) - 0.) < 0.05
assert np.abs(np.mean(X[:, 1]) - 1.) < 0.05
assert np.abs(np.mean(X[:, 2]) - 2.) < 0.05
assert_array_almost_equal(-np.log(p.pdf(X)), p.nll(X))
def test_join_non_theano():
h0 = Histogram(interpolation="linear", bins=30)
h1 = Histogram(interpolation="linear", bins=30)
h2 = Histogram(interpolation="linear", bins=30)
h0.fit(Normal(mu=0).rvs(10000, random_state=0))
h1.fit(Normal(mu=1).rvs(10000, random_state=1))
h2.fit(Normal(mu=2).rvs(10000, random_state=2))
p = Join(components=[h0, h1, h2])
assert p.ndim == 3
assert len(p.parameters_) == 0
X = p.rvs(10000, random_state=1)
assert X.shape == (10000, 3)
assert np.abs(np.mean(X[:, 0]) - 0.) < 0.05
assert np.abs(np.mean(X[:, 1]) - 1.) < 0.05
assert np.abs(np.mean(X[:, 2]) - 2.) < 0.05
assert_array_almost_equal(-np.log(p.pdf(X)), p.nll(X))
|
bsd-3-clause
| 550,063,724,693,675,600
| 31.688889
| 67
| 0.635622
| false
| 2.839768
| false
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/datalake/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_basic_py3.py
|
1
|
3694
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_py3 import Resource
class DataLakeAnalyticsAccountBasic(Resource):
"""A Data Lake Analytics account object, containing all information associated
with the named Data Lake Analytics account.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: The resource identifer.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar location: The resource location.
:vartype location: str
:ivar tags: The resource tags.
:vartype tags: dict[str, str]
:ivar account_id: The unique identifier associated with this Data Lake
Analytics account.
:vartype account_id: str
:ivar provisioning_state: The provisioning status of the Data Lake
Analytics account. Possible values include: 'Failed', 'Creating',
'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting',
'Deleted', 'Undeleting', 'Canceled'
:vartype provisioning_state: str or
~azure.mgmt.datalake.analytics.account.models.DataLakeAnalyticsAccountStatus
:ivar state: The state of the Data Lake Analytics account. Possible values
include: 'Active', 'Suspended'
:vartype state: str or
~azure.mgmt.datalake.analytics.account.models.DataLakeAnalyticsAccountState
:ivar creation_time: The account creation time.
:vartype creation_time: datetime
:ivar last_modified_time: The account last modified time.
:vartype last_modified_time: datetime
:ivar endpoint: The full CName endpoint for this account.
:vartype endpoint: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'tags': {'readonly': True},
'account_id': {'readonly': True},
'provisioning_state': {'readonly': True},
'state': {'readonly': True},
'creation_time': {'readonly': True},
'last_modified_time': {'readonly': True},
'endpoint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'account_id': {'key': 'properties.accountId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'DataLakeAnalyticsAccountStatus'},
'state': {'key': 'properties.state', 'type': 'DataLakeAnalyticsAccountState'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'},
'endpoint': {'key': 'properties.endpoint', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(DataLakeAnalyticsAccountBasic, self).__init__(**kwargs)
self.account_id = None
self.provisioning_state = None
self.state = None
self.creation_time = None
self.last_modified_time = None
self.endpoint = None
|
mit
| 5,611,381,449,626,307,000
| 40.977273
| 112
| 0.615864
| false
| 4.077263
| false
| false
| false
|
Mugginz/Thermofun
|
thermostat/subroutine.py
|
1
|
2625
|
# Helper functions for controller
import time, os, sys, threading, socket
THERMOMETER_URI = '/sys/bus/w1/devices/28-0000054b97a5/w1_slave'
LOGFILE = 'incidents.log'
# Change argument to 'w' to clear logs on startup.
with open(LOGFILE, 'a') as f:
pass
def eventLog(message):
try:
# Limit file size.
with open(LOGFILE, 'r+') as f:
line_count = sum(1 for line in f)
if line_count > 1023:
f.seek(0)
for i in range(line_count - 1023):
f.readline()
remainder = f.read()
f.seek(0)
f.write(remainder)
f.truncate()
entry = message + " @ " + time.strftime("%Y-%m-%d, %H:%M:%S") + "\n"
with open(LOGFILE, 'a+') as f:
f.write(entry)
except EnvironmentError:
return 1
return 0
# Use sysfs to read thermometer.
def getTemperature(dbg):
temperature = None
try:
with open(THERMOMETER_URI, 'r') as poll:
measure = poll.readline()
if measure.split()[11] == "YES":
measure = poll.readline()
temperature = (float(measure.split("t=")[1])) / 1000
if temperature > 80:
if dbg:
print("Thermometer error value " + str(temperature) + " reported.")
temperature = None
except EnvironmentError as e:
if dbg:
print(str(e))
except Exception as e:
if dbg:
print("Thermometer event, check logs.")
eventLog(str(e))
return temperature
# For loading thermal profile settings:
from flask_sqlalchemy import SQLAlchemy
# Share db with flask app.
# TODO:
# make absolute path
sys.path.append(os.path.dirname(os.getcwd()))
from control_panel import db, models
# Maybe not the best way to do this.
def getSchedules(dgb):
timetable = []
for i in range(3):
try:
profile_active = models.Profile.query.filter_by(active=True).first()
schedules = profile_active.schedules.all()
# except SQLAlchemy.SQLAlchemyError as e:
# time.sleep(3)
except Exception as e:
time.sleep(3)
else:
for s in schedules:
timetable.append((s.time, s.temperature))
break
else:
eventLog(str(e))
if dbg:
print("Database event, check logs.")
return timetable
# Listen for changes to settings.
# msg should be an empty dictionary
def getNotification(soc, msg, lck, dbg):
while 1:
conn, addr = soc.accept()
if dbg:
print("Connected to " + str(addr[0]) + ":" + str(addr[1]))
try:
data = conn.recv(256)
except Exception as e:
if dbg:
print("Network event, check logs.")
eventLog(str(e))
else:
clean = data.strip()
settings = clean.split(' ')
lck.acquire(True)
msg[settings[0]] = settings[1]
lck.release()
if dbg:
print("Successfully received data.\n")
conn.shutdown(socket.SHUT_RD)
conn.close()
return 0
|
gpl-2.0
| -4,010,749,887,447,146,000
| 24.240385
| 72
| 0.663238
| false
| 2.890969
| false
| false
| false
|
glabilloy/fabrydb
|
fabrydb/conf/settings.py
|
1
|
1697
|
import os
from global_settings import *
try:
from local_settings import *
from local_settings_secret import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (src.conf.local_settings). Trying to import Heroku config...')
try:
from local_settings_heroku import *
from local_settings_heroku_secret import *
warnings.warn('Local Heroku config loaded')
except ImportError:
warnings.warn('Heroku local settings not found neither (src.conf.local_settings_heroku)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
# This is used as a "seed" for various hashing algorithms. This must be set to
# a very long random string (40+ characters)
SECRET_KEY = 'read from secret settings'
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_PATH, '_site/static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
|
bsd-2-clause
| -1,389,080,983,223,519,000
| 33.632653
| 116
| 0.713612
| false
| 3.449187
| false
| false
| false
|
dibaunaumh/tikal-corp-website
|
cms/menu.py
|
1
|
8499
|
from menus.menu_pool import menu_pool
from menus.base import Menu, NavigationNode, Modifier
from cms.utils import get_language_from_request
from cms.utils.moderator import get_page_queryset, get_title_queryset
from django.conf import settings
from django.contrib.sites.models import Site
from cms.utils.i18n import get_fallback_languages
from cms.exceptions import NoHomeFound
from cms.apphook_pool import apphook_pool
from cms.models.titlemodels import Title
def page_to_node(page, home, cut):
parent_id = page.parent_id
if home and page.parent_id == home.pk and cut:
parent_id = None
# possible fix for a possible problem
#if parent_id and not page.parent.get_calculated_status():
# parent_id = None # ????
attr = {'soft_root':page.soft_root,
'auth_required':page.login_required,
'reverse_id':page.reverse_id,}
if page.limit_visibility_in_menu == None:
attr['visible_for_authenticated'] = True
attr['visible_for_anonymous'] = True
else:
attr['visible_for_authenticated'] = page.limit_visibility_in_menu == 1
attr['visible_for_anonymous'] = page.limit_visibility_in_menu == 2
if page.pk == home.pk:
attr['is_home'] = True
extenders = []
if page.navigation_extenders:
extenders.append(page.navigation_extenders)
try:
app_name = page.get_application_urls(fallback=False)
except Title.DoesNotExist:
app_name = None
if app_name:
app = apphook_pool.get_apphook(app_name)
for menu in app.menus:
extenders.append(menu.__name__)
attr['redirect_url'] = page.get_redirect() # save redirect URL is any
if extenders:
attr['navigation_extenders'] = extenders
n = NavigationNode(
page.get_menu_title(),
page.get_absolute_url(),
page.pk,
parent_id,
attr=attr,
visible=page.in_navigation,
)
return n
class CMSMenu(Menu):
def get_nodes(self, request):
page_queryset = get_page_queryset(request)
site = Site.objects.get_current()
lang = get_language_from_request(request)
filters = {
'site':site,
}
if settings.CMS_HIDE_UNTRANSLATED:
filters['title_set__language'] = lang
pages = page_queryset.published().filter(**filters).order_by("tree_id", "lft")
ids = []
nodes = []
first = True
home_cut = False
home_children = []
home = None
for page in pages:
if not home:
home = page
page.home_pk_cache = home.pk
if first and page.pk != home.pk:
home_cut = True
if (page.parent_id == home.pk or page.parent_id in home_children) and home_cut:
page.home_cut_cache = True
home_children.append(page.pk)
if (page.pk == home.pk and home.in_navigation) or page.pk != home.pk:
first = False
ids.append(page.id)
titles = list(get_title_queryset(request).filter(page__in=ids, language=lang))
for page in pages:# add the title and slugs and some meta data
for title in titles:
if title.page_id == page.pk:
if not hasattr(page, "title_cache"):
page.title_cache = {}
page.title_cache[title.language] = title
nodes.append(page_to_node(page, home, home_cut))
ids.remove(page.pk)
if ids: # get fallback languages
fallbacks = get_fallback_languages(lang)
for l in fallbacks:
titles = list(get_title_queryset(request).filter(page__in=ids, language=l))
for title in titles:
for page in pages:# add the title and slugs and some meta data
if title.page_id == page.pk:
if not hasattr(page, "title_cache"):
page.title_cache = {}
page.title_cache[title.language] = title
nodes.append(page_to_node(page, home, home_cut))
ids.remove(page.pk)
break
if not ids:
break
return nodes
menu_pool.register_menu(CMSMenu)
class NavExtender(Modifier):
def modify(self, request, nodes, namespace, id, post_cut, breadcrumb):
if post_cut:
return nodes
exts = []
# rearrange the parent relations
home = None
for node in nodes:
if node.attr.get("is_home", False):
home = node
extenders = node.attr.get("navigation_extenders", None)
if extenders:
for ext in extenders:
if not ext in exts:
exts.append(ext)
for n in nodes:
if n.namespace == ext and not n.parent_id:# if home has nav extenders but home is not visible
if node.attr.get("is_home", False) and not node.visible:
n.parent_id = None
n.parent_namespace = None
n.parent = None
else:
n.parent_id = node.id
n.parent_namespace = node.namespace
n.parent = node
node.children.append(n)
removed = []
# find all not assigned nodes
for menu in menu_pool.menus.items():
if hasattr(menu[1], 'cms_enabled') and menu[1].cms_enabled and not menu[0] in exts:
for node in nodes:
if node.namespace == menu[0]:
removed.append(node)
if breadcrumb:
# if breadcrumb and home not in navigation add node
if breadcrumb and home and not home.visible:
home.visible = True
if request.path == home.get_absolute_url():
home.selected = True
else:
home.selected = False
# remove all nodes that are nav_extenders and not assigned
for node in removed:
nodes.remove(node)
return nodes
menu_pool.register_modifier(NavExtender)
class SoftRootCutter(Modifier):
def modify(self, request, nodes, namespace, id, post_cut, breadcrumb):
if post_cut or not settings.CMS_SOFTROOT:
return nodes
selected = None
root_nodes = []
for node in nodes:
if node.selected:
selected = node
if not node.parent:
root_nodes.append(node)
if selected:
if selected.attr.get("soft_root", False):
nodes = selected.get_descendants()
selected.parent = None
nodes = [selected] + nodes
else:
nodes = self.find_ancestors(selected, nodes)
nodes = self.find_children(selected, nodes)
else:
for node in root_nodes:
self.find_children(node, nodes)
return nodes
def find_children(self, node, nodes):
for n in node.children:
if n.attr.get("soft_root", False):
self.remove_children(n, nodes)
return nodes
def remove_children(self, node, nodes):
for n in node.children:
nodes.remove(n)
self.remove_children(n, nodes)
node.children = []
def find_ancestors(self, node, nodes):
is_root = False
if node.parent:
if node.parent.attr.get("soft_root", False):
is_root = True
nodes = node.parent.get_descendants()
node.parent.parent = None
nodes = [node.parent] + nodes
else:
nodes = self.find_ancestors(node.parent, nodes)
else:
for n in nodes:
if n != node and not n.parent:
self.find_children(n, nodes)
for n in node.children:
if n != node:
self.find_children(n, nodes)
if is_root:
n.parent = None
return nodes
menu_pool.register_modifier(SoftRootCutter)
|
bsd-3-clause
| -5,967,053,210,868,146,000
| 37.986239
| 117
| 0.53218
| false
| 4.273002
| false
| false
| false
|
AdirShemesh/LibraryWiki
|
app/node_entities.py
|
1
|
3155
|
import json
from app.authorities import CODES
from requests import get
import xmltodict
class Entity:
def __init__(self, data):
self.data = data
self.properties = self._build_properties()
self.labels = self._build_labels()
def _build_properties(self):
raise NotImplemented
def _build_labels(self):
raise NotImplemented
class Authority(Entity):
def _build_properties(self):
# assign id + dumps the entire xml record in 'data' field
properties = {'data': json.dumps(self.data), 'id': self.data['001'][0]['#text']}
# assigns type
if '100' in self.data:
properties['type'] = 'Person'
elif '151' in self.data:
properties['type'] = 'Location'
elif '150' in self.data:
properties['type'] = 'Topic'
else:
properties['type'] = None
for tag, subfields in self.data.items():
for subfield in subfields:
CODES.get(tag) and properties.update(CODES[tag](subfield))
return properties
def _build_labels(self):
authority_type = self.properties['type']
if authority_type:
return 'Authority', authority_type
return 'Authority',
class Record(Entity):
def _build_properties(self):
return {'id': self.data['control']['recordid'], 'data': str(self.data),
'title': self.data['display']['title']}
def _build_labels(self):
return 'Record', self.data['display']['type']
class Photo(Record):
def __init__(self, data):
self.data = data
self._fl_url = "http://aleph.nli.org.il/X?op=find-doc&doc_num={}&base={}"
self._fl_url = self._build_fl_url()
super().__init__(data)
@property
def _fl_base(self):
return 'nnl03'
def _build_fl_url(self):
return self._fl_url.format(self.data['control']['sourcerecordid'], self._fl_base)
def _build_properties(self):
properties = super()._build_properties()
fl = self._get_fl()
if fl:
properties["fl"] = fl
return properties
def _build_labels(self):
return super()._build_labels() + ('Photo',)
def _get_fl(self):
fl = None
fields = xmltodict.parse(get(self._fl_url).content)['find-doc'].get('record')
if not fields:
return None
fields = fields['metadata']['oai_marc']['varfield']
for field in fields:
if not isinstance(field, dict) or not field.get('@id'):
continue
if field['@id'] == 'ROS':
fl = [sub['#text'] for sub in field['subfield'] if sub.get('@label') == 'd'] or None
break
return fl and fl[0]
class Portrait(Photo):
def _build_properties(self):
properties = super()._build_properties()
topic = self.data['facets'].get('topic')
if topic:
properties['topic'] = topic
return properties
@property
def _fl_base(self):
return 'nnl01'
def _build_labels(self):
return super()._build_labels() + ('Portrait',)
|
gpl-2.0
| 9,033,349,775,582,352,000
| 28.485981
| 100
| 0.562282
| false
| 3.899876
| false
| false
| false
|
cloudify-cosmo/cloudify-plugins-common
|
cloudify/utils.py
|
1
|
15485
|
########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from contextlib import contextmanager
import logging
import os
import random
import shlex
import ssl
import string
import subprocess
import sys
import tempfile
import traceback
import StringIO
from distutils.version import LooseVersion
from cloudify import cluster, constants
from cloudify.state import workflow_ctx, ctx
from cloudify.exceptions import CommandExecutionException
CFY_EXEC_TEMPDIR_ENVVAR = 'CFY_EXEC_TEMP'
class ManagerVersion(object):
"""Cloudify manager version helper class."""
def __init__(self, raw_version):
"""Raw version, for example: 3.4.0-m1, 3.3, 3.2.1, 3.3-rc1."""
components = []
for x in raw_version.split('-')[0].split('.'):
try:
components += [int(x)]
except ValueError:
pass
if len(components) == 2:
components.append(0)
self.major = components[0]
self.minor = components[1]
self.service = components[2]
def greater_than(self, other):
"""Returns true if this version is greater than the provided one."""
if self.major > other.major:
return True
if self.major == other.major:
if self.minor > other.minor:
return True
if self.minor == other.minor and self.service > other.service:
return True
return False
def equals(self, other):
"""Returns true if this version equals the provided version."""
return self.major == other.major and self.minor == other.minor and \
self.service == other.service
def __str__(self):
return '{0}.{1}.{2}'.format(self.major, self.minor, self.service)
def __eq__(self, other):
return self.equals(other)
def __gt__(self, other):
return self.greater_than(other)
def __lt__(self, other):
return other > self
def __ge__(self, other):
return self > other or self == other
def __le__(self, other):
return self < other or self == other
def __ne__(self, other):
return self > other or self < other
def setup_logger(logger_name,
logger_level=logging.INFO,
handlers=None,
remove_existing_handlers=True,
logger_format=None,
propagate=True):
"""
:param logger_name: Name of the logger.
:param logger_level: Level for the logger (not for specific handler).
:param handlers: An optional list of handlers (formatter will be
overridden); If None, only a StreamHandler for
sys.stdout will be used.
:param remove_existing_handlers: Determines whether to remove existing
handlers before adding new ones
:param logger_format: the format this logger will have.
:param propagate: propagate the message the parent logger.
:return: A logger instance.
:rtype: logging.Logger
"""
if logger_format is None:
logger_format = '%(asctime)s [%(levelname)s] [%(name)s] %(message)s'
logger = logging.getLogger(logger_name)
if remove_existing_handlers:
for handler in logger.handlers:
logger.removeHandler(handler)
if not handlers:
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
handlers = [handler]
formatter = logging.Formatter(fmt=logger_format,
datefmt='%H:%M:%S')
for handler in handlers:
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logger_level)
if not propagate:
logger.propagate = False
return logger
def format_exception(e):
"""Human-readable representation of an exception, as a bytestring.
The canonical way to print an exception, str(e), also made to handle
unicode exception messages in python 2.
Additionally, if the exception message is incompatible with utf-8,
(which should only happen in extreme cases, such as NUL bytes),
fallback to repr().
"""
try:
return str(e)
except UnicodeEncodeError:
try:
return unicode(e).encode('utf-8')
except UnicodeEncodeError:
return repr(e)
def get_manager_file_server_url():
"""
Returns the manager file server base url.
"""
if cluster.is_cluster_configured():
active_node_ip = cluster.get_cluster_active()
port = get_manager_rest_service_port()
if active_node_ip:
return 'https://{0}:{1}/resources'.format(active_node_ip, port)
return os.environ[constants.MANAGER_FILE_SERVER_URL_KEY]
def get_manager_file_server_root():
"""
Returns the host the manager REST service is running on.
"""
return os.environ[constants.MANAGER_FILE_SERVER_ROOT_KEY]
def get_manager_rest_service_host():
"""
Returns the host the manager REST service is running on.
"""
return os.environ[constants.REST_HOST_KEY]
def get_broker_ssl_cert_path():
"""
Returns location of the broker certificate on the agent
"""
if cluster.is_cluster_configured():
active_node = cluster.get_cluster_active() or {}
broker_ssl_cert_path = active_node.get('internal_cert_path')
if broker_ssl_cert_path:
return broker_ssl_cert_path
return os.environ[constants.BROKER_SSL_CERT_PATH]
# maintained for backwards compatibility
get_manager_ip = get_manager_rest_service_host
def get_manager_rest_service_port():
"""
Returns the port the manager REST service is running on.
"""
return int(os.environ[constants.REST_PORT_KEY])
def get_local_rest_certificate():
"""
Returns the path to the local copy of the server's public certificate
"""
return os.environ[constants.LOCAL_REST_CERT_FILE_KEY]
def _get_current_context():
for context in [ctx, workflow_ctx]:
try:
return context._get_current_object()
except RuntimeError:
continue
raise RuntimeError('Context required, but no operation or workflow '
'context available.')
def get_rest_token():
"""
Returns the auth token to use when calling the REST service
"""
return _get_current_context().rest_token
def get_tenant():
"""
Returns a dict with the details of the current tenant
"""
return _get_current_context().tenant
def get_tenant_name():
"""
Returns the tenant name to use when calling the REST service
"""
return _get_current_context().tenant_name
def get_is_bypass_maintenance():
"""
Returns true if workflow should run in maintenance mode.
"""
return os.environ.get(constants.BYPASS_MAINTENANCE, '').lower() == 'true'
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
"""
Generate and return a random string using upper case letters and digits.
"""
return ''.join(random.choice(chars) for _ in range(size))
def get_exec_tempdir():
"""
Returns the directory to use for temporary files, when the intention
is to place an executable file there.
This is needed because some production systems disallow executions from
the default temporary directory.
"""
return os.environ.get(CFY_EXEC_TEMPDIR_ENVVAR) or tempfile.gettempdir()
def create_temp_folder():
"""
Create a temporary folder.
"""
path_join = os.path.join(get_exec_tempdir(), id_generator(5))
os.makedirs(path_join)
return path_join
def exception_to_error_cause(exception, tb):
error = StringIO.StringIO()
etype = type(exception)
traceback.print_exception(etype, exception, tb, file=error)
return {
'message': str(exception),
'traceback': error.getvalue(),
'type': etype.__name__
}
class LocalCommandRunner(object):
def __init__(self, logger=None, host='localhost'):
"""
:param logger: This logger will be used for
printing the output and the command.
"""
logger = logger or setup_logger('LocalCommandRunner')
self.logger = logger
self.host = host
def run(self, command,
exit_on_failure=True,
stdout_pipe=True,
stderr_pipe=True,
cwd=None,
execution_env=None):
"""
Runs local commands.
:param command: The command to execute.
:param exit_on_failure: False to ignore failures.
:param stdout_pipe: False to not pipe the standard output.
:param stderr_pipe: False to not pipe the standard error.
:param cwd: the working directory the command will run from.
:param execution_env: dictionary of environment variables that will
be present in the command scope.
:return: A wrapper object for all valuable info from the execution.
:rtype: cloudify.utils.CommandExecutionResponse
"""
if isinstance(command, list):
popen_args = command
else:
popen_args = _shlex_split(command)
self.logger.debug('[{0}] run: {1}'.format(self.host, popen_args))
stdout = subprocess.PIPE if stdout_pipe else None
stderr = subprocess.PIPE if stderr_pipe else None
command_env = os.environ.copy()
command_env.update(execution_env or {})
p = subprocess.Popen(args=popen_args, stdout=stdout,
stderr=stderr, cwd=cwd, env=command_env)
out, err = p.communicate()
if out:
out = out.rstrip()
if err:
err = err.rstrip()
if p.returncode != 0:
error = CommandExecutionException(
command=command,
error=err,
output=out,
code=p.returncode)
if exit_on_failure:
raise error
else:
self.logger.error(error)
return CommandExecutionResponse(
command=command,
std_out=out,
std_err=err,
return_code=p.returncode)
class CommandExecutionResponse(object):
"""
Wrapper object for info returned when running commands.
:param command: The command that was executed.
:param std_out: The output from the execution.
:param std_err: The error message from the execution.
:param return_code: The return code from the execution.
"""
def __init__(self, command, std_out, std_err, return_code):
self.command = command
self.std_out = std_out
self.std_err = std_err
self.return_code = return_code
setup_default_logger = setup_logger # deprecated; for backwards compatibility
def _shlex_split(command):
lex = shlex.shlex(command, posix=True)
lex.whitespace_split = True
lex.escape = ''
return list(lex)
class Internal(object):
@staticmethod
def get_install_method(properties):
install_agent = properties.get('install_agent')
if install_agent is False:
return 'none'
elif install_agent is True:
return 'remote'
else:
return properties.get('agent_config', {}).get('install_method')
@staticmethod
def get_broker_ssl_options(ssl_enabled, cert_path):
if ssl_enabled:
ssl_options = {
'ca_certs': cert_path,
'cert_reqs': ssl.CERT_REQUIRED,
}
else:
ssl_options = {}
return ssl_options
@staticmethod
def get_broker_credentials(cloudify_agent):
"""Get broker credentials or their defaults if not set."""
default_user = 'guest'
default_pass = 'guest'
default_vhost = '/'
try:
broker_user = cloudify_agent.broker_user or default_user
broker_pass = cloudify_agent.broker_pass or default_pass
broker_vhost = cloudify_agent.broker_vhost or default_vhost
except AttributeError:
# Handle non-agent from non-manager (e.g. for manual tests)
broker_user = default_user
broker_pass = default_pass
broker_vhost = default_vhost
return broker_user, broker_pass, broker_vhost
@staticmethod
def _get_package_version(plugins_dir, package_name):
# get all plugin dirs
subdirs = next(os.walk(plugins_dir))[1]
# filter by package name
package_dirs = [dir for dir in subdirs if dir.startswith(package_name)]
# cut package name prefix
versions = [dir[len(package_name) + 1:] for dir in package_dirs]
# sort versions from new to old
versions.sort(key=lambda version: LooseVersion(version), reverse=True)
# return the latest
return versions[0]
@staticmethod
def plugin_prefix(package_name=None, package_version=None,
deployment_id=None, plugin_name=None, tenant_name=None,
sys_prefix_fallback=True):
tenant_name = tenant_name or ''
plugins_dir = os.path.join(sys.prefix, 'plugins', tenant_name)
prefix = None
if package_name:
package_version = package_version or Internal._get_package_version(
plugins_dir, package_name)
wagon_dir = os.path.join(
plugins_dir, '{0}-{1}'.format(package_name, package_version))
if os.path.isdir(wagon_dir):
prefix = wagon_dir
if prefix is None and deployment_id and plugin_name:
source_dir = os.path.join(
plugins_dir, '{0}-{1}'.format(deployment_id, plugin_name))
if os.path.isdir(source_dir):
prefix = source_dir
if prefix is None and sys_prefix_fallback:
prefix = sys.prefix
return prefix
@staticmethod
@contextmanager
def _change_tenant(ctx, tenant):
"""
Temporarily change the tenant the context is pretending to be.
This is not supported for anything other than snapshot restores.
If you are thinking of using this for something, it would be
better not to.
"""
if 'original_name' in ctx._context['tenant']:
raise RuntimeError(
'Overriding tenant name cannot happen while tenant name is '
'already being overridden.'
)
try:
ctx._context['tenant']['original_name'] = ctx.tenant_name
ctx._context['tenant']['name'] = tenant
yield
finally:
ctx._context['tenant']['name'] = (
ctx._context['tenant']['original_name']
)
ctx._context['tenant'].pop('original_name')
internal = Internal()
|
apache-2.0
| 3,845,092,131,709,111,300
| 30.473577
| 79
| 0.613174
| false
| 4.276443
| false
| false
| false
|
bodylabs/blmath
|
blmath/geometry/transform/rigid_transform.py
|
1
|
2624
|
def find_rigid_transform(a, b, visualize=False):
"""
Args:
a: a 3xN array of vertex locations
b: a 3xN array of vertex locations
Returns: (R,T) such that R.dot(a)+T ~= b
Based on Arun et al, "Least-squares fitting of two 3-D point sets," 1987.
See also Eggert et al, "Estimating 3-D rigid body transformations: a
comparison of four major algorithms," 1997.
"""
import numpy as np
import scipy.linalg
from blmath.numerics.matlab import col
if a.shape[0] != 3:
if a.shape[1] == 3:
a = a.T
if b.shape[0] != 3:
if b.shape[1] == 3:
b = b.T
assert a.shape[0] == 3
assert b.shape[0] == 3
a_mean = np.mean(a, axis=1)
b_mean = np.mean(b, axis=1)
a_centered = a - col(a_mean)
b_centered = b - col(b_mean)
c = a_centered.dot(b_centered.T)
u, s, v = np.linalg.svd(c, full_matrices=False)
v = v.T
R = v.dot(u.T)
if scipy.linalg.det(R) < 0:
if np.any(s == 0): # This is only valid in the noiseless case; see the paper
v[:, 2] = -v[:, 2]
R = v.dot(u.T)
else:
raise ValueError("find_rigid_transform found a reflection that it cannot recover from. Try RANSAC or something...")
T = col(b_mean - R.dot(a_mean))
if visualize != False:
from lace.mesh import Mesh
from lace.meshviewer import MeshViewer
mv = MeshViewer() if visualize is True else visualize
a_T = R.dot(a) + T
mv.set_dynamic_meshes([
Mesh(v=a.T, f=[]).set_vertex_colors('red'),
Mesh(v=b.T, f=[]).set_vertex_colors('green'),
Mesh(v=a_T.T, f=[]).set_vertex_colors('orange'),
])
return R, T
def find_rigid_rotation(a, b, allow_scaling=False):
"""
Args:
a: a 3xN array of vertex locations
b: a 3xN array of vertex locations
Returns: R such that R.dot(a) ~= b
See link: http://en.wikipedia.org/wiki/Orthogonal_Procrustes_problem
"""
import numpy as np
import scipy.linalg
from blmath.numerics.matlab import col
assert a.shape[0] == 3
assert b.shape[0] == 3
if a.size == 3:
cx = np.cross(a.ravel(), b.ravel())
a = np.hstack((col(a), col(cx)))
b = np.hstack((col(b), col(cx)))
c = a.dot(b.T)
u, _, v = np.linalg.svd(c, full_matrices=False)
v = v.T
R = v.dot(u.T)
if scipy.linalg.det(R) < 0:
v[:, 2] = -v[:, 2]
R = v.dot(u.T)
if allow_scaling:
scalefactor = scipy.linalg.norm(b) / scipy.linalg.norm(a)
R = R * scalefactor
return R
|
bsd-2-clause
| 5,908,274,546,677,623,000
| 27.521739
| 127
| 0.552973
| false
| 3.009174
| false
| false
| false
|
Colin-b/pyconfigparser
|
setup.py
|
1
|
1185
|
import os
from setuptools import setup, find_packages
this_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_dir, 'README.md'), 'r') as f:
long_description = f.read()
setup(name='pyconfigparser',
version='0.1',
author='Bounouar Colin',
maintainer='Bounouar Colin',
url='https://github.com/Colin-b/pyconfigparser',
description='Helper to parse configuration files.',
long_description=long_description,
download_url='https://github.com/Colin-b/pyconfigparser',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers"
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Operating System :: Microsoft :: Windows :: Windows 7"
],
keywords=[
'configuration'
],
packages=find_packages(),
install_requires=[
],
platforms=[
'Windows'
]
)
|
mit
| 2,933,620,478,361,313,300
| 31.916667
| 65
| 0.587342
| false
| 4.157895
| false
| true
| false
|
UCSD-E4E/aerial_lidar
|
catkin_ws/src/laser_tfs/src/nodes/plot.py
|
1
|
1482
|
#!/usr/bin/env python
import roslib
roslib.load_manifest('laser_tfs')
import rospy
import math
import matplotlib.pyplot as plt
import numpy as np
import tf
from geometry_msgs.msg import PoseWithCovarianceStamped
def handle_fcu_pose(msg):
br = tf.TransformBroadcaster()
global count
p = msg.pose.pose.position # capture translational position
o = msg.pose.pose.orientation # capture quaternion
if handle_fcu_pose.first_run:
handle_fcu_pose.origin = p
count = 0
handle_fcu_pose.first_run = False
x_p = p.x - handle_fcu_pose.origin.x
y_p = p.y - handle_fcu_pose.origin.y
# print "x " + str(x_p)
# print "y " + str(y_p)
# print " "
plt.ion()
xdata = [0]*10
ydata = [0]*10
plt.ylim([-150,150])
plt.xlim([-150,150])
if count < 10:
count = count + 1
line, = plt.plot(xdata,ydata, 'ro')
if count == 10:
xdata.append(x_p)
ydata.append(y_p)
line.set_xdata(np.arange(len(xdata)))
line.set_xdata(xdata)
line.set_ydata(np.arange(len(ydata)))
line.set_ydata(ydata)
count = 0
plt.draw()
del ydata[0]
del xdata[0]
if __name__ == '__main__':
rospy.init_node('test_broadcaster')
rospy.Subscriber('/mavros/global_position/local',
PoseWithCovarianceStamped,
handle_fcu_pose)
handle_fcu_pose.first_run = True
handle_fcu_pose.origin = None
rospy.spin()
|
mit
| 5,908,898,019,131,150,000
| 24.118644
| 66
| 0.597841
| false
| 3.049383
| false
| false
| false
|
nityansuman/Python-3
|
data_structures/tuples.py
|
1
|
1373
|
# Tuple data structure
sample_tuple = ('Glenn', 'Sally', 'John')
print(sample_tuple)
# First Element of the tuple
print(sample_tuple[0])
y = (1, 9, 15) # New tuple
print(max(y)) # Max value of the tuple
# Tuples are immutable like strings, cannot change the value of tuples
# You cannot sort, reverse or append the tuples
t = tuple()
print(dir(t)) # Check method applicable on tuples
(x, y) = (4, 'Nityan')
(a, b) = (99, 'Ram')
(c, d) = ('Nityan', '4')
print(x, y)
print(a, b)
print(c, d)
# Convert dictionary into tuples
mdict = {'root': 1, 'roll': 1, 'nityan': 2}
t = mdict.items()
print(t)
# Loop through tuples with two iteratives
for (i, j) in t:
print(i, j)
# Tuples are comparable in a weird way
# The comparison occurs from first item and goes on if previous is equal and stops if a value is achieved either true or false
print((0, 1, 2) < (5, 4, 2))
# 0<5: So done.
# True
print((2, 4, 6) < (2, 8, 4))
# 2-2 is same. So continue ---> 4<8: So done
# True
print((1, 6) > (4, 3))
# since 1!>4: So done
# False
print(('John', 'Nityan') > ('Apple', 'Mango'))
# False taking ascii values J>A: So done, else True
# Sorting the tuple using its key
t2 = sorted(d.items(), reverse=True)
print(t2)
# Sort by value: reversing the data in tuple i.e., descending order
t2.sort(reverse=True)
print(t2)
|
gpl-3.0
| 3,198,536,309,866,640,000
| 21.672414
| 126
| 0.622724
| false
| 2.671206
| false
| false
| false
|
SKIRT/PTS
|
magic/misc/imfit.py
|
1
|
12412
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.magic.misc.imfit Working with IMfit
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Code for reading in and analyzing output of imfit
import glob
import math
import numpy as np
# Import the relevant PTS classes and modules
from . import imfit_funcs as imfuncs
# -----------------------------------------------------------------
# dictionary mapping imfit function short names (as found in the config/parameter file) to
# corresponding 1-D Python functions in imfit_funcs.py, along with some useful information:
# "function" = corresponding imfit_funcs.py function, if one exists
# "nSkip" = the number of 2D-related parameters to skip (e.g., PA, ellipticity),
# "ell" = index for ellipticity parameter, if it exists,
# "a" = index or indices for semi-major-axis parameters (r_e, h, sigma, etc.)
imfitFunctionMap = {"Exponential": {"function": imfuncs.Exponential, "nSkip": 2, "ell": 1, "a": [3]},
"Exponential_GenEllipse": {"function": imfuncs.Exponential, "nSkip": 3, "ell": 1, "a": [4]},
"Sersic": {"function": imfuncs.Sersic, "nSkip": 2, "ell": 1, "a": [4]},
"Sersic_GenEllipse": {"function": imfuncs.Sersic, "nSkip": 3, "ell": 1, "a": [5]},
"Gaussian": {"function": imfuncs.Gauss, "nSkip": 2, "ell": 1, "a": [3]},
"GaussianRing": {"function": imfuncs.GaussRing, "nSkip": 2, "ell": 1, "a": [3,4]},
"GaussianRing2Side": {"function": imfuncs.GaussRing2Side, "nSkip": 2, "ell": 1, "a": [3,4,5]},
"Moffat": {"function": imfuncs.Moffat, "nSkip": 2, "ell": 1, "a": [3]},
"BrokenExponential": {"function": imfuncs.BrokenExp, "nSkip": 2, "ell": 1, "a": [3,4,5]}}
# -----------------------------------------------------------------
class Imfit(object):
"""
This class ...
"""
def __init__(self, path):
"""
This function ...
:param path:
"""
# -----------------------------------------------------------------
def ChopComments(theLine):
return theLine.split("#")[0]
# -----------------------------------------------------------------
def GetFunctionImageNames(baseName, funcNameList):
"""Generate a list of FITS filenames as would be created by makeimage in "--output-functions"
mode.
"""
nImages = len(funcNameList)
imageNameList = [ "%s%d_%s.fits" % (baseName, i + 1, funcNameList[i]) for i in range(nImages) ]
return imageNameList
# -----------------------------------------------------------------
def ReadImfitConfigFile( fileName, minorAxis=False, pix=1.0, getNames=False, X0=0.0 ):
"""Function to read and parse an imfit-generated parameter file
(or input config file) and return a tuple consisting of:
(list of 1-D imfit_funcs functions, list of lists of parameters).
pix = scale in arcsec/pixel, if desired for plotting vs radii in arcsec.
We assume that all functions have a center at x = 0; this can be changed by setting
X0.
Returns tuple of (functionList, trimmedParameterList)
If getNames == True:
Returns tuple of (functionNameList, functionList, trimmedParameterList)
"""
dlines = [ line for line in open(fileName) if len(line.strip()) > 0 and line[0] != "#" ]
funcNameList = []
paramMetaList = []
currentParamList = []
nLines = len(dlines)
for line in dlines:
trimmedLine = ChopComments(line)
#print(trimmedLine)
if trimmedLine.find("X0") == 0:
continue
if trimmedLine.find("Y0") == 0:
continue
if trimmedLine.find("FUNCTION") == 0:
# if this isn't the first function, store the previous set of parameters
if len(currentParamList) > 0:
paramMetaList.append(currentParamList)
# make a new parameter list for the new function
currentParamList = [X0]
pp = trimmedLine.split()
fname = pp[1].strip()
funcNameList.append(fname)
continue
else:
pp = trimmedLine.split()
newValue = float(pp[1])
currentParamList.append(newValue)
# ensure that final set of parameters get stored:
paramMetaList.append(currentParamList)
# process function list to remove unneeded parameters (and convert size measures
# from major-axis to minor-axis, if requested)
funcList = [ imfitFunctionMap[fname]["function"] for fname in funcNameList ]
trimmedParamList = []
nFuncs = len(funcList)
for i in range(nFuncs):
fname = funcNameList[i]
nSkipParams = imfitFunctionMap[fname]["nSkip"]
fullParams = paramMetaList[i]
# calculate scaling factor for minor-axis values, if needed
if minorAxis is True:
print(fname)
ellIndex = imfitFunctionMap[fname]["ell"]
print(ellIndex)
ell = fullParams[ellIndex+1]
q = 1.0 - ell
else:
q = 1.0
print(i, fname)
smaIndices = imfitFunctionMap[fname]["a"]
# convert length values to arcsec and/or minor-axis, if needed,
for smaIndex in smaIndices:
# +1 to account for X0 value at beginning of parameter list
fullParams[smaIndex+1] = pix*q*fullParams[smaIndex+1]
# construct the final 1-D parameter set for this function: X0 value, followed
# by post-2D-shape parameters
trimmedParams = [fullParams[0]]
trimmedParams.extend(fullParams[nSkipParams+1:])
trimmedParamList.append(trimmedParams)
if getNames is True:
return (funcNameList, funcList, trimmedParamList)
else:
return (funcList, trimmedParamList)
# -----------------------------------------------------------------
# Code for reading output of bootstrap resampling and MCMC chains
def GetBootstrapOutput(filename):
"""Reads imfit's bootstrap-resampling output when saved using the
--save-bootstrap command-line option.
Parameters
----------
filename : str
name of file with bootstrap-resampling output
Returns
-------
(column_names, data_array) : tuple of (list, np.ndarray)
column_names = list of column names (strings)
data_array = numpy array of parameter values
with shape = (n_iterations, n_parameters)
"""
# get first 100 lines
# FIXME: file *could* be shorter than 100 lines; really complicated
# model could have > 100 lines of header...
with open(filename) as theFile:
firstLines = [next(theFile) for x in range(100)]
# find header line with column names and extract column names
for i in range(len(firstLines)):
if firstLines[i].find("# Bootstrap resampling output") >= 0:
columnNamesIndex = i + 1
break
columnNames = firstLines[columnNamesIndex][1:].split()
for i in range(len(columnNames)):
if columnNames[i] == "likelihood":
nParamColumns = i
break
# get the data
d = np.loadtxt(filename)
return (columnNames, d)
# -----------------------------------------------------------------
def GetSingleChain(filename, getAllColumns=False):
"""Reads a single MCMC chain output file and returns a tuple of column names
and a numpy array with the data.
Parameters
----------
filename : str
name of file with MCMC output chain
getAllColumns: bool, optional
if False [default], only model parameter-value columns are retrieved;
if True, all output columns (including MCMC diagnostics) are retrieved
Returns
-------
(column_names, data_array) : tuple of (list, np.ndarray)
column_names = list of column names (strings)
data_array = numpy array of parameter values
with shape = (n_iterations, n_parameters)
"""
# get first 100 lines
# FIXME: file *could* be shorter than 100 lines; really complicated
# model could have > 100 lines of header...
with open(filename) as theFile:
firstLines = [next(theFile) for x in range(100)]
# find header line with column names and extract column names
for i in range(len(firstLines)):
if firstLines[i].find("# Column Headers") >= 0:
columnNamesIndex = i + 1
break
columnNames = firstLines[columnNamesIndex][1:].split()
for i in range(len(columnNames)):
if columnNames[i] == "likelihood":
nParamColumns = i
break
# get data for all columns, or just the model parameters?
whichCols = None
if not getAllColumns:
whichCols = list(range(nParamColumns))
outputColumnNames = columnNames[:nParamColumns]
else:
whichCols = None
outputColumnNames = columnNames
# get the data
d = np.loadtxt(filename, usecols=whichCols)
return (outputColumnNames, d)
# -----------------------------------------------------------------
def MergeChains( fname_root, maxChains=None, getAllColumns=False, start=10000, last=None,
secondHalf=False ):
"""
Reads and concatenates all MCMC output chains with filenames = fname_root.*.txt,
using data from t=start onwards. By default, all generations from each chain
are extracted; this can be modified with the start, last, or secondHalf keywords.
Parameters
----------
fname_root : str
root name of output chain files (e.g., "mcmc_out")
maxChains : int or None, optional
maximum number of chain files to read [default = None = read all files]
getAllColumns : bool, optional
if False [default], only model parameter-value columns are retrieved;
if True, all output columns (including MCMC diagnostics) are retrieved
start : int, optional
extract samples from each chain beginning with time = start
ignored if "secondHalf" is True or if "last" is not None
last : int or None, optional
extract last N samples from each chain
ignored if "secondHalf" is True
secondHalf : bool, optional
if True, only the second half of each chain is extracted
if False [default],
Returns
-------
(column_names, data_array) : tuple of (list, np.ndarray)
column_names = list of column names (strings)
data_array = numpy array of parameter values
with shape = (n_samples, n_parameters)
"""
# construct list of filenames
if maxChains is None:
globPattern = "{0}.*.txt".format(fname_root)
filenames = glob.glob(globPattern)
else:
filenames = ["{0}.{1}.txt".format(fname_root, n) for n in range(maxChains)]
nFiles = len(filenames)
# get the first chain so we can tell how long the chains are
(colNames, dd) = GetSingleChain(filenames[0], getAllColumns=getAllColumns)
nGenerations = dd.shape[0]
# figure out what part of full chain to extract
if secondHalf is True:
startTime = int(math.floor(nGenerations / 2))
elif last is not None:
startTime = -last
else:
startTime = start
# get first chain and column names; figure out if we get all columns or just
# model parameters
if (startTime >= nGenerations):
txt = "WARNING: # generations in MCMC chain file {0} ({1:d}) is <= ".format(filenames[0],
nGenerations)
txt += "requested start time ({0:d})!\n".format(startTime)
print(txt)
return None
dd_final = dd[startTime:,:]
if getAllColumns is False:
nParamColumns = len(colNames)
whichCols = list(range(nParamColumns))
else:
whichCols = None
# get and append rest of chains if more than 1 chain-file was requested
if nFiles > 1:
for i in range(1, nFiles):
dd_next = np.loadtxt(filenames[i], usecols=whichCols)
dd_final = np.concatenate((dd_final, dd_next[startTime:,:]))
return (colNames, dd_final)
# -----------------------------------------------------------------
|
agpl-3.0
| -2,269,840,293,426,458,600
| 35.395894
| 102
| 0.593183
| false
| 4.021711
| false
| false
| false
|
google/iree
|
third_party/format_diff/format_diff.py
|
1
|
5904
|
#!/usr/bin/env python3
#
#===- format_diff.py - Diff Reformatter ----*- python3 -*--===#
#
# This file is licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
#===------------------------------------------------------------------------===#
"""
This script reads input from a unified diff and reformats all the changed
lines. This is useful to reformat all the lines touched by a specific patch.
Example usage:
git diff -U0 HEAD^ | python3 format_diff.py yapf -i
git diff -U0 HEAD^ | python3 format_diff.py clang-format -i
svn diff --diff-cmd=diff -x-U0 | python3 format_diff.py -p0 clang-format -i
General usage:
<some diff> | python3 format_diff.py [--regex] [--lines-style] [-p] binary [args for binary]
It should be noted that the filename contained in the diff is used unmodified
to determine the source file to update. Users calling this script directly
should be careful to ensure that the path in the diff is correct relative to the
current working directory.
"""
import argparse
import difflib
import io
import re
import subprocess
import sys
BINARY_TO_DEFAULT_REGEX = {
"yapf": r".*\.py",
"clang-format":
r".*\.(cpp|cc|c\+\+|cxx|c|cl|h|hh|hpp|hxx|m|mm|inc|js|ts|proto|"
r"protodevel|java|cs)",
}
def parse_arguments():
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
"binary",
help="Location of binary to use for formatting. This controls the "
"default values of --regex and --lines-style. If binary isn't 'yapf' "
"or 'clang-format' then --regex and --lines-style are required.")
parser.add_argument(
"--regex",
metavar="PATTERN",
default=None,
help="Regex pattern for selecting file paths to reformat from the piped "
"diff. This flag is required if 'binary' is not set to 'yapf' or "
"'clang-format'. Otherwise, this flag overrides the default pattern that "
"--binary sets.")
parser.add_argument(
"--lines-style",
default=None,
help="How to style the 'lines' argument for the given binary. Can be set "
"to 'yapf' or 'clang-format'. This flag is required if 'binary' is not "
"set to 'yapf' or 'clang-format'.")
parser.add_argument(
"-p",
metavar="NUM",
default=1,
help="Strip the smallest prefix containing P slashes. Set to 0 if "
"passing `--no-prefix` to `git diff` or using `svn`")
# Parse and error-check arguments
args, binary_args = parser.parse_known_args()
if args.binary not in BINARY_TO_DEFAULT_REGEX:
if not args.regex:
raise parser.error("If 'binary' is not 'yapf' or 'clang-format' then "
"--regex must be set.")
if not args.lines_style:
raise parser.error("If 'binary' is not 'yapf' or 'clang-format' then "
"--lines-style must be set.")
else:
# Set defaults based off of 'binary'.
if not args.regex:
args.regex = BINARY_TO_DEFAULT_REGEX[args.binary]
if not args.lines_style:
args.lines_style = args.binary
if args.lines_style not in ["yapf", "clang-format"]:
raise parser.error(f"Unexpected value for --line-style {args.lines_style}")
return args, binary_args
def main():
args, binary_args = parse_arguments()
# Extract changed lines for each file.
filename = None
lines_by_file = {}
for line in sys.stdin:
# Match all filenames.
match = re.search(fr"^\+\+\+\ (.*?/){{{args.p}}}(\S*)", line)
if match:
filename = match.group(2)
if filename is None:
continue
# Match all filenames specified by --regex.
if not re.match(f"^{args.regex}$", filename):
continue
# Match unified diff line numbers.
match = re.search(r"^@@.*\+(\d+)(,(\d+))?", line)
if match:
start_line = int(match.group(1))
line_count = 1
if match.group(3):
line_count = int(match.group(3))
if line_count == 0:
continue
end_line = start_line + line_count - 1
if args.lines_style == "yapf":
lines = ["--lines", f"{start_line}-{end_line}"]
elif args.lines_style == "clang-format":
lines = ["-lines", f"{start_line}:{end_line}"]
lines_by_file.setdefault(filename, []).extend(lines)
# Pass the changed lines to 'binary' alongside any unparsed args (e.g. -i).
for filename, lines in lines_by_file.items():
command = [args.binary, filename]
command.extend(lines)
command.extend(binary_args)
print(f"Running `{' '.join(command)}`")
p = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=None,
stdin=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = p.communicate()
if p.returncode != 0:
sys.exit(p.returncode)
# If the formatter printed the formatted code to stdout then print out
# a unified diff between the formatted and unformatted code.
# If flags like --verbose are passed to the binary then the diffs this
# produces won't be particularly helpful.
formatted_code = io.StringIO(stdout).readlines()
if len(formatted_code):
with open(filename) as f:
unformatted_code = f.readlines()
diff = difflib.unified_diff(unformatted_code,
formatted_code,
fromfile=filename,
tofile=filename,
fromfiledate="(before formatting)",
tofiledate="(after formatting)")
diff_string = "".join(diff)
if len(diff_string) > 0:
sys.stdout.write(diff_string)
if __name__ == "__main__":
main()
|
apache-2.0
| -8,796,428,589,158,364,000
| 34.781818
| 94
| 0.612297
| false
| 3.762906
| false
| false
| false
|
skosukhin/spack
|
var/spack/repos/builtin/packages/py-python-gitlab/package.py
|
1
|
1868
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPythonGitlab(PythonPackage):
"""Python wrapper for the GitLab API"""
homepage = "https://github.com/gpocentek/python-gitlab"
url = "https://pypi.io/packages/source/p/python-gitlab/python-gitlab-0.19.tar.gz"
version('0.19', '6564d7204c2b7e65c54b3fa89ec91df6')
version('0.18', 'c31dae1d0bab3966cb830f2308a96308')
version('0.17', '8a69c602e07dd4731856531d79bb58eb')
version('0.16', 'e0421d930718021e7d796d74d2ad7194')
depends_on('py-setuptools', type='build')
depends_on('py-six', type=('build', 'run'))
depends_on('py-requests@1.0:', type=('build', 'run'))
|
lgpl-2.1
| 5,940,696,918,200,540,000
| 44.560976
| 90
| 0.67773
| false
| 3.585413
| false
| false
| false
|
troywatson/Python-Grammar-Checker
|
fartherVsFurther/CountSyllables.py
|
1
|
2787
|
'''
Count Syllables v1.0
A simple class to count syllables using a dictionary method
This class will attempt to calculate syllables of words not found in dictionary
'''
class CountSyllables(object):
def __init__(self):
# variables- instantiated
self.prepareData()
def generateDict(self):
# converts a pronunciation dictionary into a syllable count dictionary
fileName = open("dict.txt", 'r')
print 'openning file...'
data = fileName.read()
fileName.close()
print 'splitting up data by entries...'
words = data.split("\n")
outputFile = open("syllables.txt", 'w')
for entry in words:
entry = entry.split(" ")
word = entry[0]
pronunciation = entry[1]
sections = pronunciation.split(" ")
count = 0
for section in sections:
if self.isVowel(section):
count+=1
if count == 0: count = 1
outputFile.write(word.lower() + ',' + str(count) + '\n')
outputFile.close()
def isVowel(self, word):
# a simple function to find whether a word contains a vowel or not
word = word.lower()
if 'a' in word or 'e' in word or 'i' in word or 'o' in word or 'u' in word:
return True
else: return False
def prepareData(self):
fileName = open('SyllableCounter/syllables.txt', 'r')
self.dict = {}
data = fileName.read()
fileName.close()
lines = data.split('\n')
for line in lines:
entry = line.split(',')
if len(entry[0]) < 1: continue
if entry[0] in self.dict: continue
else: self.dict[entry[0]] = entry[1]
def count(self, word):
if word in self.dict: return self.dict[word]
syllCount = 0
for letter in word:
if self.isVowel(letter): syllCount += 1
if syllCount < 1: return 1
else: return syllCount
def main():
test = CountSyllables()
print test.count('elephant')
if __name__ == '__main__':main()
|
mit
| -3,347,954,875,784,145,400
| 35.671053
| 116
| 0.425188
| false
| 4.985689
| false
| false
| false
|
lgarren/spack
|
var/spack/repos/builtin/packages/opencv/package.py
|
1
|
10399
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Opencv(CMakePackage):
"""OpenCV is released under a BSD license and hence it's free for both
academic and commercial use. It has C++, C, Python and Java interfaces and
supports Windows, Linux, Mac OS, iOS and Android. OpenCV was designed for
computational efficiency and with a strong focus on real-time applications.
Written in optimized C/C++, the library can take advantage of multi-core
processing. Enabled with OpenCL, it can take advantage of the hardware
acceleration of the underlying heterogeneous compute platform. Adopted all
around the world, OpenCV has more than 47 thousand people of user community
and estimated number of downloads exceeding 9 million. Usage ranges from
interactive art, to mines inspection, stitching maps on the web or through
advanced robotics.
"""
homepage = 'http://opencv.org/'
url = 'https://github.com/Itseez/opencv/archive/3.1.0.tar.gz'
version('master', git="https://github.com/opencv/opencv.git", branch="master")
version('3.3.0', '98a4e4c6f23ec725e808a891dc11eec4')
version('3.2.0', 'a43b65488124ba33dde195fea9041b70')
version('3.1.0', '70e1dd07f0aa06606f1bc0e3fa15abd3')
version('2.4.13.2', 'fe52791ce523681a67036def4c25261b')
version('2.4.13.1', 'f6d354500d5013e60dc0fc44b07a63d1')
version('2.4.13', '8feb45a71adad89b8017a777477c3eff')
version('2.4.12.3', '2496a4a4caf8fecfbfc294fbe6a814b0')
version('2.4.12.2', 'bc0c60c2ea1cf4078deef99569912fc7')
version('2.4.12.1', '7192f51434710904b5e3594872b897c3')
variant('shared', default=True,
description='Enables the build of shared libraries')
variant('eigen', default=True, description='Activates support for eigen')
variant('ipp', default=True, description='Activates support for IPP')
variant('jasper', default=True, description='Activates support for JasPer')
variant('cuda', default=False, description='Activates support for CUDA')
variant('gtk', default=False, description='Activates support for GTK')
variant('vtk', default=False, description='Activates support for VTK')
variant('qt', default=False, description='Activates support for QT')
variant('python', default=False,
description='Enables the build of Python extensions')
variant('java', default=False,
description='Activates support for Java')
variant('openmp', default=False, description='Activates support for OpenMP threads')
variant('core', default=True, description='Include opencv_core module into the OpenCV build')
variant('highgui', default=False, description='Include opencv_highgui module into the OpenCV build')
variant('imgproc', default=False, description='Include opencv_imgproc module into the OpenCV build')
variant('jpeg', default=False, description='Include JPEG support')
variant('png', default=False, description='Include PNG support')
variant('tiff', default=False, description='Include TIFF support')
variant('zlib', default=False, description='Build zlib from source')
variant('dnn', default=False, description='Build DNN support')
depends_on('eigen~mpfr', when='+eigen', type='build')
depends_on('zlib', when='+zlib')
depends_on('libpng', when='+png')
depends_on('jpeg', when='+jpeg')
depends_on('libtiff', when='+tiff')
depends_on('jasper', when='+jasper')
depends_on('cuda', when='+cuda')
depends_on('gtkplus', when='+gtk')
depends_on('vtk', when='+vtk')
depends_on('qt', when='+qt')
depends_on('java', when='+java')
depends_on('py-numpy', when='+python', type=('build', 'run'))
depends_on('protobuf@3.1.0', when='@3.3.0: +dnn')
extends('python', when='+python')
def cmake_args(self):
spec = self.spec
args = [
'-DBUILD_SHARED_LIBS:BOOL={0}'.format((
'ON' if '+shared' in spec else 'OFF')),
'-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF',
'-DWITH_IPP:BOOL={0}'.format((
'ON' if '+ipp' in spec else 'OFF')),
'-DWITH_CUDA:BOOL={0}'.format((
'ON' if '+cuda' in spec else 'OFF')),
'-DWITH_QT:BOOL={0}'.format((
'ON' if '+qt' in spec else 'OFF')),
'-DWITH_VTK:BOOL={0}'.format((
'ON' if '+vtk' in spec else 'OFF')),
'-DBUILD_opencv_java:BOOL={0}'.format((
'ON' if '+java' in spec else 'OFF')),
'-DBUILD_opencv_core:BOOL={0}'.format((
'ON' if '+core' in spec else 'OFF')),
'-DBUILD_opencv_highgui:BOOL={0}'.format((
'ON' if '+highgui' in spec else 'OFF')),
'-DBUILD_opencv_imgproc:BOOL={0}'.format((
'ON' if '+imgproc' in spec else 'OFF')),
'-DWITH_JPEG:BOOL={0}'.format((
'ON' if '+jpeg' in spec else 'OFF')),
'-DWITH_PNG:BOOL={0}'.format((
'ON' if '+png' in spec else 'OFF')),
'-DWITH_TIFF:BOOL={0}'.format((
'ON' if '+tiff' in spec else 'OFF')),
'-DWITH_ZLIB:BOOL={0}'.format((
'ON' if '+zlib' in spec else 'OFF')),
'-DWITH_OPENMP:BOOL={0}'.format((
'ON' if '+openmp' in spec else 'OFF')),
'-DBUILD_opencv_dnn:BOOL={0}'.format((
'ON' if '+dnn' in spec else 'OFF')),
]
# Media I/O
if '+zlib' in spec:
zlib = spec['zlib']
args.extend([
'-DZLIB_LIBRARY_{0}:FILEPATH={1}'.format((
'DEBUG' if '+debug' in spec else 'RELEASE'),
join_path(zlib.prefix.lib,
'libz.{0}'.format(dso_suffix))),
'-DZLIB_INCLUDE_DIR:PATH={0}'.format(zlib.prefix.include)
])
if '+png' in spec:
libpng = spec['libpng']
args.extend([
'-DPNG_LIBRARY_{0}:FILEPATH={1}'.format((
'DEBUG' if '+debug' in spec else 'RELEASE'),
join_path(libpng.prefix.lib,
'libpng.{0}'.format(dso_suffix))),
'-DPNG_INCLUDE_DIR:PATH={0}'.format(libpng.prefix.include)
])
if '+jpeg' in spec:
libjpeg = spec['jpeg']
args.extend([
'-DBUILD_JPEG:BOOL=OFF',
'-DJPEG_LIBRARY:FILEPATH={0}'.format(
join_path(libjpeg.prefix.lib,
'libjpeg.{0}'.format(dso_suffix))),
'-DJPEG_INCLUDE_DIR:PATH={0}'.format(libjpeg.prefix.include)
])
if '+tiff' in spec:
libtiff = spec['libtiff']
args.extend([
'-DTIFF_LIBRARY_{0}:FILEPATH={1}'.format((
'DEBUG' if '+debug' in spec else 'RELEASE'),
join_path(libtiff.prefix.lib,
'libtiff.{0}'.format(dso_suffix))),
'-DTIFF_INCLUDE_DIR:PATH={0}'.format(libtiff.prefix.include)
])
if '+jasper' in spec:
jasper = spec['jasper']
args.extend([
'-DJASPER_LIBRARY_{0}:FILEPATH={1}'.format((
'DEBUG' if '+debug' in spec else 'RELEASE'),
join_path(jasper.prefix.lib,
'libjasper.{0}'.format(dso_suffix))),
'-DJASPER_INCLUDE_DIR:PATH={0}'.format(jasper.prefix.include)
])
# GUI
if '+gtk' not in spec:
args.extend([
'-DWITH_GTK:BOOL=OFF',
'-DWITH_GTK_2_X:BOOL=OFF'
])
elif '^gtkplus@3:' in spec:
args.extend([
'-DWITH_GTK:BOOL=ON',
'-DWITH_GTK_2_X:BOOL=OFF'
])
elif '^gtkplus@2:3' in spec:
args.extend([
'-DWITH_GTK:BOOL=OFF',
'-DWITH_GTK_2_X:BOOL=ON'
])
# Python
if '+python' in spec:
python_exe = spec['python'].command.path
python_lib = spec['python'].libs[0]
python_include_dir = spec['python'].headers.directories[0]
if '^python@3:' in spec:
args.extend([
'-DBUILD_opencv_python3=ON',
'-DPYTHON3_EXECUTABLE={0}'.format(python_exe),
'-DPYTHON3_LIBRARY={0}'.format(python_lib),
'-DPYTHON3_INCLUDE_DIR={0}'.format(python_include_dir),
'-DBUILD_opencv_python2=OFF',
])
elif '^python@2:3' in spec:
args.extend([
'-DBUILD_opencv_python2=ON',
'-DPYTHON2_EXECUTABLE={0}'.format(python_exe),
'-DPYTHON2_LIBRARY={0}'.format(python_lib),
'-DPYTHON2_INCLUDE_DIR={0}'.format(python_include_dir),
'-DBUILD_opencv_python3=OFF',
])
else:
args.extend([
'-DBUILD_opencv_python2=OFF',
'-DBUILD_opencv_python3=OFF'
])
return args
|
lgpl-2.1
| -4,794,252,325,094,675,000
| 44.017316
| 104
| 0.561689
| false
| 3.711278
| false
| false
| false
|
gam17/QAD
|
qad_circle_fun.py
|
1
|
63798
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
QAD Quantum Aided Design plugin
funzioni per creare cerchi
-------------------
begin : 2018-04-08
copyright : iiiii
email : hhhhh
developers : bbbbb aaaaa ggggg
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from qgis.PyQt.QtCore import *
from qgis.PyQt.QtGui import *
from qgis.core import *
from qgis.gui import *
import qgis.utils
import math
from . import qad_utils
from .qad_geom_relations import *
#============================================================================
# circleFrom3Pts
#============================================================================
def circleFrom3Pts(firstPt, secondPt, thirdPt):
"""
crea un cerchio attraverso:
punto iniziale
secondo punto (intermedio)
punto finale
"""
l = QadLine()
l.set(firstPt, secondPt)
InfinityLinePerpOnMiddle1 = QadPerpendicularity.getInfinityLinePerpOnMiddleLine(l)
l.set(secondPt, thirdPt)
InfinityLinePerpOnMiddle2 = QadPerpendicularity.getInfinityLinePerpOnMiddleLine(l)
if InfinityLinePerpOnMiddle1 is None or InfinityLinePerpOnMiddle2 is None:
return None
center = QadIntersections.twoInfinityLines(InfinityLinePerpOnMiddle1, InfinityLinePerpOnMiddle2)
if center is None: return None # linee parallele
radius = center.distance(firstPt)
return QadCircle().set(center, radius)
#===========================================================================
# circleFrom2IntPtsCircleTanPts
#===========================================================================
def circleFrom2IntPtsCircleTanPts(pt1, pt2, circle, pt):
"""
crea un cerchio attraverso 2 punti di intersezione e un cerchio tangente:
punto1 di intersezione
punto2 di intersezione
cerchio di tangenza (oggetto QadCircle)
punto di selezione cerchio
"""
# http://www.batmath.it/matematica/a_apollonio/ppc.htm
circleList = []
if pt1 == pt2: return None
dist1 = pt1.distance(circle.center) # distanza del punto 1 dal centro
dist2 = pt2.distance(circle.center) # distanza del punto 2 dal centro
# entrambi i punti devono essere esterni o interni a circle
if (dist1 > circle.radius and dist2 < circle.radius) or \
(dist1 < circle.radius and dist2 > circle.radius):
return None
l = QadLine()
l.set(pt1, pt2)
if dist1 == dist2: # l'asse di pt1 e pt2 passa per il centro di circle
if dist1 == circle.radius: # entrambi i punti sono sulla circonferenza di circle
return None
axis = QadPerpendicularity.getInfinityLinePerpOnMiddleLine(l) # asse di pt1 e pt2
intPts = QadIntersections.infinityLineWithCircle(axis, circle) # punti di intersezione tra l'asse e circle
for intPt in intPts:
circleTan = circleFrom3Pts(pt1, pt2, intPt)
if circleTan is not None:
circleList.append(circleTan)
elif dist1 > circle.radius and dist2 > circle.radius : # entrambi i punti sono esterni a circle
# mi ricavo una qualunque circonferenza passante per p1 e p2 ed intersecante circle
circleInt = circleFrom3Pts(pt1, pt2, circle.center)
if circleInt is None: return None
intPts = QadIntersections.twoCircles(circle, circleInt)
l1 = QadLine().set(pt1, pt2)
l2 = QadLine().set(intPts[0], intPts[1])
intPt = QadIntersections.twoInfinityLines(l1, l2)
tanPts = QadTangency.fromPointToCircle(intPt, circle)
for tanPt in tanPts:
circleTan = circleFrom3Pts(pt1, pt2, tanPt)
if circleTan is not None:
circleList.append(circleTan)
elif dist1 < circle.radius and dist2 < circle.radius : # entrambi i punti sono interni a circle
# mi ricavo una qualunque circonferenza passante per p1 e p2 ed intersecante circle
ptMiddle = qad_utils.getMiddlePoint(pt1, pt2)
angle = qad_utils.getAngleBy2Pts(pt1, pt2) + math.pi / 2
pt3 = qad_utils.getPolarPointByPtAngle(ptMiddle, angle, 2 * circle.radius)
circleInt = circleFrom3Pts(pt1, pt2, pt3)
if circleInt is None:
return None
intPts = QadIntersections.twoCircles(circle, circleInt)
l1 = QadLine().set(pt1, pt2)
l2 = QadLine().set(intPts[0], intPts[1])
intPt = QadIntersections.twoInfinityLines(l1, l2)
tanPts = QadTangency.fromPointToCircle(intPt, circle)
for tanPt in tanPts:
circleTan = circleFrom3Pts(pt1, pt2, tanPt)
if circleTan is not None:
circleList.append(circleTan)
elif dist1 == radius: # il punto1 sulla circonferenza di circle
# una sola circonferenza avente come centro l'intersezione tra l'asse pt1 e pt2 e la retta
# passante per il centro di circle e pt1
axis = QadPerpendicularity.getInfinityLinePerpOnMiddleLine(l) # asse di pt1 e pt2
l1 = QadLine().set(circle.center, pt1)
intPt = QadIntersections.twoInfinityLines(axis, l1)
circleTan = QadCircle().set(intPt, qad_utils.getDistance(pt1, intPt))
circleList.append(circleTan)
elif dist2 == radius: # il punto3 é sulla circonferenza di circle
# una sola circonferenza avente come centro l'intersezione tra l'asse pt1 e pt2 e la retta
# passante per il centro di circle e pt2
axis = QadPerpendicularity.getInfinityLinePerpOnMiddleLine(l) # asse di pt1 e pt2
l2 = QadLine().set(circle.center, pt2)
intPt = QadIntersections.twoInfinityLines(axis, l2)
circleTan = QadCircle().set(intPt, qad_utils.getDistance(pt2, intPt))
circleList.append(circleTan)
if len(circleList) == 0:
return None
result = QadCircle()
minDist = sys.float_info.max
for circleTan in circleList:
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle.center)
if qad_utils.getDistance(circleTan.center, circle.center) < circle.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
dist = qad_utils.getDistance(ptInt, pt)
if dist < minDist: # mediamente più vicino
minDist = dist
result.center = circleTan.center
result.radius = circleTan.radius
return result
#===========================================================================
# circleFrom2IntPtsLineTanPts
#===========================================================================
def circleFrom2IntPtsLineTanPts(pt1, pt2, line, pt, AllCircles = False):
"""
crea uno o più cerchi (vedi allCircles) attraverso 2 punti di intersezione e una linea tangente:
punto1 di intersezione
punto2 di intersezione
linea di tangenza (QadLine)
punto di selezione linea
il parametro AllCircles se = True fa restituire tutti i cerchi altrimenti solo quello più vicino a pt1 e pt2
"""
circleList = []
pt1Line = line.getStartPt()
pt2Line = line.getEndPt()
A = (pt1.x() * pt1.x()) + (pt1.y() * pt1.y())
B = (pt2.x() * pt2.x()) + (pt2.y() * pt2.y())
E = - pt1.x() + pt2.x()
F = pt1.y() - pt2.y()
if F == 0:
if AllCircles == True:
return circleList
else:
return None
G = (-A + B) / F
H = E / F
if pt1Line.x() - pt2Line.x() == 0:
# la linea é verticale
e = pt1Line.x()
I = H * H
if I == 0:
if AllCircles == True:
return circleList
else:
return None
J = (2 * G * H) - (4 * e) + (4 * pt2.x()) + (4 * H * pt2.y())
K = (G * G) - (4 * e * e) + (4 * B) + (4 * G * pt2.y())
else:
# equazione della retta line -> y = dx + e
d = (pt2Line.y() - pt1Line.y()) / (pt2Line.x() - pt1Line.x())
e = - d * pt1Line.x() + pt1Line.y()
C = 4 * (1 + d * d)
D = 2 * d * e
d2 = d * d
I = 1 + (H * H * d2) + 2 * H * d
if I == 0:
if AllCircles == True:
return circleList
else:
return None
J = (2 * d2 * G * H) + (2 * D) + (2 * D * H * d) + (2 * G * d) - (e * C * H) + (pt2.x() * C) + H * pt2.y() * C
K = (G * G * d2) + (2 * D * G * d) + (D * D) - (C * e * e) - (C * G * e) + (B * C) + (G * pt2.y() * C)
L = (J * J) - (4 * I * K)
if L < 0:
if AllCircles == True:
return circleList
else:
return None
a1 = (-J + math.sqrt(L)) / (2 * I)
b1 = (a1 * H) + G
c1 = - B - (a1 * pt2.x()) - (b1 * pt2.y())
center = QgsPointXY()
center.setX(- (a1 / 2))
center.setY(- (b1 / 2))
radius = math.sqrt((a1 * a1 / 4) + (b1 * b1 / 4) - c1)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
a2 = (-J - math.sqrt(L)) / (2 * I)
b2 = (a2 * H) + G
c2 = - B - (a2 * pt2.x()) - (b2 * pt2.y())
center.setX(- (a2 / 2))
center.setY(- (b2 / 2))
radius = math.sqrt((a2 * a2 / 4) + (b2 * b2 / 4) - c2)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
if AllCircles == True:
return circleList
if len(circleList) == 0:
return None
result = QadCircle()
minDist = sys.float_info.max
for circle in circleList:
ptInt = QadPerpendicularity.fromPointToInfinityLine(circle.center, line)
dist = ptInt.distance(pt)
if dist < minDist: # mediamente più vicino
minDist = dist
result.center = circle.center
result.radius = circle.radius
return result
#============================================================================
# circleFrom2IntPts1TanPt
#============================================================================
def circleFrom2IntPts1TanPt(pt1, pt2, geom, pt):
"""
crea un cerhcio attraverso 2 punti di intersezione ed un oggetto di tangenza:
punto1 di intersezione
punto2 di intersezione
geometria di tangenza (linea, arco o cerchio)
punto di selezione geometria
"""
objType = geom.whatIs()
if objType != "LINE" and objType != "ARC" and objType != "CIRCLE":
return None
if objType == "ARC": # se è arco lo trasformo in cerchio
obj = QadCircle().set(geom.center, geom.radius)
objType = "CIRCLE"
else:
obj = geom
if objType == "LINE":
return circleFrom2IntPtsLineTanPts(pt1, pt2, obj, pt)
elif objType == "CIRCLE":
return circleFrom2IntPtsCircleTanPts(pt1, pt2, obj, pt)
return None
#============================================================================
# circleFrom1IntPt2TanPts
#============================================================================
def circleFrom1IntPt2TanPts(pt, geom1, pt1, geom2, pt2):
"""
crea un cerchio attraverso 1 punti di intersezione e 2 oggetti di tangenza:
punto di intersezione
geometria1 di tangenza (linea, arco o cerchio)
punto di selezione geometria1
geometria2 di tangenza (linea, arco o cerchio)
punto di selezione geometria2
"""
obj1Type = geom1.whatIs()
obj2Type = geom2.whatIs()
if (obj1Type != "LINE" and obj1Type != "ARC" and obj1Type != "CIRCLE") or \
(obj2Type != "LINE" and obj2Type != "ARC" and obj2Type != "CIRCLE"):
return None
if obj1Type == "ARC": # se è arco lo trasformo in cerchio
obj1 = QadCircle().set(geom1.center, geom1.radius)
obj1Type = "CIRCLE"
else:
obj1 = geom1
if obj2Type == "ARC": # se è arco lo trasformo in cerchio
obj2 = QadCircle().set(geom2.center, geom2.radius)
obj2Type = "CIRCLE"
else:
obj2 = geom2
if obj1Type == "LINE":
if obj2Type == "LINE":
return circleFrom1IntPtLineLineTanPts(pt, obj1, pt1, obj2, pt2)
elif obj2Type == "CIRCLE":
return circleFrom1IntPtLineCircleTanPts(pt, obj1, pt1, obj2, pt2)
elif obj1Type == "CIRCLE":
if obj2Type == "LINE":
return circleFrom1IntPtLineCircleTanPts(pt, obj2, pt2, obj1, pt1)
elif obj2Type == "CIRCLE":
return circleFrom1IntPtCircleCircleTanPts(pt, obj1, pt1, obj2, pt2)
return None
#===========================================================================
# circleFrom1IntPtLineLineTanPts
#===========================================================================
def circleFrom1IntPtLineLineTanPts(pt, line1, pt1, line2, pt2, AllCircles = False):
"""
crea uno o più cerchi (vedi allCircles) attraverso 1 punti di intersezione e due linee tangenti:
punto di intersezione
linea1 di tangenza (QLine)
punto di selezione linea1
linea2 di tangenza (QLine)
punto di selezione linea2
il parametro AllCircles se = True fa restituire tutti i cerchi e non sono quello più vicino a pt1 e pt2
"""
# http://www.batmath.it/matematica/a_apollonio/prr.htm
circleList = []
# verifico se le rette sono parallele
ptInt = QadIntersections.twoInfinityLines(line1, line2)
if ptInt is None: # le rette sono parallele
# Se le rette sono parallele il problema ha soluzioni solo se il punto
# é non esterno alla striscia individuata dalle due rette e basta considerare
# il simmetrico di A rispetto alla bisettrice della striscia.
ptPerp = QadPerpendicularity.fromPointToInfinityLine(line2.getStartPt(), line1)
angle = qad_utils.getAngleBy2Pts(line2.getStartPt(), ptPerp)
dist = qad_utils.getDistance(line2.getStartPt(), ptPerp)
pt1ParLine = qad_utils.getPolarPointByPtAngle(line2.getStartPt(), angle, dist / 2)
angle = angle + math.pi / 2
pt2ParLine = qad_utils.getPolarPointByPtAngle(pt1ParLine, angle, dist)
l = QadLine().set(pt1ParLine, pt2ParLine)
ptPerp = QadPerpendicularity.fromPointToInfinityLine(pt, l)
dist = qad_utils.getDistance(pt, ptPerp)
# trovo il punto simmetrico
angle = qad_utils.getAngleBy2Pts(pt, ptPerp)
ptSymmetric = qad_utils.getPolarPointByPtAngle(pt, angle, dist * 2)
return circleFrom2IntPtsLineTanPts(pt, ptSymmetric, line1, pt1, AllCircles)
else: # le rette non sono parallele
if ptInt == pt:
return None
# se il punto é sulla linea1 o sulla linea2
ptPerp1 = QadPerpendicularity.fromPointToInfinityLine(pt, line1)
ptPerp2 = QadPerpendicularity.fromPointToInfinityLine(pt, line2)
if ptPerp1 == pt or ptPerp2 == pt:
# Se le rette sono incidenti ed il punto appartiene ad una delle due la costruzione
# é quasi immediata: basta tracciare le bisettrici dei due angoli individuati dalle rette
# e la perpendicolare per pt alla retta cui appartiene pt stesso. Si avranno due circonferenze.
if ptPerp1 == pt: # se il punto é sulla linea1
angle = qad_utils.getAngleBy2Pts(line2.getStartPt(), line2.getEndPt())
ptLine = qad_utils.getPolarPointByPtAngle(ptInt, angle, 10)
Bisector1 = qad_utils.getBisectorInfinityLine(pt, ptInt, ptLine)
ptLine = qad_utils.getPolarPointByPtAngle(ptInt, angle + math.pi, 10)
Bisector2 = qad_utils.getBisectorInfinityLine(pt, ptInt, ptLine)
angle = qad_utils.getAngleBy2Pts(line1.getStartPt(), line1.getEndPt())
ptPerp = qad_utils.getPolarPointByPtAngle(pt, angle + math.pi / 2, 10)
else: # se il punto é sulla linea2
angle = qad_utils.getAngleBy2Pts(line1.getStartPt(), line1.getEndPt())
ptLine = qad_utils.getPolarPointByPtAngle(ptInt, angle, 10)
Bisector1 = qad_utils.getBisectorInfinityLine(pt, ptInt, ptLine)
ptLine = qad_utils.getPolarPointByPtAngle(ptInt, angle + math.pi, 10)
Bisector2 = qad_utils.getBisectorInfinityLine(pt, ptInt, ptLine)
angle = qad_utils.getAngleBy2Pts(line2.getStartPt(), line2.getEndPt())
ptPerp = qad_utils.getPolarPointByPtAngle(pt, angle + math.pi / 2, 10)
l1 = QadLine().set(Bisector1[0], Bisector1[1])
l2 = QadLine().set(pt, ptPerp)
center = QadIntersections.twoInfinityLines(l1, l2)
radius = qad_utils.getDistance(pt, center)
circleTan = QadCircle()
circleTan.set(center, radius)
circleList.append(circleTan)
l1.set(Bisector2[0], Bisector2[1])
center = QadIntersections.twoInfinityLines(l1, l2)
radius = qad_utils.getDistance(pt, center)
circleTan = QadCircle()
circleTan.set(center, radius)
circleList.append(circleTan)
else:
# Bisettrice dell'angolo interno del triangolo avente come vertice i punti di intersezione delle rette
Bisector = qad_utils.getBisectorInfinityLine(ptPerp1, ptInt, ptPerp2)
l = QadLine().set(Bisector[0], Bisector[1])
ptPerp = QadPerpendicularity.fromPointToInfinityLine(pt, l)
dist = qad_utils.getDistance(pt, ptPerp)
# trovo il punto simmetrico
angle = qad_utils.getAngleBy2Pts(pt, ptPerp)
ptSymmetric = qad_utils.getPolarPointByPtAngle(pt, angle, dist * 2)
return circleFrom2IntPtsLineTanPts(pt, ptSymmetric, line1, pt1, AllCircles)
if AllCircles == True:
return circleList
if len(circleList) == 0:
return None
result = QadCircle()
AvgList = []
Avg = sys.float_info.max
for circleTan in circleList:
del AvgList[:] # svuoto la lista
ptInt = QadPerpendicularity.fromPointToInfinityLine(circleTan.center, line1)
AvgList.append(qad_utils.getDistance(ptInt, pt1))
ptInt = QadPerpendicularity.fromPointToInfinityLine(circleTan.center, line2)
AvgList.append(qad_utils.getDistance(ptInt, pt2))
currAvg = qad_utils.numericListAvg(AvgList)
if currAvg < Avg: # mediamente più vicino
Avg = currAvg
result.center = circleTan.center
result.radius = circleTan.radius
return result
#===============================================================================
# solveCircleTangentTo2LinesAndCircle
#===============================================================================
def solveCircleTangentTo2LinesAndCircle(line1, line2, circle, s1, s2):
'''
Trova i due cerchi tangenti a due rette e un cerchio (sarebbero 8 cerchi che si trovano con le
4 combinazioni di s1, s2 che assumo valore -1 o 1)
e restituisce quello più vicino a pt
'''
circleList = []
# http://www.batmath.it/matematica/a_apollonio/rrc.htm
# Questa costruzione utilizza una particolare trasformazione geometrica, che alcuni chiamano dilatazione parallela:
# si immagina che il raggio r del cerchio dato c si riduca a zero (il cerchio é ridotto al suo centro),
# mentre le rette rimangono parallele con distanze dal centro del cerchio che si é ridotto a zero aumentate o
# diminuite di r. Si é così ricondotti al caso di un punto e due rette e si può applicare una delle tecniche viste
# in quel caso.
line1Par = []
angle = qad_utils.getAngleBy2Pts(line1.getStartPt(), line1.getEndPt())
line1Par.append(qad_utils.getPolarPointByPtAngle(line1[0], angle + math.pi / 2, circle.radius * s1))
line1Par.append(qad_utils.getPolarPointByPtAngle(line1.getEndPt(), angle + math.pi / 2, circle.radius * s1))
line2Par = []
angle = qad_utils.getAngleBy2Pts(line2.getStartPt(), line2.getEndPt())
line2Par.append(qad_utils.getPolarPointByPtAngle(line2.getStartPt(), angle + math.pi / 2, circle.radius * s2))
line2Par.append(qad_utils.getPolarPointByPtAngle(line2.getEndPt(), angle + math.pi / 2, circle.radius * s2))
circleList = circleFrom1IntPtLineLineTanPts(circle.center, line1Par, None, line2Par, None, True)
for circleTan in circleList:
ptPerp = qad_utils.getPerpendicularPointOnInfinityLine(line1.getStartPt(), line1.getEndPt(), circleTan.center)
circleTan.radius = qad_utils.getDistance(ptPerp, circleTan.center)
return circleList
#============================================================================
# circleFromLineLineCircleTanPts
#============================================================================
def circleFromLineLineCircleTanPts(line1, pt1, line2, pt2, circle, pt3):
"""
crea un cerchio attraverso tre linee:
linea1 di tangenza (QadLine)
punto di selezione linea1
linea2 di tangenza (QadLine)
punto di selezione linea2
cerchio di tangenza (oggetto QadCircle)
punto di selezione cerchio
"""
circleList = []
circleList.extend(solveCircleTangentTo2LinesAndCircle(line1, line2, circle, -1, -1))
circleList.extend(solveCircleTangentTo2LinesAndCircle(line1, line2, circle, -1, 1))
circleList.extend(solveCircleTangentTo2LinesAndCircle(line1, line2, circle, 1, -1))
circleList.extend(solveCircleTangentTo2LinesAndCircle(line1, line2, circle, 1, 1))
if len(circleList) == 0:
return None
result = QadCircle()
AvgList = []
Avg = sys.float_info.max
for circleTan in circleList:
del AvgList[:] # svuoto la lista
ptInt = qad_utils.getPerpendicularPointOnInfinityLine(line1.getStartPt(), line1.getEndPt(), circleTan.center)
AvgList.append(ptInt.distance(pt1))
ptInt = qad_utils.getPerpendicularPointOnInfinityLine(line2.getStartPt(), line2.getEndPt(), circleTan.center)
AvgList.append(ptInt.distance(pt2))
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle.center)
if circleTan.center.distance(circle.center) < circle.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(ptInt.distance(pt3))
currAvg = qad_utils.numericListAvg(AvgList)
if currAvg < Avg: # mediamente più vicino
Avg = currAvg
result.center = circleTan.center
result.radius = circleTan.radius
return True
#============================================================================
# circleFrom3TanPts
#============================================================================
def circleFrom3TanPts(geom1, pt1, geom2, pt2, geom3, pt3):
"""
crea un cerchio attraverso tre oggetti di tangenza per le estremità del diametro:
geometria 1 di tangenza (linea, arco o cerchio)
punto di selezione geometria 1
geometria 2 di tangenza (linea, arco o cerchio)
punto di selezione geometria 2
"""
obj1Type = geom1.whatIs()
obj2Type = geom2.whatIs()
obj3Type = geom3.whatIs()
if (obj1Type != "LINE" and obj1Type != "ARC" and obj1Type != "CIRCLE") or \
(obj2Type != "LINE" and obj2Type != "ARC" and obj2Type != "CIRCLE") or \
(obj3Type != "LINE" and obj3Type != "ARC" and obj3Type != "CIRCLE"):
return None
if obj1Type == "ARC": # se è arco lo trasformo in cerchio
obj1 = QadCircle().set(geom1.center, geom1.radius)
obj1Type = "CIRCLE"
else:
obj1 = geom1
if obj2Type == "ARC": # se è arco lo trasformo in cerchio
obj2 = QadCircle().set(geom2.center, geom2.radius)
obj2Type = "CIRCLE"
else:
obj2 = geom2
if obj3Type == "ARC": # se è arco lo trasformo in cerchio
obj3 = QadCircle().set(geom3.center, geom3.radius)
obj3Type = "CIRCLE"
else:
obj3 = geom3
if obj1Type == "LINE":
if obj2Type == "LINE":
if obj3Type == "LINE":
return circleFromLineLineLineTanPts(obj1, pt1, obj2, pt2, obj3, pt3)
elif obj3Type == "CIRCLE":
return circleFromLineLineCircleTanPts(obj1, pt1, obj2, pt2, obj3, pt3)
elif obj2Type == "CIRCLE":
if obj3Type == "LINE":
return circleFromLineLineCircleTanPts(obj1, pt1, obj3, pt3, obj2, pt2)
elif obj3Type == "CIRCLE":
return circleFromLineCircleCircleTanPts(obj1, pt1, obj2, pt2, obj3, pt3)
elif obj1Type == "CIRCLE":
if obj2Type == "LINE":
if obj3Type == "LINE":
return circleFromLineLineCircleTanPts(obj2, pt2, obj3, pt3, obj1, pt1)
elif obj3Type == "CIRCLE":
return circleFromLineCircleCircleTanPts(obj2, pt2, obj1, pt1, obj3, pt3)
elif obj2Type == "CIRCLE":
if obj3Type == "LINE":
return circleFromLineCircleCircleTanPts(obj3, pt3, obj1, pt1, obj2, pt2)
elif obj3Type == "CIRCLE":
return circleFromCircleCircleCircleTanPts(obj1, pt1, obj2, pt2, obj3, pt3)
return None
#============================================================================
# circleFromLineLineLineTanPts
#============================================================================
def circleFromLineLineLineTanPts(line1, pt1, line2, pt2, line3, pt3):
"""
Crea un cerchio attraverso tre linee:
linea1 di tangenza (QadLine)
punto di selezione linea1
linea2 di tangenza (QadLine)
punto di selezione linea2
linea3 di tangenza (QadLine)
punto di selezione linea3
"""
circleList = []
# Punti di intersezione delle rette (line1, line2, line3)
ptInt1 = QadIntersections.twoInfinityLines(line1, line2)
ptInt2 = QadIntersections.twoInfinityLines(line2, line3)
ptInt3 = QadIntersections.twoInfinityLines(line3, line1)
# tre rette parallele
if (ptInt1 is None) and (ptInt2 is None):
return circleList
if (ptInt1 is None): # la linea1 e linea2 sono parallele
circleList.extend(circleFrom2ParLinesLineTanPts(line1, line2, line3))
elif (ptInt2 is None): # la linea2 e linea3 sono parallele
circleList.extend(circleFrom2ParLinesLineTanPts(line2, line3, line1))
elif (ptInt3 is None): # la linea3 e linea1 sono parallele
circleList.extend(circleFrom2ParLinesLineTanPts(line3, line1, line2))
else:
# Bisettrici degli angoli interni del triangolo avente come vertici i punti di intersezione delle rette
Bisector123 = qad_utils.getBisectorInfinityLine(ptInt1, ptInt2, ptInt3)
Bisector231 = qad_utils.getBisectorInfinityLine(ptInt2, ptInt3, ptInt1)
Bisector312 = qad_utils.getBisectorInfinityLine(ptInt3, ptInt1, ptInt2)
# Punto di intersezione delle bisettrici = centro delle circonferenza inscritta al triangolo
l1 = QadLine().set(Bisector123[0], Bisector123[1])
l2 = QadLine().set(Bisector231[0], Bisector231[1])
center = QadIntersections.twoInfinityLines(l1, l2)
# Perpendicolari alle rette line1 passanti per il centro della circonferenza inscritta
ptPer = QadPerpendicularity.fromPointToInfinityLine(center, line1)
radius = center.distance(ptPer)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
# Bisettrici degli angoli esterni del triangolo
angle = qad_utils.getAngleBy2Pts(Bisector123[0], Bisector123[1]) + math.pi / 2
Bisector123 = QadLine().set(ptInt2, qad_utils.getPolarPointByPtAngle(ptInt2, angle, 10))
angle = qad_utils.getAngleBy2Pts(Bisector231[0], Bisector231[1]) + math.pi / 2
Bisector231 = QadLine().set(ptInt3, qad_utils.getPolarPointByPtAngle(ptInt3, angle, 10))
angle = qad_utils.getAngleBy2Pts(Bisector312[0], Bisector312[1]) + math.pi / 2
Bisector312 = QadLine().set(ptInt1, qad_utils.getPolarPointByPtAngle(ptInt1, angle, 10))
# Punti di intersezione delle bisettrici = centro delle circonferenze ex-inscritte
center = QadIntersections.twoInfinityLines(Bisector123, Bisector231)
l = QadLine().set(ptInt2, ptInt3)
ptPer = QadPerpendicularity.fromPointToInfinityLine(center, l)
radius = center.distance(ptPer)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
center = QadIntersections.twoInfinityLines(Bisector231, Bisector312)
l.set(ptInt3, ptInt1)
ptPer = QadPerpendicularity.fromPointToInfinityLine(center, l)
radius = center.distance(ptPer)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
center = QadIntersections.twoInfinityLines(Bisector312, Bisector123)
l.set(ptInt1, ptInt2)
ptPer = QadPerpendicularity.fromPointToInfinityLine(center, l)
radius = center.distance(ptPer)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
if len(circleList) == 0:
return None
result = QadCircle()
AvgList = []
Avg = sys.float_info.max
for circleTan in circleList:
del AvgList[:] # svuoto la lista
ptInt = QadPerpendicularity.fromPointToInfinityLine(circleTan.center, line1)
AvgList.append(ptInt.distance(pt1))
ptInt = QadPerpendicularity.fromPointToInfinityLine(circleTan.center, line2)
AvgList.append(ptInt.distance(pt2))
ptInt = QadPerpendicularity.fromPointToInfinityLine(circleTan.center, line3)
AvgList.append(ptInt.distance(pt3))
currAvg = qad_utils.numericListAvg(AvgList)
if currAvg < Avg: # mediamente più vicino
Avg = currAvg
result.center = circleTan.center
result.radius = circleTan.radius
return result
#===========================================================================
# circleFrom2ParLinesLineTanPts
#===========================================================================
def circleFrom2ParLinesLineTanPts(parLine1, parLine2, line3):
"""
Crea due cerchi attraverso 2 linee parallele e una terza linea non parallela:
linea1 di tangenza (QadLine) parallela a linea2
linea2 di tangenza (QadLine) parallela a linea1
linea3 di tangenza (QadLine)
"""
circleList = []
ptInt2 = QadIntersections.twoInfinityLines(parLine2, line3)
ptInt3 = QadIntersections.twoInfinityLines(line3, parLine1)
if parLine1.getStartPt() == ptInt3:
pt = parLine1.getEndPt()
else:
pt = parLine1.getStartPt()
Bisector123 = qad_utils.getBisectorInfinityLine(pt, ptInt2, ptInt3)
if parLine2.getStartPt() == ptInt2:
pt = parLine2.getEndPt()
else:
pt = parLine2.getStartPt()
Bisector312 = qad_utils.getBisectorInfinityLine(pt, ptInt3, ptInt2)
# Punto di intersezione delle bisettrici = centro delle circonferenza
center = qad_utils.getIntersectionPointOn2InfinityLines(Bisector123[0], Bisector123[1], \
Bisector312[0], Bisector312[1])
ptPer = QadPerpendicularity.fromPointToInfinityLine(center, parLine1)
radius = center.distance(ptPer)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
# Bisettrici degli angoli esterni
Bisector123 = Bisector123 + math.pi / 2
Bisector312 = Bisector312 + math.pi / 2
# Punto di intersezione delle bisettrici = centro delle circonferenza
center = qad_utils.getIntersectionPointOn2InfinityLines(Bisector123[0], Bisector123[1], \
Bisector312[0], Bisector312[1])
ptPer = QadPerpendicularity.fromPointToInfinityLine(center, parLine1)
radius = center.distance(ptPer)
circle = QadCircle()
circle.set(center, radius)
circleList.append(circle)
return circleList
#============================================================================
# circleFromLineCircleCircleTanPts
#============================================================================
def circleFromLineCircleCircleTanPts(line, pt, circle1, pt1, circle2, pt2):
"""
setta le caratteristiche del cerchio attraverso tre linee:
linea di tangenza (QadLine)
punto di selezione linea
cerchio1 di tangenza (oggetto QadCircle)
punto di selezione cerchio1
cerchio2 di tangenza (oggetto QadCircle)
punto di selezione cerchio2
"""
circleList = []
circleList.extend(solveCircleTangentToLineAnd2Circles(line, circle1, circle2, -1, -1))
circleList.extend(solveCircleTangentToLineAnd2Circles(line, circle1, circle2, -1, 1))
circleList.extend(solveCircleTangentToLineAnd2Circles(line, circle1, circle2, 1, -1))
circleList.extend(solveCircleTangentToLineAnd2Circles(line, circle1, circle2, 1, 1))
if len(circleList) == 0:
return None
result = QadCircle()
AvgList = []
Avg = sys.float_info.max
for circleTan in circleList:
del AvgList[:] # svuoto la lista
ptInt = QadPerpendicularity.fromPointToInfinityLine(circleTan.center, line)
AvgList.append(ptInt.distance(t))
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle1.center)
if circleTan.center.distance(circle1.center) < circle1.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(ptInt.distance(pt1))
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle2.center)
if circleTan.center.distance(circle2.center) < circle2.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(ptInt.distance(pt2))
currAvg = qad_utils.numericListAvg(AvgList)
if currAvg < Avg: # mediamente più vicino
Avg = currAvg
result.center = circleTan.center
result.radius = circleTan.radius
return result
#============================================================================
# circleFromCircleCircleCircleTanPts
#============================================================================
def circleFromCircleCircleCircleTanPts(circle1, pt1, circle2, pt2, circle3, pt3):
"""
Crea un cerchio attraverso tre cerchi tangenti:
cerchio1 di tangenza (oggetto QadCircle)
punto di selezione cerchio1
cerchio2 di tangenza (oggetto QadCircle)
punto di selezione cerchio2
cerchio3 di tangenza (oggetto QadCircle)
punto di selezione cerchio3
"""
circleList = []
circle = solveApollonius(circle1, circle2, circle3, -1, -1, -1)
if circle is not None:
circleList.append(circle)
circle = solveApollonius(circle1, circle2, circle3, -1, -1, 1)
if circle is not None:
circleList.append(circle)
circle = solveApollonius(circle1, circle2, circle3, -1, 1, -1)
if circle is not None:
circleList.append(circle)
circle = solveApollonius(circle1, circle2, circle3, -1, 1, 1)
if circle is not None:
circleList.append(circle)
circle = solveApollonius(circle1, circle2, circle3, 1, -1, -1)
if circle is not None:
circleList.append(circle)
circle = solveApollonius(circle1, circle2, circle3, 1, -1, 1)
if circle is not None:
circleList.append(circle)
circle = solveApollonius(circle1, circle2, circle3, 1, 1, -1)
if circle is not None:
circleList.append(circle)
circle = solveApollonius(circle1, circle2, circle3, 1, 1, 1)
if circle is not None:
circleList.append(circle)
if len(circleList) == 0:
return None
result = QadCircle()
AvgList = []
Avg = sys.float_info.max
for circleTan in circleList:
del AvgList[:] # svuoto la lista
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle1.center)
if circleTan.center.distance(circle1.center) < circle1.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(ptInt.distance(pt1))
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle2.center)
if circleTan.center.distance(circle2.center) < circle2.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(ptInt.distance(pt2))
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle3.center)
if circleTan.center.distance(circle3.center) < circle3.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(ptInt.distance(pt3))
currAvg = qad_utils.numericListAvg(AvgList)
if currAvg < Avg: # mediamente più vicino
Avg = currAvg
result.center = circleTan.center
result.radius = circleTan.radius
return result
#===========================================================================
# circleFrom1IntPtLineCircleTanPts
#===========================================================================
def circleFrom1IntPtLineCircleTanPts(pt, line1, pt1, circle2, pt2, AllCircles = False):
"""
crea uno o più cerchi (vedi AllCircles) attraverso 1 punto di intersezione, 1 linea e 1 cerchio tangenti:
punto di intersezione
linea di tangenza (QadLine)
punto di selezione linea
cerchio di tangenza (QadLine)
punto di selezione cerchio
il parametro AllCircles se = True fa restituire tutti i cerchi e non sono quello più vicino a pt1 e pt2
"""
# http://www.batmath.it/matematica/a_apollonio/prc.htm
circleList = []
# Sono dati un cerchio circle2, un punto pt ed una retta line1 nell'ipotesi che pt
# non stia nè sulla retta line1 nè sul circolo.
# Si vogliono trovare le circonferenze passanti per il punto e tangenti alla retta e al cerchio dato.
# Il problema si può risolvere facilmente utilizzando un'inversione di centro pt e raggio qualunque.
# Trovate le circonferenze inverse della retta data e del circolo dato, se ne trovano le tangenti comuni.
# Le inverse di queste tangenti comuni sono le circonferenze cercate.
if line1.getYOnInfinityLine(pt.x()) == pt.y() or \
qad_utils.getDistance(pt, circle2.center) == circle2.radius:
if AllCircles == True:
return circleList
else:
return None
c = QadCircle()
c.set(pt, 10)
circularInvLine = getCircularInversionOfLine(c, line1)
circularInvCircle = getCircularInversionOfCircle(c, circle2)
tangents = QadTangency.twoCircles(circularInvCircle, circularInvLine)
for tangent in tangents:
circleList.append(getCircularInversionOfLine(c, tangent))
if AllCircles == True:
return circleList
if len(circleList) == 0:
return None
result = QadCircle()
AvgList = []
Avg = sys.float_info.max
for circleTan in circleList:
del AvgList[:] # svuoto la lista
ptInt = QadPerpendicularity.fromPointToInfinityLine(circleTan.center, line1)
AvgList.append(qad_utils.getDistance(ptInt, pt1))
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle2.center)
if qad_utils.getDistance(circleTan.center, circle2.center) < circle2.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(qad_utils.getDistance(ptInt, pt2))
currAvg = qad_utils.numericListAvg(AvgList)
if currAvg < Avg: # mediamente più vicino
Avg = currAvg
result.center = circleTan.center
result.radius = circleTan.radius
return result
#===========================================================================
# circleFrom1IntPtCircleCircleTanPts
#===========================================================================
def circleFrom1IntPtCircleCircleTanPts(pt, circle1, pt1, circle2, pt2):
"""
Crea dei cerchi attraverso 1 punto di intersezione, 2 cerchi tangenti:
punto di intersezione
cerchio1 di tangenza (oggetto QadCircle)
punto di selezione cerchio1
cerchio2 di tangenza (oggetto QadCircle)
punto di selezione cerchio2
"""
# http://www.batmath.it/matematica/a_apollonio/prc.htm
circleList = []
# Sono dati un punto pt e due circonferenze circle1 e circle2;
# si devono determinare le circonferenze passanti per pt e tangenti alle due circonferenze.
# Proponiamo una costruzione che utilizza l'inversione, in quanto ci pare la più elegante.
# In realtà si potrebbe anche fare una costruzione utilizzando i centri di omotetia dei due cerchi dati
# ma, nella sostanza, é solo un modo per mascherare l'uso dell'inversione.
# Si considera un circolo di inversione di centro pt e raggio qualunque.
# Si determinano i circoli inversi dei due circoli dati e le loro tangenti comuni.
# Le circonferenze inverse di queste tangenti comuni sono quelle che soddisfano il problema.
c = QadCircle()
c.set(pt, 10)
circularInvCircle1 = getCircularInversionOfCircle(c, circle1)
circularInvCircle2 = getCircularInversionOfCircle(c, circle2)
tangents = QadTangency.twoCircles(circularInvCircle1, circularInvCircle2)
for tangent in tangents:
circleList.append(getCircularInversionOfLine(c, tangent))
if len(circleList) == 0:
return None
result = QadCircle()
AvgList = []
Avg = sys.float_info.max
for circleTan in circleList:
del AvgList[:] # svuoto la lista
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle1.center)
if qad_utils.getDistance(circleTan.center, circle1.center) < circle1.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(qad_utils.getDistance(ptInt, pt1))
angle = qad_utils.getAngleBy2Pts(circleTan.center, circle2.center)
if qad_utils.getDistance(circleTan.center, circle2.center) < circle2.radius: # cerchio interno
angle = angle + math.pi / 2
ptInt = qad_utils.getPolarPointByPtAngle(circleTan.center, angle, circleTan.radius)
AvgList.append(qad_utils.getDistance(ptInt, pt2))
currAvg = qad_utils.numericListAvg(AvgList)
if currAvg < Avg: # mediamente più vicino
Avg = currAvg
result.center = circleTan.center
result.radius = circleTan.radius
return result
#============================================================================
# circleFromDiamEndsPtTanPt
#============================================================================
def circleFromDiamEndsPtTanPt(startPt, geom, pt):
"""
Crea un cerchio attraverso un punto di estremità del diametro e
un oggetto di tangenza per l'altra estremità :
punto iniziale
geometria 1 di tangenza (linea, arco o cerchio)
punto di selezione geometria 1
"""
objype = geom.whatIs()
if (objType != "LINE" and objType != "ARC" and objType != "CIRCLE"): return None
if objType == "ARC": # se è arco lo trasformo in cerchio
obj = QadCircle().set(geom.center, geom.radius)
objType = "CIRCLE"
else:
obj = geom
if objType == "LINE":
ptPer = QadPerpendicularity.fromPointToInfinityLine(startPt, obj)
return QadCircle().fromDiamEnds(startPt, ptPer)
elif objType == "CIRCLE":
l = QadLine().set(startPt, obj.center)
intPts = QadIntersections.infinityLineWithCircle(l, obj)
# scelgo il punto più vicino al punto pt
ptTan = qad_utils.getNearestPoints(pt, ptIntList)[0]
return QadCircle().fromDiamEnds(startPt, ptTan)
#============================================================================
# circleFromDiamEnds2TanPts
#============================================================================
def circleFromDiamEnds2TanPts(geom1, pt1, geom2, pt2):
"""
Creo un cerchio attraverso due oggetto di tangenza per le estremità del diametro:
geometria1 di tangenza (linea, arco o cerchio)
punto di selezione geometria1
geometria2 di tangenza (linea, arco o cerchio)
punto di selezione geometria2
"""
obj1Type = geom1.whatIs()
obj2Type = geom2.whatIs()
if (obj1Type != "LINE" and obj1Type != "ARC" and obj1Type != "CIRCLE") or \
(obj2Type != "LINE" and obj2Type != "ARC" and obj2Type != "CIRCLE"):
return None
if obj1Type == "ARC": # se è arco lo trasformo in cerchio
obj1 = QadCircle().set(geom1.center, geom1.radius)
obj1Type = "CIRCLE"
else:
obj1 = geom1
if obj2Type == "ARC": # se è arco lo trasformo in cerchio
obj2 = QadCircle().set(geom2.center, geom2.radius)
obj2Type = "CIRCLE"
else:
obj2 = geom2
if obj1Type == "LINE":
if obj2Type == "LINE":
return None # Il diametro non può essere tangente a due linee
elif obj2Type == "CIRCLE":
return circleFromLineCircleTanPts(obj1, obj2, pt2)
elif obj1Type == "CIRCLE":
if obj2Type == "LINE":
return circleFromLineCircleTanPts(obj2, obj1, pt1)
elif obj2Type == "CIRCLE":
return circleFromCircleCircleTanPts(obj1, pt1, obj2, pt2)
return None
#============================================================================
# circleFromLineCircleTanPts
#============================================================================
def circleFromLineCircleTanPts(line, circle, ptCircle):
"""
Creo un cerchio attraverso una linea, un cerchio di tangenza:
linea di tangenza (QadLine)
cerchio di tangenza (oggetto QadCircle)
punto di selezione cerchio
"""
ptPer = QadPerpendicularity.fromPointToInfinityLine(circle.center, line)
tanPoints = []
tanPoints.append(qad_utils.getPolarPointBy2Pts(circle.center, ptPer, circle.radius))
tanPoints.append(qad_utils.getPolarPointBy2Pts(circle.center, ptPer, -circle.radius))
# scelgo il punto più vicino al punto pt
ptTan = qad_utils.getNearestPoints(ptCircle, tanPoints)[0]
return QadCircle().fromDiamEnds(ptPer, ptTan)
#============================================================================
# circleFromCircleCircleTanPts
#============================================================================
def circleFromCircleCircleTanPts(circle1, pt1, circle2, pt2):
"""
Crea un cerchio attraverso due cerchi di tangenza:
cerchio1 di tangenza (oggetto QadCircle)
punto di selezione cerchio1
cerchio2 di tangenza (oggetto QadCircle)
punto di selezione cerchio2
"""
l = QadLine().set(circle1.center, circle2.center)
ptIntList = QadIntersections.infinityLineWithCircle(l, circle1)
# scelgo il punto più vicino al punto pt1
ptTan1 = qad_utils.getNearestPoints(pt1, ptIntList)[0]
ptIntList = QadIntersections.infinityLineWithCircle(l, circle2)
# scelgo il punto più vicino al punto pt2
ptTan2 = qad_utils.getNearestPoints(pt2, ptIntList)[0]
return QadCircle().fromDiamEnds(ptTan1, ptTan2)
#============================================================================
# circleFrom2TanPtsRadius
#============================================================================
def circleFrom2TanPtsRadius(geom1, pt1, geom2, pt2, radius):
"""
Crea un cerchio attraverso 2 oggetti di tangenza e un raggio:
geometria1 di tangenza (linea, arco o cerchio)
punto di selezione geometria1
oggetto2 di tangenza (linea, arco o cerchio)
punto di selezione geometria2
raggio
"""
obj1Type = geom1.whatIs()
obj2Type = geom2.whatIs()
if (obj1Type != "LINE" and obj1Type != "ARC" and obj1Type != "CIRCLE") or \
(obj2Type != "LINE" and obj2Type != "ARC" and obj2Type != "CIRCLE"):
return False
if obj1Type == "ARC": # se è arco lo trasformo in cerchio
obj1 = QadCircle().set(geom1.center, geom1.radius)
obj1Type = "CIRCLE"
else:
obj1 = geom1
if obj2Type == "ARC": # se è arco lo trasformo in cerchio
obj2 = QadCircle().set(geom2.center, geom2.radius)
obj2Type = "CIRCLE"
else:
obj2 = geom2
if obj1Type == "LINE":
if obj2Type == "LINE":
return circleFromLineLineTanPtsRadius(obj1, pt1, obj2, pt2, radius)
elif obj2Type == "CIRCLE":
return circleFromLineCircleTanPtsRadius(obj1, pt1, obj2, pt2, radius)
elif obj1Type == "CIRCLE":
if obj2Type == "LINE":
return circleFromLineCircleTanPtsRadius(obj2, pt2, obj1, pt1, radius)
elif obj2Type == "CIRCLE":
return circleFromCircleCircleTanPtsRadius(obj1, pt1, obj2, pt2, radius)
return None
#============================================================================
# circleFromLineLineTanPtsRadius
#============================================================================
def circleFromLineLineTanPtsRadius(line1, pt1, line2, pt2, radius):
"""
Crea un cerchio attraverso due linee di tangenza e un raggio:
linea1 di tangenza (QadLine)
punto di selezione linea1
linea2 di tangenza (QadLine)
punto di selezione linea2
raggio
"""
# calcolo il punto medio tra i due punti di selezione
ptMiddle = qad_utils.getMiddlePoint(pt1, pt2)
# verifico se le rette sono parallele
ptInt = QadIntersections.twoInfinityLines(line1, line2)
if ptInt is None: # le rette sono parallele
ptPer = QadPerpendicularity.fromPointToInfinityLine(ptMiddle, line1)
if qad_utils.doubleNear(radius, qad_utils.getDistance(ptPer, ptMiddle)):
return QadCircle().set(ptMiddle, radius)
else:
return None
# angolo linea1
angle = qad_utils.getAngleBy2Pts(line1.getStartPt(), line1.getEndPt())
# retta parallela da un lato della linea1 distante radius
angle = angle + math.pi / 2
pt1Par1Line1 = qad_utils.getPolarPointByPtAngle(line1.getStartPt(), angle, radius)
pt2Par1Line1 = qad_utils.getPolarPointByPtAngle(line1.getEndPt(), angle, radius)
# retta parallela dall'altro lato della linea1 distante radius
angle = angle - math.pi
pt1Par2Line1 = qad_utils.getPolarPointByPtAngle(line1.getStartPt(), angle, radius)
pt2Par2Line1 = qad_utils.getPolarPointByPtAngle(line1.getEndPt(), angle, radius)
# angolo linea2
angle = qad_utils.getAngleBy2Pts(line2.getStartPt(), line2.getEndPt())
# retta parallela da un lato della linea2 distante radius
angle = angle + math.pi / 2
pt1Par1Line2 = qad_utils.getPolarPointByPtAngle(line2.getStartPt(), angle, radius)
pt2Par1Line2 = qad_utils.getPolarPointByPtAngle(line2.getEndPt(), angle, radius)
# retta parallela dall'altro lato della linea2 distante radius
angle = angle - math.pi
pt1Par2Line2 = qad_utils.getPolarPointByPtAngle(line2.getStartPt(), angle, radius)
pt2Par2Line2 = qad_utils.getPolarPointByPtAngle(line2.getEndPt(), angle, radius)
# calcolo le intersezioni
ptIntList = []
ptInt = qad_utils.getIntersectionPointOn2InfinityLines(pt1Par1Line1, pt2Par1Line1, \
pt1Par1Line2, pt2Par1Line2)
ptIntList.append(ptInt)
ptInt = qad_utils.getIntersectionPointOn2InfinityLines(pt1Par1Line1, pt2Par1Line1, \
pt1Par2Line2, pt2Par2Line2)
ptIntList.append(ptInt)
ptInt = qad_utils.getIntersectionPointOn2InfinityLines(pt1Par2Line1, pt2Par2Line1, \
pt1Par1Line2, pt2Par1Line2)
ptIntList.append(ptInt)
ptInt = qad_utils.getIntersectionPointOn2InfinityLines(pt1Par2Line1, pt2Par2Line1, \
pt1Par2Line2, pt2Par2Line2)
ptIntList.append(ptInt)
# scelgo il punto più vicino al punto medio
center = qad_utils.getNearestPoints(ptMiddle, ptIntList)[0]
return QadCircle().set(center, radius)
#============================================================================
# circleFromLineCircleTanPtsRadius
#============================================================================
def circleFromLineCircleTanPtsRadius(line, ptLine, circle, ptCircle, radius):
"""
Crea un cerchio attraverso una linea, un cerchio di tangenza e un raggio:
linea di tangenza (QadLine)
punto di selezione linea
cerchio di tangenza (oggetto QadCircle)
punto di selezione cerchio
raggio
"""
# calcolo il punto medio tra i due punti di selezione
ptMiddle = qad_utils.getMiddlePoint(ptLine, ptCircle)
# angolo linea1
angle = qad_utils.getAngleBy2Pts(line.getStartPt(), line.getEndPt())
# retta parallela da un lato della linea1 distante radius
angle = angle + math.pi / 2
pt1Par1Line = qad_utils.getPolarPointByPtAngle(line.getStartPt(), angle, radius)
pt2Par1Line = qad_utils.getPolarPointByPtAngle(line.getEndPt(), angle, radius)
# retta parallela dall'altro lato della linea1 distante radius
angle = angle - math.pi
pt1Par2Line = qad_utils.getPolarPointByPtAngle(line.getStartPt(), angle, radius)
pt2Par2Line = qad_utils.getPolarPointByPtAngle(line.getEndPt(), angle, radius)
# creo un cerchio con un raggio + grande
circleTan = QadCircle()
circleTan.set(circle.center, circle.radius + radius)
l = QadLine().set(pt1Par1Line, pt2Par1Line)
ptIntList = QadIntersections.infinityLineWithCircle(l, circleTan)
l.set(pt1Par2Line, pt2Par2Line)
ptIntList2 = QadIntersections.infinityLineWithCircle(l, circleTan)
ptIntList.extend(ptIntList2)
if len(ptIntList) == 0: # nessuna intersezione
return None
# scelgo il punto più vicino al punto medio
center = qad_utils.getNearestPoints(ptMiddle, ptIntList)[0]
return QadCircle().set(center, radius)
#============================================================================
# circleFromCircleCircleTanPtsRadius
#============================================================================
def circleFromCircleCircleTanPtsRadius(circle1, pt1, circle2, pt2, radius):
"""
Crea un cerchio attraverso due cerchi di tangenza e un raggio:
cerchio1 di tangenza (oggetto QadCircle)
punto di selezione cerchio1
cerchio2 di tangenza (oggetto QadCircle)
punto di selezione cerchio2
raggio
"""
# calcolo il punto medio tra i due punti di selezione
ptMiddle = qad_utils.getMiddlePoint(pt1, pt2)
# creo due cerchi con un raggio + grande
circle1Tan = QadCircle()
circle1Tan.set(circle1.center, circle1.radius + radius)
circle2Tan = QadCircle()
circle2Tan.set(circle2.center, circle2.radius + radius)
ptIntList = QadIntersections.twoCircles(circle1Tan, circle2Tan)
if len(ptIntList) == 0: # nessuna intersezione
return None
# scelgo il punto più vicino al punto medio
center = qad_utils.getNearestPoints(ptMiddle, ptIntList)[0]
return QadCircle().set(center, radius)
#===============================================================================
# solveCircleTangentToLineAnd2Circles
#===============================================================================
def solveCircleTangentToLineAnd2Circles(line, circle1, circle2, s1, s2):
'''
Trova i due cerchi tangenti a una retta e due cerchi (sarebbero 8 cerchi che si trovano con le
4 combinazioni di s1, s2 che assumo valore -1 o 1)
e restituisce quello più vicino a pt
'''
# http://www.batmath.it/matematica/a_apollonio/rcc.htm
# Il modo più semplice per risolvere questo problema é quello di utilizzare una particolare
# trasformazione geometrica, che alcuni chiamano dilatazione parallela: si immagina che il raggio r
# del più piccolo dei cerchi in questione si riduca a zero (il cerchio é ridotto al suo centro),
# mentre le rette (risp. gli altri cerchi) rimangono parallele (risp. concentrici) con distanze
# dal centro del cerchio che si é ridotto a zero (rispettivamente con raggi dei cerchi) aumentati o
# diminuiti di r.
# Se applichiamo questa trasformazione al nostro caso, riducendo a zero il raggio del cerchio più piccolo
# (o di uno dei due se hanno lo stesso raggio) ci ritroveremo con un punto, un cerchio e una retta:
# trovate le circonferenze passanti per il punto e tangenti alla retta e al cerchio (nel modo già noto)
# potremo applicare la trasformazione inversa della dilatazione parallela precedente per determinare
# le circonferenze richieste.
if circle1.radius <= circle2.radius:
smallerCircle = circle1
greaterCircle = circle2
else:
smallerCircle = circle2
greaterCircle = circle1
linePar = []
angle = qad_utils.getAngleBy2Pts(line[0], line[1])
linePar.append(qad_utils.getPolarPointByPtAngle(line[0], angle + math.pi / 2, smallerCircle.radius * s1))
linePar.append(qad_utils.getPolarPointByPtAngle(line[1], angle + math.pi / 2, smallerCircle.radius * s1))
circlePar = QadCircle(greaterCircle)
circlePar.radius = circlePar.radius + smallerCircle.radius * s1
circleList = circleFrom1IntPtLineCircleTanPts(smallerCircle.center, linePar, None, circlePar, None, True)
for circleTan in circleList:
ptPerp = qad_utils.getPerpendicularPointOnInfinityLine(line[0], line[1], circleTan.center)
circleTan.radius = qad_utils.getDistance(ptPerp, circleTan.center)
return circleList
#===============================================================================
# solveApollonius
#===============================================================================
def solveApollonius(c1, c2, c3, s1, s2, s3):
'''
>>> solveApollonius((0, 0, 1), (4, 0, 1), (2, 4, 2), 1,1,1)
Circle(x=2.0, y=2.1, r=3.9)
>>> solveApollonius((0, 0, 1), (4, 0, 1), (2, 4, 2), -1,-1,-1)
Circle(x=2.0, y=0.8333333333333333, r=1.1666666666666667)
Trova il cerchio tangente a tre cerchi (sarebbero 8 cerchi che si trovano con le
8 combinazioni di s1, s2, s3 che assumo valore -1 o 1)
'''
x1 = c1.center.x()
y1 = c1.center.y()
r1 = c1.radius
x2 = c2.center.x()
y2 = c2.center.y()
r2 = c2.radius
x3 = c3.center.x()
y3 = c3.center.y()
r3 = c3.radius
v11 = 2*x2 - 2*x1
v12 = 2*y2 - 2*y1
v13 = x1*x1 - x2*x2 + y1*y1 - y2*y2 - r1*r1 + r2*r2
v14 = 2*s2*r2 - 2*s1*r1
v21 = 2*x3 - 2*x2
v22 = 2*y3 - 2*y2
v23 = x2*x2 - x3*x3 + y2*y2 - y3*y3 - r2*r2 + r3*r3
v24 = 2*s3*r3 - 2*s2*r2
if v11 == 0:
return None
w12 = v12/v11
w13 = v13/v11
w14 = v14/v11
if v21 == 0:
return None
w22 = v22/v21-w12
w23 = v23/v21-w13
w24 = v24/v21-w14
if w22 == 0:
return None
P = -w23/w22
Q = w24/w22
M = -w12*P-w13
N = w14 - w12*Q
a = N*N + Q*Q - 1
b = 2*M*N - 2*N*x1 + 2*P*Q - 2*Q*y1 + 2*s1*r1
c = x1*x1 + M*M - 2*M*x1 + P*P + y1*y1 - 2*P*y1 - r1*r1
# Find a root of a quadratic equation. This requires the circle centers not to be e.g. colinear
if a == 0:
return None
D = (b * b) - (4 * a * c)
# se D é così vicino a zero
if qad_utils.doubleNear(D, 0.0):
D = 0
elif D < 0: # non si può fare la radice quadrata di un numero negativo
return None
rs = (-b-math.sqrt(D))/(2*a)
xs = M+N*rs
ys = P+Q*rs
center = QgsPointXY(xs, ys)
circle = QadCircle().set(center, rs)
return circle
#===============================================================================
# getCircularInversionOfPoint
#===============================================================================
def getCircularInversionOfPoint(circleRef, pt):
"""
la funzione ritorna l'inversione circolare di un punto
"""
dist = qad_utils.getDistance(circleRef.center, pt)
angle = qad_utils.getAngleBy2Pts(circleRef.center, pt)
circInvDist = circleRef.radius * circleRef.radius / dist
return qad_utils.getPolarPointByPtAngle(circleRef.center, angle, circInvDist)
#===============================================================================
# getCircularInversionOfLine
#===============================================================================
def getCircularInversionOfLine(circleRef, line):
"""
la funzione ritorna l'inversione circolare di una linea (che é un cerchio)
"""
angleLine = qad_utils.getAngleBy2Pts(line.getStartPt(), line.getEndPt())
ptNearestLine = QadPerpendicularity.fromPointToInfinityLine(circleRef.center, line)
dist = qad_utils.getDistance(circleRef.center, ptNearestLine)
pt1 = getCircularInversionOfPoint(circleRef, ptNearestLine)
pt = qad_utils.getPolarPointByPtAngle(ptNearestLine, angleLine, dist)
pt2 = getCircularInversionOfPoint(circleRef, pt)
pt = qad_utils.getPolarPointByPtAngle(ptNearestLine, angleLine + math.pi, dist)
pt3 = getCircularInversionOfPoint(circleRef, pt)
return circleFrom3Pts(pt1, pt2, pt3)
#===============================================================================
# getCircularInversionOfCircle
#===============================================================================
def getCircularInversionOfCircle(circleRef, circle):
"""
la funzione ritorna l'inversione circolare di un cerchio (che é un cerchio)
"""
angleLine = qad_utils.getAngleBy2Pts(circle.center, circleRef.center)
ptNearestLine = qad_utils.getPolarPointByPtAngle(circle.center, angleLine, circle.radius)
dist = qad_utils.getDistance(circleRef.center, circle.center)
pt1 = getCircularInversionOfPoint(circleRef, ptNearestLine)
pt = qad_utils.getPolarPointByPtAngle(circle.center, angleLine + math.pi / 2, circle.radius)
pt2 = getCircularInversionOfPoint(circleRef, pt)
pt = qad_utils.getPolarPointByPtAngle(circle.center, angleLine - math.pi / 2, circle.radius)
pt3 = getCircularInversionOfPoint(circleRef, pt)
return circleFrom3Pts(pt1, pt2, pt3)
|
gpl-3.0
| 9,054,910,789,273,340,000
| 39.761468
| 118
| 0.607645
| false
| 3.153603
| false
| false
| false
|
hortonworks/hortonworks-sandbox
|
desktop/core/src/desktop/middleware_test.py
|
1
|
1593
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tests for Desktop-specific middleware
from desktop.lib.django_test_util import make_logged_in_client
from nose.tools import assert_equal
def test_jframe_middleware():
c = make_logged_in_client()
path = "/about/?foo=bar&baz=3"
response = c.get(path)
assert_equal(path, response["X-Hue-JFrame-Path"])
path_nocache = "/about/?noCache=blabla&foo=bar&baz=3"
response = c.get(path_nocache)
assert_equal(path, response["X-Hue-JFrame-Path"])
path_nocache = "/about/?noCache=blabla&foo=bar&noCache=twiceover&baz=3"
response = c.get(path_nocache)
assert_equal(path, response["X-Hue-JFrame-Path"])
path = "/about/"
response = c.get(path)
assert_equal(path, response["X-Hue-JFrame-Path"])
response = c.get("/about/?")
assert_equal("/about/", response["X-Hue-JFrame-Path"])
|
apache-2.0
| 5,882,152,061,198,778,000
| 36.046512
| 74
| 0.733208
| false
| 3.367865
| false
| false
| false
|
corumcorp/redsentir
|
redsentir/lineatiempo/migrations/0001_initial.py
|
1
|
2480
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2018-04-04 18:06
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ComentarioP',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contenido', models.CharField(max_length=1000, null=True)),
('fecha', models.DateTimeField(auto_now=True)),
('me_gusta', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='MultiMedia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('archivo', models.FileField(upload_to='static/images/publicaciones')),
('tipo', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='Publicacion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contenido', models.CharField(max_length=1000, null=True)),
('fecha', models.DateTimeField(auto_now=True)),
('me_gusta', models.IntegerField(default=0)),
('comentarios', models.IntegerField(default=0)),
('usuario', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='multimedia',
name='publicacion',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lineatiempo.Publicacion'),
),
migrations.AddField(
model_name='comentariop',
name='publicacion',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lineatiempo.Publicacion'),
),
migrations.AddField(
model_name='comentariop',
name='usuario',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
gpl-3.0
| 3,658,711,810,807,085,600
| 39
| 121
| 0.587097
| false
| 4.112769
| false
| false
| false
|
bdunnette/djecks
|
migrations/0005_auto__add_field_case_source__chg_field_case_title__chg_field_deck_titl.py
|
1
|
3187
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Case.source'
db.add_column(u'djecks_case', 'source',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
# Changing field 'Case.title'
db.alter_column(u'djecks_case', 'title', self.gf('django.db.models.fields.TextField')(default=''))
# Changing field 'Deck.title'
db.alter_column(u'djecks_deck', 'title', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Deleting field 'Case.source'
db.delete_column(u'djecks_case', 'source')
# Changing field 'Case.title'
db.alter_column(u'djecks_case', 'title', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
# Changing field 'Deck.title'
db.alter_column(u'djecks_deck', 'title', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
u'djecks.card': {
'Meta': {'object_name': 'Card'},
'cases': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['djecks.Case']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_back': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_front': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'djecks.case': {
'Meta': {'object_name': 'Case'},
'age': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'decks': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['djecks.Deck']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sex': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'djecks.deck': {
'Meta': {'object_name': 'Deck'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['djecks']
|
agpl-3.0
| -5,144,000,211,033,142,000
| 50.419355
| 165
| 0.564481
| false
| 3.513782
| false
| false
| false
|
jreades/starspy
|
stars/visualization/kernelDensityTime.py
|
1
|
7148
|
"""
Yet Another Kernel Density Implementation in Python
This one supports updating the raster one event at a time, to allow for time series visualization.
"""
from sys import stdout
import pysal
import numpy
from math import exp,pi,ceil,floor,sqrt
#try:
# from osgeo import gdal, gdal_array
# from osgeo.gdalconst import GDT_Float64
#except ImportError:
# import gdal, gdal_array
# from gdalconst import GDT_Float64
def triangular(z):
return 1 - abs(z)
def uniform(z):
return abs(z)
def quadratic(z):
return 0.75*(1 - z*z)
def quartic(z):
return (15*1.0/16)*(1-z*z)*(1-z*z)
def gaussian(z):
return sqrt(2*pi)*exp(-0.5*z*z)
class KernelDensity:
"""
Kernel Density Estimation
ptGeoObj -- PySAL Point Geo Object -- pysal.open('points.shp','r')
cellSize -- int -- In Map Units.
bandwidth -- float -- In Map Units.
"""
def __init__(self, extent, cellSize, bandwidth, kernel='quadratic', extent_buffer=0):
left, lower, right, upper = extent
left,lower = left-extent_buffer,lower-extent_buffer
right,upper = right+extent_buffer,upper+extent_buffer
self.extent = pysal.cg.Rectangle(left,lower,right,upper)
self.cellSize = cellSize
self.bandwidth = bandwidth
self.kernel = quadratic
if kernel not in ['triangular', 'uniform', 'quadratic', 'quartic', 'gaussian']:
raise 'Unsupported Kernel Type'
else:
self.kernel = eval(kernel)
self._raster = numpy.zeros((self.rows,self.cols))
self.bw = bandwidth
self.cellSize = float(cellSize)
self.grid_lower = lower+(cellSize/2.0)
maxRow = self.rows-1
self.grid_upper = self.grid_lower + (maxRow*self.cellSize)
self.grid_left = left+(self.cellSize/2.0)
self._n = 0
def update(self,X,Y,invert=False):
self._n += 1
cellSize = self.cellSize
radius = self.bandwidth / cellSize
float_i = (Y-self.grid_lower) / cellSize
#float_i = (self.grid_upper-Y) / cellSize
i = int(floor(float_i - radius))
i = i if i >= 0 else 0
I = int(floor(float_i + radius))
I = I if I < self.rows else self.rows-1
float_j = (X-self.grid_left) / cellSize
j = int(floor(float_j - radius))
j = j if j >= 0 else 0
J = int(floor(float_j + radius))
J = J if J < self.cols else self.cols-1
#print
#print "update rows[%d:%d], cols[%d:%d]"%(i,I,j,J)
for row in xrange(i,I+1):
for col in xrange(j,J+1):
x = self.grid_left+(col*cellSize)
y = self.grid_lower+(row*cellSize)
#y = self.grid_upper-(row*cellSize)
d = ((x-X)**2 + (y-Y)**2) ** (0.5)
if d <= self.bw:
z = d/self.bw
if invert:
self._raster[row,col] -= self.kernel(z)
else:
#print "update[%d,%d]"%(row,col)
self._raster[row,col] += self.kernel(z)
@property
def raster(self):
return self._raster / (self._n*self.bw)
@property
def cols(self):
return int(ceil(self.extent.width / float(self.cellSize)))
@property
def rows(self):
return int(ceil(self.extent.height / self.cellSize))
def erdasImage(self, outfilename):
mpValue = self.mpValue
mpArray = self.mpArray
driver = gdal.GetDriverByName('HFA')
out = driver.Create(outfilename, self.cols, self.rows, 1, GDT_Float64)
if mpValue and hasattr(mpValue, 'value'):
mpValue.value = 1
mpArray.value = "The output image file is created."
try:
out.SetGeoTransform([self.extent.left, self.extent.width/self.cols, 0, self.extent.lower, 0, self.extent.height/self.rows])
gdal_array.BandWriteArray(out.GetRasterBand(1), self.raster)
mpValue.value = 100
mpArray.value = "The output image file is successfully written."
return True
except:
mpValue.value = 100
mpArray.value = "Image writing failed."
return False
def asciiTable(self):
mpValue = self.mpValue
mpArray = self.mpArray
tot = float(self.rows)
s = "ncols %d\n"%self.cols
s+= "nrows %d\n"%self.rows
s+= "xllcorner %f\n"%self.extent.left
s+= "yllcorner %f\n"%self.extent.lower
s+= "cellsize %f\n"%self.cellSize
s+= "nodata_value -1\n"
c = 0
for i in xrange(self.rows-1,-1,-1):
for j in xrange(self.cols):
s+="%f "%self.raster[i,j]
s+="\n"
if mpValue and hasattr(mpValue,'value'):
c += 1
mpValue.value = int(round((c/tot)*100))
if mpArray and hasattr(mpArray,'value'):
mpArray.value = "Saving... %d of %d rows remaining"%(i,tot)
else:
stdout.write('\r%f%% Complete.'%(100*(c/tot)))
stdout.flush()
return s
if __name__=='__main__':
def draw(kd):
img = numpy.zeros((kd.rows,kd.cols,3),numpy.uint8)
raster = kd.raster
scaled = (raster-raster.min())/(raster.max()-raster.min())
img[:,:,0] = (scaled*255).astype("B") #red
img[:,:,2] = ((1+(scaled*-1))*255).astype("B") #blue
return Image.fromarray(img)
import time
import datetime
from PIL import Image,ImageDraw
t0 = time.clock()
#shp = pysal.open('/Users/charlie/Documents/data/pittsburgh/pitthom.shp','r')
shp = pysal.open('/Users/charlie/Documents/Work/NIJ/Target1/Mesa Data/Mesa_ResBurgAllYears_withGrids/Mesa_ResBurgAllYears_withGrids.shp','r')
dbf = pysal.open('/Users/charlie/Documents/Work/NIJ/Target1/Mesa Data/Mesa_ResBurgAllYears_withGrids/Mesa_ResBurgAllYears_withGrids.dbf','r')
dates = dbf.by_col("REPORT_DAT")
data = dict([(date,set()) for date in dates])
for date,point in zip(dates,shp):
data[date].add(point)
dates.sort()
extent = [shp.header.get(x) for x in ['BBOX Xmin', 'BBOX Ymin', 'BBOX Xmax', 'BBOX Ymax']]
kd = KernelDensity(extent,400,3500)
#open('kd_ascii.txt','w').write(kd.asciiTable())
start = dates[0]
cur = start
step = datetime.timedelta(days=1)
window = datetime.timedelta(days=120)
window = None
end = dates[-1]
#for i,date in enumerate(dates):
i = 0
while cur <= end:
if cur in data:
evts = data[cur]
if window:
clear = cur-window
if clear in data:
for rx,ry in data[clear]:
kd.update(rx,ry,True)
for x,y in evts:
kd.update(x,y)
img = draw(kd)
d = ImageDraw.Draw(img)
if window:
d.text((0,0),clear.isoformat()+" through "+cur.isoformat())
else:
d.text((0,0),cur.isoformat())
del d
img.save("kd/kd_%d.png"%i)
i+=1
cur+=step
print time.clock()-t0
|
gpl-2.0
| 3,872,030,793,151,872,500
| 35.284264
| 145
| 0.559177
| false
| 3.357445
| false
| false
| false
|
hayj/WorkspaceManager
|
workspacemanager/test/setuptest.py
|
1
|
1453
|
# coding: utf-8
import unittest
import doctest
import os
from workspacemanager import setup
from workspacemanager import generateSetup
from workspacemanager.utils import *
from shutil import *
from workspacemanager.test.utils import *
# The level allow the unit test execution to choose only the top level test
min = 0
max = 1
assert min <= max
if min <= 0 <= max:
class DocTest(unittest.TestCase):
def testDoctests(self):
"""Run doctests"""
doctest.testmod(setup)
if min <= 1 <= max:
class Test1(unittest.TestCase):
def setUp(self):
pass
def test1(self):
# Create a fake project:
theProjectDirectory = createFakeDir()
# Check the fake project:
assert os.path.isdir(theProjectDirectory) is True
# Generate the setup and others:
generateSetup(theProjectDirectory=theProjectDirectory)
# Check things:
self.assertTrue("__DES" not in fileToStr(theProjectDirectory + "/setup.py"))
self.assertTrue("<year>" not in fileToStr(theProjectDirectory + "/LICENCE.txt"))
self.assertTrue("version" in fileToStr(theProjectDirectory + "/projecttest/__init__.py"))
if min <= 2 <= max:
pass
if min <= 3 <= max:
pass
if __name__ == '__main__':
unittest.main() # Or execute as Python unit-test in eclipse
|
mit
| -396,985,961,022,106,430
| 25.418182
| 101
| 0.613902
| false
| 4.350299
| true
| false
| false
|
michaelkuty/feincms
|
feincms/module/extensions/ct_tracker.py
|
1
|
6323
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
#
# ct_tracker.py
# FeinCMS
#
# Created by Martin J. Laubach on 02.10.09.
# Copyright (c) 2009 Martin J. Laubach. All rights reserved.
# Updated in 2011 by Matthias Kestenholz for the 1.3 release.
#
# ------------------------------------------------------------------------
"""
Track the content types for pages. Instead of gathering the content
types present in each page at run time, save the current state at
saving time, thus saving at least one DB query on page delivery.
"""
from __future__ import absolute_import, unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import class_prepared, post_save, pre_save
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
from feincms.contrib.fields import JSONField
from feincms.models import ContentProxy
INVENTORY_VERSION = 1
_translation_map_cache = {}
# ------------------------------------------------------------------------
class TrackerContentProxy(ContentProxy):
def _fetch_content_type_counts(self):
"""
If an object with an empty _ct_inventory is encountered, compute all
the content types currently used on that object and save the list in
the object itself. Further requests for that object can then access
that information and find out which content types are used without
resorting to multiple selects on different ct tables.
It is therefore important that even an "empty" object does not have an
empty _ct_inventory.
"""
if 'counts' not in self._cache:
if (self.item._ct_inventory
and self.item._ct_inventory.get('_version_', -1)
== INVENTORY_VERSION):
try:
self._cache['counts'] = self._from_inventory(
self.item._ct_inventory)
except KeyError:
# It's possible that the inventory does not fit together
# with the current models anymore, f.e. because a content
# type has been removed.
pass
if 'counts' not in self._cache:
super(TrackerContentProxy, self)._fetch_content_type_counts()
self.item._ct_inventory = self._to_inventory(
self._cache['counts'])
if hasattr(self.item, 'invalidate_cache'):
self.item.invalidate_cache()
self.item.__class__.objects.filter(id=self.item.id).update(
_ct_inventory=self.item._ct_inventory)
# Run post save handler by hand
if hasattr(self.item, 'get_descendants'):
self.item.get_descendants(include_self=False).update(
_ct_inventory=None)
return self._cache['counts']
def _translation_map(self):
cls = self.item.__class__
if cls not in _translation_map_cache:
# Prime translation map and cache it in the class. This needs to be
# done late as opposed to at class definition time as not all
# information is ready, especially when we are doing a "syncdb" the
# ContentType table does not yet exist
map = {}
for idx, fct in enumerate(self.item._feincms_content_types):
dct = ContentType.objects.get_for_model(fct)
# Rely on non-negative primary keys
map[-dct.id] = idx # From-inventory map
map[idx] = dct.id # To-inventory map
_translation_map_cache[cls] = map
return _translation_map_cache[cls]
def _from_inventory(self, inventory):
"""
Transforms the inventory from Django's content types to FeinCMS's
ContentProxy counts format.
"""
map = self._translation_map()
return dict((region, [
(pk, map[-ct]) for pk, ct in items
]) for region, items in inventory.items() if region != '_version_')
def _to_inventory(self, counts):
map = self._translation_map()
inventory = dict(
(
region,
[(pk, map[ct]) for pk, ct in items],
) for region, items in counts.items()
)
inventory['_version_'] = INVENTORY_VERSION
return inventory
# ------------------------------------------------------------------------
def class_prepared_handler(sender, **kwargs):
# It might happen under rare circumstances that not all model classes
# are fully loaded and initialized when the translation map is accessed.
# This leads to (lots of) crashes on the server. Better be safe and
# kill the translation map when any class_prepared signal is received.
_translation_map_cache.clear()
class_prepared.connect(class_prepared_handler)
# ------------------------------------------------------------------------
def tree_post_save_handler(sender, instance, **kwargs):
"""
Clobber the _ct_inventory attribute of this object and all sub-objects
on save.
"""
# TODO: Does not find everything it should when ContentProxy content
# inheritance has been customized.
instance.get_descendants(include_self=True).update(_ct_inventory=None)
# ------------------------------------------------------------------------
def single_pre_save_handler(sender, instance, **kwargs):
"""Clobber the _ct_inventory attribute of this object"""
instance._ct_inventory = None
# ------------------------------------------------------------------------
class Extension(extensions.Extension):
def handle_model(self):
self.model.add_to_class('_ct_inventory', JSONField(
_('content types'), editable=False, blank=True, null=True))
self.model.content_proxy_class = TrackerContentProxy
pre_save.connect(single_pre_save_handler, sender=self.model)
if hasattr(self.model, 'get_descendants'):
post_save.connect(tree_post_save_handler, sender=self.model)
# ------------------------------------------------------------------------
|
bsd-3-clause
| -4,565,303,736,663,644,700
| 38.030864
| 79
| 0.558754
| false
| 4.622076
| false
| false
| false
|
avalentino/gsdview
|
exectools/tests/gtkshell.py
|
1
|
9226
|
#!/usr/bin/env python
# GSDView - Geo-Spatial Data Viewer
# Copyright (C) 2008-2021 Antonio Valentino <antonio.valentino@tiscali.it>
#
# This module is free software you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation either version 2 of the License, or
# (at your option) any later version.
#
# This module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this module if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 US
"""Simple interactive shell implementation using exectools and GTK+."""
import time
import logging
from gi.repository import Gtk, Gdk
import exectools
from exectools.gtk import (GtkOutputPane, GtkOutputHandler, GtkToolController,
GtkDialogLoggingHandler, GtkLoggingHandler)
class GtkShell:
"""GTK+ interactive shell using tool controller."""
historyfile = 'history.txt'
def __init__(self, debug=False):
# Command box
cmdlabel = Gtk.Label(label='cmd >')
cmdlabel.set_padding(5, 0)
self.cmdbox = Gtk.ComboBoxText.new_with_entry()
self.cmdbox.set_active(0)
self.cmdbox.set_focus_on_click(False)
self.cmdbox.connect('changed', self.on_item_selected)
completion = Gtk.EntryCompletion()
completion.set_model(self.cmdbox.get_model())
completion.set_text_column(0)
self.entry = self.cmdbox.get_child()
self.entry.set_completion(completion)
self.entry.connect('activate', self.on_entry_activate)
self.entry.connect('key-press-event', self.on_key_pressed)
self.entry.connect('populate-popup', self.on_populate_popup)
# self.cmdbutton = Gtk.Button.new_with_mnemonic('_Execute')
self.cmdbutton = Gtk.Button(stock=Gtk.STOCK_EXECUTE)
self.cmdbutton.connect('clicked', self.on_cmdbutton_clicked)
# Note: set_always_show_image is new in Gtk 3.6
self.cmdbutton.set_always_show_image(True)
hbox = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, spacing=3)
hbox.pack_start(cmdlabel, expand=False, fill=False, padding=0)
hbox.pack_start(self.cmdbox, expand=True, fill=True, padding=0)
hbox.pack_start(self.cmdbutton, expand=False, fill=False, padding=0)
# Output pane
outputpane = GtkOutputPane(hide_button=False)
outputpane.set_editable(False)
scrolledwin = Gtk.ScrolledWindow()
scrolledwin.set_policy(Gtk.PolicyType.AUTOMATIC,
Gtk.PolicyType.AUTOMATIC)
scrolledwin.add(outputpane)
# Status bar
self.statusbar = Gtk.Statusbar()
id_ = self.statusbar.get_context_id('ready')
self.statusbar.push(id_, 'Ready.')
# Main window
vbox = Gtk.Box.new(Gtk.Orientation.VERTICAL, spacing=3)
vbox.set_border_width(3)
vbox.pack_start(hbox, expand=False, fill=True, padding=0)
vbox.pack_start(scrolledwin, expand=True, fill=True, padding=0)
vbox.pack_start(self.statusbar, expand=False, fill=True, padding=0)
accelgroup = Gtk.AccelGroup()
accelgroup.connect(ord('d'), Gdk.ModifierType.CONTROL_MASK,
Gtk.AccelFlags.VISIBLE, self.quit)
self.mainwin = Gtk.Window()
self.mainwin.set_title('GTK Shell')
theme = Gtk.IconTheme.get_default()
icon = theme.load_icon(Gtk.STOCK_EXECUTE, Gtk.IconSize.LARGE_TOOLBAR,
Gtk.IconLookupFlags(0))
self.mainwin.set_icon(icon)
self.mainwin.add(vbox)
self.mainwin.set_default_size(650, 500)
self.mainwin.add_accel_group(accelgroup)
self.mainwin.connect('destroy', self.quit)
self.mainwin.show_all()
# Setup the log system
if debug:
level = logging.DEBUG
logging.basicConfig(level=level)
else:
level = logging.INFO
self.logger = logging.getLogger()
formatter = logging.Formatter('%(levelname)s: %(message)s')
handler = GtkLoggingHandler(outputpane)
handler.setLevel(level)
handler.setFormatter(formatter)
self.logger.addHandler(handler)
formatter = logging.Formatter('%(message)s')
handler = GtkDialogLoggingHandler(parent=self.mainwin, dialog=None)
handler.setLevel(logging.WARNING)
handler.setFormatter(formatter)
self.logger.addHandler(handler)
self.logger.setLevel(level)
# Setup high level components and initialize the parent classes
handler = GtkOutputHandler(self.logger, self.statusbar)
self.tool = exectools.ToolDescriptor('', stdout_handler=handler)
self.controller = GtkToolController(logger=self.logger)
self.controller.connect('finished', self.on_finished)
# Final setup
self._state = 'ready' # or maybe __state
self.logger.debug('gtkshell session started at %s.' % time.asctime())
self.load_history()
def main(self):
Gtk.main()
def quit(self, *data):
try:
self.save_history()
finally:
self.logger.debug(
'gtkshell session stopped at %s.' % time.asctime())
Gtk.main_quit()
def load_history(self):
try:
for cmd in open(self.historyfile):
self.cmdbox.append_text(cmd.rstrip())
self.logger.debug('history file "%s" loaded.' % self.historyfile)
except OSError as e:
self.logger.debug('unable to read the history file "%s": %s.' %
(self.historyfile, e))
def save_history(self):
try:
liststore = self.cmdbox.get_model()
history = '\n'.join([item[0] for item in liststore])
f = open(self.historyfile, 'w')
f.write(history)
f.close()
self.logger.debug('history saved in %s' % self.historyfile)
except OSError as e:
self.logger.warning('unable to save the history file "%s": %s' %
(self.historyfile, e))
def _reset(self):
self.controller._reset()
self.cmdbutton.set_label(Gtk.STOCK_EXECUTE)
self.cmdbox.set_sensitive(True)
self.entry.grab_focus()
def reset(self):
self._reset()
self.state = 'ready'
@property
def state(self):
return self._state
@state.setter
def state(self, state):
if state == 'ready':
self._reset()
id_ = self.statusbar.get_context_id('running')
self.statusbar.pop(id_)
elif state == 'running':
self.cmdbox.set_sensitive(False)
id_ = self.statusbar.get_context_id('running')
self.statusbar.push(id_, 'Running ...')
self.cmdbutton.set_label(Gtk.STOCK_STOP)
else:
raise ValueError('invalid status: "%s".' % state)
self._state = state
def execute(self):
cmd = self.entry.get_text()
if cmd:
self.entry.set_text('')
self.cmdbox.append_text(cmd)
cmd = cmd.split()
try:
self.state = 'running'
self.controller.run_tool(self.tool, *cmd)
# raise RuntimeError('simulated runtime error')
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
self.logger.error(e, exc_info=True)
self.state = 'ready'
def on_key_pressed(self, widget, event):
key = Gdk.keyval_name(event.keyval)
if key in ('Up', 'Down', 'Page_Up', 'Page_Down'):
self.cmdbox.popup()
return True
def on_cmdbutton_clicked(self, widget=None):
if self.state == 'ready':
self.execute()
elif self.state == 'running':
self.controller.stop_tool()
def on_entry_activate(self, widget=None):
if self.state == 'running':
return
self.execute()
def on_item_selected(self, widget):
self.entry.set_position(-1)
def on_populate_popup(self, widget, menu):
# separator
item = Gtk.SeparatorMenuItem()
item.show()
menu.append(item)
# Clear history
item = Gtk.ImageMenuItem(Gtk.STOCK_CLEAR)
item.set_name('clear_history')
item.connect('activate', self.on_clear_history, None)
item.connect('activate', self.on_clear_entry, None)
item.show()
menu.append(item)
def on_clear_history(self, widget=None):
liststore = self.cmdbox.get_model()
liststore.clear()
def on_clear_entry(self, widget=None):
self.entry.set_text('')
def on_finished(self, widget=None, returncode=0):
self.reset()
if __name__ == '__main__':
GtkShell(debug=True).main()
|
gpl-2.0
| -1,603,177,992,602,668,000
| 33.94697
| 78
| 0.609365
| false
| 3.832987
| false
| false
| false
|
helixyte/TheLMA
|
thelma/resources/experiment.py
|
1
|
12060
|
"""
This file is part of the TheLMA (THe Laboratory Management Application) project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Experiment resources.
"""
from datetime import datetime
import logging
from pyramid.httpexceptions import HTTPBadRequest
from everest.querying.specifications import AscendingOrderSpecification
from everest.querying.specifications import DescendingOrderSpecification
from everest.querying.specifications import cntd
from everest.representers.dataelements import DataElementAttributeProxy
from everest.representers.interfaces import IDataElement
from everest.resources.base import Collection
from everest.resources.base import Member
from everest.resources.descriptors import attribute_alias
from everest.resources.descriptors import collection_attribute
from everest.resources.descriptors import member_attribute
from everest.resources.descriptors import terminal_attribute
from everest.resources.utils import get_root_collection
from everest.resources.utils import url_to_resource
from thelma.entities.racklayout import RackLayout
from thelma.entities.utils import get_current_user
from thelma.interfaces import IExperiment
from thelma.interfaces import IExperimentDesign
from thelma.interfaces import IExperimentDesignRack
from thelma.interfaces import IExperimentJob
from thelma.interfaces import IExperimentMetadata
from thelma.interfaces import IExperimentMetadataType
from thelma.interfaces import IExperimentRack
from thelma.interfaces import ILabIsoRequest
from thelma.interfaces import IMoleculeDesignPoolSet
from thelma.interfaces import IPlate
from thelma.interfaces import IRack
from thelma.interfaces import IRackLayout
from thelma.interfaces import IRackShape
from thelma.interfaces import ISubproject
from thelma.interfaces import ITag
from thelma.resources.base import RELATION_BASE_URL
from thelma.tools.experiment import get_writer
from thelma.tools.metadata.ticket \
import IsoRequestTicketDescriptionUpdater
from thelma.tools.metadata.ticket import IsoRequestTicketActivator
from thelma.tools.metadata.ticket import IsoRequestTicketCreator
from thelma.tools.metadata.ticket import IsoRequestTicketDescriptionRemover
from thelma.tools.semiconstants import get_experiment_metadata_type
from thelma.tools.stock.base import STOCKMANAGEMENT_USER
__docformat__ = 'reStructuredText en'
__all__ = ['ExperimentMetadataTypeMember',
'ExperimentCollection',
'ExperimentDesignCollection',
'ExperimentDesignMember',
'ExperimentDesignRackCollection',
'ExperimentDesignRackMember',
'ExperimentMember',
'ExperimentMetadataCollection',
'ExperimentMetadataMember',
'ExperimentRackCollection',
'ExperimentRackMember',
]
class ExperimentMetadataTypeMember(Member):
relation = '%s/experiment-metadata-type' % RELATION_BASE_URL
title = attribute_alias('display_name')
display_name = terminal_attribute(str, 'display_name')
class ExperimentDesignRackMember(Member):
relation = "%s/experiment-design-rack" % RELATION_BASE_URL
title = attribute_alias('label')
label = terminal_attribute(str, 'label')
rack_shape = member_attribute(IRackShape, 'rack_layout.shape')
rack_layout = member_attribute(IRackLayout, 'rack_layout')
tags = collection_attribute(ITag, 'tags')
class ExperimentDesignRackCollection(Collection):
title = 'Experiment Design Racks'
root_name = 'experiment-design-racks'
description = 'Manage experiment design racks'
# default_order = asc('label')
class ExperimentDesignMember(Member):
relation = "%s/experiment-design" % RELATION_BASE_URL
title = terminal_attribute(str, 'slug')
rack_shape = member_attribute(IRackShape, 'rack_shape')
experiment_design_racks = collection_attribute(IExperimentDesignRack,
'experiment_design_racks')
experiments = collection_attribute(IExperiment, 'experiments')
experiment_metadata = member_attribute(IExperimentMetadata,
'experiment_metadata')
class ExperimentDesignCollection(Collection):
title = 'Experiment Designs'
root_name = 'experiment-designs'
description = 'Manage experiment designs'
default_order = AscendingOrderSpecification('label')
class ExperimentMember(Member):
relation = '%s/experiment' % RELATION_BASE_URL
title = attribute_alias('label')
label = terminal_attribute(str, 'label')
source_rack = member_attribute(IRack, 'source_rack')
experiment_design = member_attribute(IExperimentDesign,
'experiment_design')
experiment_racks = collection_attribute(IExperimentRack,
'experiment_racks')
experiment_job = member_attribute(IExperimentJob, 'job')
experiment_metadata_type = \
member_attribute(IExperimentMetadataType,
'experiment_design.experiment_metadata.experiment_metadata_type')
def get_writer(self):
return get_writer(self.get_entity())
class ExperimentCollection(Collection):
title = 'Experiments'
root_name = 'experiments'
description = 'Manage experiments'
default_order = AscendingOrderSpecification('label')
class ExperimentMetadataMember(Member):
relation = '%s/experiment-metadata' % RELATION_BASE_URL
label = terminal_attribute(str, 'label')
title = attribute_alias('label')
ticket_number = terminal_attribute(int, 'ticket_number')
subproject = member_attribute(ISubproject, 'subproject')
number_replicates = terminal_attribute(int, 'number_replicates')
molecule_design_pool_set = member_attribute(IMoleculeDesignPoolSet,
'molecule_design_pool_set')
experiment_design = member_attribute(IExperimentDesign,
'experiment_design')
iso_request = member_attribute(ILabIsoRequest, 'lab_iso_request')
creation_date = terminal_attribute(datetime, 'creation_date')
experiment_metadata_type = member_attribute(IExperimentMetadataType,
'experiment_metadata_type')
def __getitem__(self, name):
if name == 'tags':
tags_dict = {}
design_racks = self.__get_design_racks()
for rack in design_racks:
for tp in rack.rack_layout.tagged_rack_position_sets:
for tag in tp.tags:
tags_dict[tag.get_entity().slug] = tag
tag_coll = get_root_collection(ITag)
tag_coll.filter = cntd(id=[tag.id for tag in tags_dict.values()])
result = tag_coll
elif name == 'experiment-design-racks':
result = self.__get_design_racks()
else:
result = Member.__getitem__(self, name)
return result
@classmethod
def create_from_entity(cls, entity):
if entity.ticket_number is None:
# Create a new ticket and attach the ticket number.
user = get_current_user()
ticket_creator = \
IsoRequestTicketCreator(requester=user,
experiment_metadata=entity)
entity.ticket_number = \
cls.__run_trac_tool(ticket_creator,
'Could not update the ticket: %s.')
return cls(entity)
def update(self, data):
if IDataElement.providedBy(data): # pylint: disable=E1101
# FIXME: This really should be a PATCH operation.
prx = DataElementAttributeProxy(data)
self_entity = self.get_entity()
changed_num_reps = prx.number_replicates != self.number_replicates
emt_id = prx.experiment_metadata_type.get('id')
changed_em_type = emt_id != self.experiment_metadata_type.id
if changed_em_type or changed_num_reps:
if changed_num_reps:
self_entity.number_replicates = prx.number_replicates
if changed_em_type:
self_entity.experiment_metadata_type = \
get_experiment_metadata_type(emt_id)
if not self_entity.experiment_design is None:
# invalidate data to force a fresh upload of the XLS file
self_entity.experiment_design.experiment_design_racks = []
self_entity.experiment_design.worklist_series = None
if not self_entity.lab_iso_request is None:
shape = self_entity.lab_iso_request.rack_layout.shape
new_layout = RackLayout(shape=shape)
self_entity.lab_iso_request.rack_layout = new_layout
self_entity.lab_iso_request.owner = ''
changed_sp = self_entity.subproject.id != prx.subproject.get('id')
if changed_sp:
new_sp = \
url_to_resource(prx.subproject.get('href')).get_entity()
self_entity.subproject = new_sp
self_entity.label = prx.label
# Perform appropriate Trac updates.
if not self_entity.lab_iso_request is None:
if self.iso_request.owner == STOCKMANAGEMENT_USER:
ticket_activator = IsoRequestTicketActivator(
experiment_metadata=self_entity)
self.__run_trac_tool(ticket_activator,
'Could not update the ticket: %s.')
else:
if changed_em_type or changed_num_reps:
trac_updater = IsoRequestTicketDescriptionRemover(
experiment_metadata=self_entity,
changed_num_replicates=changed_num_reps,
changed_em_type=changed_em_type)
else:
url = 'http://thelma/public//LOUICe.html#' \
+ self.path
iso_url = 'http://thelma/public//LOUICe.html#' \
+ self.iso_request.path
trac_updater = IsoRequestTicketDescriptionUpdater(
experiment_metadata=self_entity,
experiment_metadata_link=url,
iso_request_link=iso_url)
self.__run_trac_tool(trac_updater,
'Could not update the ticket: %s.')
else:
Member.update(self, data)
@classmethod
def __run_trac_tool(cls, tool, error_msg_text):
tool.run()
if not tool.transaction_completed():
exc_msg = str(tool.get_messages(logging_level=logging.ERROR))
raise HTTPBadRequest(error_msg_text % exc_msg).exception
return tool.return_value
def __get_design_racks(self):
if self.experiment_design is not None:
design_racks = self.experiment_design.experiment_design_racks
else: # order only type
design_racks = []
return design_racks
class ExperimentMetadataCollection(Collection):
title = 'Experiment Metadata'
root_name = 'experiment-metadatas'
description = 'Manage the experiment metadata'
default_order = DescendingOrderSpecification('creation_date')
class ExperimentRackMember(Member):
relation = '%s/experiment-rack' % RELATION_BASE_URL
experiment = member_attribute(IExperiment, 'experiment')
design_rack = member_attribute(IExperimentDesignRack, 'design_rack')
plate = member_attribute(IPlate, 'rack')
source_rack = member_attribute(IRack, 'source_rack')
class ExperimentRackCollection(Collection):
title = 'Cell Plates'
root_name = 'experiment-racks'
description = 'Manage cell plates'
|
mit
| -6,446,421,184,529,573,000
| 43.832714
| 80
| 0.646186
| false
| 4.273565
| false
| false
| false
|
ArcasProject/Arcas
|
src/arcas/PLOS/main.py
|
1
|
3663
|
from arcas.tools import Api
import xml.etree.ElementTree as etree
from xml.etree import ElementTree
class Plos(Api):
def __init__(self):
self.standard = 'http://api.plos.org/search?q='
def create_url_search(self, parameters):
"""Creates the search url, combining the standard url and various
search parameters."""
url = self.standard
url += parameters[0]
for i in parameters[1:]:
if 'rows=' in i or 'start=' in i:
url += '&{}'.format(i)
else:
url += '+AND+{}'.format(i)
return url
def to_dataframe(self, raw_article):
"""A function which takes a dictionary with structure of the PLOS
results and transform it to a standardized format.
"""
raw_article['author'] = raw_article.get('author_display', None)
raw_article['abstract'] = raw_article.get('abstract', [None])
raw_article['date'] = int(raw_article.get('publication_date', '0').split('-')[0])
raw_article['journal'] = raw_article.get('journal', None)
raw_article['provenance'] = 'PLOS'
raw_article['score'] = raw_article.get('score', None)
if raw_article['score'] is not None:
raw_article['score'] = int(raw_article['score'])
raw_article['doi'] = raw_article.get('id', None)
raw_article['url'] = 'https://doi.org/' + raw_article['id']
raw_article['title'] = raw_article.get('title_display', None)
raw_article['key'], raw_article['unique_key'] = self.create_keys(raw_article)
raw_article['category'] = 'Not available'
raw_article['open_access'] = 'Not available'
return self.dict_to_dataframe(raw_article)
@staticmethod
def xml_to_dict(record):
"""Xml response with information on article to dictionary"""
d = {}
for key, value in record:
if key is not None:
if value is not None:
d[key] = value
else:
d[key] = []
current_key = key
else:
if value is not None:
d[current_key].append(value)
return d
def parse(self, root):
"""Parsing the xml file"""
if root['response']['numFound'] == 0:
return False
return root['response']['docs']
@staticmethod
def parameters_fix(author=None, title=None, abstract=None, year=None,
records=None, start=None, category=None, journal=None,
keyword=None):
parameters = []
if author is not None:
parameters.append('author:"{}"'.format(author))
if title is not None:
parameters.append('title:"{}"'.format(title))
if abstract is not None:
parameters.append('abstract:"{}"'.format(abstract))
if year is not None:
parameters.append('publication_date:[{0}-01-01T00:00:00Z TO '
'{0}-12-30T23:59:59Z]'.format(year))
if journal is not None:
parameters.append('journal:"{}"'.format(journal))
if category is not None:
parameters.append('subject:"{}"'.format(category))
if keyword is not None:
parameters.append('everything:"{}"'.format(keyword))
if records is not None:
parameters.append('rows={}'.format(records))
if start is not None:
parameters.append('start={}'.format(start))
return parameters
@staticmethod
def get_root(response):
root = response.json()
return root
|
mit
| 3,411,965,802,518,887,400
| 36.762887
| 89
| 0.555283
| false
| 4.143665
| false
| false
| false
|
flavour/ifrc_qa
|
modules/s3db/ocr.py
|
1
|
7350
|
# -*- coding: utf-8 -*-
""" OCR Utility Functions
@copyright: 2009-2016 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("OCRDataModel",
"ocr_buttons",
)
import os
from gluon import *
from gluon.storage import Storage
from ..s3 import *
# =============================================================================
class OCRDataModel(S3Model):
"""
"""
names = ("ocr_meta",
"ocr_payload",
"ocr_form_status",
"ocr_field_crops",
"ocr_data_xml",
)
def model(self):
#T = current.T
#messages = current.messages
#UNKNOWN_OPT = messages.UNKNOWN_OPT
#NONE = messages["NONE"]
define_table = self.define_table
# Upload folders
folder = current.request.folder
metadata_folder = os.path.join(folder, "uploads", "ocr_meta")
payload_folder = os.path.join(folder, "uploads", "ocr_payload")
# =====================================================================
# OCR Meta Data
#
tablename = "ocr_meta"
define_table(tablename,
Field("form_uuid",
notnull=True,
length=128,
unique=True),
Field("resource_name",
notnull=True),
Field("s3ocrxml_file", "upload",
length = current.MAX_FILENAME_LENGTH,
uploadfolder = metadata_folder,
),
Field("layout_file", "upload",
length = current.MAX_FILENAME_LENGTH,
uploadfolder = metadata_folder,
),
Field("revision",
notnull=True,
length=128,
unique=True),
Field("pages", "integer"),
*s3_meta_fields())
#======================================================================
# OCR Payload
#
tablename = "ocr_payload"
define_table(tablename,
# a set of images = one complete form
Field("image_set_uuid",
notnull=True),
Field("image_file", "upload",
length = current.MAX_FILENAME_LENGTH,
notnull = True,
uploadfolder = payload_folder,
),
Field("page_number", "integer",
notnull=True),
*s3_meta_fields())
#======================================================================
# OCR Form Status
#
tablename = "ocr_form_status"
define_table(tablename,
Field("image_set_uuid",
notnull=True,
length=128,
unique=True),
Field("form_uuid",
notnull=True),
Field("review_status", "integer",
notnull=True,
default=0),
Field("job_uuid",
length=128,
unique=True),
Field("job_has_errors", "integer"),
*s3_meta_fields())
#======================================================================
# OCR Field Crops
#
tablename = "ocr_field_crops"
define_table(tablename,
Field("image_set_uuid",
notnull=True),
Field("resource_table",
notnull=True),
Field("field_name",
notnull=True),
Field("image_file", "upload",
length = current.MAX_FILENAME_LENGTH,
notnull = True,
uploadfolder = payload_folder,
),
Field("value"),
Field("sequence", "integer"),
*s3_meta_fields())
#======================================================================
# OCR XML Data
#
tablename = "ocr_data_xml"
define_table(tablename,
Field("image_set_uuid",
length=128,
unique=True,
notnull=True),
Field("data_file", "upload",
length = current.MAX_FILENAME_LENGTH,
notnull = True,
uploadfolder = payload_folder,
),
Field("form_uuid",
notnull=True,
default=""),
*s3_meta_fields())
# =============================================================================
def ocr_buttons(r):
""" Generate 'Print PDF' button in the view """
if not current.deployment_settings.has_module("ocr"):
return ""
if r.component:
urlargs = [r.id, r.component_name]
else:
urlargs = []
f = r.function
c = r.controller
a = r.application
T = current.T
UPLOAD = T("Upload Scanned OCR Form")
DOWNLOAD = T("Download OCR-able PDF Form")
_style = "height:10px;float:right;padding:3px;"
output = DIV(
A(IMG(_src="/%s/static/img/upload-ocr.png" % a, _alt=UPLOAD),
_id="upload-pdf-btn",
_href=URL(c=c, f=f, args=urlargs + ["import.pdf"]),
_title=UPLOAD,
_style=_style),
A(IMG(_src="/%s/static/img/download-ocr.png" % a, _alt=DOWNLOAD),
_id="download-pdf-btn",
_href=URL(c=c, f=f, args=urlargs + ["create.pdf"]),
_title=DOWNLOAD,
_style=_style),
)
return output
# END =========================================================================
|
mit
| 1,902,842,553,426,639,000
| 34.167464
| 79
| 0.428027
| false
| 5.13986
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.