code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
import numpy as np
import torch
from torch import nn as nn
from rlkit.policies.base import Policy
from rlkit.pythonplusplus import identity
from rlkit.torch.core import PyTorchModule, eval_np
from rlkit.torch.data_management.normalizer import TorchFixedNormalizer
from rlkit.torch.pytorch_util import activation_from_string
class CNN(PyTorchModule):
# TODO: remove the FC parts of this code
def __init__(
self,
input_width,
input_height,
input_channels,
output_size,
kernel_sizes,
n_channels,
strides,
paddings,
hidden_sizes=None,
added_fc_input_size=0,
conv_normalization_type='none',
fc_normalization_type='none',
init_w=1e-4,
hidden_init=nn.init.xavier_uniform_,
hidden_activation=nn.ReLU(),
output_activation=identity,
output_conv_channels=False,
pool_type='none',
pool_sizes=None,
pool_strides=None,
pool_paddings=None,
):
if hidden_sizes is None:
hidden_sizes = []
assert len(kernel_sizes) == \
len(n_channels) == \
len(strides) == \
len(paddings)
assert conv_normalization_type in {'none', 'batch', 'layer'}
assert fc_normalization_type in {'none', 'batch', 'layer'}
assert pool_type in {'none', 'max2d'}
if pool_type == 'max2d':
assert len(pool_sizes) == len(pool_strides) == len(pool_paddings)
super().__init__()
self.hidden_sizes = hidden_sizes
self.input_width = input_width
self.input_height = input_height
self.input_channels = input_channels
self.output_size = output_size
self.output_activation = output_activation
self.hidden_activation = hidden_activation
self.conv_normalization_type = conv_normalization_type
self.fc_normalization_type = fc_normalization_type
self.added_fc_input_size = added_fc_input_size
self.conv_input_length = self.input_width * self.input_height * self.input_channels
self.output_conv_channels = output_conv_channels
self.pool_type = pool_type
self.conv_layers = nn.ModuleList()
self.conv_norm_layers = nn.ModuleList()
self.pool_layers = nn.ModuleList()
self.fc_layers = nn.ModuleList()
self.fc_norm_layers = nn.ModuleList()
for i, (out_channels, kernel_size, stride, padding) in enumerate(
zip(n_channels, kernel_sizes, strides, paddings)
):
conv = nn.Conv2d(input_channels,
out_channels,
kernel_size,
stride=stride,
padding=padding)
hidden_init(conv.weight)
conv.bias.data.fill_(0)
conv_layer = conv
self.conv_layers.append(conv_layer)
input_channels = out_channels
if pool_type == 'max2d':
self.pool_layers.append(
nn.MaxPool2d(
kernel_size=pool_sizes[i],
stride=pool_strides[i],
padding=pool_paddings[i],
)
)
# use torch rather than ptu because initially the model is on CPU
test_mat = torch.zeros(
1,
self.input_channels,
self.input_width,
self.input_height,
)
# find output dim of conv_layers by trial and add norm conv layers
for i, conv_layer in enumerate(self.conv_layers):
test_mat = conv_layer(test_mat)
if self.conv_normalization_type == 'batch':
self.conv_norm_layers.append(nn.BatchNorm2d(test_mat.shape[1]))
if self.conv_normalization_type == 'layer':
self.conv_norm_layers.append(nn.LayerNorm(test_mat.shape[1:]))
if self.pool_type != 'none':
test_mat = self.pool_layers[i](test_mat)
self.conv_output_flat_size = int(np.prod(test_mat.shape))
if self.output_conv_channels:
self.last_fc = None
else:
fc_input_size = self.conv_output_flat_size
# used only for injecting input directly into fc layers
fc_input_size += added_fc_input_size
for idx, hidden_size in enumerate(hidden_sizes):
fc_layer = nn.Linear(fc_input_size, hidden_size)
fc_input_size = hidden_size
fc_layer.weight.data.uniform_(-init_w, init_w)
fc_layer.bias.data.uniform_(-init_w, init_w)
self.fc_layers.append(fc_layer)
if self.fc_normalization_type == 'batch':
self.fc_norm_layers.append(nn.BatchNorm1d(hidden_size))
if self.fc_normalization_type == 'layer':
self.fc_norm_layers.append(nn.LayerNorm(hidden_size))
self.last_fc = nn.Linear(fc_input_size, output_size)
self.last_fc.weight.data.uniform_(-init_w, init_w)
self.last_fc.bias.data.uniform_(-init_w, init_w)
def forward(self, input, return_last_activations=False):
conv_input = input.narrow(start=0,
length=self.conv_input_length,
dim=1).contiguous()
# reshape from batch of flattened images into (channels, w, h)
h = conv_input.view(conv_input.shape[0],
self.input_channels,
self.input_height,
self.input_width)
h = self.apply_forward_conv(h)
if self.output_conv_channels:
return h
# flatten channels for fc layers
h = h.view(h.size(0), -1)
if self.added_fc_input_size != 0:
extra_fc_input = input.narrow(
start=self.conv_input_length,
length=self.added_fc_input_size,
dim=1,
)
h = torch.cat((h, extra_fc_input), dim=1)
h = self.apply_forward_fc(h)
if return_last_activations:
return h
return self.output_activation(self.last_fc(h))
def apply_forward_conv(self, h):
for i, layer in enumerate(self.conv_layers):
h = layer(h)
if self.conv_normalization_type != 'none':
h = self.conv_norm_layers[i](h)
if self.pool_type != 'none':
h = self.pool_layers[i](h)
h = self.hidden_activation(h)
return h
def apply_forward_fc(self, h):
for i, layer in enumerate(self.fc_layers):
h = layer(h)
if self.fc_normalization_type != 'none':
h = self.fc_norm_layers[i](h)
h = self.hidden_activation(h)
return h
class ConcatCNN(CNN):
"""
Concatenate inputs along dimension and then pass through MLP.
"""
def __init__(self, *args, dim=1, **kwargs):
super().__init__(*args, **kwargs)
self.dim = dim
def forward(self, *inputs, **kwargs):
flat_inputs = torch.cat(inputs, dim=self.dim)
return super().forward(flat_inputs, **kwargs)
class MergedCNN(CNN):
'''
CNN that supports input directly into fully connected layers
'''
def __init__(self,
added_fc_input_size,
**kwargs
):
super().__init__(added_fc_input_size=added_fc_input_size,
**kwargs)
def forward(self, conv_input, fc_input):
h = torch.cat((conv_input, fc_input), dim=1)
output = super().forward(h)
return output
class CNNPolicy(CNN, Policy):
"""
A simpler interface for creating policies.
"""
def __init__(
self,
*args,
obs_normalizer: TorchFixedNormalizer = None,
**kwargs
):
super().__init__(*args, **kwargs)
self.obs_normalizer = obs_normalizer
def forward(self, obs, **kwargs):
if self.obs_normalizer:
obs = self.obs_normalizer.normalize(obs)
return super().forward(obs, **kwargs)
def get_action(self, obs_np):
actions = self.get_actions(obs_np[None])
return actions[0, :], {}
def get_actions(self, obs):
return eval_np(self, obs)
class BasicCNN(PyTorchModule):
# TODO: clean up CNN using this basic CNN
def __init__(
self,
input_width,
input_height,
input_channels,
kernel_sizes,
n_channels,
strides,
paddings,
normalization_type='none',
hidden_init=None,
hidden_activation='relu',
output_activation=identity,
pool_type='none',
pool_sizes=None,
pool_strides=None,
pool_paddings=None,
):
assert len(kernel_sizes) == \
len(n_channels) == \
len(strides) == \
len(paddings)
assert normalization_type in {'none', 'batch', 'layer'}
assert pool_type in {'none', 'max2d'}
if pool_type == 'max2d':
assert len(pool_sizes) == len(pool_strides) == len(pool_paddings)
super().__init__()
self.input_width = input_width
self.input_height = input_height
self.input_channels = input_channels
self.output_activation = output_activation
if isinstance(hidden_activation, str):
hidden_activation = activation_from_string(hidden_activation)
self.hidden_activation = hidden_activation
self.normalization_type = normalization_type
self.conv_input_length = self.input_width * self.input_height * self.input_channels
self.pool_type = pool_type
self.conv_layers = nn.ModuleList()
self.conv_norm_layers = nn.ModuleList()
self.pool_layers = nn.ModuleList()
for i, (out_channels, kernel_size, stride, padding) in enumerate(
zip(n_channels, kernel_sizes, strides, paddings)
):
conv = nn.Conv2d(input_channels,
out_channels,
kernel_size,
stride=stride,
padding=padding)
if hidden_init:
hidden_init(conv.weight)
conv_layer = conv
self.conv_layers.append(conv_layer)
input_channels = out_channels
if pool_type == 'max2d':
if pool_sizes[i] > 1:
self.pool_layers.append(
nn.MaxPool2d(
kernel_size=pool_sizes[i],
stride=pool_strides[i],
padding=pool_paddings[i],
)
)
else:
self.pool_layers.append(None)
# use torch rather than ptu because initially the model is on CPU
test_mat = torch.zeros(
1,
self.input_channels,
self.input_height,
self.input_width,
)
# find output dim of conv_layers by trial and add norm conv layers
for i, conv_layer in enumerate(self.conv_layers):
test_mat = conv_layer(test_mat)
if self.normalization_type == 'batch':
self.conv_norm_layers.append(nn.BatchNorm2d(test_mat.shape[1]))
if self.normalization_type == 'layer':
self.conv_norm_layers.append(nn.LayerNorm(test_mat.shape[1:]))
if self.pool_type != 'none':
if self.pool_layers[i]:
test_mat = self.pool_layers[i](test_mat)
self.output_shape = test_mat.shape[1:] # ignore batch dim
def forward(self, conv_input):
return self.apply_forward_conv(conv_input)
def apply_forward_conv(self, h):
for i, layer in enumerate(self.conv_layers):
h = layer(h)
if self.normalization_type != 'none':
h = self.conv_norm_layers[i](h)
if self.pool_type != 'none':
if self.pool_layers[i]:
h = self.pool_layers[i](h)
h = self.hidden_activation(h)
return h
| vitchyr/rlkit | rlkit/torch/networks/cnn.py | Python | mit | 12,489 |
"""Common settings and globals."""
from os.path import abspath, basename, dirname, join, normpath
from sys import path
try:
from secret import *
except:
pass
########## PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
# Add SITE_ROOT to lookup application (wsgi)
path.append(SITE_ROOT)
########## END PATH CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('olivier', 'olivier.larcheveque@gmail.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = normpath(join(SITE_ROOT, 'assets'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
normpath(join(SITE_ROOT, 'static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
########## END SECRET CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
normpath(join(SITE_ROOT, 'fixtures')),
)
########## END FIXTURE CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
'cms.context_processors.media',
'sekizai.context_processors.sekizai',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
normpath(join(SITE_ROOT, 'templates')),
)
########## END TEMPLATE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
# Default Django middleware.
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.common.CommonMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin panel and documentation:
'django.contrib.admin',
# 'django.contrib.admindocs',
'django.contrib.markup',
)
THIRD_PARTY_APPS = (
# Database migration helpers:
'south',
# Django CMS
'cms',
'cms.stacks',
'menus',
'mptt',
'menus',
'sekizai',
'django_countries',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'resume',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
########## END APP CONFIGURATION
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'wsgi.application'
########## END WSGI CONFIGURATION
######### DJANGO CMS
CMS_PERMISSION = True
CMS_PUBLIC_FOR = "all"
LANGUAGES = [
('fr', 'French'),
('en', 'English'),
]
CMS_LANGUAGES = {
'default': {
'fallbacks': ['fr', 'en', ],
'redirect_on_fallback':True,
'public': True,
'hide_untranslated': False,
}
}
CMS_TEMPLATES = (
('layouts/classic.html', 'Classic'),
('layouts/classic_home.html', 'Classic Home'),
('layouts/classic_2columns.html', 'Classic 2 columns'),
)
######### END DJANGO CMS
| olarcheveque/usinacv | usinacv/usinacv/settings/base.py | Python | mit | 8,153 |
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
# Based on Adafruit_I2C.py created by Kevin Townsend.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
import os
import subprocess
import Adafruit_GPIO.Platform as Platform
def reverseByteOrder(data):
"""DEPRECATED: See https://github.com/adafruit/Adafruit_Python_GPIO/issues/48"""
# # Courtesy Vishal Sapre
# byteCount = len(hex(data)[2:].replace('L','')[::2])
# val = 0
# for i in range(byteCount):
# val = (val << 8) | (data & 0xff)
# data >>= 8
# return val
raise RuntimeError('reverseByteOrder is deprecated! See: https://github.com/adafruit/Adafruit_Python_GPIO/issues/48')
def get_default_bus():
"""Return the default bus number based on the device platform. For a
Raspberry Pi either bus 0 or 1 (based on the Pi revision) will be returned.
For a Beaglebone Black the first user accessible bus, 1, will be returned.
"""
plat = Platform.platform_detect()
if plat == Platform.RASPBERRY_PI:
if Platform.pi_revision() == 1:
# Revision 1 Pi uses I2C bus 0.
return 0
else:
# Revision 2 Pi uses I2C bus 1.
return 1
elif plat == Platform.BEAGLEBONE_BLACK:
# Beaglebone Black has multiple I2C buses, default to 1 (P9_19 and P9_20).
return 1
else:
raise RuntimeError('Could not determine default I2C bus for platform.')
def get_i2c_device(address, busnum=None, i2c_interface=None, **kwargs):
"""Return an I2C device for the specified address and on the specified bus.
If busnum isn't specified, the default I2C bus for the platform will attempt
to be detected.
"""
if busnum is None:
busnum = get_default_bus()
return Device(address, busnum, i2c_interface, **kwargs)
def require_repeated_start():
"""Enable repeated start conditions for I2C register reads. This is the
normal behavior for I2C, however on some platforms like the Raspberry Pi
there are bugs which disable repeated starts unless explicitly enabled with
this function. See this thread for more details:
http://www.raspberrypi.org/forums/viewtopic.php?f=44&t=15840
"""
plat = Platform.platform_detect()
if plat == Platform.RASPBERRY_PI and os.path.exists('/sys/module/i2c_bcm2708/parameters/combined'):
# On the Raspberry Pi there is a bug where register reads don't send a
# repeated start condition like the kernel smbus I2C driver functions
# define. As a workaround this bit in the BCM2708 driver sysfs tree can
# be changed to enable I2C repeated starts.
subprocess.check_call('chmod 666 /sys/module/i2c_bcm2708/parameters/combined', shell=True)
subprocess.check_call('echo -n 1 > /sys/module/i2c_bcm2708/parameters/combined', shell=True)
# Other platforms are a no-op because they (presumably) have the correct
# behavior and send repeated starts.
class Device(object):
"""Class for communicating with an I2C device using the adafruit-pureio pure
python smbus library, or other smbus compatible I2C interface. Allows reading
and writing 8-bit, 16-bit, and byte array values to registers
on the device."""
def __init__(self, address, busnum, i2c_interface=None):
"""Create an instance of the I2C device at the specified address on the
specified I2C bus number."""
self._address = address
if i2c_interface is None:
# Use pure python I2C interface if none is specified.
import Adafruit_PureIO.smbus
self._bus = Adafruit_PureIO.smbus.SMBus(busnum)
else:
# Otherwise use the provided class to create an smbus interface.
self._bus = i2c_interface(busnum)
self._logger = logging.getLogger('Adafruit_I2C.Device.Bus.{0}.Address.{1:#0X}' \
.format(busnum, address))
def writeRaw8(self, value):
"""Write an 8-bit value on the bus (without register)."""
value = value & 0xFF
self._bus.write_byte(self._address, value)
self._logger.debug("Wrote 0x%02X",
value)
def write8(self, register, value):
"""Write an 8-bit value to the specified register."""
value = value & 0xFF
self._bus.write_byte_data(self._address, register, value)
self._logger.debug("Wrote 0x%02X to register 0x%02X",
value, register)
def write16(self, register, value):
"""Write a 16-bit value to the specified register."""
value = value & 0xFFFF
self._bus.write_word_data(self._address, register, value)
self._logger.debug("Wrote 0x%04X to register pair 0x%02X, 0x%02X",
value, register, register+1)
def writeList(self, register, data):
"""Write bytes to the specified register."""
self._bus.write_i2c_block_data(self._address, register, data)
self._logger.debug("Wrote to register 0x%02X: %s",
register, data)
def readList(self, register, length):
"""Read a length number of bytes from the specified register. Results
will be returned as a bytearray."""
results = self._bus.read_i2c_block_data(self._address, register, length)
self._logger.debug("Read the following from register 0x%02X: %s",
register, results)
return results
def readRaw8(self):
"""Read an 8-bit value on the bus (without register)."""
result = self._bus.read_byte(self._address) & 0xFF
self._logger.debug("Read 0x%02X",
result)
return result
def readU8(self, register):
"""Read an unsigned byte from the specified register."""
result = self._bus.read_byte_data(self._address, register) & 0xFF
self._logger.debug("Read 0x%02X from register 0x%02X",
result, register)
return result
def readS8(self, register):
"""Read a signed byte from the specified register."""
result = self.readU8(register)
if result > 127:
result -= 256
return result
def readU16(self, register, little_endian=True):
"""Read an unsigned 16-bit value from the specified register, with the
specified endianness (default little endian, or least significant byte
first)."""
result = self._bus.read_word_data(self._address,register) & 0xFFFF
self._logger.debug("Read 0x%04X from register pair 0x%02X, 0x%02X",
result, register, register+1)
# Swap bytes if using big endian because read_word_data assumes little
# endian on ARM (little endian) systems.
if not little_endian:
result = ((result << 8) & 0xFF00) + (result >> 8)
return result
def readS16(self, register, little_endian=True):
"""Read a signed 16-bit value from the specified register, with the
specified endianness (default little endian, or least significant byte
first)."""
result = self.readU16(register, little_endian)
if result > 32767:
result -= 65536
return result
def readU16LE(self, register):
"""Read an unsigned 16-bit value from the specified register, in little
endian byte order."""
return self.readU16(register, little_endian=True)
def readU16BE(self, register):
"""Read an unsigned 16-bit value from the specified register, in big
endian byte order."""
return self.readU16(register, little_endian=False)
def readS16LE(self, register):
"""Read a signed 16-bit value from the specified register, in little
endian byte order."""
return self.readS16(register, little_endian=True)
def readS16BE(self, register):
"""Read a signed 16-bit value from the specified register, in big
endian byte order."""
return self.readS16(register, little_endian=False)
| adafruit/Adafruit_Python_GPIO | Adafruit_GPIO/I2C.py | Python | mit | 9,083 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.gis.geos import geometry
from PIL import Image
from PIL.ExifTags import TAGS
from ..util import point_from_exif
class Migration(DataMigration):
def forwards(self, orm):
for photo in orm['photomap.Photo'].objects.all():
photo.location = point_from_exif(photo.image.path)
photo.save()
def backwards(self, orm):
raise NotImplementedError('Too lazy to write a method to write the'
' coordinates to the EXIF of the files')
models = {
u'photomap.photo': {
'Meta': {'object_name': 'Photo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'})
}
}
complete_apps = ['photomap']
symmetrical = True
| dschep/django-photomap | photomap/migrations/0004_copy_exif_data_to_model.py | Python | mit | 1,128 |
# -*- coding: utf-8 -*-
"""The application's model objects"""
from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker
# from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
#===============================================================================
# test by cl
#===============================================================================
import sqlalchemy
from datetime import date, datetime as dt
from sqlalchemy.orm.session import SessionExtension
from sqlalchemy.orm import attributes, object_mapper
DB_DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
class LogSessionExtension(SessionExtension):
def before_flush(self, session, flush_context, instances):
print "_^" * 30
print "Come into my log session extension"
print "_*" * 30
log = []
for obj in session.dirty:
obj_mapper = object_mapper(obj)
obj_state = attributes.instance_state(obj)
for om in obj_mapper.iterate_to_root():
for obj_col in om.local_table.c:
try:
prop = obj_mapper.get_property_by_column(obj_col)
except UnmappedColumnError:
continue
try:
need2log = obj_col.info["auto_log"]
except:
continue
else:
if not need2log : continue
if prop.key not in obj_state.dict:
getattr(obj, prop.key)
history = attributes.get_history(obj, prop.key)
if not history.has_changes():continue
a, u, d = history
if d:
attr_old_value = d[0]
elif u:
attr_old_value = u[0]
else:
attr_old_value = ""
attr_new_value = a[0] or ""
if not self._isUpdateReally(obj_col, attr_old_value, attr_new_value) : continue
_old, _new = self._2string(obj_col, attr_old_value, attr_new_value)
log.append((obj_col.info.get("field_name", prop.key), _old, _new))
if log :
print log
def _isUpdateReally(self, col, old_value, new_value):
if not old_value and not new_value : return False
if not (old_value and new_value) : return True
if isinstance(col.type, sqlalchemy.types.Integer): return old_value == int(new_value)
if isinstance(col.type, sqlalchemy.types.Float): return old_value == float(new_value)
if isinstance(col.type, (sqlalchemy.types.Unicode, sqlalchemy.types.String)): return unicode(old_value) == unicode(new_value)
if isinstance(col.type, (sqlalchemy.types.Date, sqlalchemy.types.DateTime)) : return old_value == dt.strptime(new_value, DB_DATE_FORMAT)
# if isinstance(prop.type, sqlalchemy.types.Boolean) : return old_value == bool(new_value)
return False
def _2string(self, col, old_value, new_value):
if isinstance(col.type, sqlalchemy.types.Integer): return (old_value or '', new_value or '')
if isinstance(col.type, sqlalchemy.types.Float): return (old_value or '', new_value or '')
if isinstance(col.type, (sqlalchemy.types.Unicode, sqlalchemy.types.String)): return (old_value or "", new_value or "")
if isinstance(col.type, (sqlalchemy.types.Date, sqlalchemy.types.DateTime)) :
_o = "" if not old_value else old_value.strftime(DB_DATE_FORMAT)
_n = new_value or ""
return (_o, _n)
return (old_value, new_value)
# maker = sessionmaker(autoflush = True, autocommit = False,
# extension = [ LogSessionExtension(), ZopeTransactionExtension(), ])
maker = sessionmaker(autoflush = True, autocommit = False,
extension = ZopeTransactionExtension())
DBSession = scoped_session(maker)
# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()
# There are two convenient ways for you to spare some typing.
# You can have a query property on all your model classes by doing this:
# DeclarativeBase.query = DBSession.query_property()
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)
# Global metadata.
# The default metadata is the one from the declarative base.
metadata = DeclarativeBase.metadata
# If you have multiple databases with overlapping table names, you'll need a
# metadata for each database. Feel free to rename 'metadata2'.
# metadata2 = MetaData()
#####
# Generally you will not want to define your table's mappers, and data objects
# here in __init__ but will want to create modules them in the model directory
# and import them at the bottom of this file.
#
######
def init_model(engine):
"""Call me before using any of the tables or classes in the model."""
engine.dialect.supports_sane_rowcount = False
DBSession.configure(bind = engine)
# If you are using reflection to introspect your database and create
# table objects for you, your tables must be defined and mapped inside
# the init_model function, so that the engine is available if you
# use the model outside tg2, you need to make sure this is called before
# you use the model.
#
# See the following example:
# global t_reflected
# t_reflected = Table("Reflected", metadata,
# autoload=True, autoload_with=engine)
# mapper(Reflected, t_reflected)
# Import your model modules here.
from tribal.model.auth import User, Group, Permission
from tribal.model.sportsware import *
from tribal.model.orsay import *
from tribal.model.orchestra import *
from tribal.model.sample import *
# from tribal.model.pei import *
from tribal.model.sysutil import *
from tribal.model.dba import *
from tribal.model.bby import *
from tribal.model.tag import *
# from tribal.model.cabelas import *
from tribal.model.lemmi import *
from tribal.model.tmw import *
from tribal.model.mglobalpack import *
from tribal.model.prepress import *
| LamCiuLoeng/internal | tribal/model/__init__.py | Python | mit | 6,536 |
from datetime import date, time
try:
from decimal import Decimal
haveDecimal = True
except ImportError:
haveDecimal = False
from twisted.internet import defer
from twisted.trial import unittest
import formal
from formal import validation
class TestValidators(unittest.TestCase):
def testHasValidator(self):
t = formal.String(validators=[validation.LengthValidator(max=10)])
self.assertEquals(t.hasValidator(validation.LengthValidator), True)
def testRequired(self):
t = formal.String(required=True)
self.assertEquals(t.hasValidator(validation.RequiredValidator), True)
self.assertEquals(t.required, True)
class TestCreation(unittest.TestCase):
def test_immutablility(self):
self.assertEquals(formal.String().immutable, False)
self.assertEquals(formal.String(immutable=False).immutable, False)
self.assertEquals(formal.String(immutable=True).immutable, True)
def test_immutablilityOverride(self):
class String(formal.String):
immutable = True
self.assertEquals(String().immutable, True)
self.assertEquals(String(immutable=False).immutable, False)
self.assertEquals(String(immutable=True).immutable, True)
class TestValidate(unittest.TestCase):
@defer.deferredGenerator
def runSuccessTests(self, type, tests):
for test in tests:
d = type(*test[0], **test[1]).validate(test[2])
d = defer.waitForDeferred(d)
yield d
self.assertEquals(d.getResult(), test[3])
@defer.deferredGenerator
def runFailureTests(self, type, tests):
for test in tests:
d = type(*test[0], **test[1]).validate(test[2])
d = defer.waitForDeferred(d)
yield d
self.assertRaises(test[3], d.getResult)
def testStringSuccess(self):
return self.runSuccessTests(formal.String, [
([], {}, None, None),
([], {}, '', None),
([], {}, ' ', ' '),
([], {}, 'foo', 'foo'),
([], {}, u'foo', u'foo'),
([], {'strip': True}, ' ', None),
([], {'strip': True}, ' foo ', 'foo'),
([], {'missing': 'bar'}, 'foo', 'foo'),
([], {'missing': 'bar'}, '', 'bar'),
([], {'strip': True, 'missing': ''}, ' ', ''),
])
def testStringFailure(self):
return self.runFailureTests(formal.String, [
([], {'required': True}, '', formal.FieldValidationError),
([], {'required': True}, None, formal.FieldValidationError),
])
def testIntegerSuccess(self):
return self.runSuccessTests(formal.Integer, [
([], {}, None, None),
([], {}, 0, 0),
([], {}, 1, 1),
([], {}, -1, -1),
([], {'missing': 1}, None, 1),
([], {'missing': 1}, 2, 2),
])
def testIntegerFailure(self):
return self.runFailureTests(formal.Integer, [
([], {'required': True}, None, formal.FieldValidationError),
])
def testFloatSuccess(self):
self.runSuccessTests(formal.Float, [
([], {}, None, None),
([], {}, 0, 0.0),
([], {}, 0.0, 0.0),
([], {}, .1, .1),
([], {}, 1, 1.0),
([], {}, -1, -1.0),
([], {}, -1.86, -1.86),
([], {'missing': 1.0}, None, 1.0),
([], {'missing': 1.0}, 2.0, 2.0),
])
def testFloatFailure(self):
self.runFailureTests(formal.Float, [
([], {'required': True}, None, formal.FieldValidationError),
])
if haveDecimal:
def testDecimalSuccess(self):
return self.runSuccessTests(formal.Decimal, [
([], {}, None, None),
([], {}, Decimal('0'), Decimal('0')),
([], {}, Decimal('0.0'), Decimal('0.0')),
([], {}, Decimal('.1'), Decimal('.1')),
([], {}, Decimal('1'), Decimal('1')),
([], {}, Decimal('-1'), Decimal('-1')),
([], {}, Decimal('-1.86'), Decimal('-1.86')),
([], {'missing': Decimal('1.0')}, None, Decimal('1.0')),
([], {'missing': Decimal('1.0')}, Decimal('2.0'), Decimal('2.0')),
])
def testDecimalFailure(self):
return self.runFailureTests(formal.Decimal, [
([], {'required': True}, None, formal.FieldValidationError),
])
def testBooleanSuccess(self):
return self.runSuccessTests(formal.Boolean, [
([], {}, None, None),
([], {}, True, True),
([], {}, False, False),
([], {'missing' :True}, None, True),
([], {'missing': True}, False, False)
])
def testDateSuccess(self):
return self.runSuccessTests(formal.Date, [
([], {}, None, None),
([], {}, date(2005, 1, 1), date(2005, 1, 1)),
([], {'missing': date(2005, 1, 2)}, None, date(2005, 1, 2)),
([], {'missing': date(2005, 1, 2)}, date(2005, 1, 1), date(2005, 1, 1)),
])
def testDateFailure(self):
return self.runFailureTests(formal.Date, [
([], {'required': True}, None, formal.FieldValidationError),
])
def testTimeSuccess(self):
self.runSuccessTests(formal.Time, [
([], {}, None, None),
([], {}, time(12, 30, 30), time(12, 30, 30)),
([], {'missing': time(12, 30, 30)}, None, time(12, 30, 30)),
([], {'missing': time(12, 30, 30)}, time(12, 30, 31), time(12, 30, 31)),
])
def testTimeFailure(self):
self.runFailureTests(formal.Time, [
([], {'required': True}, None, formal.FieldValidationError),
])
def testSequenceSuccess(self):
self.runSuccessTests(formal.Sequence, [
([formal.String()], {}, None, None),
([formal.String()], {}, ['foo'], ['foo']),
([formal.String()], {'missing': ['foo']}, None, ['foo']),
([formal.String()], {'missing': ['foo']}, ['bar'], ['bar']),
])
def testSequenceFailure(self):
self.runFailureTests(formal.Sequence, [
([formal.String()], {'required': True}, None, formal.FieldValidationError),
([formal.String()], {'required': True}, [], formal.FieldValidationError),
])
def test_file(self):
pass
test_file.skip = "write tests"
| emgee/formal | formal/test/test_types.py | Python | mit | 6,660 |
import logging
import time
import threading
try:
import ConfigParser as config
except:
import configparser as config
from pydispatch import dispatcher
import requests
import ci_screen.service.ci_server_loader as ci_loader
logger = logging.getLogger(__name__)
class CIServerPoller(object):
def __init__(self):
self._stop = threading.Event()
self._update = threading.Event()
self._poll_rate = self.get_poll_rate()
self.polling_thread = None
self.ci_servers = ci_loader.get_ci_servers()
def __del__(self):
self.stop_polling()
def start_polling_async(self):
self._stop.clear()
self._update.clear()
self.polling_thread = threading.Thread(target=self.poll_for_changes)
self.polling_thread.daemon = True
self.polling_thread.start()
def stop_polling(self):
self._stop.set()
self.polling_thread = None
def poll_for_changes(self):
while not self._stop.isSet():
errors = {}
responses = {}
for ci_server in self.ci_servers:
name = ci_server['name']
url = ci_server['url']
username = ci_server.get('username')
token = ci_server.get('token')
auth = None
if username is not None and token is not None:
auth = requests.auth.HTTPBasicAuth(username, token)
try:
response = requests.get('{}/cc.xml'.format(url), auth=auth)
if response.status_code == 200:
responses[name] = response
else:
raise Exception('ci server {} returned {}: {}'.format(url, response, response.text))
except Exception as ex:
logger.warning(ex)
errors[name] = ex
dispatcher.send(signal="CI_UPDATE", sender=self, responses=responses, errors=errors)
time.sleep(self._poll_rate)
def get_poll_rate(self):
config_parser = config.SafeConfigParser(allow_no_value=False)
with open('ci_screen.cfg') as config_file:
config_parser.readfp(config_file)
return int(config_parser.get('general', 'poll_rate_seconds'))
| garyjohnson/ci_screen_2 | ci_screen/service/ci_server_poller.py | Python | mit | 2,307 |
from aqt.utils import askUser, showInfo
_field_names = ["Gid", "Gender", "Source", "Target", "Target Language", "Pronunciation"]
_model_name = "Duolingo Sync"
def create_model(mw):
mm = mw.col.models
m = mm.new(_(_model_name))
for field_name in _field_names:
fm = mm.newField(_(field_name))
mm.addField(m, fm)
t = mm.newTemplate("Card 1")
t['qfmt'] = "{{Source}}<br>\n<br>\nTo {{Target Language}}:\n\n<hr id=answer>"
t['afmt'] = "{{FrontSide}}\n\n<br><br>{{Target}}"
mm.addTemplate(m, t)
t = mm.newTemplate("Card 2")
t['qfmt'] = "{{Target}}<br>\n<br>\nFrom {{Target Language}}:\n\n<hr id=answer>"
t['afmt'] = "{{FrontSide}}\n\n<br><br>{{Source}}"
mm.addTemplate(m, t)
mm.add(m)
mw.col.models.save(m)
return m
def get_duolingo_model(mw):
m = mw.col.models.byName(_model_name)
if not m:
showInfo("Duolingo Sync note type not found. Creating.")
m = create_model(mw)
# Add new fields if they don't exist yet
fields_to_add = [field_name for field_name in _field_names if field_name not in mw.col.models.fieldNames(m)]
if fields_to_add:
showInfo("""
<p>The Duolingo Sync plugin has recently been upgraded to include the following attributes: {}</p>
<p>This change will require a full-sync of your card database to your Anki-Web account.</p>
""".format(", ".join(fields_to_add)))
for field_name in fields_to_add:
pass
fm = mw.col.models.newField(_(field_name))
mw.col.models.addField(m, fm)
mw.col.models.save(m)
return m
| JASchilz/AnkiSyncDuolingo | duolingo_sync/duolingo_model.py | Python | mit | 1,630 |
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from allauth.socialaccount import requests
from allauth.socialaccount.models import SocialLogin, SocialAccount
from allauth.utils import get_user_model
from provider import GoogleProvider
User = get_user_model()
class GoogleOAuth2Adapter(OAuth2Adapter):
provider_id = GoogleProvider.id
access_token_url = 'https://accounts.google.com/o/oauth2/token'
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
profile_url = 'https://www.googleapis.com/oauth2/v1/userinfo'
def complete_login(self, request, app, token):
resp = requests.get(self.profile_url,
{ 'access_token': token.token,
'alt': 'json' })
extra_data = resp.json
# extra_data is something of the form:
#
# {u'family_name': u'Penners', u'name': u'Raymond Penners',
# u'picture': u'https://lh5.googleusercontent.com/-GOFYGBVOdBQ/AAAAAAAAAAI/AAAAAAAAAGM/WzRfPkv4xbo/photo.jpg',
# u'locale': u'nl', u'gender': u'male',
# u'email': u'raymond.penners@gmail.com',
# u'link': u'https://plus.google.com/108204268033311374519',
# u'given_name': u'Raymond', u'id': u'108204268033311374519',
# u'verified_email': True}
#
# TODO: We could use verified_email to bypass allauth email verification
uid = str(extra_data['id'])
user = User(email=extra_data.get('email', ''),
last_name=extra_data.get('family_name', ''),
first_name=extra_data.get('given_name', ''))
account = SocialAccount(extra_data=extra_data,
uid=uid,
provider=self.provider_id,
user=user)
return SocialLogin(account)
oauth2_login = OAuth2LoginView.adapter_view(GoogleOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(GoogleOAuth2Adapter)
| rawjam/django-allauth | allauth/socialaccount/providers/google/views.py | Python | mit | 2,159 |
#! /usr/bin/env python
""" time to run LSTM on this bad boy! """
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import cPickle as pkl
from keras.optimizers import SGD, RMSprop, Adagrad
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM, GRU
from text_processing.ISTapps import load_ISTapps
#from ISTapps import load_ISTapps
from sklearn import cross_validation
# different structures to test out
"""
# trial 1: kept memory faulting at a certain point
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
optimizers = ['adam', sgd, 'adagrad', 'adadelta', 'rmsprop']
LSTM_ins = [128, 256, 512]
LSTM_outs = [128, 256]
activations = ['sigmoid', 'relu', 'softmax', 'tanh']
loss_functions = ['binary_crossentropy', 'mean_squared_error']
# trial 2: cross validation settings
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
optimizers = ['adam']
LSTM_ins = [256, 512]
LSTM_outs = [128, 256]
activations = ['sigmoid', 'relu']
loss_functions = ['binary_crossentropy']
#trial 3: try different optimizers with other settings constant
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
optimizers = [sgd, 'adagrad', 'adadelta', 'rmsprop', 'adam']
LSTM_ins = [256]
LSTM_outs = [128]
activations = ['sigmoid']
loss_functions = ['binary_crossentropy']
"""
# trial 4: try basically all combos except adadelta
sgd1dec = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
sgd1 = SGD(lr=0.1, momentum=0., decay=0., nesterov=False)
sgd01 = SGD(lr=0.01, momentum=0., decay=0., nesterov=False)
sgd001 = SGD(lr=0.001, momentum=0., decay=0., nesterov=False)
optimizers = ['sgd', sgd1, sgd01, sgd001, sgd1dec, 'adam', 'rmsprop', 'adadelta']
LSTM_in_out = [(128, 128), (128, 256), (256,128)]
activations = ['sigmoid', 'tanh', 'relu', 'softmax']
loss_functions = ['mean_squared_error', 'binary_crossentropy']
max_features=100000
maxlen = 30 # cut texts after this number of words
batch_size = 16
k = 5 # cross-validation
#dataset_size = 15000
#results = {}
max_avg = 0
opt_settings = []
for optimizer in optimizers :
for loss_func in loss_functions :
for activation in activations :
for (LSTM_in, LSTM_out) in LSTM_in_out :
settings = (optimizer, loss_func, activation, LSTM_in, LSTM_out)
print("Loading data...")
(X,y) = load_ISTapps(maxlen, seed=111)
# is there data signal ??! --> shrink dataset
#X = X[:dataset_size]
#y = y[:dataset_size]
print("Settings: ", settings)
# cross validation
kfold_indices = cross_validation.KFold(len(X), n_folds=k)
cv_round = 0
cumulative_acc = [0]*k
for train_indices, test_indices in kfold_indices :
X_train = X[train_indices]
y_train = y[train_indices]
X_test = X[test_indices]
y_test = y[test_indices]
print("Cross Validation split ", cv_round)
print(len(X_train), 'train sequences')
print(len(X_test), 'test sequences')
print('X_train shape:', X_train.shape)
print('X_test shape:', X_test.shape)
print('Build model...')
model = Sequential()
model.add(Embedding(max_features, LSTM_in))
model.add(LSTM(LSTM_in, LSTM_out)) # try using a GRU instead, for fun
model.add(Dropout(0.5))
model.add(Dense(LSTM_out, 1))
model.add(Activation(activation))
# try using different optimizers and different optimizer configs
model.compile(loss=loss_func, optimizer=optimizer, class_mode="binary")
print("Train...")
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=5,
validation_split=0.1, show_accuracy=True, verbose=2)
score = model.evaluate(X_test, y_test, batch_size=batch_size)
print('Test score:', score)
classes = [int(val > 0.55) for val in model.predict(X_test, batch_size=batch_size)]
#classes = model.predict_classes(X_test, batch_size=batch_size)
acc = np_utils.accuracy(classes, y_test)
print('Test accuracy:', acc)
cumulative_acc[cv_round] = acc
cv_round += 1
# try to conserve some memory cause getting weird memory errors
del X_train
del y_train
del X_test
del y_test
del model
cross_val_acc = sum(cumulative_acc) / k
# keep track of current maximum average and settings
if (max_avg < cross_val_acc) :
max_avg = cross_val_acc
opt_settings = (settings)
with open('/home/enagaraj/cumulative_results.txt', 'a') as f :
print ('\nsettings: ', settings, 'accuracies: ', cumulative_acc, 'avg acc: ', cross_val_acc, file=f)
#results[settings] = (cumulative_acc, cross_val_acc)
print ('Average accuracy: ', cross_val_acc)
# again try to satisfy the memory gods
del X
del y
# calculate best value
#vals = results.values()
#max_avg = 0
#for lst,avg in vals :
# if avg > maxv :
# max_avg = avg
with open('/home/enagaraj/cumulative_results_len30.txt', 'a') as f :
print ('\nmax average: ', max_avg, 'optimal settings: ', opt_settings, file=f)
#print (results) | eelanagaraj/IST_project | LSTM_ISTapps.py | Python | mit | 5,130 |
#coding=utf-8
from django import forms
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
# Register your models here.
class RegisterForm(forms.Form):
username=forms.CharField(label=_(u"昵称"),max_length=20,widget=forms.TextInput(attrs={'size': 20,'class':"form-control"}))
email=forms.EmailField(label=_(u"邮件"),max_length=20,widget=forms.EmailInput(attrs={'size': 20,'class':"form-control"}))
password=forms.CharField(label=_(u"密码"),max_length=20,widget=forms.PasswordInput(attrs={'size': 20,'class':"form-control"}))
re_password=forms.CharField(label=_(u"重复密码"),max_length=20,widget=forms.PasswordInput(attrs={'size': 20,'class':"form-control"}))
def clean_username(self):
'''验证重复昵称'''
users = User.objects.filter(username__iexact=self.cleaned_data["username"])
if not users:
return self.cleaned_data["username"]
raise forms.ValidationError(_(u"该昵称已经被使用请使用其他的昵称"))
def clean_email(self):
'''验证重复email'''
emails = User.objects.filter(email__iexact=self.cleaned_data["email"])
if not emails:
return self.cleaned_data["email"]
raise forms.ValidationError(_(u"该邮箱已经被使用请使用其他的"))
def clean(self):
"""验证其他非法"""
cleaned_data = super(RegisterForm, self).clean()
if cleaned_data.get("password") == cleaned_data.get("username"):
raise forms.ValidationError(_(u"用户名和密码不能一样"))
if cleaned_data.get("password") != cleaned_data.get("re_password"):
raise forms.ValidationError(_(u"两次输入密码不一致"))
return cleaned_data
class LoginForm(forms.Form):
username=forms.CharField(label=_(u"昵称"),max_length=20,widget=forms.TextInput(attrs={'size': 20,'class':"form-control"}))
password=forms.CharField(label=_(u"密码"),max_length=20,widget=forms.PasswordInput(attrs={'size': 20,'class':"form-control"}))
| flysmoke/ijizhang | ijizhang_prj/accounts/forms.py | Python | mit | 2,097 |
from django.conf.urls import include, url
from django.views.decorators.cache import cache_page as cp
from django.views.generic import TemplateView
from rest_framework.routers import DefaultRouter
from .views import ReviewViewSet, ReviewView
router = DefaultRouter()
router.register(r'reviews', ReviewViewSet)
urlpatterns = [
url(r'^$', cp(60 * 5)(ReviewView.as_view(template_name='reviews/index_list.html')), name='reviews-index'),
url(r'^api/', include(router.urls), name='reviews-api'),
url(r'^manual$', cp(60 * 60)(ReviewView.as_view(template_name='reviews/manual.html')), name='reviews-manual'),
url(r'^sample$', cp(60 * 5)(ReviewView.as_view(template_name='reviews/sample_list.html')), name='reviews-sample'),
url(r'^edit$', TemplateView.as_view(template_name='reviews/edit.html'), name='reviews-edit'),
]
| mikoim/japanization | reviews/urls.py | Python | mit | 834 |
"""
WSGI config for text_analysis project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "text_analysis.settings")
application = get_wsgi_application()
| bungoume/mecab-web-api | text_analysis/text_analysis/wsgi.py | Python | mit | 403 |
import threading
import time
import logging
class _Module(threading.Thread):
def __init__(self, parent, name):
threading.Thread.__init__(self)
self.logger = logging.getLogger()
self.parent = parent
self.daemon = False
self.event = threading.Event()
self.running = True
self._className = self.__class__.__name__
self.name = name
def run(self):
self.logger.debug("Starting thread {}".format(self.name))
while self.running:
time.sleep(0.0001)
self.logger.debug("Thread {} got exit signal".format(self.name))
def handleEvent(self):
self.logger.debug("{} handling event".format(self.name))
pass
def stop(self):
self.running = False
self.logger.debug("{} stopping".format(self.name))
| cscashby/pi-showcontrol | src/modules/_Module.py | Python | mit | 772 |
"""
Pixel perfect collision algorithm
From http://www.pygame.org/wiki/FastPixelPerfect?parent=CookBook
"""
import pygame
def pixel_perfect_check_collision(obj1, obj2):
image_mask = pygame.mask.from_surface(obj1.image)
otherimage_mask = pygame.mask.from_surface(obj2.image)
offset_x = obj1.rect.left - obj2.rect.left
offset_y = obj1.rect.top - obj2.rect.top
if image_mask.overlap(otherimage_mask, (offset_x, offset_y)) is not None:
return True
else:
return False | baritone-snake-studios/flappyturtle | flappyturtle/objects/collision.py | Python | mit | 505 |
import matplotlib.pyplot as plt
import numpy as np
import pdb
if __name__ == "__main__":
fig, ax = plt.subplots(figsize=(10,5))
for clients in (10, 50, 100, 200):
median_data = np.zeros(5)
for k in (1, 2, 3, 4, 5):
data = np.loadtxt("loss_" + str(clients) + "_" + str(k) + ".csv", delimiter=',')
median_data[k-1] = data.shape[0]
print str(clients) + " median is " + str(np.median(median_data))
print str(clients) + " stddev is " + str(np.std(median_data))
data1 = np.loadtxt("loss_10_2.csv", delimiter=',')
data2 = np.loadtxt("loss_50_2.csv", delimiter=',')
data3 = np.loadtxt("loss_100_2.csv", delimiter=',')
data4 = np.loadtxt("loss_200_2.csv", delimiter=',')
plt.plot(data1, color="black", label="10 clients", lw=5)
plt.plot(data2, color="red", label="50 clients", lw=5)
plt.plot(data3, color="orange", label="100 clients", lw=5)
plt.plot(data4, color="green", label="200 clients", lw=5)
plt.legend(loc='best', ncol=1, fontsize=18)
plt.xlabel("Time (s)", fontsize=22)
plt.ylabel("Training Error", fontsize=22)
axes = plt.gca()
axes.set_ylim([0, 0.5])
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
plt.setp(ax.get_xticklabels(), fontsize=18)
plt.setp(ax.get_yticklabels(), fontsize=18)
plt.tight_layout()
plt.show() | DistributedML/TorML | eurosys-eval/results_tor_no_tor/makeplot.py | Python | mit | 1,404 |
# coding=utf8
import random
import logging
import json
import time
from httplib2 import Http
from logging import handlers
LOG_FILE = '../logs/WSCN_client.log'
handler = logging.handlers.RotatingFileHandler(LOG_FILE, maxBytes=1024 * 1024, backupCount=5) # 实例化handler
handler.setFormatter(logging.Formatter('%(asctime)s - %(filename)s:%(lineno)s - %(name)s - %(message)s'))
logger = logging.getLogger('client')
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
h = Http()
def send():
exchange_type = random_type()
r, c = h.request("http://127.0.0.1:4000/trade.do", "POST",
"{\"symbol\": \"WSCN\", \"type\": \"" + exchange_type + "\", \"amount\": " + random_amount() +
", \"price\": " + random_price() + "}", {"Content-Type": "text/json"})
if exchange_type == "buy" or exchange_type == "sell":
obj = json.loads(c)
logger.info("%s, %s", obj['order_id'], exchange_type)
def random_type():
return str(random.choice(["buy", "sell", "buy_market", "sell_market"]))
def random_amount():
return str(random.randrange(1, 100, 1))
def random_price():
return str(round(random.uniform(90.00, 110.00), 2))
if __name__ == '__main__':
for i in range(0, 1000):
send()
time.sleep(0.230)
| katuyo/symbol-exchange | integration/script.py | Python | mit | 1,297 |
import json
import math
######################################################
# add parameters
######################################################
def addParameters(scene, h=0.005, maxIter=5, maxIterVel=5, velocityUpdateMethod=0, contactTolerance=0.05, triangleModelSimulationMethod=2, triangleModelBendingMethod=2,
contactStiffnessRigidBody=1.0, contactStiffnessParticleRigidBody=100.0,
cloth_stiffness=1.0, cloth_bendingStiffness=0.005, cloth_xxStiffness=1.0, cloth_yyStiffness=1.0, cloth_xyStiffness=1.0,
cloth_xyPoissonRatio=0.3, cloth_yxPoissonRatio=0.3, cloth_normalizeStretch=0, cloth_normalizeShear=0, gravity=[0,-9.81,0], numberOfStepsPerRenderUpdate=4):
parameters = { 'timeStepSize': h,
'gravity': gravity,
'maxIter' : maxIter,
'maxIterVel' : maxIterVel,
'numberOfStepsPerRenderUpdate': numberOfStepsPerRenderUpdate,
'velocityUpdateMethod' : velocityUpdateMethod,
'contactTolerance': contactTolerance,
'triangleModelSimulationMethod': triangleModelSimulationMethod,
'triangleModelBendingMethod': triangleModelBendingMethod,
'contactStiffnessRigidBody' : contactStiffnessRigidBody,
'contactStiffnessParticleRigidBody': contactStiffnessParticleRigidBody,
'cloth_stiffness': cloth_stiffness,
'cloth_bendingStiffness': cloth_bendingStiffness,
'cloth_xxStiffness': cloth_xxStiffness,
'cloth_yyStiffness': cloth_yyStiffness,
'cloth_xyStiffness': cloth_xyStiffness,
'cloth_xyPoissonRatio': cloth_xyPoissonRatio,
'cloth_yxPoissonRatio': cloth_yxPoissonRatio,
'cloth_normalizeStretch': cloth_normalizeStretch,
'cloth_normalizeShear': cloth_normalizeShear
}
scene['Simulation'] = parameters
return
######################################################
# add rigid bodies
######################################################
def addRigidBody(scene, geoFile, coType, coFile='', coScale=[1,1,1], translation=[0,0,0], axis=[1,0,0], angle=0.0, scale=[1,1,1],
v=[0,0,0], omega=[0,0,0], dynamic=1, density=500, rest=0.6, friction=0.3,
testMesh = 1):
global current_id
rb = { 'id': current_id,
'geometryFile': geoFile,
'isDynamic': dynamic,
'density': density,
'translation': translation,
'rotationAxis': axis,
'rotationAngle': angle,
'scale': scale,
'velocity': v,
'angularVelocity': omega,
'restitution' : rest,
'friction' : friction,
'collisionObjectType': coType,
'collisionObjectScale': coScale,
'collisionObjectFileName': coFile,
'testMesh': testMesh
}
current_id += 1
scene['RigidBodies'].append(rb)
return current_id-1
######################################################
# add triangle models
######################################################
def addTriangleModel(scene, geoFile, translation=[0,0,0], axis=[1,0,0], angle=0.0, scale=[1,1,1],
rest=0.6, friction=0.3, staticParticles=[]):
global current_id
tri = { 'id': current_id,
'geometryFile': geoFile,
'translation': translation,
'rotationAxis': axis,
'rotationAngle': angle,
'scale': scale,
'staticParticles': staticParticles,
'restitution' : rest,
'friction' : friction
}
current_id += 1
scene['TriangleModels'].append(tri)
return current_id-1
######################################################
# add tet models
######################################################
def addTetModel(scene, nodeFile, eleFile, visFile='', translation=[0,0,0], axis=[1,0,0], angle=0.0, scale=[1,1,1],
rest=0.6, friction=0.3, staticParticles=[]):
global current_id
tet = { 'id': current_id,
'nodeFile': nodeFile,
'eleFile': eleFile,
'translation': translation,
'rotationAxis': axis,
'rotationAngle': angle,
'scale': scale,
'staticParticles': staticParticles,
'restitution' : rest,
'friction' : friction
}
if visFile != '':
tet['visFile'] = visFile
current_id += 1
scene['TetModels'].append(tet)
return current_id-1
######################################################
# add ball joint
######################################################
def addBallJoint(scene, rbId1, rbId2, position):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position': position
}
scene['BallJoints'].append(joint)
return
######################################################
# add ball-on-line joint
######################################################
def addBallOnLineJoint(scene, rbId1, rbId2, position, axis):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position': position,
'axis': axis
}
scene['BallOnLineJoints'].append(joint)
return
######################################################
# add hinge joint
######################################################
def addHingeJoint(scene, rbId1, rbId2, position, axis):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position': position,
'axis': axis
}
scene['HingeJoints'].append(joint)
return
######################################################
# add universal joint
######################################################
def addUniversalJoint(scene, rbId1, rbId2, position, axis1, axis2):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position': position,
'axis1': axis1,
'axis2': axis2
}
scene['UniversalJoints'].append(joint)
return
######################################################
# add slider joint
######################################################
def addSliderJoint(scene, rbId1, rbId2, axis):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'axis': axis
}
scene['SliderJoints'].append(joint)
return
######################################################
# add damper joint
######################################################
def addDamperJoint(scene, rbId1, rbId2, axis, stiffness):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'axis': axis,
'stiffness': stiffness
}
scene['DamperJoints'].append(joint)
return
######################################################
# add RigidBodyParticleBallJoint
######################################################
def addRigidBodyParticleBallJoint(scene, rbId, particleId):
joint = {
'rbID': rbId,
'particleID': particleId
}
scene['RigidBodyParticleBallJoints'].append(joint)
return
######################################################
# add TargetAngleMotorHingeJoint
######################################################
def addTargetAngleMotorHingeJoint(scene, rbId1, rbId2, position, axis, target, targetSequence=None, repeatSequence=0):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position': position,
'axis': axis
}
if targetSequence != None:
joint['targetSequence'] = targetSequence
joint['repeatSequence'] = repeatSequence
else:
joint['target'] = target
scene['TargetAngleMotorHingeJoints'].append(joint)
return
######################################################
# add TargetVelocityMotorHingeJoint
######################################################
def addTargetVelocityMotorHingeJoint(scene, rbId1, rbId2, position, axis, target):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position': position,
'axis': axis,
'target': target
}
scene['TargetVelocityMotorHingeJoints'].append(joint)
return
######################################################
# add TargetPositionMotorSliderJoint
######################################################
def addTargetPositionMotorSliderJoint(scene, rbId1, rbId2, axis, target):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'axis': axis,
'target': target
}
scene['TargetPositionMotorSliderJoints'].append(joint)
return
######################################################
# add TargetVelocityMotorSliderJoint
######################################################
def addTargetVelocityMotorSliderJoint(scene, rbId1, rbId2, axis, target):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'axis': axis,
'target': target
}
scene['TargetVelocityMotorSliderJoints'].append(joint)
return
######################################################
# add spring
######################################################
def addRigidBodySpring(scene, rbId1, rbId2, position1, position2, stiffness):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position1': position1,
'position2': position2,
'stiffness': stiffness
}
scene['RigidBodySprings'].append(joint)
return
######################################################
# add distance joint
######################################################
def addDistanceJoint(scene, rbId1, rbId2, position1, position2):
joint = {
'bodyID1': rbId1,
'bodyID2': rbId2,
'position1': position1,
'position2': position2
}
scene['DistanceJoints'].append(joint)
return
######################################################
# generate scene
######################################################
def generateScene(name, camPosition=[0, 10, 30], camLookat=[0,0,0]):
scene = {'Name' : name}
scene['cameraPosition'] = camPosition
scene['cameraLookat'] = camLookat
scene['RigidBodies'] = []
scene['BallJoints'] = []
scene['BallOnLineJoints'] = []
scene['HingeJoints'] = []
scene['UniversalJoints'] = []
scene['SliderJoints'] = []
scene['DamperJoints'] = []
scene['RigidBodyParticleBallJoints'] = []
scene['TargetAngleMotorHingeJoints'] = []
scene['TargetVelocityMotorHingeJoints'] = []
scene['TargetPositionMotorSliderJoints'] = []
scene['TargetVelocityMotorSliderJoints'] = []
scene['RigidBodySprings'] = []
scene['DistanceJoints'] = []
scene['TriangleModels'] = []
scene['TetModels'] = []
return scene
######################################################
# write scene to file
######################################################
def writeScene(scene, fileName):
f = open(fileName, 'w')
json_str = json.dumps(scene, sort_keys=True,indent=4, separators=(',', ': '))
f.write(json_str)
#print json_str
f.close()
######################################################
# compute rotation matrix
######################################################
def rotation_matrix(axis, angle):
x = axis[0]
y = axis[1]
z = axis[2]
d = math.sqrt(x*x + y*y + z*z)
if d < 1.0e-6:
print ("Vector of rotation matrix is zero!")
return
x = x/d;
y = y/d;
z = z/d;
x2 = x*x;
y2 = y*y;
z2 = z*z;
s = math.sin(angle);
c = math.cos(angle);
c1 = 1.0-c;
xyc = x*y*c1;
xzc = x*z*c1;
yzc = y*z*c1;
xs=x*s;
ys=y*s;
zs=z*s;
return [[c + x2*c1, xyc-zs, xzc+ys],
[xyc+zs, c+y2*c1, yzc-xs],
[xzc-ys, yzc+xs, c+z2*c1]]
######################################################
# compute matrix vector product
######################################################
def matrix_vec_product(A, v):
res = [0,0,0]
for i in range(0,3):
for j in range(0,3):
res[i] += A[i][j] * v[j];
return res
######################################################
# compute cross product
######################################################
def cross_product(a, b):
res = [0,0,0]
res[0] = a[1]*b[2] - a[2]*b[1];
res[1] = a[2]*b[0] - a[0]*b[2];
res[2] = a[0]*b[1] - a[1]*b[0];
return res
######################################################
# scale vector
######################################################
def scale_vector(v, s):
res = [0,0,0]
res[0] = s*v[0];
res[1] = s*v[1];
res[2] = s*v[2];
return res
######################################################
# add vector
######################################################
def add_vector(v1, v2):
res = [0,0,0]
res[0] = v1[0] + v2[0];
res[1] = v1[1] + v2[1];
res[2] = v1[2] + v2[2];
return res
current_id=1 | janbender/PositionBasedDynamics | data/scenes/SceneGenerator.py | Python | mit | 12,344 |
class Action:
label = ""
selectable = 0
def __init__ (self,label="",s=0):
self.label = label
self.selectable = s
def getLabel (self):
return self.label
def do (self):
tmp = 1
def canSelect (self):
return self.selectable
| tbdale/crystalfontz-lcd-ui | python/Action.py | Python | mit | 289 |
# Generated by Django 3.2 on 2021-08-25 20:55
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Ville',
fields=[
('id', models.IntegerField(db_column='ville_id', primary_key=True, serialize=False)),
('departement', models.CharField(blank=True, db_column='ville_departement', max_length=3, null=True)),
('slug', models.CharField(blank=True, db_column='ville_slug', max_length=255, null=True)),
('nom', models.CharField(blank=True, db_column='ville_nom', max_length=45, null=True)),
('nom_simple', models.CharField(blank=True, db_column='ville_nom_simple', max_length=45, null=True)),
('nom_reel', models.CharField(blank=True, db_column='ville_nom_reel', max_length=45, null=True)),
('nom_soundex', models.CharField(blank=True, db_column='ville_nom_soundex', max_length=20, null=True)),
('nom_metaphone', models.CharField(blank=True, db_column='ville_nom_metaphone', max_length=22, null=True)),
('code_postal', models.CharField(blank=True, db_column='ville_code_postal', max_length=255, null=True)),
('commune', models.CharField(blank=True, db_column='ville_commune', max_length=3, null=True)),
('code_commune', models.CharField(db_column='ville_code_commune', max_length=5)),
('arrondissement', models.IntegerField(blank=True, db_column='ville_arrondissement', null=True)),
('canton', models.CharField(blank=True, db_column='ville_canton', max_length=4, null=True)),
('amdi', models.IntegerField(blank=True, db_column='ville_amdi', null=True)),
('population_2010', models.IntegerField(blank=True, db_column='ville_population_2010', null=True)),
('population_1999', models.IntegerField(blank=True, db_column='ville_population_1999', null=True)),
('population_2012', models.IntegerField(blank=True, db_column='ville_population_2012', null=True)),
('densite_2010', models.IntegerField(blank=True, db_column='ville_densite_2010', null=True)),
('surface', models.FloatField(blank=True, db_column='ville_surface', null=True)),
('longitude_deg', models.FloatField(blank=True, db_column='ville_longitude_deg', null=True)),
('latitude_deg', models.FloatField(blank=True, db_column='ville_latitude_deg', null=True)),
('longitude_grd', models.CharField(blank=True, db_column='ville_longitude_grd', max_length=9, null=True)),
('latitude_grd', models.CharField(blank=True, db_column='ville_latitude_grd', max_length=8, null=True)),
('longitude_dms', models.CharField(blank=True, db_column='ville_longitude_dms', max_length=9, null=True)),
('latitude_dms', models.CharField(blank=True, db_column='ville_latitude_dms', max_length=8, null=True)),
('zmin', models.IntegerField(blank=True, db_column='ville_zmin', null=True)),
('zmax', models.IntegerField(blank=True, db_column='ville_zmax', null=True)),
],
options={
'db_table': 'villes_france_free',
},
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['departement'], name='departement_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['slug'], name='slug_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['nom'], name='nom_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['nom_simple'], name='nom_simple_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['nom_reel'], name='nom_reel_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['nom_soundex'], name='nom_soundex_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['nom_metaphone'], name='nom_metaphone_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['code_postal'], name='code_postal_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['commune'], name='commune_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['code_commune'], name='code_commune_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['arrondissement'], name='arrondissement_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['canton'], name='canton_idx'),
),
migrations.AddIndex(
model_name='ville',
index=models.Index(fields=['amdi'], name='amdi_idx'),
),
]
| fab13n/caracole | villes/migrations/0001_initial.py | Python | mit | 5,318 |
__author__ = 'mkrcah'
| mkrcah/propsort | propsort/__init__.py | Python | mit | 22 |
import json
try:
from urllib.parse import urljoin
from urllib.parse import urlencode
except ImportError:
from urlparse import urljoin
from urllib import urlencode
import facepy
from django.conf import settings
from django.utils import timezone
from django.conf import settings
from django.core import signing
from django.core.urlresolvers import reverse
from django.utils import encoding
from . facepy_wrapper import utils
GRAPH_MAX_TRIES = 3
FACEBOOK_TIMEOUT = getattr(settings, 'FACEBOOK_AUTH_BACKEND_FACEBOOK_TIMEOUT',
timezone.timedelta(seconds=20).total_seconds())
FACEBOOK_API_VERSION = getattr(settings, 'FACEBOOK_API_VERSION', '2.1')
class InvalidNextUrl(Exception):
pass
class Next(object):
salt = 'facebook_auth.urls.Next'
def encode(self, data):
data = self.dumps(data)
return urlencode({'next': data})
def decode(self, data):
try:
return self.loads(data)
except signing.BadSignature:
raise InvalidNextUrl()
def dumps(self, obj):
data = json.dumps(
obj, separators=(',', ':'), sort_keys=True).encode('utf-8')
base64d = signing.b64_encode(data)
return signing.Signer(salt=self.salt).sign(base64d)
def loads(self, s):
base64d = encoding.force_bytes(
signing.Signer(salt=self.salt).unsign(s))
data = signing.b64_decode(base64d)
return json.loads(data.decode('utf-8'))
def redirect_uri(next, close):
return urljoin(
settings.FACEBOOK_CANVAS_URL,
reverse('facebook-auth-handler') + "?" +
Next().encode({'next': next, 'close': close})
)
def get_from_graph_api(graphAPI, query):
for i in range(GRAPH_MAX_TRIES):
try:
return graphAPI.get(query)
except facepy.FacepyError as e:
if i == GRAPH_MAX_TRIES - 1 or getattr(e, 'code', None) != 1:
raise
def get_application_graph(version=None):
version = version or FACEBOOK_API_VERSION
token = (facepy.utils
.get_application_access_token(settings.FACEBOOK_APP_ID,
settings.FACEBOOK_APP_SECRET,
api_version=version))
return get_graph(token)
def get_graph(*args, **kwargs):
version = FACEBOOK_API_VERSION
return utils.get_graph(*args, version=version, timeout=FACEBOOK_TIMEOUT, **kwargs)
def get_long_lived_access_token(access_token):
return utils.get_long_lived_access_token(
access_token=access_token,
client_id=settings.FACEBOOK_APP_ID,
client_secret=settings.FACEBOOK_APP_SECRET,
)
def get_access_token(code=None, redirect_uri=None):
return utils.get_access_token(
code=code,
redirect_uri=redirect_uri,
client_id=settings.FACEBOOK_APP_ID,
client_secret=settings.FACEBOOK_APP_SECRET,
timeout=FACEBOOK_TIMEOUT,
)
| pozytywnie/django-facebook-auth | facebook_auth/utils.py | Python | mit | 2,967 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2018-01-24 19:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('page', '0009_auto_20180124_0105'),
]
operations = [
migrations.AlterField(
model_name='page',
name='template_key',
field=models.CharField(choices=[(b'content/pages/page.html', 'Page'), (b'content/pages/index_page.html', 'Index Page')], default=b'content/pages/page.html', max_length=255, verbose_name='template'),
),
]
| mcmaxwell/idea_digital_agency | idea/feincms/module/page/migrations/0010_auto_20180124_1945.py | Python | mit | 612 |
# This file is part of the Python 2.7 module subprocess.py, included here
# for compatibility with Python 2.6.
#
# It is still under the original, very open PSF license, see the original
# copyright message included below.
#
# subprocess - Subprocesses with accessible I/O streams
#
# For more information about this module, see PEP 324.
#
# This module should remain compatible with Python 2.2, see PEP 291.
#
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
#
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
from subprocess import Popen, CalledProcessError, PIPE
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=STDOUT)
'ls: non_existent_file: No such file or directory\n'
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
return output
| auag92/n2dm | Asap-3.8.4/OpenKIMexport/subprocess_py27.py | Python | mit | 1,883 |
"""
File Adapter Module
"""
import time
import uuid
from flask import request, jsonify
from ..base import BaseAdapter
class FileAdapter(BaseAdapter):
""" File Adapter Class """
def create_routes(self, app, mongo):
""" Routes Creator """
@app.route('/api/upload', methods=['POST'])
def save_upload():
""" File Upload POST Route """
filename = uuid.uuid3(uuid.NAMESPACE_DNS, request.files['file'].filename.split('.')[0] + str(time.time())).hex + "." + request.files['file'].filename.split('.')[1]
mongo.save_file(filename, request.files['file'], base = "datas")
return jsonify(filename)
@app.route('/api/upload/<path:filename>')
def get_upload(filename):
""" File Upload GET Route """
response = mongo.send_file(filename, base = "datas")
response.cache_control.public = False
response.cache_control.max_age = 0
response.cache_control.no_cache = True
return response
def run(self):
""" Main Entry for Adapter """
self.create_routes(self.app, self.scope["mongo"])
print('Hello from FileAdapter')
| noahziheng/freeiot | libfreeiot/adapters/file/main.py | Python | mit | 1,195 |
import urllib2
import wikipedia
import json
from bs4 import BeautifulSoup
if __name__ == '__main__':
wikipedia.set_rate_limiting(True)
charity_list_url = 'List_of_charitable_foundations'
wikipage_list = wikipedia.page(charity_list_url)
charity_results = []
for charity_name in wikipage_list.links:
try:
wikipage_charity = wikipedia.page(charity_name)
name = charity_name
print 'Processing : %s'%name
text = wikipage_charity.summary
image_url = None
for image_url in wikipage_charity.images:
if charity_name.lower() in image_url.lower():
image_url = image_url
if name != None and text != None and image_url != None:
charity_results.append({"name": name,
"photoUrl": image_url,
"website":"en.wikipedia.org/wiki/%s"%name,
"mission":text})
print 'Status : %s'%('Added' if text != None and image_url != None else 'None')
except:
print "Raise error/exception"
with open("charityData.json", "w") as outfile:
json.dump(charity_results, outfile)
| MrMallIronmaker/donatio | pullCharityData.py | Python | mit | 1,208 |
#!/usr/bin/env python
# coding=utf-8
# code by kbdancer@92ez.com
from threading import Thread
from telnetlib import Telnet
import requests
import sqlite3
import queue
import time
import sys
import os
def ip2num(ip):
ip = [int(x) for x in ip.split('.')]
return ip[0] << 24 | ip[1] << 16 | ip[2] << 8 | ip[3]
def num2ip(num):
return '%s.%s.%s.%s' % ((num & 0xff000000) >> 24, (num & 0x00ff0000) >> 16, (num & 0x0000ff00) >> 8, num & 0x000000ff)
def ip_range(start, end):
return [num2ip(num) for num in range(ip2num(start), ip2num(end) + 1) if num & 0xff]
class Database:
db = sys.path[0] + "/TPLINK_KEY.db"
charset = 'utf8'
def __init__(self):
self.connection = sqlite3.connect(self.db)
self.connection.text_factory = str
self.cursor = self.connection.cursor()
def insert(self, query, params):
try:
self.cursor.execute(query, params)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def update(self, query, params):
try:
self.cursor.execute(query, params)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def query(self, query, params):
cursor = self.connection.cursor()
cursor.execute(query, params)
return cursor.fetchall()
def __del__(self):
self.connection.close()
def b_thread(ip_address_list):
thread_list = []
queue_list = queue.Queue()
hosts = ip_address_list
for host in hosts:
queue_list.put(host)
for x in range(0, int(sys.argv[1])):
thread_list.append(tThread(queue_list))
for t in thread_list:
try:
t.daemon = True
t.start()
except Exception as e:
print(e)
for t in thread_list:
t.join()
class tThread(Thread):
def __init__(self, queue_obj):
Thread.__init__(self)
self.queue = queue_obj
def run(self):
while not self.queue.empty():
host = self.queue.get()
try:
get_info(host)
except Exception as e:
print(e)
continue
def get_position_by_ip(host):
try:
ip_url = "http://ip-api.com/json/{ip}?lang=zh-CN".format(ip=host)
header = {"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0"}
json_data = requests.get(url=ip_url, headers=header, timeout=10).json()
info = [json_data.get("country"), json_data.get('regionName'), json_data.get('city'), json_data.get('isp')]
return info
except Exception as e:
print(e)
def get_info(host):
username = "admin"
password = "admin"
telnet_timeout = 15
cmd_timeout = 5
try:
t = Telnet(host, timeout=telnet_timeout)
t.read_until("username:", cmd_timeout)
t.write(username + "\n")
t.read_until("password:", cmd_timeout)
t.write(password + "\n")
t.write("wlctl show\n")
t.read_until("SSID", cmd_timeout)
wifi_str = t.read_very_eager()
t.write("lan show info\n")
t.read_until("MACAddress", cmd_timeout)
lan_str = t.read_very_eager()
t.close()
if len(wifi_str) > 0:
# clear extra space
wifi_str = "".join(wifi_str.split())
wifi_str = wifi_str
# get SID KEY MAC
wifi_ssid = wifi_str[1:wifi_str.find('QSS')]
wifi_key = wifi_str[wifi_str.find('Key=') + 4:wifi_str.find('cmd')] if wifi_str.find('Key=') != -1 else '无密码'
router_mac = lan_str[1:lan_str.find('__')].replace('\r\n', '')
current_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
try:
my_sqlite_db = Database()
query_info = """select * from scanlog where ssid=? and key=? and mac=?"""
query_result = my_sqlite_db.query(query_info, [wifi_ssid, wifi_key, router_mac])
if len(query_result) < 1:
position_data = get_position_by_ip(host)
country = position_data[0]
province = position_data[1]
city = position_data[2]
isp = position_data[3]
insert_info = """INSERT INTO scanlog (`host`,`mac`,`ssid`,`wifikey`,`country`,`province`,`city`,`isp`) VALUES (?,?,?,?,?,?,?,?)"""
my_sqlite_db.insert(insert_info, [host, router_mac, wifi_ssid, wifi_key, country, province, city, isp])
print('[√] [%s] Info %s %s %s => Inserted!' % (current_time, host, wifi_ssid, wifi_key))
else:
print('[x] [%s] Found %s %s %s in DB, do nothing!' % (current_time, host, wifi_ssid, wifi_key))
except Exception as e:
print(e)
except Exception as e:
pass
if __name__ == '__main__':
print('==========================================')
print(' Scan TPLINK(MERCURY) wifi key by telnet')
print(' Author 92ez.com')
print('==========================================')
begin_ip = sys.argv[2].split('-')[0]
end_ip = sys.argv[2].split('-')[1]
ip_list = ip_range(begin_ip, end_ip)
current_pid = os.getpid()
print('\n[*] Total %d IP...' % len(ip_list))
print('\n================ Running =================')
try:
b_thread(ip_list)
except KeyboardInterrupt:
print('\n[*] Kill all thread.')
os.kill(current_pid, 9)
| kbdancer/TPLINKKEY | scan.py | Python | mit | 5,658 |
"""Example process file."""
from mapchete.errors import MapcheteNodataTile
from shapely.geometry import shape
def execute(mp):
"""User defined process."""
# Reading and writing data works like this:
with mp.open("file1") as vector_file:
if vector_file.is_empty():
raise MapcheteNodataTile
return [
dict(
geometry=feature["geometry"],
properties=dict(
name=feature["properties"].get("NAME_0", None),
id=feature["properties"].get("ID_0", None),
area=shape(feature["geometry"]).area,
),
)
for feature in vector_file.read()
]
| ungarj/mapchete | test/testdata/geojson_test.py | Python | mit | 717 |
from geoscript.style import util
from geoscript.style.expression import Expression
from geoscript.style.font import Font
from geoscript.style.fill import Fill
from geoscript.style.halo import Halo
from geoscript.style.color import Color
from geoscript.style.property import Property
from geoscript.style.symbolizer import Symbolizer
from org.geotools.styling import TextSymbolizer
class Label(Symbolizer):
"""
Symbolizer for labelling a geometry.
The ``property`` argument specifies the field or attribute with which to generate labels from.
The ``font`` and ``color`` arguments specify the label font and color
respectively.
>>> Label('foo')
Label(property=foo)
"""
def __init__(self, property, font=None, color=None):
Symbolizer.__init__(self)
self.property = Property(property)
self.color = Color(color) if color else None
self._font = Font(font) if font else None
self._halo = None
self._placement = None
def font(self, font):
"""
Sets the font for this label. The ``font`` argument is a string describing the
font attributes. See :class:`Font <geoscript.style.font.Font>` for supported
syntax.
>>> label = Label('foo').font('italic bold 12px "Times New Roman"')
"""
self._font = Font(font)
return self
def halo(self, fill=None, radius=None):
"""
Generates a halo for this label.
The ``fill`` and ``radius`` arguments specify the :class:`Fill` and radius to
use for the halo.
>>> from geoscript.style import Fill
>>> label = Label('foo').halo(Fill('#ffffff'), 2)
"""
self._halo = Halo(fill, radius)
return self
def point(self, anchor=(0.5,0.5), displace=(0,0), rotate=0):
"""
Sets the label placement relative to a point.
The ``anchor`` argument is a tuple that specifies how the label should be
anchored along an xy axis relative to the geometry being labeled. Allowable
values range from (0,0) to (1,1) ordered from the bottom left corner to the top
right corner of the label.
The ``displacement`` argument is a tuple that specifies how the label should be
displaced along an xy axis.
The ``rotate`` argument specifies in degrees the angle at which to rotate the
label.
>>> label = Label('foo').point((0.5,0), (0,5))
"""
f = self.factory
ap = f.createAnchorPoint(Expression(anchor[0]).expr,Expression(anchor[1]).expr)
dp = f.createDisplacement(Expression(displace[0]).expr,
Expression(displace[1]).expr)
self._placement = f.createPointPlacement(ap, dp, Expression(rotate).expr)
return self
def linear(self, offset=0, gap=None, igap=None, align=False, follow=False,
group=False, displace=None, repeat=None):
"""
Sets the label placement relative to a line.
The ``offset`` argument specifies the perpindicular distance from the line at
which to position the label.
The ``align`` argument specifies whether to align the label along the line. The
``follow`` argument specifies whether to curve the label in order to force it
to follow the line.
>>> label = Label('foo').linear(align=True, follow=True)
"""
f = self.factory
lp = f.createLinePlacement(Expression(offset).expr)
lp.setAligned(align)
#lp.setRepeated(repeat)
if gap:
lp.setGap(Expression(gap).expr)
if igap:
lp.setInitialGap(Expression(igap).expr)
self._placement = lp
self.options = {'followLine': follow, 'group': group}
if displace:
self.options['maxDisplacement'] = displace
if repeat:
self.options['repeat'] = repeat
return self
def _prepare(self, rule):
syms = util.symbolizers(rule, TextSymbolizer)
for sym in syms:
self._apply(sym)
def _apply(self, sym):
Symbolizer._apply(self, sym)
sym.setLabel(self.property.expr)
if self._font:
self._font._apply(sym)
if self._halo:
self._halo._apply(sym)
if self.color:
sym.setFill(Fill(self.color)._fill())
if self._placement:
sym.setLabelPlacement(self._placement)
def __repr__(self):
return self._repr('property')
| jericks/geoscript-py | geoscript/style/label.py | Python | mit | 4,163 |
from __future__ import division
import numpy as np,opticstools,matplotlib.pyplot as plt,math,scipy.ndimage,copy
from skimage.measure import block_reduce
import astropy.io.fits as pyfits
import os,sys,contratio as crat
nameList = sys.argv[5:len(sys.argv)]
if len(sys.argv)<4:
print('Useage: crat_from_object.py raw_directory cube_directory plot_directory num_cals object_name (with spaces)')
sys.exit()
#Combine name into single string
name = ''
if len(nameList)>1:
for ii in range(0,len(nameList)):
name+=nameList[ii]
if ii<len(nameList)-1:
name+=' '
#Remove Spaces From Object Name
objNoSpaces = name.split(' ')
objName = ''.join(objNoSpaces)
rawDir = sys.argv[1]
plotDir = sys.argv[3]
infoFile = open(rawDir+'/blockinfo.txt','r')
elements = []
ii = 0
lineNums = []
all_elements = []
cubeDir = sys.argv[2]
"""Code to select target cubes based on object name and calibration cubes close to the target cubes"""
#Find line and frame numbers where the name appears
for line in infoFile:
ii+=1
if ii==1:
continue
entry = line.split(' ')
if name in line and os.path.isfile(cubeDir+'/cube'+str(int(entry[0]))+'.fits'):
elements.append(int(entry[0]))
lineNums.append(ii)
all_elements.append(int(entry[0]))
cal_els = []
tgt_cubes = []
cal_cubes = []
#Create target cube list
for ii in range(0,len(elements)):
tgt_cubes.append(cubeDir+'/cube'+str(elements[ii])+'.fits')
ii = 0
numCals = int(sys.argv[4])
#Find calibrators from objects nearby in the list. Go one step in both directions and add frame to
#list of calibrators.
for kk in range(0,len(elements)):
ii = lineNums[kk]
jj = lineNums[kk]
while ii>=0 or jj<len(all_elements):
if ii>=0 and os.path.isfile(cubeDir+'/cube'+str(all_elements[ii-2])+'.fits') and ii not in lineNums:
cal_els.append(all_elements[ii-2])
ii-=1
elif not os.path.isfile(cubeDir+'/cube'+str(all_elements[ii-2])+'.fits') or ii in lineNums:
ii-=1
if len(cal_els)==numCals:
break
if jj<len(all_elements) and os.path.isfile(cubeDir+'/cube'+str(all_elements[jj-2])+'.fits') and jj not in lineNums:
cal_els.append(all_elements[jj-2])
jj+=1
elif jj>=len(all_elements) or not os.path.isfile(cubeDir+'/cube'+str(all_elements[jj-2])+'.fits') or jj in lineNums:
jj+=1
if len(cal_els)==numCals:
break
if len(cal_els)==numCals:
break
tgt_cubes = []
cal_cubes = []
tgt_ims = []
cal_ims = []
pas = []
#Create target cube list
for ii in range(0,len(elements)):
tgt_cubes.append(cubeDir+'/cube'+str(elements[ii])+'.fits')
cube = pyfits.getdata(tgt_cubes[ii])
pa = pyfits.getdata(tgt_cubes[ii],1)['pa']
for jj in range(0,len(cube)):
tgt_ims.append(cube[jj])
pas.append(pa[jj])
#Create calibrator list
cal_objects = []
for ii in range(0,len(cal_els)):
cal_cubes.append(cubeDir+'/cube'+str(cal_els[ii])+'.fits')
cube = pyfits.getdata(cal_cubes[ii])
cal = pyfits.getheader(cal_cubes[ii])['OBJECT']
for jj in range(0,len(cube)):
cal_ims.append(cube[jj])
cal_objects.append(cal)
tgt_ims = np.array(tgt_ims)
cal_ims = np.array(cal_ims)
pas = np.array(pas)
"""Code to create artificial companion"""
sep = 20 #Companion Separation in pixels
contrast = 0.1 #Contrast Ratio
newIms = np.zeros(tgt_ims.shape)
for ii in range(0,tgt_ims.shape[0]):
im = tgt_ims[ii]
angle = pas[ii]
#Put companion directly east of centre
xShift = -int(sep*np.cos((math.pi/180)*angle))
yShift = int(sep*np.sin((math.pi/180)*angle))
newIms[ii] = im + contrast*np.roll(np.roll(im,xShift,axis=1),yShift,axis=0)
outfile = plotDir+'/cube_with_companion.fits'
oldHeader = pyfits.getheader(tgt_cubes[0])
header = pyfits.Header(oldHeader)
hdu = pyfits.PrimaryHDU(newIms,header)
col1 = pyfits.Column(name='pa', format='E', array=pas)
#col2 = pyfits.Column(name='paDiff', format='E', array=paDiff)
hdu2 = pyfits.BinTableHDU.from_columns(pyfits.ColDefs([col1]))
hdulist = pyfits.HDUList([hdu,hdu2])
hdulist.writeto(outfile,clobber=True)
good_ims = crat.choose_psfs([outfile],cal_cubes,plotDir)
crat_file = crat.best_psf_subtract(good_ims,plotDir)
| mikeireland/pynrm | fake_companion.py | Python | mit | 4,240 |
class Blood(object):
uid = "blood"
name = "Blood"
"""
Most characters will have ordinary blood but some could have acidic blood or with other properties.
"""
| ChrisLR/Python-Roguelike-Template | bodies/blood/base.py | Python | mit | 178 |
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from awscli.customizations.emr import helptext
from awscli.customizations.emr.createdefaultroles import EC2_ROLE_NAME
INSTANCE_GROUPS_SCHEMA = {
"type": "array",
"items": {
"type": "object",
"properties": {
"Name": {
"type": "string",
"description":
"Friendly name given to the instance group."
},
"InstanceGroupType": {
"type": "string",
"description":
"The type of the instance group in the cluster.",
"enum": ["MASTER", "CORE", "TASK"],
"required": True
},
"BidPrice": {
"type": "string",
"description":
"Bid price for each Amazon EC2 instance in the "
"instance group when launching nodes as Spot Instances, "
"expressed in USD."
},
"InstanceType": {
"type": "string",
"description":
"The Amazon EC2 instance type for all instances "
"in the instance group.",
"required": True
},
"InstanceCount": {
"type": "integer",
"description": "Target number of Amazon EC2 instances "
"for the instance group",
"required": True
},
"EbsConfiguration": {
"type": "object",
"description": "EBS configuration that will be associated with the instance group.",
"properties": {
"EbsOptimized": {
"type": "boolean",
"description": "Boolean flag used to tag EBS-optimized instances.",
},
"EbsBlockDeviceConfigs": {
"type": "array",
"items": {
"type": "object",
"properties": {
"VolumeSpecification" : {
"type": "object",
"description": "The EBS volume specification that will be created and attached to every instance in this instance group.",
"properties": {
"VolumeType": {
"type": "string",
"description": "The EBS volume type that is attached to all the instances in the instance group. Valid types are: gp2, io1, and standard.",
"required": True
},
"SizeInGB": {
"type": "integer",
"description": "The EBS volume size, in GB, that is attached to all the instances in the instance group.",
"required": True
},
"Iops": {
"type": "integer",
"description": "The IOPS of the EBS volume that is attached to all the instances in the instance group.",
}
}
},
"VolumesPerInstance": {
"type": "integer",
"description": "The number of EBS volumes that will be created and attached to each instance in the instance group.",
}
}
}
}
}
},
"AutoScalingPolicy": {
"type": "object",
"description": "Auto Scaling policy that will be associated with the instance group.",
"properties": {
"Constraints": {
"type": "object",
"description": "The Constraints that will be associated to an Auto Scaling policy.",
"properties": {
"MinCapacity": {
"type": "integer",
"description": "The minimum value for the instances to scale in"
" to in response to scaling activities."
},
"MaxCapacity": {
"type": "integer",
"description": "The maximum value for the instances to scale out to in response"
" to scaling activities"
}
}
},
"Rules": {
"type": "array",
"description": "The Rules associated to an Auto Scaling policy.",
"items": {
"type": "object",
"properties": {
"Name": {
"type": "string",
"description": "Name of the Auto Scaling rule."
},
"Description": {
"type": "string",
"description": "Description of the Auto Scaling rule."
},
"Action": {
"type": "object",
"description": "The Action associated to an Auto Scaling rule.",
"properties": {
"Market": { # Required for Instance Fleets
"type": "string",
"description": "Market type of the Amazon EC2 instances used to create a "
"cluster node by Auto Scaling action.",
"enum": ["ON_DEMAND", "SPOT"]
},
"SimpleScalingPolicyConfiguration": {
"type": "object",
"description": "The Simple scaling configuration that will be associated"
"to Auto Scaling action.",
"properties": {
"AdjustmentType": {
"type": "string",
"description": "Specifies how the ScalingAdjustment parameter is "
"interpreted.",
"enum": ["CHANGE_IN_CAPACITY", "PERCENT_CHANGE_IN_CAPACITY",
"EXACT_CAPACITY"]
},
"ScalingAdjustment": {
"type": "integer",
"description": "The amount by which to scale, based on the "
"specified adjustment type."
},
"CoolDown": {
"type": "integer",
"description": "The amount of time, in seconds, after a scaling "
"activity completes and before the next scaling "
"activity can start."
}
}
}
}
},
"Trigger": {
"type": "object",
"description": "The Trigger associated to an Auto Scaling rule.",
"properties": {
"CloudWatchAlarmDefinition": {
"type": "object",
"description": "The Alarm to be registered with CloudWatch, to trigger"
" scaling activities.",
"properties": {
"ComparisonOperator": {
"type": "string",
"description": "The arithmetic operation to use when comparing the"
" specified Statistic and Threshold."
},
"EvaluationPeriods": {
"type": "integer",
"description": "The number of periods over which data is compared"
" to the specified threshold."
},
"MetricName": {
"type": "string",
"description": "The name for the alarm's associated metric."
},
"Namespace": {
"type": "string",
"description": "The namespace for the alarm's associated metric."
},
"Period": {
"type": "integer",
"description": "The period in seconds over which the specified "
"statistic is applied."
},
"Statistic": {
"type": "string",
"description": "The statistic to apply to the alarm's associated "
"metric."
},
"Threshold": {
"type": "double",
"description": "The value against which the specified statistic is "
"compared."
},
"Unit": {
"type": "string",
"description": "The statistic's unit of measure."
},
"Dimensions": {
"type": "array",
"description": "The dimensions for the alarm's associated metric.",
"items": {
"type": "object",
"properties": {
"Key": {
"type": "string",
"description": "Dimension Key."
},
"Value": {
"type": "string",
"description": "Dimension Value."
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
EC2_ATTRIBUTES_SCHEMA = {
"type": "object",
"properties": {
"KeyName": {
"type": "string",
"description":
"The name of the Amazon EC2 key pair that can "
"be used to ssh to the master node as the user 'hadoop'."
},
"SubnetId": {
"type": "string",
"description":
"To launch the cluster in Amazon "
"Virtual Private Cloud (Amazon VPC), set this parameter to "
"the identifier of the Amazon VPC subnet where you want "
"the cluster to launch. If you do not specify this value, "
"the cluster is launched in the normal Amazon Web Services "
"cloud, outside of an Amazon VPC. "
},
"AvailabilityZone": {
"type": "string",
"description": "The Availability Zone the cluster will run in."
},
"InstanceProfile": {
"type": "string",
"description":
"An IAM role for the cluster. The EC2 instances of the cluster"
" assume this role. The default role is " +
EC2_ROLE_NAME + ". In order to use the default"
" role, you must have already created it using the "
"<code>create-default-roles</code> command. "
},
"EmrManagedMasterSecurityGroup": {
"type": "string",
"description": helptext.EMR_MANAGED_MASTER_SECURITY_GROUP
},
"EmrManagedSlaveSecurityGroup": {
"type": "string",
"description": helptext.EMR_MANAGED_SLAVE_SECURITY_GROUP
},
"ServiceAccessSecurityGroup": {
"type": "string",
"description": helptext.SERVICE_ACCESS_SECURITY_GROUP
},
"AdditionalMasterSecurityGroups": {
"type": "array",
"description": helptext.ADDITIONAL_MASTER_SECURITY_GROUPS,
"items": {
"type": "string"
}
},
"AdditionalSlaveSecurityGroups": {
"type": "array",
"description": helptext.ADDITIONAL_SLAVE_SECURITY_GROUPS,
"items": {
"type": "string"
}
}
}
}
APPLICATIONS_SCHEMA = {
"type": "array",
"items": {
"type": "object",
"properties": {
"Name": {
"type": "string",
"description": "Application name.",
"enum": ["MapR", "HUE", "HIVE", "PIG", "HBASE",
"IMPALA", "GANGLIA", "HADOOP", "SPARK"],
"required": True
},
"Args": {
"type": "array",
"description":
"A list of arguments to pass to the application.",
"items": {
"type": "string"
}
}
}
}
}
BOOTSTRAP_ACTIONS_SCHEMA = {
"type": "array",
"items": {
"type": "object",
"properties": {
"Name": {
"type": "string",
"default": "Bootstrap Action"
},
"Path": {
"type": "string",
"description":
"Location of the script to run during a bootstrap action. "
"Can be either a location in Amazon S3 or "
"on a local file system.",
"required": True
},
"Args": {
"type": "array",
"description":
"A list of command line arguments to pass to "
"the bootstrap action script",
"items": {
"type": "string"
}
}
}
}
}
STEPS_SCHEMA = {
"type": "array",
"items": {
"type": "object",
"properties": {
"Type": {
"type": "string",
"description":
"The type of a step to be added to the cluster.",
"default": "custom_jar",
"enum": ["CUSTOM_JAR", "STREAMING", "HIVE", "PIG", "IMPALA"],
},
"Name": {
"type": "string",
"description": "The name of the step. ",
},
"ActionOnFailure": {
"type": "string",
"description": "The action to take if the cluster step fails.",
"enum": ["TERMINATE_CLUSTER", "CANCEL_AND_WAIT", "CONTINUE"],
"default": "CONTINUE"
},
"Jar": {
"type": "string",
"description": "A path to a JAR file run during the step.",
},
"Args": {
"type": "array",
"description":
"A list of command line arguments to pass to the step.",
"items": {
"type": "string"
}
},
"MainClass": {
"type": "string",
"description":
"The name of the main class in the specified "
"Java file. If not specified, the JAR file should "
"specify a Main-Class in its manifest file."
},
"Properties": {
"type": "string",
"description":
"A list of Java properties that are set when the step "
"runs. You can use these properties to pass key value "
"pairs to your main function."
}
}
}
}
HBASE_RESTORE_FROM_BACKUP_SCHEMA = {
"type": "object",
"properties": {
"Dir": {
"type": "string",
"description": helptext.HBASE_BACKUP_DIR
},
"BackupVersion": {
"type": "string",
"description": helptext.HBASE_BACKUP_VERSION
}
}
}
EMR_FS_SCHEMA = {
"type": "object",
"properties": {
"Consistent": {
"type": "boolean",
"description": "Enable EMRFS consistent view."
},
"SSE": {
"type": "boolean",
"description": "Enable Amazon S3 server-side encryption on files "
"written to S3 by EMRFS."
},
"RetryCount": {
"type": "integer",
"description":
"The maximum number of times to retry upon S3 inconsistency."
},
"RetryPeriod": {
"type": "integer",
"description": "The amount of time (in seconds) until the first "
"retry. Subsequent retries use an exponential "
"back-off."
},
"Args": {
"type": "array",
"description": "A list of arguments to pass for additional "
"EMRFS configuration.",
"items": {
"type": "string"
}
},
"Encryption": {
"type": "string",
"description": "EMRFS encryption type.",
"enum": ["SERVERSIDE", "CLIENTSIDE"]
},
"ProviderType": {
"type": "string",
"description": "EMRFS client-side encryption provider type.",
"enum": ["KMS", "CUSTOM"]
},
"KMSKeyId": {
"type": "string",
"description": "AWS KMS's customer master key identifier",
},
"CustomProviderLocation": {
"type": "string",
"description": "Custom encryption provider JAR location."
},
"CustomProviderClass": {
"type": "string",
"description": "Custom encryption provider full class name."
}
}
}
TAGS_SCHEMA = {
"type": "array",
"items": {
"type": "string"
}
}
| mnahm5/django-estore | Lib/site-packages/awscli/customizations/emr/argumentschema.py | Python | mit | 22,109 |
import smtplib
from email.mime.text import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email.Header import Header
from email import Charset
class MailConfig(object):
def __init__(self, config):
self.smtp_host = config.smtp_host
self.smtp_port = config.smtp_port
self.smtp_local_hostname = config.smtp_local_hostname
self.smtp_timeout = config.smtp_timeout
self.smtp_use_ttls = config.smtp_use_ttls
self.smtp_user = config.smtp_user
self.smtp_password = config.smtp_password
class MailDispatcher(object):
def __init__(self, mail_config):
self.config = mail_config
def send_mail(self, from_addr, receiver_addr, subject, body, body_html = None):
server = smtplib.SMTP(
self.config.smtp_host,
self.config.smtp_port,
self.config.smtp_local_hostname,
self.config.smtp_timeout
)
if body_html:
msg = self._get_multipart_message(from_addr, receiver_addr, subject, body, body_html)
else:
msg = self._get_plaintext_message(from_addr, receiver_addr, subject, body)
if(self.config.smtp_use_ttls):
server.ehlo()
server.starttls()
server.ehlo()
server.login(self.config.smtp_user,self.config.smtp_password)
server.set_debuglevel(1)
server.sendmail(from_addr, receiver_addr, msg.as_string())
server.quit()
def _append_header(self, msg, from_addr, receiver_addr, subject, is_multipart = False):
#header_charset = 'ISO-8859-1'
#msg['Subject'] = Header(unicode(subject), header_charset)
msg['Subject'] = Header(subject.encode('utf-8'), 'UTF-8').encode()
msg['From'] = from_addr
msg['To'] = receiver_addr
if is_multipart:
msg.preamble = 'This is a multi-part message in MIME format.'
return msg
def _get_plaintext_message(self, from_addr, receiver_addr, subject, body):
msg = self._get_body_as_mimetext(body,'plain')
msg = self._append_header(msg, from_addr, receiver_addr, subject)
return msg
def _get_body_as_mimetext(self, body, mime_type):
Charset.add_charset('utf-8', Charset.QP, Charset.QP, 'utf-8')
mime_text = MIMEText(body.encode('utf-8'), mime_type, 'UTF-8')
return mime_text
def _get_multipart_message(self, from_addr, receiver_addr, subject, body, body_html):
# Encapsulate the plain and HTML versions of the message body in an
# 'alternative' part, so message agents can decide which they want to display.
msg = MIMEMultipart('alternative')
msg = self._append_header(msg, from_addr, receiver_addr, subject, True)
msg.attach(self._get_body_as_mimetext(body,'plain'))
msg.attach(self._get_body_as_mimetext(body_html,'html'))
return msg
class TemplateMailDispatcher(MailDispatcher):
def __init__(self, mail_config, template, html_template = None):
super(self.__class__, self).__init__(mail_config)
self.template = template
self.html_template = html_template
def send_mail(self, from_addr, receiver_addr, subject, template_fill_args_dictionary):
msg = self._get_filled_template(self.template, template_fill_args_dictionary)
if self.html_template:
msg_html = self._get_filled_template(self.html_template, template_fill_args_dictionary)
else:
msg_html = None
super(self.__class__, self).send_mail(from_addr, receiver_addr, subject, msg, msg_html)
def _get_filled_template(self, template, template_fill_args_dictionary):
prepared_template = template
for key, value in template_fill_args_dictionary.items():
prepared_template = prepared_template.replace(key, value)
return prepared_template | jwoschitz/SimpleAuth | src/mail.py | Python | mit | 3,973 |
__author__ = 'mpetyx'
from unittest import TestCase
import unittest
import logging
import sys
from pyapi import API
class TestpyAPI(TestCase):
def setUp(self):
# print "Setting up the coverage unit test for pyapi"
self.document = API()
self.api = API().parse(location="url", language="raml")
def test_swagger_serialise(self):
self.assertEqual(self.api.serialise(language="swagger"), {}, "Swagger could not be serialised properly")
def test_raml_serialise(self):
self.assertEqual(self.api.serialise(language="raml"), {}, "RAML could not be serialised properly")
def test_hydra_serialise(self):
self.assertEqual(self.api.serialise(language="hydra"), {}, "Hydra could not be serialised properly")
def test_blueprint_serialise(self):
self.assertEqual(self.api.serialise(language="blueprint"), {},
"API blueprint could not be serialised properly")
def test_query(self):
# print "sample"
self.assertEqual(1, 2, "There are not equal at all!")
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
self.log.debug("finalising the test")
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr)
logging.getLogger("SomeTest.testSomething").setLevel(logging.DEBUG)
unittest.main()
| mpetyx/pyapi | tests/test_overview_example.py | Python | mit | 1,364 |
# Run all test suites
import unittest, os
from database_api_test_archive import *
from database_api_test_course import *
from database_api_test_exam import *
from database_api_test_teacher import *
from database_api_test_user import *
from rest_api_test_user import *
# List of test suites
db_suites = [unittest.TestLoader().loadTestsFromTestCase(ArchiveTestCase),
unittest.TestLoader().loadTestsFromTestCase(CourseTestCase),
unittest.TestLoader().loadTestsFromTestCase(ExamTestCase),
unittest.TestLoader().loadTestsFromTestCase(TeacherTestCase),
unittest.TestLoader().loadTestsFromTestCase(UserTestCase)]
rest_suites = [unittest.TestLoader().loadTestsFromTestCase(RestUserTestCase)]
# Run each suite one by one
for suite in db_suites:
unittest.TextTestRunner(verbosity=2).run(suite)
for suite in rest_suites:
unittest.TextTestRunner(verbosity=2).run(suite)
| petterip/exam-archive | test/__main__.py | Python | mit | 958 |
import Fast5File
def run(parser, args):
if args.read:
for i, fast5 in enumerate(Fast5File.Fast5FileSet(args.files)):
for metadata_dict in fast5.read_metadata:
if i == 0:
header = metadata_dict.keys()
print "\t".join(["filename"] + header)
print "\t".join([fast5.filename] + [str( metadata_dict[k] ) for k in header])
else:
print "asic_id\tasic_temp\theatsink_temp"
for fast5 in Fast5File.Fast5FileSet(args.files):
asic_temp = fast5.get_asic_temp()
asic_id = fast5.get_asic_id()
heatsink_temp = fast5.get_heatsink_temp()
print "%s\t%s\t%s" % (asic_id, asic_temp, heatsink_temp)
fast5.close()
| arq5x/poretools | poretools/metadata.py | Python | mit | 641 |
"""Convert "arbitrary" sound files to AIFF (Apple and SGI's audio format).
Input may be compressed.
Uncompressed file type may be AIFF, WAV, VOC, 8SVX, NeXT/Sun, and others.
An exception is raised if the file is not of a recognized type.
Returned filename is either the input filename or a temporary filename;
in the latter case the caller must ensure that it is removed.
Other temporary files used are removed by the function.
"""
import os
import tempfile
import pipes
import sndhdr
__all__ = ["error", "toaiff"]
table = {}
t = pipes.Template()
t.append('sox -t au - -t aiff -r 8000 -', '--')
table['au'] = t
# XXX The following is actually sub-optimal.
# XXX The HCOM sampling rate can be 22k, 22k/2, 22k/3 or 22k/4.
# XXX We must force the output sampling rate else the SGI won't play
# XXX files sampled at 5.5k or 7.333k; however this means that files
# XXX sampled at 11k are unnecessarily expanded.
# XXX Similar comments apply to some other file types.
t = pipes.Template()
t.append('sox -t hcom - -t aiff -r 22050 -', '--')
table['hcom'] = t
t = pipes.Template()
t.append('sox -t voc - -t aiff -r 11025 -', '--')
table['voc'] = t
t = pipes.Template()
t.append('sox -t wav - -t aiff -', '--')
table['wav'] = t
t = pipes.Template()
t.append('sox -t 8svx - -t aiff -r 16000 -', '--')
table['8svx'] = t
t = pipes.Template()
t.append('sox -t sndt - -t aiff -r 16000 -', '--')
table['sndt'] = t
t = pipes.Template()
t.append('sox -t sndr - -t aiff -r 16000 -', '--')
table['sndr'] = t
uncompress = pipes.Template()
uncompress.append('uncompress', '--')
class error(Exception):
pass
def toaiff(filename):
temps = []
ret = None
try:
ret = _toaiff(filename, temps)
finally:
for temp in temps[:]:
if temp != ret:
try:
os.unlink(temp)
except os.error:
pass
temps.remove(temp)
return ret
def _toaiff(filename, temps):
if filename[-2:] == '.Z':
fname = tempfile.mktemp()
temps.append(fname)
sts = uncompress.copy(filename, fname)
if sts:
raise error, filename + ': uncompress failed'
else:
fname = filename
try:
ftype = sndhdr.whathdr(fname)
if ftype:
ftype = ftype[0] # All we're interested in
except IOError, msg:
if type(msg) == type(()) and len(msg) == 2 and \
type(msg[0]) == type(0) and type(msg[1]) == type(''):
msg = msg[1]
if type(msg) != type(''):
msg = `msg`
raise error, filename + ': ' + msg
if ftype == 'aiff':
return fname
if ftype is None or not table.has_key(ftype):
raise error, \
filename + ': unsupported audio file type ' + `ftype`
temp = tempfile.mktemp()
temps.append(temp)
sts = table[ftype].copy(fname, temp)
if sts:
raise error, filename + ': conversion to aiff failed'
return temp
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.2/Lib/toaiff.py | Python | mit | 2,989 |
import _plotly_utils.basevalidators
class XValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="x", parent_name="choropleth.colorbar", **kwargs):
super(XValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
max=kwargs.pop("max", 3),
min=kwargs.pop("min", -2),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/choropleth/colorbar/_x.py | Python | mit | 472 |
"""
Implementation of the paper,
Durand and Dorsey SIGGGRAPH 2002,
"Fast Bilateral Fitering for the display of high-dynamic range images"
"""
import numpy as np
import hydra.io
import hydra.filters
def bilateral_separation(img, sigma_s=0.02, sigma_r=0.4):
r, c = img.shape
sigma_s = max(r, c) * sigma_s
img_log = np.log10(img + 1.0e-6)
img_fil = hydra.filters.bilateral(img_log, sigma_s, sigma_r)
base = 10.0 ** (img_fil) - 1.0e-6
base[base <= 0.0] = 0.0
base = base.reshape((r, c))
detail = hydra.core.remove_specials(img / base)
return base, detail
def durand(img, target_contrast=5.0):
L = hydra.core.lum(img)
tmp = np.zeros(img.shape)
for c in range(3):
tmp[:,:,c] = hydra.core.remove_specials(img[:,:,c] / L)
Lbase, Ldetail = bilateral_separation(L)
log_base = np.log10(Lbase)
max_log_base = np.max(log_base)
log_detail = np.log10(Ldetail)
compression_factor = np.log(target_contrast) / (max_log_base - np.min(log_base))
log_absolute = compression_factor * max_log_base
log_compressed = log_base * compression_factor + log_detail - log_absolute
output = np.power(10.0, log_compressed)
ret = np.zeros(img.shape)
for c in range(3):
ret[:,:,c] = tmp[:,:,c] * output
ret = np.maximum(ret, 0.0)
ret = np.minimum(ret, 1.0)
return ret
| tatsy/hydra | hydra/tonemap/durand.py | Python | mit | 1,367 |
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstilä #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
import unittest
import Network
import time
class TestConnection(Network.Connection):
def handlePacket(self, packet):
self.packet = packet
class TestServer(Network.Server):
def createConnection(self, sock):
return TestConnection(sock)
class NetworkTest(unittest.TestCase):
def testHandshake(self):
s = TestServer()
c = TestConnection()
c.connect("localhost")
c.sendPacket("moikka")
Network.communicate(100)
client = s.clients.values()[0]
assert client.packet == "moikka"
assert client.id == 1
def tearDown(self):
Network.shutdown()
if __name__ == "__main__":
unittest.main()
| fretsonfire/fof-python | src/NetworkTest.py | Python | mit | 2,106 |
"""
This program handles the communication over I2C between a Raspberry Pi and a
BMP180 Temperature/Pressure sensor.
Made by: MrTijn/Tijndagamer
Copyright 2015-2017
Released under the MIT license.
"""
import smbus
import math
from time import sleep
class bmp180:
# Global variables
address = None
bus = smbus.SMBus(1)
mode = 1 # TODO: Add a way to change the mode
# BMP180 registers
CONTROL_REG = 0xF4
DATA_REG = 0xF6
# Calibration data registers
CAL_AC1_REG = 0xAA
CAL_AC2_REG = 0xAC
CAL_AC3_REG = 0xAE
CAL_AC4_REG = 0xB0
CAL_AC5_REG = 0xB2
CAL_AC6_REG = 0xB4
CAL_B1_REG = 0xB6
CAL_B2_REG = 0xB8
CAL_MB_REG = 0xBA
CAL_MC_REG = 0xBC
CAL_MD_REG = 0xBE
# Calibration data variables
calAC1 = 0
calAC2 = 0
calAC3 = 0
calAC4 = 0
calAC5 = 0
calAC6 = 0
calB1 = 0
calB2 = 0
calMB = 0
calMC = 0
calMD = 0
def __init__(self, address):
self.address = address
# Get the calibration data from the BMP180
self.read_calibration_data()
# I2C methods
def read_signed_16_bit(self, register):
"""Reads a signed 16-bit value.
register -- the register to read from.
Returns the read value.
"""
msb = self.bus.read_byte_data(self.address, register)
lsb = self.bus.read_byte_data(self.address, register + 1)
if msb > 127:
msb -= 256
return (msb << 8) + lsb
def read_unsigned_16_bit(self, register):
"""Reads an unsigned 16-bit value.
Reads the given register and the following, and combines them as an
unsigned 16-bit value.
register -- the register to read from.
Returns the read value.
"""
msb = self.bus.read_byte_data(self.address, register)
lsb = self.bus.read_byte_data(self.address, register + 1)
return (msb << 8) + lsb
# BMP180 interaction methods
def read_calibration_data(self):
"""Reads and stores the raw calibration data."""
self.calAC1 = self.read_signed_16_bit(self.CAL_AC1_REG)
self.calAC2 = self.read_signed_16_bit(self.CAL_AC2_REG)
self.calAC3 = self.read_signed_16_bit(self.CAL_AC3_REG)
self.calAC4 = self.read_unsigned_16_bit(self.CAL_AC4_REG)
self.calAC5 = self.read_unsigned_16_bit(self.CAL_AC5_REG)
self.calAC6 = self.read_unsigned_16_bit(self.CAL_AC6_REG)
self.calB1 = self.read_signed_16_bit(self.CAL_B1_REG)
self.calB2 = self.read_signed_16_bit(self.CAL_B2_REG)
self.calMB = self.read_signed_16_bit(self.CAL_MB_REG)
self.calMC = self.read_signed_16_bit(self.CAL_MC_REG)
self.calMD = self.read_signed_16_bit(self.CAL_MD_REG)
def get_raw_temp(self):
"""Reads and returns the raw temperature data."""
# Write 0x2E to CONTROL_REG to start the measurement
self.bus.write_byte_data(self.address, self.CONTROL_REG, 0x2E)
# Wait 4,5 ms
sleep(0.0045)
# Read the raw data from the DATA_REG, 0xF6
raw_data = self.read_unsigned_16_bit(self.DATA_REG)
# Return the raw data
return raw_data
def get_raw_pressure(self):
"""Reads and returns the raw pressure data."""
# Write appropriate data to sensor to start the measurement
self.bus.write_byte_data(self.address, self.CONTROL_REG, 0x34 + (self.mode << 6))
# Sleep for 8 ms.
# TODO: Way to use the correct wait time for the current mode
sleep(0.008)
MSB = self.bus.read_byte_data(self.address, self.DATA_REG)
LSB = self.bus.read_byte_data(self.address, self.DATA_REG + 1)
XLSB = self.bus.read_byte_data(self.address, self.DATA_REG + 2)
raw_data = ((MSB << 16) + (LSB << 8) + XLSB) >> (8 - self.mode)
return raw_data
def get_temp(self):
"""Reads the raw temperature and calculates the actual temperature.
The calculations used to get the actual temperature are from the BMP-180
datasheet.
Returns the actual temperature in degrees Celcius.
"""
UT = self.get_raw_temp()
X1 = 0
X2 = 0
B5 = 0
actual_temp = 0.0
X1 = ((UT - self.calAC6) * self.calAC5) / math.pow(2, 15)
X2 = (self.calMC * math.pow(2, 11)) / (X1 + self.calMD)
B5 = X1 + X2
actual_temp = ((B5 + 8) / math.pow(2, 4)) / 10
return actual_temp
def get_pressure(self):
"""Reads and calculates the actual pressure.
Returns the actual pressure in Pascal.
"""
UP = self.get_raw_pressure()
UT = self.get_raw_temp()
B3 = 0
B4 = 0
B5 = 0
B6 = 0
B7 = 0
X1 = 0
X2 = 0
X3 = 0
pressure = 0
# These calculations are from the BMP180 datasheet, page 15
# Not sure if these calculations should be here, maybe they could be
# removed?
X1 = ((UT - self.calAC6) * self.calAC5) / math.pow(2, 15)
X2 = (self.calMC * math.pow(2, 11)) / (X1 + self.calMD)
B5 = X1 + X2
# Todo: change math.pow cals to constants
B6 = B5 - 4000
X1 = (self.calB2 * (B6 * B6 / math.pow(2, 12))) / math.pow(2, 11)
X2 = self.calAC2 * B6 / math.pow(2, 11)
X3 = X1 + X2
B3 = (((self.calAC1 * 4 + int(X3)) << self.mode) + 2) / 4
X1 = self.calAC3 * B6 / math.pow(2, 13)
X2 = (self.calB1 * (B6 * B6 / math.pow(2, 12))) / math.pow(2, 16)
X3 = ((X1 + X2) + 2) / math.pow(2, 2)
B4 = self.calAC4 * (X3 + 32768) / math.pow(2,15)
B7 = (UP - B3) * (50000 >> self.mode)
if B7 < 0x80000000:
pressure = (B7 * 2) / B4
else:
pressure = (B7 / B4) * 2
X1 = (pressure / math.pow(2, 8)) * (pressure / math.pow(2, 8))
X1 = (X1 * 3038) / math.pow(2, 16)
X2 = (-7357 * pressure) / math.pow(2, 16)
pressure = pressure + (X1 + X2 + 3791) / math.pow(2, 4)
return pressure
def get_altitude(self, sea_level_pressure = 101325):
"""Calulates the altitude.
This method calculates the altitude using the pressure.
This method is not reliable when the sensor is inside.
sea_level_pressure -- the pressure at the sea level closest to you in
Pascal.
Returns the altitude in meters.
!!! This method probably does not work correctly. I've tried to test
it but at the moment I have no way of verifying the data. !!!
"""
altitude = 0.0
pressure = float(self.get_pressure())
altitude = 44330.0 * (1.0 - math.pow(pressure / sea_level_pressure, 0.00019029495))
return altitude
if __name__ == "__main__":
bmp = bmp180(0x77)
print(bmp.get_temp())
print(bmp.get_pressure())
print(bmp.get_altitude())
| Tijndagamer/BMP180-Python | bmp180/bmp180.py | Python | mit | 6,914 |
"""Features modules"""
| timevortexproject/timevortex | features/__init__.py | Python | mit | 23 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals, division
from sc2reader.utils import Length
from sc2reader.events.base import Event
from sc2reader.log_utils import loggable
from itertools import chain
@loggable
class GameEvent(Event):
"""
This is the base class for all game events. The attributes below are universally available.
"""
def __init__(self, frame, pid):
#: The id of the player generating the event. This is 16 for global non-player events.
#: Prior to Heart of the Swarm this was the player id. Since HotS it is
#: now the user id (uid), we still call it pid for backwards compatibility. You shouldn't
#: ever need to use this; use :attr:`player` instead.
self.pid = pid
#: A reference to the :class:`~sc2reader.objects.Player` object representing
#: this player in the replay. Not available for global events (:attr:`is_local` = False)
self.player = None
#: The frame of the game that this event was recorded at. 16 frames per game second.
self.frame = frame
#: The second of the game that this event was recorded at. 16 frames per game second.
self.second = frame >> 4
#: A flag indicating if it is a local or global event.
self.is_local = pid != 16
#: Short cut string for event class name
self.name = self.__class__.__name__
def _str_prefix(self):
if getattr(self, "pid", 16) == 16:
player_name = "Global"
elif self.player and not self.player.name:
player_name = "Player {0} - ({1})".format(
self.player.pid, self.player.play_race
)
elif self.player:
player_name = self.player.name
else:
player_name = "no name"
return "{0}\t{1:<15} ".format(Length(seconds=int(self.frame / 16)), player_name)
def __str__(self):
return self._str_prefix() + self.name
class GameStartEvent(GameEvent):
"""
Recorded when the game starts and the frames start to roll. This is a global non-player
event.
"""
def __init__(self, frame, pid, data):
super(GameStartEvent, self).__init__(frame, pid)
#: ???
self.data = data
class PlayerLeaveEvent(GameEvent):
"""
Recorded when a player leaves the game.
"""
def __init__(self, frame, pid, data):
super(PlayerLeaveEvent, self).__init__(frame, pid)
#: ???
self.data = data
class UserOptionsEvent(GameEvent):
"""
This event is recorded for each player at the very beginning of the game before the
:class:`GameStartEvent`.
"""
def __init__(self, frame, pid, data):
super(UserOptionsEvent, self).__init__(frame, pid)
#:
self.game_fully_downloaded = data["game_fully_downloaded"]
#:
self.development_cheats_enabled = data["development_cheats_enabled"]
#:
self.multiplayer_cheats_enabled = data["multiplayer_cheats_enabled"]
#:
self.sync_checksumming_enabled = data["sync_checksumming_enabled"]
#:
self.is_map_to_map_transition = data["is_map_to_map_transition"]
#:
self.use_ai_beacons = data["use_ai_beacons"]
#: Are workers sent to auto-mine on game start
self.starting_rally = (
data["starting_rally"] if "starting_rally" in data else None
)
#:
self.debug_pause_enabled = data["debug_pause_enabled"]
#:
self.base_build_num = data["base_build_num"]
def create_command_event(frame, pid, data):
ability_type = data["data"][0]
if ability_type == "None":
return BasicCommandEvent(frame, pid, data)
elif ability_type == "TargetUnit":
return TargetUnitCommandEvent(frame, pid, data)
elif ability_type == "TargetPoint":
return TargetPointCommandEvent(frame, pid, data)
elif ability_type == "Data":
return DataCommandEvent(frame, pid, data)
@loggable
class CommandEvent(GameEvent):
"""
Ability events are generated when ever a player in the game issues a command
to a unit or group of units. They are split into three subclasses of ability,
each with their own set of associated data. The attributes listed below are
shared across all ability event types.
See :class:`TargetPointCommandEvent`, :class:`TargetUnitCommandEvent`, and
:class:`DataCommandEvent` for individual details.
"""
def __init__(self, frame, pid, data):
super(CommandEvent, self).__init__(frame, pid)
#: Flags on the command???
self.flags = data["flags"]
#: A dictionary of possible ability flags. Flags are:
#:
#: * alternate
#: * queued
#: * preempt
#: * smart_click
#: * smart_rally
#: * subgroup
#: * set_autocast,
#: * set_autocast_on
#: * user
#: * data_a
#: * data_b
#: * data_passenger
#: * data_abil_queue_order_id,
#: * ai
#: * ai_ignore_on_finish
#: * is_order
#: * script
#: * homogenous_interruption,
#: * minimap
#: * repeat
#: * dispatch_to_other_unit
#: * target_self
#:
self.flag = dict(
alternate=0x1 & self.flags != 0,
queued=0x2 & self.flags != 0,
preempt=0x4 & self.flags != 0,
smart_click=0x8 & self.flags != 0,
smart_rally=0x10 & self.flags != 0,
subgroup=0x20 & self.flags != 0,
set_autocast=0x40 & self.flags != 0,
set_autocast_on=0x80 & self.flags != 0,
user=0x100 & self.flags != 0,
data_a=0x200 & self.flags != 0,
data_passenger=0x200 & self.flags != 0, # alt-name
data_b=0x400 & self.flags != 0,
data_abil_queue_order_id=0x400 & self.flags != 0, # alt-name
ai=0x800 & self.flags != 0,
ai_ignore_on_finish=0x1000 & self.flags != 0,
is_order=0x2000 & self.flags != 0,
script=0x4000 & self.flags != 0,
homogenous_interruption=0x8000 & self.flags != 0,
minimap=0x10000 & self.flags != 0,
repeat=0x20000 & self.flags != 0,
dispatch_to_other_unit=0x40000 & self.flags != 0,
target_self=0x80000 & self.flags != 0,
)
#: Flag marking that the command had ability information
self.has_ability = data["ability"] is not None
#: Link the the ability group
self.ability_link = data["ability"]["ability_link"] if self.has_ability else 0
#: The index of the ability in the ability group
self.command_index = (
data["ability"]["ability_command_index"] if self.has_ability else 0
)
#: Additional ability data.
self.ability_data = (
data["ability"]["ability_command_data"] if self.has_ability else 0
)
#: Unique identifier for the ability
self.ability_id = self.ability_link << 5 | self.command_index
#: A reference to the ability being used
self.ability = None
#: A shortcut to the name of the ability being used
self.ability_name = ""
#: The type of ability, one of: None (no target), TargetPoint, TargetUnit, or Data
self.ability_type = data["data"][0]
#: The raw data associated with this ability type
self.ability_type_data = data["data"][1]
#: Other unit id??
self.other_unit_id = data["other_unit_tag"]
#: A reference to the other unit
self.other_unit = None
def __str__(self):
string = self._str_prefix()
if self.has_ability:
string += "Ability ({0:X})".format(self.ability_id)
if self.ability:
string += " - {0}".format(self.ability.name)
else:
string += "Right Click"
if self.ability_type == "TargetUnit":
string += "; Target: {0} [{1:0>8X}]".format(
self.target.name, self.target_unit_id
)
if self.ability_type in ("TargetPoint", "TargetUnit"):
string += "; Location: {0}".format(str(self.location))
return string
class BasicCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
This event is recorded for events that have no extra information recorded.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(BasicCommandEvent, self).__init__(frame, pid, data)
class TargetPointCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
This event is recorded when ever a player issues a command that targets a location
and NOT a unit. Commands like Psistorm, Attack Move, Fungal Growth, and EMP fall
under this category.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(TargetPointCommandEvent, self).__init__(frame, pid, data)
#: The x coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.x = self.ability_type_data["point"].get("x", 0) / 4096.0
#: The y coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.y = self.ability_type_data["point"].get("y", 0) / 4096.0
#: The z coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.z = self.ability_type_data["point"].get("z", 0)
#: The location of the target. Available for TargetPoint and TargetUnit type events
self.location = (self.x, self.y, self.z)
class TargetUnitCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
This event is recorded when ever a player issues a command that targets a unit.
The location of the target unit at the time of the command is also recorded. Commands like
Chronoboost, Transfuse, and Snipe fall under this category.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(TargetUnitCommandEvent, self).__init__(frame, pid, data)
#: Flags set on the target unit. Available for TargetUnit type events
self.target_flags = self.ability_type_data.get("flags", None)
#: Timer?? Available for TargetUnit type events.
self.target_timer = self.ability_type_data.get("timer", None)
#: Unique id of the target unit. Available for TargetUnit type events.
#: This id can be 0 when the target unit is shrouded by fog of war.
self.target_unit_id = self.ability_type_data.get("unit_tag", None)
#: A reference to the targeted unit. When the :attr:`target_unit_id` is
#: 0 this target unit is a generic, reused fog of war unit of the :attr:`target_unit_type`
#: with an id of zero. It should not be confused with a real unit.
self.target_unit = None
#: Current integer type id of the target unit. Available for TargetUnit type events.
self.target_unit_type = self.ability_type_data.get("unit_link", None)
#: Integer player id of the controlling player. Available for TargetUnit type events starting in 19595.
#: When the targeted unit is under fog of war this id is zero.
self.control_player_id = self.ability_type_data.get("control_player_id", None)
#: Integer player id of the player paying upkeep. Available for TargetUnit type events.
self.upkeep_player_id = self.ability_type_data.get("upkeep_player_id", None)
#: The x coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.x = self.ability_type_data["point"].get("x", 0) / 4096.0
#: The y coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.y = self.ability_type_data["point"].get("y", 0) / 4096.0
#: The z coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.z = self.ability_type_data["point"].get("z", 0)
#: The location of the target. Available for TargetPoint and TargetUnit type events
self.location = (self.x, self.y, self.z)
class UpdateTargetPointCommandEvent(TargetPointCommandEvent):
"""
Extends :class: 'TargetPointCommandEvent'
This event is generated when the user changes the point of a unit. Appears to happen
when a unit is moving and it is given a new command. It's possible there are other
instances of this occurring.
"""
name = "UpdateTargetPointCommandEvent"
class UpdateTargetUnitCommandEvent(TargetUnitCommandEvent):
"""
Extends :class:`TargetUnitCommandEvent`
This event is generated when a TargetUnitCommandEvent is updated, likely due to
changing the target unit. It is unclear if this needs to be a separate event
from TargetUnitCommandEvent, but for flexibility, it will be treated
differently.
One example of this event occurring is casting inject on a hatchery while
holding shift, and then shift clicking on a second hatchery.
"""
name = "UpdateTargetUnitCommandEvent"
class DataCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
DataCommandEvent are recorded when ever a player issues a command that has no target. Commands
like Burrow, SeigeMode, Train XYZ, and Stop fall under this category.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(DataCommandEvent, self).__init__(frame, pid, data)
#: Other target data. Available for Data type events.
self.target_data = self.ability_type_data.get("data", None)
@loggable
class CommandManagerStateEvent(GameEvent):
"""
These events indicated that the last :class:`CommandEvent` called has been
called again. For example, if you add three SCVs to an empty queue on a
Command Center, the first add will be generate a :class:`BasicCommandEvent`
and the two subsequent adds will each generate a
:class:`CommandManagerStateEvent`.
"""
def __init__(self, frame, pid, data):
super(CommandManagerStateEvent, self).__init__(frame, pid)
#: Always 1?
self.state = data["state"]
#: An index identifying how many events of this type have been called
self.sequence = data["sequence"]
@loggable
class SelectionEvent(GameEvent):
"""
Selection events are generated when ever the active selection of the
player is updated. Unlike other game events, these events can also be
generated by non-player actions like unit deaths or transformations.
Starting in Starcraft 2.0.0, selection events targeting control group
buffers are also generated when control group selections are modified
by non-player actions. When a player action updates a control group
a :class:`ControlGroupEvent` is generated.
"""
def __init__(self, frame, pid, data):
super(SelectionEvent, self).__init__(frame, pid)
#: The control group being modified. 10 for active selection
self.control_group = data["control_group_index"]
#: Deprecated, use control_group
self.bank = self.control_group
#: ???
self.subgroup_index = data["subgroup_index"]
#: The type of mask to apply. One of None, Mask, OneIndices, ZeroIndices
self.mask_type = data["remove_mask"][0]
#: The data for the mask
self.mask_data = data["remove_mask"][1]
#: The unit type data for the new units
self.new_unit_types = [
(
d["unit_link"],
d["subgroup_priority"],
d["intra_subgroup_priority"],
d["count"],
)
for d in data["add_subgroups"]
]
#: The unit id data for the new units
self.new_unit_ids = data["add_unit_tags"]
# This stretches out the unit types and priorities to be zipped with ids.
unit_types = chain(
*[
[utype] * count
for (
utype,
subgroup_priority,
intra_subgroup_priority,
count,
) in self.new_unit_types
]
)
unit_subgroup_priorities = chain(
*[
[subgroup_priority] * count
for (
utype,
subgroup_priority,
intra_subgroup_priority,
count,
) in self.new_unit_types
]
)
unit_intra_subgroup_priorities = chain(
*[
[intra_subgroup_priority] * count
for (
utype,
subgroup_priority,
intra_subgroup_priority,
count,
) in self.new_unit_types
]
)
#: The combined type and id information for new units
self.new_unit_info = list(
zip(
self.new_unit_ids,
unit_types,
unit_subgroup_priorities,
unit_intra_subgroup_priorities,
)
)
#: A list of references to units added by this selection
self.new_units = None
#: Deprecated, see new_units
self.objects = None
def __str__(self):
if self.new_units:
return GameEvent.__str__(self) + str([str(u) for u in self.new_units])
else:
return GameEvent.__str__(self) + str([str(u) for u in self.new_unit_info])
def create_control_group_event(frame, pid, data):
update_type = data["control_group_update"]
if update_type == 0:
return SetControlGroupEvent(frame, pid, data)
elif update_type == 1:
return AddToControlGroupEvent(frame, pid, data)
elif update_type == 2:
return GetControlGroupEvent(frame, pid, data)
elif update_type == 3:
# TODO: What could this be?!?
return ControlGroupEvent(frame, pid, data)
else:
# No idea what this is but we're seeing update_types of 4 and 5 in 3.0
return ControlGroupEvent(frame, pid, data)
@loggable
class ControlGroupEvent(GameEvent):
"""
ControlGroup events are recorded when ever a player action modifies or accesses a control
group. There are three kinds of events, generated by each of the possible
player actions:
* :class:`SetControlGroup` - Recorded when a user sets a control group (ctrl+#).
* :class:`GetControlGroup` - Recorded when a user retrieves a control group (#).
* :class:`AddToControlGroup` - Recorded when a user adds to a control group (shift+ctrl+#)
All three events have the same set of data (shown below) but are interpreted differently.
See the class entry for details.
"""
def __init__(self, frame, pid, data):
super(ControlGroupEvent, self).__init__(frame, pid)
#: Index to the control group being modified
self.control_group = data["control_group_index"]
#: Deprecated, use control_group
self.bank = self.control_group
#: Deprecated, use control_group
self.hotkey = self.control_group
#: The type of update being performed, 0 (set),1 (add),2 (get)
self.update_type = data["control_group_update"]
#: The type of mask to apply. One of None, Mask, OneIndices, ZeroIndices
self.mask_type = data["remove_mask"][0]
#: The data for the mask
self.mask_data = data["remove_mask"][1]
class SetControlGroupEvent(ControlGroupEvent):
"""
Extends :class:`ControlGroupEvent`
This event does a straight forward replace of the current control group contents
with the player's current selection. This event doesn't have masks set.
"""
class AddToControlGroupEvent(SetControlGroupEvent):
"""
Extends :class:`ControlGroupEvent`
This event adds the current selection to the control group.
"""
class GetControlGroupEvent(ControlGroupEvent):
"""
Extends :class:`ControlGroupEvent`
This event replaces the current selection with the contents of the control group.
The mask data is used to limit that selection to units that are currently selectable.
You might have 1 medivac and 8 marines on the control group but if the 8 marines are
inside the medivac they cannot be part of your selection.
"""
@loggable
class CameraEvent(GameEvent):
"""
Camera events are generated when ever the player camera moves, zooms, or rotates.
It does not matter why the camera changed, this event simply records the current
state of the camera after changing.
"""
def __init__(self, frame, pid, data):
super(CameraEvent, self).__init__(frame, pid)
#: The x coordinate of the center of the camera
self.x = (data["target"]["x"] if data["target"] is not None else 0) / 256.0
#: The y coordinate of the center of the camera
self.y = (data["target"]["y"] if data["target"] is not None else 0) / 256.0
#: The location of the center of the camera
self.location = (self.x, self.y)
#: The distance to the camera target ??
self.distance = data["distance"]
#: The current pitch of the camera
self.pitch = data["pitch"]
#: The current yaw of the camera
self.yaw = data["yaw"]
def __str__(self):
return self._str_prefix() + "{0} at ({1}, {2})".format(
self.name, self.x, self.y
)
@loggable
class ResourceTradeEvent(GameEvent):
"""
Generated when a player trades resources with another player. But not when fullfulling
resource requests.
"""
def __init__(self, frame, pid, data):
super(ResourceTradeEvent, self).__init__(frame, pid)
#: The id of the player sending the resources
self.sender_id = pid
#: A reference to the player sending the resources
self.sender = None
#: The id of the player receiving the resources
self.recipient_id = data["recipient_id"]
#: A reference to the player receiving the resources
self.recipient = None
#: An array of resources sent
self.resources = data["resources"]
#: Amount minerals sent
self.minerals = self.resources[0] if len(self.resources) >= 1 else None
#: Amount vespene sent
self.vespene = self.resources[1] if len(self.resources) >= 2 else None
#: Amount terrazine sent
self.terrazine = self.resources[2] if len(self.resources) >= 3 else None
#: Amount custom resource sent
self.custom_resource = self.resources[3] if len(self.resources) >= 4 else None
def __str__(self):
return self._str_prefix() + " transfer {0} minerals, {1} gas, {2} terrazine, and {3} custom to {4}".format(
self.minerals,
self.vespene,
self.terrazine,
self.custom_resource,
self.recipient,
)
class ResourceRequestEvent(GameEvent):
"""
Generated when a player creates a resource request.
"""
def __init__(self, frame, pid, data):
super(ResourceRequestEvent, self).__init__(frame, pid)
#: An array of resources sent
self.resources = data["resources"]
#: Amount minerals sent
self.minerals = self.resources[0] if len(self.resources) >= 1 else None
#: Amount vespene sent
self.vespene = self.resources[1] if len(self.resources) >= 2 else None
#: Amount terrazine sent
self.terrazon = self.resources[2] if len(self.resources) >= 3 else None
#: Amount custom resource sent
self.custom_resource = self.resources[3] if len(self.resources) >= 4 else None
def __str__(self):
return (
self._str_prefix()
+ " requests {0} minerals, {1} gas, {2} terrazine, and {3} custom".format(
self.minerals, self.vespene, self.terrazine, self.custom_resource
)
)
class ResourceRequestFulfillEvent(GameEvent):
"""
Generated when a player accepts a resource request.
"""
def __init__(self, frame, pid, data):
super(ResourceRequestFulfillEvent, self).__init__(frame, pid)
#: The id of the request being fulfilled
self.request_id = data["request_id"]
class ResourceRequestCancelEvent(GameEvent):
"""
Generated when a player cancels their resource request.
"""
def __init__(self, frame, pid, data):
super(ResourceRequestCancelEvent, self).__init__(frame, pid)
#: The id of the request being cancelled
self.request_id = data["request_id"]
class HijackReplayGameEvent(GameEvent):
"""
Generated when players take over from a replay.
"""
def __init__(self, frame, pid, data):
super(HijackReplayGameEvent, self).__init__(frame, pid)
#: The method used. Not sure what 0/1 represent
self.method = data["method"]
#: Information on the users hijacking the game
self.user_infos = data["user_infos"]
| ggtracker/sc2reader | sc2reader/events/game.py | Python | mit | 25,673 |
#!/usr/bin/env python
"""Application controller for CD-HIT v3.1.1"""
import shutil
from os import remove
from cogent.app.parameters import ValuedParameter
from cogent.app.util import CommandLineApplication, ResultPath,\
get_tmp_filename
from cogent.core.moltype import RNA, DNA, PROTEIN
from cogent.core.alignment import SequenceCollection
from cogent.parse.fasta import MinimalFastaParser
__author__ = "Daniel McDonald"
__copyright__ = "Copyright 2007-2012, The Cogent Project"
__credits__ = ["Daniel McDonald"]
__license__ = "GPL"
__version__ = "1.5.3"
__maintainer__ = "Daniel McDonald"
__email__ = "mcdonadt@colorado.edu"
__status__ = "Development"
class CD_HIT(CommandLineApplication):
"""cd-hit Application Controller
Use this version of CD-HIT if your MolType is PROTEIN
"""
_command = 'cd-hit'
_input_handler = '_input_as_multiline_string'
_parameters = {
# input input filename in fasta format, required
'-i':ValuedParameter('-',Name='i',Delimiter=' ',IsPath=True),
# output filename, required
'-o':ValuedParameter('-',Name='o',Delimiter=' ',IsPath=True),
# sequence identity threshold, default 0.9
# this is the default cd-hit's "global sequence identity" calc'd as :
# number of identical amino acids in alignment
# divided by the full length of the shorter sequence
'-c':ValuedParameter('-',Name='c',Delimiter=' '),
# use global sequence identity, default 1
# if set to 0, then use local sequence identity, calculated as :
# number of identical amino acids in alignment
# divided by the length of the alignment
# NOTE!!! don't use -G 0 unless you use alignment coverage controls
# see options -aL, -AL, -aS, -AS
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# band_width of alignment, default 20
'-b':ValuedParameter('-',Name='b',Delimiter=' '),
# max available memory (Mbyte), default 400
'-M':ValuedParameter('-',Name='M',Delimiter=' '),
# word_length, default 8, see user's guide for choosing it
'-n':ValuedParameter('-',Name='n',Delimiter=' '),
# length of throw_away_sequences, default 10
'-l':ValuedParameter('-',Name='l',Delimiter=' '),
# tolerance for redundance, default 2
'-t':ValuedParameter('-',Name='t',Delimiter=' '),
# length of description in .clstr file, default 20
# if set to 0, it takes the fasta defline and stops at first space
'-d':ValuedParameter('-',Name='d',Delimiter=' '),
# length difference cutoff, default 0.0
# if set to 0.9, the shorter sequences need to be
# at least 90% length of the representative of the cluster
'-s':ValuedParameter('-',Name='s',Delimiter=' '),
# length difference cutoff in amino acid, default 999999
# f set to 60, the length difference between the shorter sequences
# and the representative of the cluster can not be bigger than 60
'-S':ValuedParameter('-',Name='S',Delimiter=' '),
# alignment coverage for the longer sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aL':ValuedParameter('-',Name='aL',Delimiter=' '),
# alignment coverage control for the longer sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AL':ValuedParameter('-',Name='AL',Delimiter=' '),
# alignment coverage for the shorter sequence, default 0.0
# if set to 0.9, the alignment must covers 90% of the sequence
'-aS':ValuedParameter('-',Name='aS',Delimiter=' '),
# alignment coverage control for the shorter sequence, default 99999999
# if set to 60, and the length of the sequence is 400,
# then the alignment must be >= 340 (400-60) residues
'-AS':ValuedParameter('-',Name='AS',Delimiter=' '),
# 1 or 0, default 0, by default, sequences are stored in RAM
# if set to 1, sequence are stored on hard drive
# it is recommended to use -B 1 for huge databases
'-B':ValuedParameter('-',Name='B',Delimiter=' '),
# 1 or 0, default 0
# if set to 1, print alignment overlap in .clstr file
'-p':ValuedParameter('-',Name='p',Delimiter=' '),
# 1 or 0, default 0
# by cd-hit's default algorithm, a sequence is clustered to the first
# cluster that meet the threshold (fast cluster). If set to 1, the program
# will cluster it into the most similar cluster that meet the threshold
# (accurate but slow mode)
# but either 1 or 0 won't change the representatives of final clusters
'-g':ValuedParameter('-',Name='g',Delimiter=' '),
# print this help
'-h':ValuedParameter('-',Name='h',Delimiter=' ')
}
_synonyms = {'Similarity':'-c'}
def getHelp(self):
"""Method that points to documentation"""
help_str =\
"""
CD-HIT is hosted as an open source project at:
http://www.bioinformatics.org/cd-hit/
The following papers should be cited if this resource is used:
Clustering of highly homologous sequences to reduce thesize of large
protein database", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2001) 17:282-283
Tolerating some redundancy significantly speeds up clustering of large
protein databases", Weizhong Li, Lukasz Jaroszewski & Adam Godzik
Bioinformatics, (2002) 18:77-82
"""
return help_str
def _input_as_multiline_string(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_multiline_string(data))
return ''
def _input_as_lines(self, data):
"""Writes data to tempfile and sets -i parameter
data -- list of lines, ready to be written to file
"""
if data:
self.Parameters['-i']\
.on(super(CD_HIT,self)._input_as_lines(data))
return ''
def _input_as_seqs(self, data):
"""Creates a list of seqs to pass to _input_as_lines
data -- list like object of sequences
"""
lines = []
for i,s in enumerate(data):
# will number the sequences 1,2,3, etc...
lines.append(''.join(['>',str(i+1)]))
lines.append(s)
return self._input_as_lines(lines)
def _input_as_string(self, data):
"""Makes data the value of a specific parameter"""
if data:
self.Parameters['-i'].on(str(data))
return ''
def _get_seqs_outfile(self):
"""Returns the absolute path to the seqs outfile"""
if self.Parameters['-o'].isOn():
return self.Parameters['-o'].Value
else:
raise ValueError, "No output file specified"
def _get_clstr_outfile(self):
"""Returns the absolute path to the clstr outfile"""
if self.Parameters['-o'].isOn():
return ''.join([self.Parameters['-o'].Value, '.clstr'])
else:
raise ValueError, "No output file specified"
def _get_result_paths(self, data):
"""Return dict of {key: ResultPath}"""
result = {}
result['FASTA'] = ResultPath(Path=self._get_seqs_outfile())
result['CLSTR'] = ResultPath(Path=self._get_clstr_outfile())
return result
class CD_HIT_EST(CD_HIT):
"""cd-hit Application Controller
Use this version of CD-HIT if your MolType is PROTEIN
"""
_command = 'cd-hit-est'
_input_handler = '_input_as_multiline_string'
_parameters = CD_HIT._parameters
_parameters.update({\
# 1 or 0, default 0, by default only +/+ strand alignment
# if set to 1, do both +/+ & +/- alignments
'-r':ValuedParameter('-',Name='r',Delimiter=' ')
})
def cdhit_clusters_from_seqs(seqs, moltype, params=None):
"""Returns the CD-HIT clusters given seqs
seqs : dict like collection of sequences
moltype : cogent.core.moltype object
params : cd-hit parameters
NOTE: This method will call CD_HIT if moltype is PROTIEN,
CD_HIT_EST if moltype is RNA/DNA, and raise if any other
moltype is passed.
"""
# keys are not remapped. Tested against seq_ids of 100char length
seqs = SequenceCollection(seqs, MolType=moltype)
#Create mapping between abbreviated IDs and full IDs
int_map, int_keys = seqs.getIntMap()
#Create SequenceCollection from int_map.
int_map = SequenceCollection(int_map,MolType=moltype)
# setup params and make sure the output argument is set
if params is None:
params = {}
if '-o' not in params:
params['-o'] = get_tmp_filename()
# call the correct version of cd-hit base on moltype
working_dir = get_tmp_filename()
if moltype is PROTEIN:
app = CD_HIT(WorkingDir=working_dir, params=params)
elif moltype is RNA:
app = CD_HIT_EST(WorkingDir=working_dir, params=params)
elif moltype is DNA:
app = CD_HIT_EST(WorkingDir=working_dir, params=params)
else:
raise ValueError, "Moltype must be either PROTEIN, RNA, or DNA"
# grab result
res = app(int_map.toFasta())
clusters = parse_cdhit_clstr_file(res['CLSTR'].readlines())
remapped_clusters = []
for c in clusters:
curr = [int_keys[i] for i in c]
remapped_clusters.append(curr)
# perform cleanup
res.cleanUp()
shutil.rmtree(working_dir)
remove(params['-o'] + '.bak.clstr')
return remapped_clusters
def cdhit_from_seqs(seqs, moltype, params=None):
"""Returns the CD-HIT results given seqs
seqs : dict like collection of sequences
moltype : cogent.core.moltype object
params : cd-hit parameters
NOTE: This method will call CD_HIT if moltype is PROTIEN,
CD_HIT_EST if moltype is RNA/DNA, and raise if any other
moltype is passed.
"""
# keys are not remapped. Tested against seq_ids of 100char length
seqs = SequenceCollection(seqs, MolType=moltype)
# setup params and make sure the output argument is set
if params is None:
params = {}
if '-o' not in params:
params['-o'] = get_tmp_filename()
# call the correct version of cd-hit base on moltype
working_dir = get_tmp_filename()
if moltype is PROTEIN:
app = CD_HIT(WorkingDir=working_dir, params=params)
elif moltype is RNA:
app = CD_HIT_EST(WorkingDir=working_dir, params=params)
elif moltype is DNA:
app = CD_HIT_EST(WorkingDir=working_dir, params=params)
else:
raise ValueError, "Moltype must be either PROTEIN, RNA, or DNA"
# grab result
res = app(seqs.toFasta())
new_seqs = dict(MinimalFastaParser(res['FASTA'].readlines()))
# perform cleanup
res.cleanUp()
shutil.rmtree(working_dir)
remove(params['-o'] + '.bak.clstr')
return SequenceCollection(new_seqs, MolType=moltype)
def clean_cluster_seq_id(id):
"""Returns a cleaned cd-hit sequence id
The cluster file has sequence ids in the form of:
>some_id...
"""
return id[1:-3]
def parse_cdhit_clstr_file(lines):
"""Returns a list of list of sequence ids representing clusters"""
clusters = []
curr_cluster = []
for l in lines:
if l.startswith('>Cluster'):
if not curr_cluster:
continue
clusters.append(curr_cluster)
curr_cluster = []
else:
curr_cluster.append(clean_cluster_seq_id(l.split()[2]))
if curr_cluster:
clusters.append(curr_cluster)
return clusters
| sauloal/cnidaria | scripts/venv/lib/python2.7/site-packages/cogent/app/cd_hit.py | Python | mit | 11,924 |
import code
import signal
import sys
import greenlet
import logging
class SigintHappened(object):
pass
class SystemExitFromCodeThread(SystemExit):
pass
class CodeRunner(object):
"""Runs user code in an interpreter, taking care of stdout/in/err"""
def __init__(self, interp=None, stuff_a_refresh_request=lambda:None):
self.interp = interp or code.InteractiveInterpreter()
self.source = None
self.main_greenlet = greenlet.getcurrent()
self.code_greenlet = None
self.stuff_a_refresh_request = stuff_a_refresh_request
self.code_is_waiting = False
self.sigint_happened = False
self.orig_sigint_handler = None
@property
def running(self):
return self.source and self.code_greenlet
def load_code(self, source):
"""Prep code to be run"""
self.source = source
self.code_greenlet = None
def _unload_code(self):
"""Called when done running code"""
self.source = None
self.code_greenlet = None
self.code_is_waiting = False
def run_code(self, for_code=None):
"""Returns Truthy values if code finishes, False otherwise
if for_code is provided, send that value to the code greenlet
if source code is complete, returns "done"
if source code is incomplete, returns "unfinished"
"""
if self.code_greenlet is None:
assert self.source is not None
self.code_greenlet = greenlet.greenlet(self._blocking_run_code)
self.orig_sigint_handler = signal.getsignal(signal.SIGINT)
signal.signal(signal.SIGINT, self.sigint_handler)
request = self.code_greenlet.switch()
else:
assert self.code_is_waiting
self.code_is_waiting = False
signal.signal(signal.SIGINT, self.sigint_handler)
if self.sigint_happened:
self.sigint_happened = False
request = self.code_greenlet.switch(SigintHappened)
else:
request = self.code_greenlet.switch(for_code)
if request in ['wait', 'refresh']:
self.code_is_waiting = True
if request == 'refresh':
self.stuff_a_refresh_request()
return False
elif request in ['done', 'unfinished']:
self._unload_code()
signal.signal(signal.SIGINT, self.orig_sigint_handler)
self.orig_sigint_handler = None
return request
elif request in ['SystemExit']: #use the object?
self._unload_code()
raise SystemExitFromCodeThread()
else:
raise ValueError("Not a valid value from code greenlet: %r" % request)
def sigint_handler(self, *args):
if greenlet.getcurrent() is self.code_greenlet:
logging.debug('sigint while running user code!')
raise KeyboardInterrupt()
else:
logging.debug('sigint while fufilling code request sigint handler running!')
self.sigint_happened = True
def _blocking_run_code(self):
try:
unfinished = self.interp.runsource(self.source)
except SystemExit:
return 'SystemExit'
return 'unfinished' if unfinished else 'done'
def wait_and_get_value(self):
"""Return the argument passed in to .run_code(for_code)
Nothing means calls to run_code must be...
"""
value = self.main_greenlet.switch('wait')
if value is SigintHappened:
raise KeyboardInterrupt()
return value
def refresh_and_get_value(self):
"""Returns the argument passed in to .run_code(for_code) """
value = self.main_greenlet.switch('refresh')
if value is SigintHappened:
raise KeyboardInterrupt()
return value
class FakeOutput(object):
def __init__(self, coderunner, please):
self.coderunner = coderunner
self.please = please
def write(self, *args, **kwargs):
self.please(*args, **kwargs)
return self.coderunner.refresh_and_get_value()
def test_simple():
orig_stdout = sys.stdout
orig_stderr = sys.stderr
c = CodeRunner(stuff_a_refresh_request=lambda: orig_stdout.flush() or orig_stderr.flush())
stdout = FakeOutput(c, orig_stdout.write)
sys.stdout = stdout
c.load_code('1 + 1')
c.run_code()
c.run_code()
c.run_code()
def test_exception():
orig_stdout = sys.stdout
orig_stderr = sys.stderr
c = CodeRunner(stuff_a_refresh_request=lambda: orig_stdout.flush() or orig_stderr.flush())
def ctrlc():
raise KeyboardInterrupt()
stdout = FakeOutput(c, lambda x: ctrlc())
sys.stdout = stdout
c.load_code('1 + 1')
c.run_code()
if __name__ == '__main__':
test_simple()
| thomasballinger/old-bpython-with-hy-support | bpython/curtsiesfrontend/coderunner.py | Python | mit | 4,839 |
"""
Automatically generate marking helpers functions.
"""
import sys
from .objects import Mark
class SimpleHelpers(object):
"""
A class that is designed to act as a module and implement magic helper
generation.
"""
def __init__(self):
self.__helpers = {}
def make_helper(self, color_tag):
"""
Make a simple helper.
:param color_tag: The color tag to make a helper for.
:returns: The helper function.
"""
helper = self.__helpers.get(color_tag)
if not helper:
def helper(obj):
return Mark(obj=obj, color_tag=color_tag)
helper.__name__ = color_tag
helper.__doc__ = """
Mark an object for coloration.
The color tag is set to {color_tag!r}.
:param obj: The object to mark for coloration.
:returns: A :class:`Mark<chromalog.mark.objects.Mark>` instance.
>>> from chromalog.mark.helpers.simple import {color_tag}
>>> {color_tag}(42).color_tag
['{color_tag}']
""".format(color_tag=color_tag)
self.__helpers[color_tag] = helper
return helper
def __getattr__(self, name):
"""
Get a magic helper.
:param name: The name of the helper to get.
>>> SimpleHelpers().alpha(42).color_tag
['alpha']
>>> getattr(SimpleHelpers(), '_incorrect', None)
"""
if name.startswith('_'):
raise AttributeError(name)
return self.make_helper(color_tag=name)
class ConditionalHelpers(object):
"""
A class that is designed to act as a module and implement magic helper
generation.
"""
def __init__(self):
self.__helpers = {}
def make_helper(self, color_tag_true, color_tag_false):
"""
Make a conditional helper.
:param color_tag_true: The color tag if the condition is met.
:param color_tag_false: The color tag if the condition is not met.
:returns: The helper function.
"""
helper = self.__helpers.get(
(color_tag_true, color_tag_false),
)
if not helper:
def helper(obj, condition=None):
if condition is None:
condition = obj
return Mark(
obj=obj,
color_tag=color_tag_true if condition else color_tag_false,
)
helper.__name__ = '_or_'.join((color_tag_true, color_tag_false))
helper.__doc__ = """
Convenience helper method that marks an object with the
{color_tag_true!r} color tag if `condition` is truthy, and with the
{color_tag_false!r} color tag otherwise.
:param obj: The object to mark for coloration.
:param condition: The condition to verify. If `condition` is
:const:`None`, the `obj` is evaluated instead.
:returns: A :class:`Mark<chromalog.mark.objects.Mark>` instance.
>>> from chromalog.mark.helpers.conditional import {name}
>>> {name}(42, True).color_tag
['{color_tag_true}']
>>> {name}(42, False).color_tag
['{color_tag_false}']
>>> {name}(42).color_tag
['{color_tag_true}']
>>> {name}(0).color_tag
['{color_tag_false}']
""".format(
name=helper.__name__,
color_tag_true=color_tag_true,
color_tag_false=color_tag_false,
)
self.__helpers[
(color_tag_true, color_tag_false),
] = helper
return helper
def __getattr__(self, name):
"""
Get a magic helper.
:param name: The name of the helper to get. Must be of the form
'a_or_b' where `a` and `b` are color tags.
>>> ConditionalHelpers().alpha_or_beta(42, True).color_tag
['alpha']
>>> ConditionalHelpers().alpha_or_beta(42, False).color_tag
['beta']
>>> ConditionalHelpers().alpha_or_beta(42).color_tag
['alpha']
>>> ConditionalHelpers().alpha_or_beta(0).color_tag
['beta']
>>> getattr(ConditionalHelpers(), 'alpha_beta', None)
>>> getattr(ConditionalHelpers(), '_incorrect', None)
"""
if name.startswith('_'):
raise AttributeError(name)
try:
color_tag_true, color_tag_false = name.split('_or_')
except ValueError:
raise AttributeError(name)
return self.make_helper(
color_tag_true=color_tag_true,
color_tag_false=color_tag_false,
)
simple = SimpleHelpers()
simple.__doc__ = """
Pseudo-module that generates simple helpers.
See :class:`SimpleHelpers<chromalog.mark.helpers.SimpleHelpers>`.
"""
conditional = ConditionalHelpers()
conditional.__doc__ = """
Pseudo-module that generates conditional helpers.
See :class:`ConditionalHelpers<chromalog.mark.helpers.ConditionalHelpers>`.
"""
sys.modules['.'.join([__name__, 'simple'])] = simple
sys.modules['.'.join([__name__, 'conditional'])] = conditional
| freelan-developers/chromalog | chromalog/mark/helpers.py | Python | mit | 5,224 |
# -*- coding: utf-8 -*-
import os
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
# Initialize Base class
Base = declarative_base()
metadata = Base.metadata
session_maker = sessionmaker()
session = scoped_session(session_maker)
engine = create_engine(os.environ['DATABASE_URI'])
session.configure(bind=engine)
def database_setup(engine):
'''Set up the database.
'''
metadata.create_all(engine)
def database_teardown(engine):
'''BURN IT ALL DOWN (╯°□°)╯︵ ┻━┻
'''
metadata.drop_all(engine)
def _action_and_commit(obj, action):
'''Adds/deletes the instance obj to/from the session based on the action.
'''
action(obj)
session.commit()
class GhostBase(Base):
'''The GhostBase class extends the declarative Base class.'''
__abstract__ = True
def __str__(self, attrs):
return '''<{0}({1})>'''.format(self.__class__.__name__, ', '.join([
'='.join([attr, str(getattr(self, attr, ''))]) for attr in attrs]))
@classmethod
def _get_instance(cls, **kwargs):
'''Returns the first instance of cls with attributes matching **kwargs.
'''
return session.query(cls).filter_by(**kwargs).first()
@classmethod
def get_or_create(cls, **kwargs):
'''
If a record matching the instance already exists in the database, then
return it, otherwise create a new record.
'''
q = cls._get_instance(**kwargs)
if q:
return q
q = cls(**kwargs)
_action_and_commit(q, session.add)
return q
# TODO (jsa): _traverse_report only needs to return the ID for an update
@classmethod
def update(cls, **kwargs):
'''
If a record matching the instance id already exists in the database,
update it. If a record matching the instance id does not already exist,
create a new record.
'''
q = cls._get_instance(**{'id': kwargs['id']})
if q:
for k, v in kwargs.items():
setattr(q, k, v)
_action_and_commit(q, session.add)
else:
cls.get_or_create(**kwargs)
@classmethod
def delete(cls, **kwargs):
'''
If a record matching the instance id exists in the database, delete it.
'''
q = cls._get_instance(**kwargs)
if q:
_action_and_commit(q, session.delete)
class ResponseClassLegacyAccessor(object):
def __init__(self, response_class, column, accessor):
self.response_class = response_class
self.column = column
self.accessor = accessor
def _get_instance(self, **kwargs):
'''Return the first existing instance of the response record.
'''
return session.query(self.response_class).filter_by(**kwargs).first()
def get_or_create_from_legacy_response(self, response, **kwargs):
'''
If a record matching the instance already does not already exist in the
database, then create a new record.
'''
response_cls = self.response_class(**kwargs).get_or_create(**kwargs)
if not getattr(response_cls, self.column):
setattr(response_cls, self.column, self.accessor(response))
_action_and_commit(response_cls, session.add)
def update(self, response, **kwargs):
'''
If a record matching the instance already exists in the database, update
it, else create a new record.
'''
response_cls = self._get_instance(**kwargs)
if response_cls:
setattr(response_cls, self.column, self.accessor(response))
_action_and_commit(response_cls, session.add)
else:
self.get_or_create_from_legacy_response(response, **kwargs)
def delete(self, response, **kwargs):
'''
If a record matching the instance id exists in the database, delete it.
'''
response_cls = self._get_instance(**kwargs)
if response_cls:
_action_and_commit(response_cls, session.delete)
class LocationResponseClassLegacyAccessor(ResponseClassLegacyAccessor):
def __init__(
self, response_class, column,
accessor, venue_column, venue_accessor):
super(LocationResponseClassLegacyAccessor, self).__init__(
response_class, column, accessor)
self.venue_column = venue_column
self.venue_accessor = venue_accessor
def get_or_create_from_legacy_response(self, response, **kwargs):
'''
If a record matching the instance already does not already exist in the
database, then create a new record.
'''
response_cls = self.response_class(**kwargs).get_or_create(**kwargs)
if not getattr(response_cls, self.column):
setattr(response_cls, self.column, self.accessor(response))
_action_and_commit(response_cls, session.add)
if not getattr(response_cls, self.venue_column):
setattr(
response_cls, self.venue_column, self.venue_accessor(response))
_action_and_commit(response_cls, session.add)
def update(self, response, **kwargs):
'''
If a record matching the instance already exists in the database, update
both the column and venue column attributes, else create a new record.
'''
response_cls = super(
LocationResponseClassLegacyAccessor, self)._get_instance(**kwargs)
if response_cls:
setattr(response_cls, self.column, self.accessor(response))
setattr(
response_cls, self.venue_column, self.venue_accessor(response))
_action_and_commit(response_cls, session.add)
| thejunglejane/datums | datums/models/base.py | Python | mit | 5,846 |
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Contact(models.Model):
name = models.CharField(max_length=255, verbose_name=_("namn"))
email = models.EmailField(verbose_name=_("e-post"))
def __str__(self):
return self.name
class Meta:
verbose_name = _("Kontaktval")
verbose_name_plural = _("Kontaktval")
| andersonjonathan/Navitas | navitas/contact/models.py | Python | mit | 391 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-16 21:51
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user', '0002_profile_validated'),
]
operations = [
migrations.CreateModel(
name='EmailValidationToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(max_length=100, unique=True)),
('expire', models.DateTimeField()),
('consumed', models.BooleanField(default=False)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| ava-project/ava-website | website/apps/user/migrations/0003_emailvalidationtoken.py | Python | mit | 973 |
#!/usr/local/bin/python3
import cgi
print("Content-type: text/html")
print('''
<!DOCTYPE html>
<html>
<head>
<title>Python</title>
</head>
<body>
<h1>Python</h1>
<p>Python</p>
<p>This is the article for Python</p>
</body>
</html>
''')
| Secretmapper/updevcamp-session-2-dist | form/cgi-bin/lectures/simple/python.py | Python | mit | 272 |
import sys
import math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
numbers = [int(x) for x in input().split()]
u = 0
d = 0
for x in numbers:
if x >= 0:
u += 1
else:
d += 1
if u == 1:
for x in numbers:
if x >= 0:
print(x)
else:
for x in numbers:
if x < 0:
print(x)
# Write an action using print
# To debug: print("Debug messages...", file=sys.stderr)
| NendoTaka/CodeForReference | Codingame/Python/Clash/intNotLikeOthers.py | Python | mit | 494 |
"""
Implement atoi() in Python (given a string, return a number).
Assume all the strings are always valid.
"""
import unittest
def atoi(string):
l = len(string)
t = 0
v = 10 ** (l - 1)
for ch in string:
t += v * int(ch)
v /= 10
return t
def atoi2(string):
l, t = len(string), 0
for idx, ch in enumerate(string):
t += int(ch) * (10 ** (l - idx - 1))
return t
def atoi3(string):
l = len(string)
return sum([
int(ch) * (10 ** (l - idx - 1))
for idx, ch in enumerate(string)
])
class AtoITest(unittest.TestCase):
def test_atoi(self):
self.assertEqual(12345, atoi("12345"))
self.assertEqual(1234, atoi("1234"))
self.assertEqual(123, atoi("123"))
self.assertEqual(12, atoi("12"))
self.assertEqual(1, atoi("1"))
self.assertEqual(0, atoi("0"))
def test_atoi2(self):
self.assertEqual(12345, atoi2("12345"))
self.assertEqual(1234, atoi2("1234"))
self.assertEqual(123, atoi2("123"))
self.assertEqual(12, atoi2("12"))
self.assertEqual(1, atoi2("1"))
self.assertEqual(0, atoi2("0"))
def test_atoi3(self):
self.assertEqual(12345, atoi3("12345"))
self.assertEqual(1234, atoi3("1234"))
self.assertEqual(123, atoi3("123"))
self.assertEqual(12, atoi3("12"))
self.assertEqual(1, atoi3("1"))
self.assertEqual(0, atoi3("0"))
| kratorius/ads | python/interviewquestions/atoi.py | Python | mit | 1,456 |
__author__ = 'g10k'
"""Потом убрать в миграции"""
# ('Регистраторы','Регистраторы'),
# ('Менеджеры','Менеджеры'),
# ('Проф департамент','Проф департамент'),
# ('Аналитика','Аналитика'),
# ('Контроль качества','Контроль качества'),
import kb.models
departaments = {
'Регистраторы': {
'apps': ['lmk', 'prof'],
'color': 'blue',
},
'Менеджеры': {
'apps': ['crm', 'out'],
'color': 'green'
},
'Проф департамент': {
'apps': ['prof', 'crm', 'out'],
'color': 'purple',
},
'Аналитики': {
'apps': ['analytic', 'crm', 'lmk', 'prof'],
'color': 'red'
},
'Контроль качества': {
'apps': ['qq'],
'color': 'pink'
}
}
def fill_departaments():
set_apps = set()
for department_name, info in departaments.items():
for app in set_apps:
kb.models.App.objects.get_or_create(name=app)
apps = info.get('apps')
color = info.get('color')
departament, created = kb.models.Departament.objects.get_or_create(name=department_name)
app_objects = kb.models.App.objects.filter(name__in=apps)
departament.apps.set(app_objects, clear=True)
departament.color = color
departament.save()
| telminov/knowledge-base | kb/data.py | Python | mit | 1,498 |
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
import time
import Adafruit_GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
# Constants
SSD1351_I2C_ADDRESS = 0x3C # 011110+SA0+RW - 0x3C or 0x3D
SSD1351_SETCONTRAST = 0x81
SSD1351_DISPLAYALLON_RESUME = 0xA4
SSD1351_DISPLAYALLON = 0xA5
SSD1351_NORMALDISPLAY = 0xA6
SSD1351_INVERTDISPLAY = 0xA7
SSD1351_DISPLAYOFF = 0xAE
SSD1351_DISPLAYON = 0xAF
SSD1351_SETDISPLAYOFFSET = 0xD3
SSD1351_SETCOMPINS = 0xDA
SSD1351_SETVCOMDETECT = 0xDB
SSD1351_SETDISPLAYCLOCKDIV = 0xD5
SSD1351_SETPRECHARGE = 0xD9
SSD1351_SETMULTIPLEX = 0xA8
SSD1351_SETLOWCOLUMN = 0x00
SSD1351_SETHIGHCOLUMN = 0x10
SSD1351_SETSTARTLINE = 0x40
SSD1351_MEMORYMODE = 0x20
SSD1351_COLUMNADDR = 0x21
SSD1351_PAGEADDR = 0x22
SSD1351_COMSCANINC = 0xC0
SSD1351_COMSCANDEC = 0xC8
SSD1351_SEGREMAP = 0xA0
SSD1351_CHARGEPUMP = 0x8D
SSD1351_EXTERNALVCC = 0x1
SSD1351_SWITCHCAPVCC = 0x2
# Scrolling constants
SSD1351_ACTIVATE_SCROLL = 0x2F
SSD1351_DEACTIVATE_SCROLL = 0x2E
SSD1351_SET_VERTICAL_SCROLL_AREA = 0xA3
SSD1351_RIGHT_HORIZONTAL_SCROLL = 0x26
SSD1351_LEFT_HORIZONTAL_SCROLL = 0x27
SSD1351_VERTICAL_AND_RIGHT_HORIZONTAL_SCROLL = 0x29
SSD1351_VERTICAL_AND_LEFT_HORIZONTAL_SCROLL = 0x2A
#? SSD1351_DELAYS_HWFILL (3)
#? SSD1351_DELAYS_HWLINE (1)
# SSD1351 Commands
SSD1351_SETCOLUMN = 0x15
SSD1351_SETROW = 0x75
SSD1351_WRITERAM = 0x5C
SSD1351_READRAM = 0x5D
SSD1351_SETREMAP = 0xA0
SSD1351_STARTLINE = 0xA1
SSD1351_DISPLAYOFFSET = 0xA2
SSD1351_DISPLAYALLOFF = 0xA4
SSD1351_DISPLAYALLON = 0xA5
SSD1351_NORMALDISPLAY = 0xA6
SSD1351_INVERTDISPLAY = 0xA7
SSD1351_FUNCTIONSELECT = 0xAB
SSD1351_DISPLAYOFF = 0xAE
SSD1351_DISPLAYON = 0xAF
SSD1351_PRECHARGE = 0xB1
SSD1351_DISPLAYENHANCE = 0xB2
SSD1351_CLOCKDIV = 0xB3
SSD1351_SETVSL = 0xB4
SSD1351_SETGPIO = 0xB5
SSD1351_PRECHARGE2 = 0xB6
SSD1351_SETGRAY = 0xB8
SSD1351_USELUT = 0xB9
SSD1351_PRECHARGELEVEL = 0xBB
SSD1351_VCOMH = 0xBE
SSD1351_CONTRASTABC = 0xC1
SSD1351_CONTRASTMASTER = 0xC7
SSD1351_MUXRATIO = 0xCA
SSD1351_COMMANDLOCK = 0xFD
SSD1351_HORIZSCROLL = 0x96
SSD1351_STOPSCROLL = 0x9E
SSD1351_STARTSCROLL = 0x9F
class SSD1351Base(object):
"""Base class for SSD1351-based OLED displays. Implementors should subclass
and provide an implementation for the _initialize function.
"""
def __init__(self, width, height, rst, dc=None, sclk=None, din=None, cs=None,
gpio=None, spi=None, i2c_bus=None, i2c_address=SSD1351_I2C_ADDRESS,
i2c=None):
self._log = logging.getLogger('Adafruit_SSD1351.SSD1351Base')
self._spi = None
self._i2c = None
self.width = width
self.height = height
self._pages = height/8
self._buffer = [0]*(width*height)
# Default to platform GPIO if not provided.
self._gpio = gpio
if self._gpio is None:
self._gpio = GPIO.get_platform_gpio()
# Setup reset pin.
self._rst = rst
self._gpio.setup(self._rst, GPIO.OUT)
# Handle hardware SPI
if spi is not None:
self._log.debug('Using hardware SPI')
self._spi = spi
self._spi.set_clock_hz(8000000)
# Handle software SPI
elif sclk is not None and din is not None and cs is not None:
self._log.debug('Using software SPI')
self._spi = SPI.BitBang(self._gpio, sclk, din, None, cs)
# Handle hardware I2C
elif i2c is not None:
self._log.debug('Using hardware I2C with custom I2C provider.')
self._i2c = i2c.get_i2c_device(i2c_address)
else:
self._log.debug('Using hardware I2C with platform I2C provider.')
import Adafruit_GPIO.I2C as I2C
if i2c_bus is None:
self._i2c = I2C.get_i2c_device(i2c_address)
else:
self._i2c = I2C.get_i2c_device(i2c_address, busnum=i2c_bus)
# Initialize DC pin if using SPI.
if self._spi is not None:
if dc is None:
raise ValueError('DC pin must be provided when using SPI.')
self._dc = dc
self._gpio.setup(self._dc, GPIO.OUT)
def _initialize(self):
raise NotImplementedError
def command(self, c):
"""Send command byte to display."""
if self._spi is not None:
# SPI write.
self._gpio.set_low(self._dc)
self._spi.write([c])
else:
# I2C write.
control = 0x00 # Co = 0, DC = 0
self._i2c.write8(control, c)
def data(self, c):
"""Send byte of data to display."""
if self._spi is not None:
# SPI write.
self._gpio.set_high(self._dc)
self._spi.write([c])
else:
# I2C write.
control = 0x40 # Co = 0, DC = 0
self._i2c.write8(control, c)
def begin(self, vccstate=SSD1351_SWITCHCAPVCC):
"""Initialize display."""
# Save vcc state.
self._vccstate = vccstate
# Reset and initialize display.
self.reset()
self._initialize()
# Turn on the display.
self.command(SSD1351_DISPLAYON)
def reset(self):
"""Reset the display."""
# Set reset high for a millisecond.
self._gpio.set_high(self._rst)
time.sleep(0.001)
# Set reset low for 10 milliseconds.
self._gpio.set_low(self._rst)
time.sleep(0.010)
# Set reset high again.
self._gpio.set_high(self._rst)
def display(self):
"""Write display buffer to physical display."""
self.command(SSD1351_SETCOLUMN)
self.data(0) # Column start address. (0 = reset)
self.data(self.width-1) # Column end address.
self.command(SSD1351_SETROW)
self.data(0) # Page start address. (0 = reset)
self.data(self.height-1) # Page end address.
# Write buffer data.
if self._spi is not None:
# Set DC high for data.
self._gpio.set_high(self._dc)
# Write buffer.
self.command(SSD1351_WRITERAM)
self._spi.write(self._buffer)
else:
for i in range(0, len(self._buffer), 16):
control = 0x40 # Co = 0, DC = 0
self._i2c.writeList(control, self._buffer[i:i+16])
def image(self, image):
"""Set buffer to value of Python Imaging Library image. The image should
be in 1 bit mode and a size equal to the display size.
"""
# if image.mode != '1':
# raise ValueError('Image must be in mode 1.')
imwidth, imheight = image.size
if imwidth != self.width or imheight != self.height:
raise ValueError('Image must be same dimensions as display ({0}x{1}).' \
.format(self.width, self.height))
# Grab all the pixels from the image, faster than getpixel.
pix = image.load()
# Iterate through the memory pages
index = 0
for page in range(self.height):
# Iterate through all x axis columns.
for x in range(self.width):
# Set the bits for the column of pixels at the current position.
bits = 0
# Don't use range here as it's a bit slow
for bit in [0, 1, 2, 3, 4, 5, 6, 7]:
bits = bits << 1
bits |= 0 if pix[(x, page*8+7-bit)] == 0 else 1
# Update buffer byte and increment to next byte.
self._buffer[index] = bits
index += 1
def clear(self):
"""Clear contents of image buffer."""
self._buffer = [0]*(self.width*self.height)
def set_contrast(self, contrast):
"""Sets the contrast of the display. Contrast should be a value between
0 and 255."""
if contrast < 0 or contrast > 255:
raise ValueError('Contrast must be a value from 0 to 255 (inclusive).')
self.command(SSD1351_CONTRASTMASTER)
self.command(contrast)
def dim(self, dim):
"""Adjusts contrast to dim the display if dim is True, otherwise sets the
contrast to normal brightness if dim is False.
"""
# Assume dim display.
contrast = 0
# Adjust contrast based on VCC if not dimming.
if not dim:
if self._vccstate == SSD1351_EXTERNALVCC:
contrast = 0x9F
else:
contrast = 0xCF
def invert(self):
self.command(SSD1351_NORMALDISPLAY)
def rawfill(self, x, y, w, h, fillcolor):
if (x >= self.width) or (y >= self.height):
return
if y+h > self.height:
h = self.height-y-1
if x+w > self.width:
w = self.width-x-1
self.command(SSD1351_SETCOLUMN)
self.data(x)
self.data(x+w-1)
self.command(SSD1351_SETROW)
self.data(y)
self.data(y+h-1)
#fill!
self.command(SSD1351_WRITERAM)
for num in range (0, w*h):
self.data(fillcolor >> 8)
self.data(fillcolor)
def color565(self, r, g, b):
c = r >> 3
c <<= 6
c |= g >> 2
c <<= 5
c |= b >> 3
return c
def roughimage(self, image):
self.command(SSD1351_SETCOLUMN)
self.data(0)
self.data(self.width - 1)
self.command(SSD1351_SETROW)
self.data(0)
self.data(self.height-1)
#fill
im_width, im_height = image.size
print(im_width, im_height)
rgb_image = image.convert('RGB')
pix = rgb_image.load()
self.command(SSD1351_WRITERAM)
for row in range (0, im_height):
for column in range (0, im_width):
r,g,b = pix[column, row]
color = self.color565(r,g,b)
self.data( color >> 8)
self.data( color )
class SSD1351_128_96(SSD1351Base):
def __init__(self, rst, dc=None, sclk=None, din=None, cs=None, gpio=None,
spi=None, i2c_bus=None, i2c_address=SSD1351_I2C_ADDRESS,
i2c=None):
# Call base class constructor.
super(SSD1351_128_96, self).__init__(128, 96, rst, dc, sclk, din, cs,
gpio, spi, i2c_bus, i2c_address, i2c)
def _initialize(self):
# 128x96 pixel specific initialization.
# My version
self.command(SSD1351_COMMANDLOCK) # set command lock
self.data(0x12)
self.command(SSD1351_COMMANDLOCK) # set command lock
self.data(0xB1)
self.command(SSD1351_DISPLAYOFF) # 0xAE
self.command(SSD1351_CLOCKDIV) # 0xB3
self.command(0xF1) # 7:4 = Oscillator Frequency, 3:0 = CLK Div Ratio (A[3:0]+1 = 1..16)
self.command(SSD1351_MUXRATIO)
self.data(127)
self.command(SSD1351_SETREMAP)
self.data(0x74)
self.command(SSD1351_SETCOLUMN)
self.data(0x00)
self.data(0x7F)
self.command(SSD1351_SETROW)
self.data(0x00)
self.data(0x7F)
self.command(SSD1351_STARTLINE) # 0xA1
self.data(96)
self.command(SSD1351_DISPLAYOFFSET) # 0xA2
self.data(0x0)
self.command(SSD1351_SETGPIO)
self.data(0x00)
self.command(SSD1351_FUNCTIONSELECT)
self.data(0x01) #internal (diode drop)
self.command(SSD1351_PRECHARGE) # 0xB1
self.command(0x32)
self.command(SSD1351_VCOMH) # 0xBE
self.command(0x05)
self.command(SSD1351_NORMALDISPLAY) # 0xA6
self.command(SSD1351_CONTRASTABC)
self.data(0xC8)
self.data(0x80)
self.data(0xC8)
self.command(SSD1351_CONTRASTMASTER)
self.data(0x0F)
self.command(SSD1351_SETVSL)
self.data(0xA0)
self.data(0xB5)
self.data(0x55)
self.command(SSD1351_PRECHARGE2)
self.data(0x01)
| twchad/Adafruit_Python_SSD1351 | Adafruit_SSD1351/SSD1351.py | Python | mit | 11,355 |
class Solution(object):
def reverseBits(self, n):
"""
:type n: int
:rtype: int
"""
ret = 0
for i in range(32):
ret += (n%2) * 2**(31-i)
n /= 2
return ret | xingjian-f/Leetcode-solution | 190. Reverse Bits.py | Python | mit | 231 |
"""Configuration for pytest."""
import json
def pytest_generate_tests(metafunc):
"""Configure pytest to call each of the tests once for each test case."""
if "test_case" in metafunc.fixturenames:
tests = json.load(open("tests/test_data.json"))["tests"]
metafunc.parametrize("test_case", tests)
| rowanphipps/Cerberus | tests/conftest.py | Python | mit | 320 |
#! /usr/bin/env python
#
# IRC2LCD
# Tim Ballas
#
"""IRC bot to display mentions on an LCD through a Parallax Propeller.
Usage: IRCbot2LCD.py <server[:port]> <channel> <nicknameToMonitor> <COMport> <optional bot nickname>
"""
#
# Modified from:
# Example program using irc.bot.
# Joel Rosdahl <joel@rosdahl.net>
#
import irc.bot
import irc.strings
from irc.client import ip_numstr_to_quad, ip_quad_to_numstr
import re
import serial
import time
class IRC2LCDbot(irc.bot.SingleServerIRCBot):
def __init__(self, channel, nickname, server, port=6667):
irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname, nickname)
self.channel = channel
def on_nicknameinuse(self, c, e):
c.nick(BotNick)
def on_welcome(self, c, e):
c.join(self.channel)
def on_pubmsg(self, c, e):
pubmsgTemp = e.arguments[0] # e.arguments[0] is the public message we are processing, loaded into "pubmsgTemp"
pattern = re.compile(r'(.*{0}([|_][a-z0-9]+)?(\s|$).*|.*{1}([|_][a-z0-9]+)?:.*)'.format(MonitorNick,MonitorNick)) # Compile Regular Expression to check if the public message has our MonitorNick in it
result = re.search(pattern, pubmsgTemp) # Execute Regular Expression
if result: # Check to see if we matched our MonitorNick in the public message
try: # Handle error when result has 'None' in it
print result.group(1) # Print matched message to the console
MatchedMessage = str(result.group(1)) # Load matched message into "MatchedMessage" variable. Enclosing it in "str()" is to return a nice printable string.
ser.write("\r\t" + MatchedMessage) # Write "MatchedMessage" to LCD through Parallax Propeller over Serial connection. "\r\t" is command for Propeller to Clear LCD.
except: # Needed to complete 'try:' statement
pass # Do nothing and move on
def main():
import sys
if len(sys.argv) < 5:
print("Usage: IRCbot2LCD.py <server[:port]> <channel> <nicknameToMonitor> <COMport> <optional bot nickname>")
sys.exit(1)
s = sys.argv[1].split(":", 1)
server = s[0]
if len(s) == 2:
try:
port = int(s[1])
except ValueError:
print("Error: Erroneous port.")
sys.exit(1)
else:
port = 6667
channel = sys.argv[2]
nickname = sys.argv[3]
COMport = sys.argv[4]
global BotNick # Declare global variable for "BotNick"
if len(sys.argv) == 6: # If there is a argument defined for "BotNick"
BotNick = sys.argv[5] # Set "BotNick" to Argument 5(sys.argv[5])
else: # Else
BotNick = nickname + "_" # Use nickname to monitor and an underscore
global MonitorNick # Declare global variable for "MonitorNick"
MonitorNick = nickname # Set "MonitorNick" to nickname(sys.argv[3])
global ser # Declare global variable for "ser"
ser = serial.Serial(str(COMport),baudrate=9600) # Set "ser" to Serial object
bot = IRC2LCDbot(channel, nickname, server, port) # Set "bot" to IRC2LCDbot object
bot.start() # Start bot
ser.close() # Closing Serial port will prevent problems
if __name__ == "__main__":
main()
| tballas/IRC2LCD | Python/IRC2LCD.py | Python | mit | 2,965 |
#!/usr/bin/env python3
"""
Rename and organize Horos QC exported data in <BIDS Root>/incoming and place in <BIDS Root>/sourcedata
AUTHOR
----
Mike Tyszka, Ph.D.
MIT License
Copyright (c) 2019 Mike Tyszka
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import os
import sys
from glob import glob
import argparse
from pathlib import Path
import pydicom
from shutil import rmtree
def main():
parser = argparse.ArgumentParser(description='Fix subject and session directory naming in Horos output')
parser.add_argument('-d', '--dataset', default='.',
help='BIDS dataset directory containing sourcedata subdirectory')
# Parse command line arguments
args = parser.parse_args()
dataset_dir = os.path.realpath(args.dataset)
incoming_dir = os.path.join(dataset_dir, 'incoming')
sourcedata_dir = os.path.join(dataset_dir, 'sourcedata')
qc_dir = os.path.join(sourcedata_dir, 'QC')
# Create single QC subject
print("Checking that QC subject exists in sourcedata")
if os.path.isdir(qc_dir):
print(" It does - continuning")
else:
print(" QC subject does not exist - creating QC subject in sourcedata")
os.makedirs(qc_dir, exist_ok=True)
# Loop over all Qc study directories in sourcedata
# Expect subject/session directory names in the form "Qc_<session ID>_*/<session dir>/"
# Move session subdirectories from Qc_*/<session dir> to Qc/<ScanDate>
print("Scanning for incoming QC studies")
for inc_qc_dir in glob(os.path.join(incoming_dir, 'Qc*')):
print("")
print(" Processing {}".format(inc_qc_dir))
# There should be only one session subdirectory
dlist = list(glob(os.path.join(inc_qc_dir, '*')))
if len(dlist) > 0:
ses_dir = dlist[0]
# Get first DICOM file in ses_dir at any level
first_dcm = str(list(Path(ses_dir).rglob("*.dcm"))[0])
# Get acquisition date from DICOM header
acq_date = acquisition_date(first_dcm)
# Destination session directory name in QC subject folder
dest_dir = os.path.join(qc_dir, acq_date)
# Move and rename session subdirectory
print(' Moving %s to %s' % (ses_dir, dest_dir))
os.rename(ses_dir, dest_dir)
# Delete incoming Qc_* directory
print(' Deleting %s' % inc_qc_dir)
rmtree(inc_qc_dir)
def acquisition_date(dcm_fname):
"""
Extract acquisition date from DICOM header
:param dcm_fname: DICOM filename
:return acq_date: str, acquisition date (YYYYMMDD)
"""
# Default return date
acq_date = '19010101'
if not os.path.isfile(dcm_fname):
print('* File not found - %s' % dcm_fname)
try:
ds = pydicom.read_file(dcm_fname, force=True)
except IOError:
print("* Problem opening %s" % dcm_fname)
raise
except AttributeError:
print("* Problem opening %s" % dcm_fname)
raise
if ds:
acq_date = ds.AcquisitionDate
else:
print('* DICOM header problem - returning %s' % acq_date)
return acq_date
if 'main' in __name__:
main() | jmtyszka/CBICQA | bin/cbicqc_incoming.py | Python | mit | 4,170 |
# -*- coding: utf-8; -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2014 Flavien Charlon
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Provides the infrastructure for calculating the asset ID and asset quantity of Bitcoin outputs,
according to the Open Assets Protocol.
"""
import asyncio
import bitcoin.core
import bitcoin.core.script
import enum
import hashlib
import io
class ColoringEngine(object):
"""The backtracking engine used to find the asset ID and asset quantity of any output."""
def __init__(self, transaction_provider, cache, event_loop):
"""
Constructs an instance of the ColorEngine class.
:param bytes -> Future[CTransaction] transaction_provider: A function returning a transaction given its hash.
:param OutputCache cache: The cache object to use.
:param BaseEventLoop | None event_loop: The event loop used to schedule asynchronous tasks.
"""
self._transaction_provider = transaction_provider
self._cache = cache
self._loop = event_loop
@asyncio.coroutine
def get_output(self, transaction_hash, output_index):
"""
Gets an output and information about its asset ID and asset quantity.
:param bytes transaction_hash: The hash of the transaction containing the output.
:param int output_index: The index of the output.
:return: An object containing the output as well as its asset ID and asset quantity.
:rtype: Future[TransactionOutput]
"""
cached_output = yield from self._cache.get(transaction_hash, output_index)
if cached_output is not None:
return cached_output
transaction = yield from self._transaction_provider(transaction_hash)
if transaction is None:
raise ValueError('Transaction {0} could not be retrieved'.format(bitcoin.core.b2lx(transaction_hash)))
colored_outputs = yield from self.color_transaction(transaction)
for index, output in enumerate(colored_outputs):
yield from self._cache.put(transaction_hash, index, output)
return colored_outputs[output_index]
@asyncio.coroutine
def color_transaction(self, transaction):
"""
Computes the asset ID and asset quantity of every output in the transaction.
:param CTransaction transaction: The transaction to color.
:return: A list containing all the colored outputs of the transaction.
:rtype: Future[list[TransactionOutput]]
"""
# If the transaction is a coinbase transaction, the marker output is always invalid
if not transaction.is_coinbase():
for i, output in enumerate(transaction.vout):
# Parse the OP_RETURN script
marker_output_payload = MarkerOutput.parse_script(output.scriptPubKey)
if marker_output_payload is not None:
# Deserialize the payload as a marker output
marker_output = MarkerOutput.deserialize_payload(marker_output_payload)
if marker_output is not None:
# Fetch the colored outputs for previous transactions
inputs = []
for input in transaction.vin:
inputs.append((yield from asyncio.async(
self.get_output(input.prevout.hash, input.prevout.n), loop=self._loop)))
asset_ids = self._compute_asset_ids(
inputs,
i,
transaction.vout,
marker_output.asset_quantities)
if asset_ids is not None:
return asset_ids
# If no valid marker output was found in the transaction, all outputs are considered uncolored
return [
TransactionOutput(output.nValue, output.scriptPubKey, None, 0, OutputType.uncolored)
for output in transaction.vout]
@classmethod
def _compute_asset_ids(cls, inputs, marker_output_index, outputs, asset_quantities):
"""
Computes the asset IDs of every output in a transaction.
:param list[TransactionOutput] inputs: The outputs referenced by the inputs of the transaction.
:param int marker_output_index: The position of the marker output in the transaction.
:param list[CTxOut] outputs: The outputs of the transaction.
:param list[int] asset_quantities: The list of asset quantities of the outputs.
:return: A list of outputs with asset ID and asset quantity information.
:rtype: list[TransactionOutput]
"""
# If there are more items in the asset quantities list than outputs in the transaction (excluding the
# marker output), the marker output is deemed invalid
if len(asset_quantities) > len(outputs) - 1:
return None
# If there is no input in the transaction, the marker output is always invalid
if len(inputs) == 0:
return None
result = []
# Add the issuance outputs
issuance_asset_id = cls.hash_script(bytes(inputs[0].script))
for i in range(0, marker_output_index):
value, script = outputs[i].nValue, outputs[i].scriptPubKey
if i < len(asset_quantities) and asset_quantities[i] > 0:
output = TransactionOutput(value, script, issuance_asset_id, asset_quantities[i], OutputType.issuance)
else:
output = TransactionOutput(value, script, None, 0, OutputType.issuance)
result.append(output)
# Add the marker output
issuance_output = outputs[marker_output_index]
result.append(TransactionOutput(
issuance_output.nValue, issuance_output.scriptPubKey, None, 0, OutputType.marker_output))
# Add the transfer outputs
input_iterator = iter(inputs)
input_units_left = 0
for i in range(marker_output_index + 1, len(outputs)):
if i <= len(asset_quantities):
output_asset_quantity = asset_quantities[i - 1]
else:
output_asset_quantity = 0
output_units_left = output_asset_quantity
asset_id = None
while output_units_left > 0:
# Move to the next input if the current one is depleted
if input_units_left == 0:
current_input = next(input_iterator, None)
if current_input is None:
# There are less asset units available in the input than in the outputs:
# the marker output is considered invalid
return None
else:
input_units_left = current_input.asset_quantity
# If the current input is colored, assign its asset ID to the current output
if current_input.asset_id is not None:
progress = min(input_units_left, output_units_left)
output_units_left -= progress
input_units_left -= progress
if asset_id is None:
# This is the first input to map to this output
asset_id = current_input.asset_id
elif asset_id != current_input.asset_id:
# Another different asset ID has already been assigned to that output:
# the marker output is considered invalid
return None
result.append(TransactionOutput(
outputs[i].nValue, outputs[i].scriptPubKey, asset_id, output_asset_quantity, OutputType.transfer))
return result
@staticmethod
def hash_script(data):
"""
Hashes a script into an asset ID using SHA256 followed by RIPEMD160.
:param bytes data: The data to hash.
"""
sha256 = hashlib.sha256()
ripemd = hashlib.new('ripemd160')
sha256.update(data)
ripemd.update(sha256.digest())
return ripemd.digest()
class OutputType(enum.Enum):
uncolored = 0
marker_output = 1
issuance = 2
transfer = 3
class TransactionOutput(object):
"""Represents a transaction output and its asset ID and asset quantity."""
def __init__(
self,
value=-1,
script=bitcoin.core.script.CScript(),
asset_id=None,
asset_quantity=0,
output_type=OutputType.uncolored):
"""
Initializes a new instance of the TransactionOutput class.
:param int value: The satoshi value of the output.
:param CScript script: The script controlling redemption of the output.
:param bytes | None asset_id: The asset ID of the output.
:param int asset_quantity: The asset quantity of the output.
:param OutputType output_type: The type of the output.
"""
assert 0 <= asset_quantity <= MarkerOutput.MAX_ASSET_QUANTITY
self._value = value
self._script = script
self._asset_id = asset_id
self._asset_quantity = asset_quantity
self._output_type = output_type
@property
def value(self):
"""
Gets the number of satoshis in the output.
:return: The value of the output in satoshis.
:rtype: int
"""
return self._value
@property
def script(self):
"""
Gets the script of the output.
:return: The output script.
:rtype: CScript
"""
return self._script
@property
def asset_id(self):
"""
Gets the asset ID of the output.
:return: The asset ID of the output, or None of the output is uncolored.
:rtype: bytes | None
"""
return self._asset_id
@property
def asset_quantity(self):
"""
Gets the asset quantity of the output.
:return: The asset quantity of the output (zero if the output is uncolored).
:rtype: int
"""
return self._asset_quantity
@property
def output_type(self):
"""
Gets the type of the output.
:return: The type of the output.
:rtype: OutputType
"""
return self._output_type
def __repr__(self):
return 'TransactionOutput(value=%r, script=%r, asset_id=%r, asset_quantity=%r, output_type=%r)' % \
(self.value, self.script, self.asset_id, self.asset_quantity, self.output_type)
class OutputCache(object):
"""Represents the interface for an object capable of storing the result of output coloring."""
@asyncio.coroutine
def get(self, transaction_hash, output_index):
"""
Returns a cached output.
:param bytes transaction_hash: The hash of the transaction the output belongs to.
:param int output_index: The index of the output in the transaction.
:return: The output for the transaction hash and output index provided if it is found in the cache, or None
otherwise.
:rtype: TransactionOutput
"""
return None
@asyncio.coroutine
def put(self, transaction_hash, output_index, output):
"""
Saves an output in cache.
:param bytes transaction_hash: The hash of the transaction the output belongs to.
:param int output_index: The index of the output in the transaction.
:param TransactionOutput output: The output to save.
"""
pass
class MarkerOutput(object):
"""Represents an Open Assets marker output."""
MAX_ASSET_QUANTITY = 2 ** 63 - 1
OPEN_ASSETS_TAG = b'OA\x01\x00'
def __init__(self, asset_quantities, metadata):
"""
Initializes a new instance of the MarkerOutput class.
:param list[int] asset_quantities: The list of asset quantities.
:param bytes metadata: The metadata in the marker output.
"""
self._asset_quantities = asset_quantities
self._metadata = metadata
@property
def asset_quantities(self):
"""
Gets the asset quantity list.
:return: The asset quantity list of the output.
:rtype: list[int]
"""
return self._asset_quantities
@property
def metadata(self):
"""
Gets the metadata contained in the marker output.
:return: The metadata contained in the marker output.
:rtype: bytes
"""
return self._metadata
@classmethod
def deserialize_payload(cls, payload):
"""
Deserializes the marker output payload.
:param bytes payload: A buffer containing the marker output payload.
:return: The marker output object.
:rtype: MarkerOutput
"""
with io.BytesIO(payload) as stream:
# The OAP marker and protocol version
oa_version = stream.read(4)
if oa_version != cls.OPEN_ASSETS_TAG:
return None
try:
# Deserialize the expected number of items in the asset quantity list
output_count = bitcoin.core.VarIntSerializer.stream_deserialize(stream)
# LEB128-encoded unsigned integers representing the asset quantity of every output in order
asset_quantities = []
for i in range(0, output_count):
asset_quantity = cls.leb128_decode(stream)
# If the LEB128-encoded asset quantity of any output exceeds 9 bytes,
# the marker output is deemed invalid
if asset_quantity > cls.MAX_ASSET_QUANTITY:
return None
asset_quantities.append(asset_quantity)
# The var-integer encoded length of the metadata field.
metadata_length = bitcoin.core.VarIntSerializer.stream_deserialize(stream)
# The actual metadata
metadata = stream.read(metadata_length)
# If the metadata string wasn't long enough, the marker output is malformed
if len(metadata) != metadata_length:
return None
# If there are bytes left to read, the marker output is malformed
last_byte = stream.read(1)
if len(last_byte) > 0:
return None
except bitcoin.core.SerializationTruncationError:
return None
return MarkerOutput(asset_quantities, metadata)
def serialize_payload(self):
"""
Serializes the marker output data into a payload buffer.
:return: The serialized payload.
:rtype: bytes
"""
with io.BytesIO() as stream:
stream.write(self.OPEN_ASSETS_TAG)
bitcoin.core.VarIntSerializer.stream_serialize(len(self.asset_quantities), stream)
for asset_quantity in self.asset_quantities:
stream.write(self.leb128_encode(asset_quantity))
bitcoin.core.VarIntSerializer.stream_serialize(len(self.metadata), stream)
stream.write(self.metadata)
return stream.getvalue()
@staticmethod
def parse_script(output_script):
"""
Parses an output and returns the payload if the output matches the right pattern for a marker output,
or None otherwise.
:param CScript output_script: The output script to be parsed.
:return: The marker output payload if the output fits the pattern, None otherwise.
:rtype: bytes
"""
script_iterator = output_script.raw_iter()
try:
first_opcode, _, _ = next(script_iterator, (None, None, None))
_, data, _ = next(script_iterator, (None, None, None))
remainder = next(script_iterator, None)
except bitcoin.core.script.CScriptTruncatedPushDataError:
return None
except bitcoin.core.script.CScriptInvalidError:
return None
if first_opcode == bitcoin.core.script.OP_RETURN and data is not None and remainder is None:
return data
else:
return None
@staticmethod
def build_script(data):
"""
Creates an output script containing an OP_RETURN and a PUSHDATA.
:param bytes data: The content of the PUSHDATA.
:return: The final script.
:rtype: CScript
"""
return bitcoin.core.script.CScript(
bytes([bitcoin.core.script.OP_RETURN]) + bitcoin.core.script.CScriptOp.encode_op_pushdata(data))
@staticmethod
def leb128_decode(data):
"""
Decodes a LEB128-encoded unsigned integer.
:param BufferedIOBase data: The buffer containing the LEB128-encoded integer to decode.
:return: The decoded integer.
:rtype: int
"""
result = 0
shift = 0
while True:
character = data.read(1)
if len(character) == 0:
raise bitcoin.core.SerializationTruncationError('Invalid LEB128 integer')
b = ord(character)
result |= (b & 0x7f) << shift
if b & 0x80 == 0:
break
shift += 7
return result
@staticmethod
def leb128_encode(value):
"""
Encodes an integer using LEB128.
:param int value: The value to encode.
:return: The LEB128-encoded integer.
:rtype: bytes
"""
if value == 0:
return b'\x00'
result = []
while value != 0:
byte = value & 0x7f
value >>= 7
if value != 0:
byte |= 0x80
result.append(byte)
return bytes(result)
def __repr__(self):
return 'MarkerOutput(asset_quantities=%r, metadata=%r)' % (self.asset_quantities, self.metadata)
| OpenAssets/openassets | openassets/protocol.py | Python | mit | 19,047 |
#!/usr/bin/env python3
import os
import linecache
import tempfile
lorem = '''Lorem ipsum dolor sit amet, consectetuer
adipiscing elit. Vivamus eget elit. In posuere mi non
risus. Mauris id quam posuere lectus sollicitudin
varius. Praesent at mi. Nunc eu velit. Sed augue massa,
fermentum id, nonummy a, nonummy sit amet, ligula. Curabitur
eros pede, egestas at, ultricies ac, apellentesque eu,
tellus.
Sed sed odio sed mi luctus mollis. Integer et nulla ac augue
convallis accumsan. Ut felis. Donec lectus sapien, elementum
nec, condimentum ac, interdum non, tellus. Aenean viverra,
mauris vehicula semper porttitor, ipsum odio consectetuer
lorem, ac imperdiet eros odio a sapien. Nulla mauris tellus,
aliquam non, egestas a, nonummy et, erat. Vivamus sagittis
porttitor eros.'''
def makefile():
fd, temp_file_name = tempfile.mkstemp()
os.close(fd)
fd = open(temp_file_name, 'wt')
try:
fd.write(lorem)
finally:
fd.close()
return temp_file_name
def cleanup(filename):
os.unlink(filename)
filename = makefile()
print('SOURCE:')
print('%r' % (lorem.split('\n')[4]))
print()
print('CACHE:')
print('%r' % linecache.getline(filename, 5))
print()
print('BLANK: %r' % linecache.getline(filename, 8))
print()
not_there = linecache.getline(filename, 500)
print('NOT THERE: %r includes %d characters' % (not_there, len(not_there)))
module_line = linecache.getline('linecache.py', 3)
print('MODULE:')
print(repr(module_line))
file_src = linecache.__file__
if file_src.endswith('.pyc'):
file_src = file_src[:-1]
print('\nFILE:')
with open(file_src, 'r') as f:
file_line = f.readlines()[2]
print(file_line)
| eroicaleo/ThePythonStandardLibraryByExample | ch06TheFileSystem/linecache_test.py | Python | mit | 1,655 |
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from plaid.model.transfer_user_address_in_request import TransferUserAddressInRequest
globals()['TransferUserAddressInRequest'] = TransferUserAddressInRequest
class TransferUserInRequest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'legal_name': (str,), # noqa: E501
'phone_number': (str,), # noqa: E501
'email_address': (str,), # noqa: E501
'address': (TransferUserAddressInRequest,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'legal_name': 'legal_name', # noqa: E501
'phone_number': 'phone_number', # noqa: E501
'email_address': 'email_address', # noqa: E501
'address': 'address', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, legal_name, *args, **kwargs): # noqa: E501
"""TransferUserInRequest - a model defined in OpenAPI
Args:
legal_name (str): The user's legal name.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
phone_number (str): The user's phone number.. [optional] # noqa: E501
email_address (str): The user's email address.. [optional] # noqa: E501
address (TransferUserAddressInRequest): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.legal_name = legal_name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| plaid/plaid-python | plaid/model/transfer_user_in_request.py | Python | mit | 7,582 |
from django.db import models
class TimeStampedModel(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Caller(TimeStampedModel):
name = models.CharField(max_length=30)
phone_number = models.CharField(max_length=15)
question_num = models.IntegerField(default=0)
start_num = models.IntegerField(default=0)
end_num = models.IntegerField(default=0)
start_fresh = models.BooleanField(default=True)
intro_text = models.CharField(max_length=1600)
outro_text = models.CharField(max_length=1600)
def __str__(self):
return self.name
class Question(TimeStampedModel):
num = models.IntegerField()
question_text = models.CharField(max_length=1600)
def __str__(self):
return self.question_text[:100] + '...'
class Choice(TimeStampedModel):
question = models.ForeignKey(Question, related_name='choices')
choice_text = models.CharField(max_length=1600)
num = models.IntegerField()
def __str__(self):
return self.choice_text[:100] + '...'
| benjamin0/how-do-you-like-them-apples | howdoyoulike/calls/models.py | Python | mit | 1,136 |
{
"register": {
"email": "email address already exists",
"password": "password's length must longer than six characters",
"username": "{ username } already exists"
},
"login": {
"email": "email address not exists",
"password": "password is not correct"
}
}
| free-free/pyblog | pyblog/locale/english/message.py | Python | mit | 313 |
from wasp94_base import *
# create a night to analyze
from mosasaurus.Night import Night
n = Night('ut140801', instrument=i)
n.createNightlyLog(remake=False)
# create an observation
from mosasaurus.Observation import Observation
o = Observation(t, i, n)
o.setupFilePrefixes(science=['WASP-94'], reference=['WASP-94'], flat=['flat'])
# create a reducer to analyze this observation
from mosasaurus.Reducer import Reducer
r = Reducer(o, visualize=False)
r.reduce()
from mosasaurus.Cube import Cube
c = Cube(o, width=16)
c.populate(shift=False, max=None)
c.setStars(target='aperture_709_1066', comparisons='aperture_751_1066')
c.save()
from mosasaurus.WavelengthRecalibrator import WavelengthRecalibrator
wr = WavelengthRecalibrator(c)
#c.imageCube(keys=['raw_counts'], stars=[c.target])
#c.imageCube()
#c.populate(shift=True, max=None)
#c.imageCube(keys=['raw_counts'], stars=[c.target])
#c.imageCube()
#c.exportShiftStretch()
#c.shiftCube()
#c.imageCube(keys=['raw_counts'], stars=[c.target], remake=True)
'''
c.movieCube(stride=1, remake=False)
c.imageCube(remake=True)
c.movieCube(stride=1, remake=False)
'''
#c.nudgeWavelengths()
| zkbt/mosasaurus | scripts/wasp94/wasp94_ut140801.py | Python | mit | 1,142 |
"""Kerasの各種モデル。"""
# pylint: skip-file
# flake8: noqa
from . import darknet53, efficientnet, xception
| ak110/pytoolkit | pytoolkit/applications/__init__.py | Python | mit | 117 |
# encoding: utf-8
import os
import subprocess
import sys
from workflow import Workflow3 as Workflow, MATCH_SUBSTRING
from workflow.background import run_in_background
import brew_actions
import helpers
GITHUB_SLUG = 'fniephaus/alfred-homebrew'
def execute(wf, cmd_list):
brew_arch = helpers.get_brew_arch(wf)
new_env = helpers.initialise_path(brew_arch)
cmd, err = subprocess.Popen(cmd_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=new_env).communicate()
if err:
return err
return cmd
def get_all_formulae():
return execute(wf, ['brew', 'formulae']).splitlines()
def get_installed_formulae():
return execute(wf, ['brew', 'list', '--versions']).splitlines()
def get_pinned_formulae():
return execute(wf, ['brew', 'list', '--pinned', '--versions']).splitlines()
def get_outdated_formulae():
return execute(wf, ['brew', 'outdated', '--formula']).splitlines()
def get_info():
return execute(wf, ['brew', 'info'])
def get_commands(wf, query):
result = execute(wf, ['brew', 'commands']).splitlines()
commands = [x for x in result if ' ' not in x]
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], commands, match_on=MATCH_SUBSTRING)
return commands
def get_all_services():
services_response = execute(wf, ['brew', 'services', 'list']).splitlines()
services_response.pop(0)
services = []
for serviceLine in services_response:
services.append({'name': serviceLine.split()[0], 'status': serviceLine.split()[1]})
return services
def filter_all_formulae(wf, query):
formulae = wf.cached_data('brew_all_formulae',
get_all_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_installed_formulae(wf, query):
formulae = wf.cached_data('brew_installed_formulae',
get_installed_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_pinned_formulae(wf, query):
formulae = wf.cached_data('brew_pinned_formulae',
get_pinned_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_outdated_formulae(wf, query):
formulae = wf.cached_data('brew_outdated_formulae',
get_outdated_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_all_services(wf, query):
services = wf.cached_data('brew_all_services',
get_all_services,
session=True)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], services, key=lambda x: x['name'], match_on=MATCH_SUBSTRING)
return services
def add_service_actions(wf, service_name):
wf.add_item('Run Service',
'Run the service formula without registering to launch at login (or boot).',
autocomplete='services %s run' % service_name,
arg='brew services run %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Stop Service',
'Stop the service formula immediately and unregister it from launching at login (or boot).',
autocomplete='services %s stop' % service_name,
arg='brew services stop %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Start Service',
'Start the service formula immediately and register it to launch at login (or boot).',
autocomplete='services %s start' % service_name,
arg='brew services start %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Restart Service',
'Stop (if necessary) and start the service formula immediately and register it to launch '
'at login (or boot).',
autocomplete='services %s restart' % service_name,
arg='brew services restart %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
def main(wf):
if wf.update_available:
wf.add_item('An update is available!',
autocomplete='workflow:update',
valid=False,
icon=helpers.get_icon(wf, 'cloud-download'))
# Check for brew installation
find_brew = helpers.brew_installed()
if not (find_brew['INTEL'] or find_brew['ARM']):
helpers.brew_installation_instructions(wf)
else:
# extract query
query = wf.args[0] if len(wf.args) else None
if (not query and
len(wf.cached_data('brew_outdated_formulae',
get_outdated_formulae,
max_age=3600)) > 0):
wf.add_item('Some of your formulae are outdated!',
autocomplete='outdated ',
valid=False,
icon=helpers.get_icon(wf, 'cloud-download'))
if query and query.startswith('install'):
for formula in filter_all_formulae(wf, query):
wf.add_item(formula, 'Install formula.',
arg='brew install %s' % formula,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('services'):
query_filter = query.split()
if len(query_filter) == 2 and query.endswith(' '):
service_name = query_filter[1]
add_service_actions(wf, service_name)
else:
services = filter_all_services(wf, query)
for service in services:
wf.add_item(service['name'], 'Select for action. Status: %s' % service['status'],
autocomplete='services %s ' % service['name'],
arg='',
valid=False,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('search'):
for formula in filter_all_formulae(wf, query):
wf.add_item(formula, 'Open formula on GitHub.',
arg='brew info --github %s' % formula,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('uninstall'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Uninstall formula.',
arg='brew uninstall %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('list'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Open formula on GitHub.',
arg='brew info --github %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('pin'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Pin formula.',
arg='brew pin %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
# delete cached file
wf.cache_data('brew_pinned_formulae', None)
elif query and query.startswith('unpin'):
for formula in filter_pinned_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Unpin formula.',
arg='brew unpin %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
# delete cached file
wf.cache_data('brew_pinned_formulae', None)
elif query and query.startswith('cat'):
for formula in filter_all_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Display the source to this formula.',
arg='brew cat %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('outdated'):
for formula in filter_outdated_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Upgrade formula.',
arg='brew upgrade %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('info'):
wf.add_item(get_info(),
autocomplete='',
icon=helpers.get_icon(wf, 'info'))
elif query and query.startswith('commands'):
for command in get_commands(wf, query):
wf.add_item(command, 'Run this command.',
arg='brew %s' % command,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
elif query and query.startswith('config'):
helpers.edit_settings(wf)
wf.add_item('`settings.json` has been opened.',
autocomplete='',
icon=helpers.get_icon(wf, 'info'))
else:
actions = brew_actions.ACTIONS
if len(wf.cached_data('brew_pinned_formulae',
get_pinned_formulae,
max_age=3600)) > 0:
actions.append({
'name': 'Unpin',
'description': 'Unpin formula.',
'autocomplete': 'unpin ',
'arg': '',
'valid': False,
})
# filter actions by query
if query:
actions = wf.filter(query, actions,
key=helpers.search_key_for_action,
match_on=MATCH_SUBSTRING)
if len(actions) > 0:
for action in actions:
wf.add_item(action['name'], action['description'],
uid=action['name'],
autocomplete=action['autocomplete'],
arg=action['arg'],
valid=action['valid'],
icon=helpers.get_icon(wf, 'chevron-right'))
else:
wf.add_item('No action found for "%s"' % query,
autocomplete='',
icon=helpers.get_icon(wf, 'info'))
if len(wf._items) == 0:
query_name = query[query.find(' ') + 1:]
wf.add_item('No formula found for "%s"' % query_name,
autocomplete='%s ' % query[:query.find(' ')],
icon=helpers.get_icon(wf, 'info'))
wf.send_feedback()
# refresh cache
cmd = ['/usr/bin/python', wf.workflowfile('brew_refresh.py')]
run_in_background('brew_refresh', cmd)
if __name__ == '__main__':
wf = Workflow(update_settings={'github_slug': GITHUB_SLUG})
sys.exit(wf.run(main))
| fniephaus/alfred-homebrew | src/brew.py | Python | mit | 12,427 |
from os.path import join as pjoin
# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z"
_version_major = 0
_version_minor = 2
_version_micro = '' # use '' for first of series, number for 1 and above
_version_extra = 'dev'
#_version_extra = '' # Uncomment this for full releases
# Construct full version string from these.
_ver = [_version_major, _version_minor]
if _version_micro:
_ver.append(_version_micro)
if _version_extra:
_ver.append(_version_extra)
__version__ = '.'.join(map(str, _ver))
CLASSIFIERS = ["Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering"]
# Description should be a one-liner:
description = "skedm: Emperical Dynamic Modeling with a simple api"
# Long description will go up on the pypi page
long_description = """
skedm
========
Scikit Emperical Dynamic Modeling (sknla) can be used as a way to forecast time series,
spatio-temporal 2D arrays, and even discrete spatial arrangements. More
importantly, skedm can provide insight into the underlying dynamics of a system.
"""
NAME = "skedm"
MAINTAINER = "Nick Cortale"
MAINTAINER_EMAIL = "nickcortale@gmail.com"
DESCRIPTION = description
LONG_DESCRIPTION = long_description
URL = "https://github.com/NickC1/skedm"
DOWNLOAD_URL = "https://github.com/NickC1/skedm/tarball/0.1"
LICENSE = "MIT"
AUTHOR = "Nick Cortale"
AUTHOR_EMAIL = "nickcortale@gmail.com"
PLATFORMS = "OS Independent"
MAJOR = _version_major
MINOR = _version_minor
MICRO = _version_micro
VERSION = __version__
PACKAGES = ['skedm']
PACKAGE_DATA = ""
REQUIRES = ["numpy", "scikitlearn"]
| NickC1/skedm | build/lib/skedm/version.py | Python | mit | 1,850 |
import logging
from ..models import Activity
from .date import activity_stream_date_to_datetime, datetime_to_string
log = logging.getLogger(__name__)
def activity_from_dict(data):
log.debug("Converting YouTube dict to Activity Model")
activity_dict = activity_dict_from_dict(data)
return Activity.from_activity_dict(activity_dict)
def activity_dict_from_dict(blob):
log.debug("Converting YouTube dict to activity dict: %s", blob)
stream_object = {}
stream_object["@context"] = "http://www.w3.org/ns/activitystreams"
stream_object["@type"] = "Activity"
date = blob.get("snippet").get("publishedAt")
date = activity_stream_date_to_datetime(date)
stream_object["published"] = datetime_to_string(date)
stream_object["provider"] = {
"@type": "Service",
"displayName": "YouTube"
}
snippet = blob.get("snippet")
stream_object["actor"] = {
"@type": "Person",
"@id": "https://www.youtube.com/user/{}".format(snippet.get("channelTitle")),
"displayName": snippet.get("channelTitle"),
}
stream_object["object"] = {
"@id": "https://www.youtube.com/watch?v={}".format(blob.get("id").get("videoId")),
"@type": "Video",
"displayName": snippet.get("title"),
"url": [{
"href": "https://www.youtube.com/watch?v={}".format(blob.get("id").get("videoId")),
"@type": "Link"
}],
"content": snippet.get("description"),
"youtube:etag": blob.get("etag"),
"youtube:kind": blob.get("kind"),
"youtube:id:kind": blob.get("id").get("kind"),
"youtube:channelId": snippet.get("channelId"),
"youtube:liveBroadcastContent": snippet.get("liveBroadcastContent"),
"image": [
{
"@type": "Link",
"href": snippet.get("thumbnails").get("default").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "default"
},
{
"@type": "Link",
"href": snippet.get("thumbnails").get("medium").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "medium"
},
{
"@type": "Link",
"href": snippet.get("thumbnails").get("high").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "high"
},
]
}
return stream_object
"""
"""
"""
{
"@context": "http://www.w3.org/ns/activitystreams",
"@type": "Activity", ------ Abstract wrapper
"published": "2015-02-10T15:04:55Z",
"provider": {
"@type": "Service",
"displayName": "Twitter|FaceBook|Instagram|YouTube"
},
"actor": {
"@type": "Person",
"@id": "https://www.twitter.com/{{user.screen_name}}
"displayName": "Martin Smith",
"url": "http://example.org/martin",
"image": {
"@type": "Link",
"href": "http://example.org/martin/image.jpg",
"mediaType": "image/jpeg"
}
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Note",
"url": "http://example.org/blog/2011/02/entry",
"content": "This is a short note"
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Video",
"displayName": "A Simple Video",
"url": "http://example.org/video.mkv",
"duration": "PT2H"
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Image",
"displayName": "A Simple Image",
"content": "any messages?"
"url": [
{
"@type": "Link",
"href": "http://example.org/image.jpeg",
"mediaType": "image/jpeg"
},
{
"@type": "Link",
"href": "http://example.org/image.png",
"mediaType": "image/png"
}
]
},
}
"""
| blitzagency/django-chatterbox | chatterbox/utils/youtube.py | Python | mit | 4,331 |
import web
from gothonweb import map
urls = (
'/game', 'GameEngine',
'/', 'Index',
)
app = web.application(urls, globals())
#little hack so that debug mode works with sessions
if web.config.get('_session') is None:
store = web.session.DiskStore('sessions')
session = web.session.Session(app, store,
initializer={'room':None})
web.config._session = session
else:
session = web.config._session
render = web.template.render('templates/', base="layout")
class Index(object):
def GET(self):
# this is used to "setup" the session with starting values
session.room = map.START
web.seeother("/game")
class GameEngine(object):
def GET(self):
if session.room:
return render.show_room(room=session.room)
# else:
# # why is there here? do you need it?
# return render.you_died()
def POST(self):
form = web.input(action=None)
if session.room:
session.room = session.room.go(form.action)
web.seeother("/game")
if __name__ == "__main__":
app.run() | githubfun/lphw | gothonweb/bin/gothon_app.py | Python | mit | 1,131 |
#FIXME: UNDO, click time at end to undo
from PyQt4 import QtCore, QtGui
import sys, os
import volume_editor_layout, settings_layout, cPickle
import numpy as np
from utils import Utils
class SettingsEditWidget(QtGui.QDialog, settings_layout.Ui_Dialog):
#################################################### Init
def __init__(self, i_parent=None):
QtGui.QDialog.__init__(self, i_parent)
self.setupUi(self)
QtCore.QObject.connect( self.box_enable_learning, QtCore.SIGNAL("toggled(bool)"), self.setEnableLearning)
QtCore.QObject.connect( self.box_seconds_delay, QtCore.SIGNAL("valueChanged(double)"),self.editClickParamsEvent)
QtCore.QObject.connect( self.box_click_dev, QtCore.SIGNAL("valueChanged(double)"),self.editClickParamsEvent)
############################################### Main
def editClickParamsEvent(self, i_value):
self.emit(QtCore.SIGNAL("edit_click_params"))
def closeEvent(self, event):
QtGui.QDialog.close(self)
self.emit(QtCore.SIGNAL("close_settings"))
def clickPdfToSettingsParams(self, i_params):
"""Convert click pdf parameters to the ones stored in settings editor."""
(delay, std, fr, fp_rate) = i_params
fr *= 100.0
fp_rate *= 60.0
return (delay, std, fr, fp_rate)
def settingsToClickPdfParams(self, i_params):
"""Convert settings editorparameters to the ones stored by click pdf."""
(delay, std, fr, fp_rate) = i_params
fr /= 100.0
fp_rate /= 60.0
return (delay, std, fr, fp_rate)
################################################ Get
def getSettings(self):
settings = {}
#Click-time delay
delay = self.box_seconds_delay.value()
std = self.box_click_dev.value()
settings['is_train'] = self.box_enable_learning.isChecked()
settings['learning_rate'] = self.box_learning_rate.value()
settings['learn_delay'] = self.box_learn_delay.isChecked()
settings['learn_std'] = self.box_learn_std.isChecked()
#Switch noise
fp_rate = self.box_fp_rate.value()
fr = self.box_fr.value()
settings['learn_fp'] = self.box_learn_fp.isChecked()
settings['learn_fr'] = self.box_learn_fr.isChecked()
#Do the conversion
click_params = (delay, std, fr, fp_rate)
(settings['delay'], settings['std'], settings['fr'], settings['fp_rate']) = self.settingsToClickPdfParams(click_params)
#Error correction
settings['undo'] = self.box_undo.value()
settings['prog_status'] = self.box_prog_status.value()
settings['restart_word'] = self.box_restart_word.value()
settings['shut_down'] = self.box_shut_down.value()
settings['word_select_thresh'] = self.box_word_select.value()
#Speed & channels
settings['file_length'] = self.box_file_length.value()
settings['channel_index'] = int(self.box_channels.currentIndex())
settings['end_delay'] = self.box_end_delay.value()
return settings
def getCurrentChannel(self):
return self.getChannel(self.box_channels.currentIndex())
def getChannel(self, i_index):
return int(self.box_channels.itemText(i_index))
#################################################### Set
def setSettings(self, i_settings):
#Get the parameters
click_params = (i_settings['delay'], i_settings['std'], i_settings['fr'], i_settings['fp_rate'])
(delay, std, fr, fp_rate) = self.clickPdfToSettingsParams(click_params)
self.setClickParams((delay, std, fr, fp_rate))
#More click-time params
self.box_enable_learning.setChecked(i_settings['is_train'])
self.box_learning_rate.setValue(i_settings['learning_rate'])
self.box_learn_delay.setChecked( i_settings['learn_delay'])
self.box_learn_std.setChecked(i_settings['learn_std'])
#More switch noise params
self.box_learn_fp.setChecked(i_settings['learn_fp'])
self.box_learn_fr.setChecked(i_settings['learn_fr'])
#Error correction
self.box_undo.setValue(i_settings['undo'])
self.box_prog_status.setValue(i_settings['prog_status'])
self.box_restart_word.setValue(i_settings['restart_word'])
self.box_shut_down.setValue(i_settings['shut_down'])
self.box_word_select.setValue(i_settings['word_select_thresh'])
#Speed & channels
self.box_file_length.setValue(i_settings['file_length'])
self.box_channels.setCurrentIndex(i_settings['channel_index'])
self.box_end_delay.setValue(i_settings['end_delay'])
def setClickParams(self, i_params):
(delay, std, fr, fp_rate) = i_params
self.box_seconds_delay.setValue(delay)
self.box_click_dev.setValue(std)
self.box_fp_rate.setValue(fp_rate)
self.box_fr.setValue(fr)
def setEnableLearning(self, i_checked):
self.box_learn_delay.setChecked(i_checked)
self.box_learn_std.setChecked(i_checked)
self.box_learn_fp.setChecked(i_checked)
self.box_learn_fr.setChecked(i_checked)
class VolumeEditWidget(QtGui.QDialog, volume_editor_layout.Ui_Dialog):
##################################### Init
def __init__(self, i_parent=None):
QtGui.QDialog.__init__(self, i_parent)
self.setupUi(self)
self.volumes = []
for n in range(0, 5):
slider = getattr(self, "volume_settings_" + str(n))
self.volumes.append(slider.value())
func_vol = getattr(self, "setVolume" + str(n))
func_mute = getattr(self, "mute" + str(n))
box = getattr(self, "box_mute_" + str(n))
QtCore.QObject.connect( slider, QtCore.SIGNAL("sliderReleased()"), func_vol)
QtCore.QObject.connect( box, QtCore.SIGNAL("toggled(bool)"), func_mute)
QtCore.QObject.connect( self.box_mute_all, QtCore.SIGNAL("toggled(bool)"), self.muteAll)
########################################### Signal/slots
def mute0(self, i_checked):
self.mute(0, i_checked)
def mute1(self, i_checked):
self.mute(1, i_checked)
def mute2(self, i_checked):
self.mute(2, i_checked)
def mute3(self, i_checked):
self.mute(3, i_checked)
def mute4(self, i_checked):
self.mute(4, i_checked)
def setVolume0(self):
self.setVolume(0)
def setVolume1(self):
self.setVolume(1)
def setVolume2(self):
self.setVolume(2)
def setVolume3(self):
self.setVolume(3)
def setVolume4(self):
self.setVolume(4)
########################################## Get
def getVolume(self, i_channel):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
val = float(slider_object.value()) / 1000.0
return val
########################################## Set
def setVolume(self, i_channel, i_save_volume=True):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
slider_val = slider_object.value()
val = float(slider_val) / 1000.0
if i_save_volume:
self.volumes[i_channel] = slider_val
self.emit(QtCore.SIGNAL("volume(float,int)"), float(val), int(i_channel))
def setChannelConfig(self, i_channel_config):
nchannels = i_channel_config.getChannels()
channel_names = i_channel_config.getChannelNames()
for n in range(0, nchannels):
label_object = getattr(self, "volume_label_" + str(n))
label_object.setText(QtCore.QString(channel_names[n][0]))
label_object.show()
slider_object = getattr(self, "volume_settings_" + str(n))
slider_object.show()
for n in range(nchannels, 5):
object_name = "volume_label_" + str(n)
label_object = getattr(self, object_name)
label_object.hide()
slider_object = getattr(self, "volume_settings_" + str(n))
slider_object.hide()
def mute(self, i_channel, i_checked):
slider_object = getattr(self, "volume_settings_" + str(i_channel))
if i_checked:
slider_object.setValue(0)
else:
slider_object.setValue(self.volumes[i_channel])
self.setVolume(i_channel, i_save_volume=False)
def muteAll(self, i_checked):
for channel in range(0, len(self.volumes)):
box_mute = getattr(self, "box_mute_" + str(channel))
box_mute.setChecked(i_checked)
########################################### Signal/slots
def mute0(self, i_checked):
self.mute(0, i_checked)
class VolumeEditGui(QtGui.QMainWindow):
def __init__(self):
from channel_config import ChannelConfig
QtGui.QWidget.__init__(self)
channel_config = ChannelConfig(i_nchannels=5, i_sound_overlap=0.5 , i_file_length=0.4, i_root_dir="./")
self.volume_editor = VolumeEditWidget(self)
self.volume_editor.setChannelConfig(channel_config)
self.volume_editor.show()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
gui = VolumeEditGui()
gui.show()
sys.exit( app.exec_()) | singleswitch/ticker | settings_editor.py | Python | mit | 9,463 |
# Generated from 'Appearance.h'
def FOUR_CHAR_CODE(x): return x
kAppearanceEventClass = FOUR_CHAR_CODE('appr')
kAEAppearanceChanged = FOUR_CHAR_CODE('thme')
kAESystemFontChanged = FOUR_CHAR_CODE('sysf')
kAESmallSystemFontChanged = FOUR_CHAR_CODE('ssfn')
kAEViewsFontChanged = FOUR_CHAR_CODE('vfnt')
kThemeDataFileType = FOUR_CHAR_CODE('thme')
kThemePlatinumFileType = FOUR_CHAR_CODE('pltn')
kThemeCustomThemesFileType = FOUR_CHAR_CODE('scen')
kThemeSoundTrackFileType = FOUR_CHAR_CODE('tsnd')
kThemeBrushDialogBackgroundActive = 1
kThemeBrushDialogBackgroundInactive = 2
kThemeBrushAlertBackgroundActive = 3
kThemeBrushAlertBackgroundInactive = 4
kThemeBrushModelessDialogBackgroundActive = 5
kThemeBrushModelessDialogBackgroundInactive = 6
kThemeBrushUtilityWindowBackgroundActive = 7
kThemeBrushUtilityWindowBackgroundInactive = 8
kThemeBrushListViewSortColumnBackground = 9
kThemeBrushListViewBackground = 10
kThemeBrushIconLabelBackground = 11
kThemeBrushListViewSeparator = 12
kThemeBrushChasingArrows = 13
kThemeBrushDragHilite = 14
kThemeBrushDocumentWindowBackground = 15
kThemeBrushFinderWindowBackground = 16
kThemeBrushScrollBarDelimiterActive = 17
kThemeBrushScrollBarDelimiterInactive = 18
kThemeBrushFocusHighlight = 19
kThemeBrushPopupArrowActive = 20
kThemeBrushPopupArrowPressed = 21
kThemeBrushPopupArrowInactive = 22
kThemeBrushAppleGuideCoachmark = 23
kThemeBrushIconLabelBackgroundSelected = 24
kThemeBrushStaticAreaFill = 25
kThemeBrushActiveAreaFill = 26
kThemeBrushButtonFrameActive = 27
kThemeBrushButtonFrameInactive = 28
kThemeBrushButtonFaceActive = 29
kThemeBrushButtonFaceInactive = 30
kThemeBrushButtonFacePressed = 31
kThemeBrushButtonActiveDarkShadow = 32
kThemeBrushButtonActiveDarkHighlight = 33
kThemeBrushButtonActiveLightShadow = 34
kThemeBrushButtonActiveLightHighlight = 35
kThemeBrushButtonInactiveDarkShadow = 36
kThemeBrushButtonInactiveDarkHighlight = 37
kThemeBrushButtonInactiveLightShadow = 38
kThemeBrushButtonInactiveLightHighlight = 39
kThemeBrushButtonPressedDarkShadow = 40
kThemeBrushButtonPressedDarkHighlight = 41
kThemeBrushButtonPressedLightShadow = 42
kThemeBrushButtonPressedLightHighlight = 43
kThemeBrushBevelActiveLight = 44
kThemeBrushBevelActiveDark = 45
kThemeBrushBevelInactiveLight = 46
kThemeBrushBevelInactiveDark = 47
kThemeBrushNotificationWindowBackground = 48
kThemeBrushMovableModalBackground = 49
kThemeBrushSheetBackground = 50
kThemeBrushDrawerBackground = 51
kThemeBrushBlack = -1
kThemeBrushWhite = -2
kThemeTextColorDialogActive = 1
kThemeTextColorDialogInactive = 2
kThemeTextColorAlertActive = 3
kThemeTextColorAlertInactive = 4
kThemeTextColorModelessDialogActive = 5
kThemeTextColorModelessDialogInactive = 6
kThemeTextColorWindowHeaderActive = 7
kThemeTextColorWindowHeaderInactive = 8
kThemeTextColorPlacardActive = 9
kThemeTextColorPlacardInactive = 10
kThemeTextColorPlacardPressed = 11
kThemeTextColorPushButtonActive = 12
kThemeTextColorPushButtonInactive = 13
kThemeTextColorPushButtonPressed = 14
kThemeTextColorBevelButtonActive = 15
kThemeTextColorBevelButtonInactive = 16
kThemeTextColorBevelButtonPressed = 17
kThemeTextColorPopupButtonActive = 18
kThemeTextColorPopupButtonInactive = 19
kThemeTextColorPopupButtonPressed = 20
kThemeTextColorIconLabel = 21
kThemeTextColorListView = 22
kThemeTextColorDocumentWindowTitleActive = 23
kThemeTextColorDocumentWindowTitleInactive = 24
kThemeTextColorMovableModalWindowTitleActive = 25
kThemeTextColorMovableModalWindowTitleInactive = 26
kThemeTextColorUtilityWindowTitleActive = 27
kThemeTextColorUtilityWindowTitleInactive = 28
kThemeTextColorPopupWindowTitleActive = 29
kThemeTextColorPopupWindowTitleInactive = 30
kThemeTextColorRootMenuActive = 31
kThemeTextColorRootMenuSelected = 32
kThemeTextColorRootMenuDisabled = 33
kThemeTextColorMenuItemActive = 34
kThemeTextColorMenuItemSelected = 35
kThemeTextColorMenuItemDisabled = 36
kThemeTextColorPopupLabelActive = 37
kThemeTextColorPopupLabelInactive = 38
kThemeTextColorTabFrontActive = 39
kThemeTextColorTabNonFrontActive = 40
kThemeTextColorTabNonFrontPressed = 41
kThemeTextColorTabFrontInactive = 42
kThemeTextColorTabNonFrontInactive = 43
kThemeTextColorIconLabelSelected = 44
kThemeTextColorBevelButtonStickyActive = 45
kThemeTextColorBevelButtonStickyInactive = 46
kThemeTextColorNotification = 47
kThemeTextColorBlack = -1
kThemeTextColorWhite = -2
kThemeStateInactive = 0
kThemeStateActive = 1
kThemeStatePressed = 2
kThemeStateRollover = 6
kThemeStateUnavailable = 7
kThemeStateUnavailableInactive = 8
kThemeStateDisabled = 0
kThemeStatePressedUp = 2
kThemeStatePressedDown = 3
kThemeArrowCursor = 0
kThemeCopyArrowCursor = 1
kThemeAliasArrowCursor = 2
kThemeContextualMenuArrowCursor = 3
kThemeIBeamCursor = 4
kThemeCrossCursor = 5
kThemePlusCursor = 6
kThemeWatchCursor = 7
kThemeClosedHandCursor = 8
kThemeOpenHandCursor = 9
kThemePointingHandCursor = 10
kThemeCountingUpHandCursor = 11
kThemeCountingDownHandCursor = 12
kThemeCountingUpAndDownHandCursor = 13
kThemeSpinningCursor = 14
kThemeResizeLeftCursor = 15
kThemeResizeRightCursor = 16
kThemeResizeLeftRightCursor = 17
kThemeMenuBarNormal = 0
kThemeMenuBarSelected = 1
kThemeMenuSquareMenuBar = (1 << 0)
kThemeMenuActive = 0
kThemeMenuSelected = 1
kThemeMenuDisabled = 3
kThemeMenuTypePullDown = 0
kThemeMenuTypePopUp = 1
kThemeMenuTypeHierarchical = 2
kThemeMenuTypeInactive = 0x0100
kThemeMenuItemPlain = 0
kThemeMenuItemHierarchical = 1
kThemeMenuItemScrollUpArrow = 2
kThemeMenuItemScrollDownArrow = 3
kThemeMenuItemAtTop = 0x0100
kThemeMenuItemAtBottom = 0x0200
kThemeMenuItemHierBackground = 0x0400
kThemeMenuItemPopUpBackground = 0x0800
kThemeMenuItemHasIcon = 0x8000
kThemeBackgroundTabPane = 1
kThemeBackgroundPlacard = 2
kThemeBackgroundWindowHeader = 3
kThemeBackgroundListViewWindowHeader = 4
kThemeBackgroundSecondaryGroupBox = 5
kThemeNameTag = FOUR_CHAR_CODE('name')
kThemeVariantNameTag = FOUR_CHAR_CODE('varn')
kThemeHighlightColorTag = FOUR_CHAR_CODE('hcol')
kThemeScrollBarArrowStyleTag = FOUR_CHAR_CODE('sbar')
kThemeScrollBarThumbStyleTag = FOUR_CHAR_CODE('sbth')
kThemeSoundsEnabledTag = FOUR_CHAR_CODE('snds')
kThemeDblClickCollapseTag = FOUR_CHAR_CODE('coll')
kThemeAppearanceFileNameTag = FOUR_CHAR_CODE('thme')
kThemeSystemFontTag = FOUR_CHAR_CODE('lgsf')
kThemeSmallSystemFontTag = FOUR_CHAR_CODE('smsf')
kThemeViewsFontTag = FOUR_CHAR_CODE('vfnt')
kThemeViewsFontSizeTag = FOUR_CHAR_CODE('vfsz')
kThemeDesktopPatternNameTag = FOUR_CHAR_CODE('patn')
kThemeDesktopPatternTag = FOUR_CHAR_CODE('patt')
kThemeDesktopPictureNameTag = FOUR_CHAR_CODE('dpnm')
kThemeDesktopPictureAliasTag = FOUR_CHAR_CODE('dpal')
kThemeDesktopPictureAlignmentTag = FOUR_CHAR_CODE('dpan')
kThemeHighlightColorNameTag = FOUR_CHAR_CODE('hcnm')
kThemeExamplePictureIDTag = FOUR_CHAR_CODE('epic')
kThemeSoundTrackNameTag = FOUR_CHAR_CODE('sndt')
kThemeSoundMaskTag = FOUR_CHAR_CODE('smsk')
kThemeUserDefinedTag = FOUR_CHAR_CODE('user')
kThemeSmoothFontEnabledTag = FOUR_CHAR_CODE('smoo')
kThemeSmoothFontMinSizeTag = FOUR_CHAR_CODE('smos')
kThemeCheckBoxClassicX = 0
kThemeCheckBoxCheckMark = 1
kThemeScrollBarArrowsSingle = 0
kThemeScrollBarArrowsLowerRight = 1
kThemeScrollBarThumbNormal = 0
kThemeScrollBarThumbProportional = 1
kThemeSystemFont = 0
kThemeSmallSystemFont = 1
kThemeSmallEmphasizedSystemFont = 2
kThemeViewsFont = 3
kThemeEmphasizedSystemFont = 4
kThemeApplicationFont = 5
kThemeLabelFont = 6
kThemeMenuTitleFont = 100
kThemeMenuItemFont = 101
kThemeMenuItemMarkFont = 102
kThemeMenuItemCmdKeyFont = 103
kThemeWindowTitleFont = 104
kThemePushButtonFont = 105
kThemeUtilityWindowTitleFont = 106
kThemeAlertHeaderFont = 107
kThemeCurrentPortFont = 200
kThemeTabNonFront = 0
kThemeTabNonFrontPressed = 1
kThemeTabNonFrontInactive = 2
kThemeTabFront = 3
kThemeTabFrontInactive = 4
kThemeTabNonFrontUnavailable = 5
kThemeTabFrontUnavailable = 6
kThemeTabNorth = 0
kThemeTabSouth = 1
kThemeTabEast = 2
kThemeTabWest = 3
kThemeSmallTabHeight = 16
kThemeLargeTabHeight = 21
kThemeTabPaneOverlap = 3
kThemeSmallTabHeightMax = 19
kThemeLargeTabHeightMax = 24
kThemeMediumScrollBar = 0
kThemeSmallScrollBar = 1
kThemeMediumSlider = 2
kThemeMediumProgressBar = 3
kThemeMediumIndeterminateBar = 4
kThemeRelevanceBar = 5
kThemeSmallSlider = 6
kThemeLargeProgressBar = 7
kThemeLargeIndeterminateBar = 8
kThemeTrackActive = 0
kThemeTrackDisabled = 1
kThemeTrackNothingToScroll = 2
kThemeTrackInactive = 3
kThemeLeftOutsideArrowPressed = 0x01
kThemeLeftInsideArrowPressed = 0x02
kThemeLeftTrackPressed = 0x04
kThemeThumbPressed = 0x08
kThemeRightTrackPressed = 0x10
kThemeRightInsideArrowPressed = 0x20
kThemeRightOutsideArrowPressed = 0x40
kThemeTopOutsideArrowPressed = kThemeLeftOutsideArrowPressed
kThemeTopInsideArrowPressed = kThemeLeftInsideArrowPressed
kThemeTopTrackPressed = kThemeLeftTrackPressed
kThemeBottomTrackPressed = kThemeRightTrackPressed
kThemeBottomInsideArrowPressed = kThemeRightInsideArrowPressed
kThemeBottomOutsideArrowPressed = kThemeRightOutsideArrowPressed
kThemeThumbPlain = 0
kThemeThumbUpward = 1
kThemeThumbDownward = 2
kThemeTrackHorizontal = (1 << 0)
kThemeTrackRightToLeft = (1 << 1)
kThemeTrackShowThumb = (1 << 2)
kThemeTrackThumbRgnIsNotGhost = (1 << 3)
kThemeTrackNoScrollBarArrows = (1 << 4)
kThemeWindowHasGrow = (1 << 0)
kThemeWindowHasHorizontalZoom = (1 << 3)
kThemeWindowHasVerticalZoom = (1 << 4)
kThemeWindowHasFullZoom = kThemeWindowHasHorizontalZoom + kThemeWindowHasVerticalZoom
kThemeWindowHasCloseBox = (1 << 5)
kThemeWindowHasCollapseBox = (1 << 6)
kThemeWindowHasTitleText = (1 << 7)
kThemeWindowIsCollapsed = (1 << 8)
kThemeWindowHasDirty = (1 << 9)
kThemeDocumentWindow = 0
kThemeDialogWindow = 1
kThemeMovableDialogWindow = 2
kThemeAlertWindow = 3
kThemeMovableAlertWindow = 4
kThemePlainDialogWindow = 5
kThemeShadowDialogWindow = 6
kThemePopupWindow = 7
kThemeUtilityWindow = 8
kThemeUtilitySideWindow = 9
kThemeSheetWindow = 10
kThemeWidgetCloseBox = 0
kThemeWidgetZoomBox = 1
kThemeWidgetCollapseBox = 2
kThemeWidgetDirtyCloseBox = 6
kThemeArrowLeft = 0
kThemeArrowDown = 1
kThemeArrowRight = 2
kThemeArrowUp = 3
kThemeArrow3pt = 0
kThemeArrow5pt = 1
kThemeArrow7pt = 2
kThemeArrow9pt = 3
kThemeGrowLeft = (1 << 0)
kThemeGrowRight = (1 << 1)
kThemeGrowUp = (1 << 2)
kThemeGrowDown = (1 << 3)
kThemePushButton = 0
kThemeCheckBox = 1
kThemeRadioButton = 2
kThemeBevelButton = 3
kThemeArrowButton = 4
kThemePopupButton = 5
kThemeDisclosureButton = 6
kThemeIncDecButton = 7
kThemeSmallBevelButton = 8
kThemeMediumBevelButton = 3
kThemeLargeBevelButton = 9
kThemeListHeaderButton = 10
kThemeRoundButton = 11
kThemeLargeRoundButton = 12
kThemeSmallCheckBox = 13
kThemeSmallRadioButton = 14
kThemeRoundedBevelButton = 15
kThemeNormalCheckBox = kThemeCheckBox
kThemeNormalRadioButton = kThemeRadioButton
kThemeButtonOff = 0
kThemeButtonOn = 1
kThemeButtonMixed = 2
kThemeDisclosureRight = 0
kThemeDisclosureDown = 1
kThemeDisclosureLeft = 2
kThemeAdornmentNone = 0
kThemeAdornmentDefault = (1 << 0)
kThemeAdornmentFocus = (1 << 2)
kThemeAdornmentRightToLeft = (1 << 4)
kThemeAdornmentDrawIndicatorOnly = (1 << 5)
kThemeAdornmentHeaderButtonLeftNeighborSelected = (1 << 6)
kThemeAdornmentHeaderButtonRightNeighborSelected = (1 << 7)
kThemeAdornmentHeaderButtonSortUp = (1 << 8)
kThemeAdornmentHeaderMenuButton = (1 << 9)
kThemeAdornmentHeaderButtonNoShadow = (1 << 10)
kThemeAdornmentHeaderButtonShadowOnly = (1 << 11)
kThemeAdornmentNoShadow = kThemeAdornmentHeaderButtonNoShadow
kThemeAdornmentShadowOnly = kThemeAdornmentHeaderButtonShadowOnly
kThemeAdornmentArrowLeftArrow = (1 << 6)
kThemeAdornmentArrowDownArrow = (1 << 7)
kThemeAdornmentArrowDoubleArrow = (1 << 8)
kThemeAdornmentArrowUpArrow = (1 << 9)
kThemeNoSounds = 0
kThemeWindowSoundsMask = (1 << 0)
kThemeMenuSoundsMask = (1 << 1)
kThemeControlSoundsMask = (1 << 2)
kThemeFinderSoundsMask = (1 << 3)
kThemeDragSoundNone = 0
kThemeDragSoundMoveWindow = FOUR_CHAR_CODE('wmov')
kThemeDragSoundGrowWindow = FOUR_CHAR_CODE('wgro')
kThemeDragSoundMoveUtilWindow = FOUR_CHAR_CODE('umov')
kThemeDragSoundGrowUtilWindow = FOUR_CHAR_CODE('ugro')
kThemeDragSoundMoveDialog = FOUR_CHAR_CODE('dmov')
kThemeDragSoundMoveAlert = FOUR_CHAR_CODE('amov')
kThemeDragSoundMoveIcon = FOUR_CHAR_CODE('imov')
kThemeDragSoundSliderThumb = FOUR_CHAR_CODE('slth')
kThemeDragSoundSliderGhost = FOUR_CHAR_CODE('slgh')
kThemeDragSoundScrollBarThumb = FOUR_CHAR_CODE('sbth')
kThemeDragSoundScrollBarGhost = FOUR_CHAR_CODE('sbgh')
kThemeDragSoundScrollBarArrowDecreasing = FOUR_CHAR_CODE('sbad')
kThemeDragSoundScrollBarArrowIncreasing = FOUR_CHAR_CODE('sbai')
kThemeDragSoundDragging = FOUR_CHAR_CODE('drag')
kThemeSoundNone = 0
kThemeSoundMenuOpen = FOUR_CHAR_CODE('mnuo')
kThemeSoundMenuClose = FOUR_CHAR_CODE('mnuc')
kThemeSoundMenuItemHilite = FOUR_CHAR_CODE('mnui')
kThemeSoundMenuItemRelease = FOUR_CHAR_CODE('mnus')
kThemeSoundWindowClosePress = FOUR_CHAR_CODE('wclp')
kThemeSoundWindowCloseEnter = FOUR_CHAR_CODE('wcle')
kThemeSoundWindowCloseExit = FOUR_CHAR_CODE('wclx')
kThemeSoundWindowCloseRelease = FOUR_CHAR_CODE('wclr')
kThemeSoundWindowZoomPress = FOUR_CHAR_CODE('wzmp')
kThemeSoundWindowZoomEnter = FOUR_CHAR_CODE('wzme')
kThemeSoundWindowZoomExit = FOUR_CHAR_CODE('wzmx')
kThemeSoundWindowZoomRelease = FOUR_CHAR_CODE('wzmr')
kThemeSoundWindowCollapsePress = FOUR_CHAR_CODE('wcop')
kThemeSoundWindowCollapseEnter = FOUR_CHAR_CODE('wcoe')
kThemeSoundWindowCollapseExit = FOUR_CHAR_CODE('wcox')
kThemeSoundWindowCollapseRelease = FOUR_CHAR_CODE('wcor')
kThemeSoundWindowDragBoundary = FOUR_CHAR_CODE('wdbd')
kThemeSoundUtilWinClosePress = FOUR_CHAR_CODE('uclp')
kThemeSoundUtilWinCloseEnter = FOUR_CHAR_CODE('ucle')
kThemeSoundUtilWinCloseExit = FOUR_CHAR_CODE('uclx')
kThemeSoundUtilWinCloseRelease = FOUR_CHAR_CODE('uclr')
kThemeSoundUtilWinZoomPress = FOUR_CHAR_CODE('uzmp')
kThemeSoundUtilWinZoomEnter = FOUR_CHAR_CODE('uzme')
kThemeSoundUtilWinZoomExit = FOUR_CHAR_CODE('uzmx')
kThemeSoundUtilWinZoomRelease = FOUR_CHAR_CODE('uzmr')
kThemeSoundUtilWinCollapsePress = FOUR_CHAR_CODE('ucop')
kThemeSoundUtilWinCollapseEnter = FOUR_CHAR_CODE('ucoe')
kThemeSoundUtilWinCollapseExit = FOUR_CHAR_CODE('ucox')
kThemeSoundUtilWinCollapseRelease = FOUR_CHAR_CODE('ucor')
kThemeSoundUtilWinDragBoundary = FOUR_CHAR_CODE('udbd')
kThemeSoundWindowOpen = FOUR_CHAR_CODE('wopn')
kThemeSoundWindowClose = FOUR_CHAR_CODE('wcls')
kThemeSoundWindowZoomIn = FOUR_CHAR_CODE('wzmi')
kThemeSoundWindowZoomOut = FOUR_CHAR_CODE('wzmo')
kThemeSoundWindowCollapseUp = FOUR_CHAR_CODE('wcol')
kThemeSoundWindowCollapseDown = FOUR_CHAR_CODE('wexp')
kThemeSoundWindowActivate = FOUR_CHAR_CODE('wact')
kThemeSoundUtilWindowOpen = FOUR_CHAR_CODE('uopn')
kThemeSoundUtilWindowClose = FOUR_CHAR_CODE('ucls')
kThemeSoundUtilWindowZoomIn = FOUR_CHAR_CODE('uzmi')
kThemeSoundUtilWindowZoomOut = FOUR_CHAR_CODE('uzmo')
kThemeSoundUtilWindowCollapseUp = FOUR_CHAR_CODE('ucol')
kThemeSoundUtilWindowCollapseDown = FOUR_CHAR_CODE('uexp')
kThemeSoundUtilWindowActivate = FOUR_CHAR_CODE('uact')
kThemeSoundDialogOpen = FOUR_CHAR_CODE('dopn')
kThemeSoundDialogClose = FOUR_CHAR_CODE('dlgc')
kThemeSoundAlertOpen = FOUR_CHAR_CODE('aopn')
kThemeSoundAlertClose = FOUR_CHAR_CODE('altc')
kThemeSoundPopupWindowOpen = FOUR_CHAR_CODE('pwop')
kThemeSoundPopupWindowClose = FOUR_CHAR_CODE('pwcl')
kThemeSoundButtonPress = FOUR_CHAR_CODE('btnp')
kThemeSoundButtonEnter = FOUR_CHAR_CODE('btne')
kThemeSoundButtonExit = FOUR_CHAR_CODE('btnx')
kThemeSoundButtonRelease = FOUR_CHAR_CODE('btnr')
kThemeSoundDefaultButtonPress = FOUR_CHAR_CODE('dbtp')
kThemeSoundDefaultButtonEnter = FOUR_CHAR_CODE('dbte')
kThemeSoundDefaultButtonExit = FOUR_CHAR_CODE('dbtx')
kThemeSoundDefaultButtonRelease = FOUR_CHAR_CODE('dbtr')
kThemeSoundCancelButtonPress = FOUR_CHAR_CODE('cbtp')
kThemeSoundCancelButtonEnter = FOUR_CHAR_CODE('cbte')
kThemeSoundCancelButtonExit = FOUR_CHAR_CODE('cbtx')
kThemeSoundCancelButtonRelease = FOUR_CHAR_CODE('cbtr')
kThemeSoundCheckboxPress = FOUR_CHAR_CODE('chkp')
kThemeSoundCheckboxEnter = FOUR_CHAR_CODE('chke')
kThemeSoundCheckboxExit = FOUR_CHAR_CODE('chkx')
kThemeSoundCheckboxRelease = FOUR_CHAR_CODE('chkr')
kThemeSoundRadioPress = FOUR_CHAR_CODE('radp')
kThemeSoundRadioEnter = FOUR_CHAR_CODE('rade')
kThemeSoundRadioExit = FOUR_CHAR_CODE('radx')
kThemeSoundRadioRelease = FOUR_CHAR_CODE('radr')
kThemeSoundScrollArrowPress = FOUR_CHAR_CODE('sbap')
kThemeSoundScrollArrowEnter = FOUR_CHAR_CODE('sbae')
kThemeSoundScrollArrowExit = FOUR_CHAR_CODE('sbax')
kThemeSoundScrollArrowRelease = FOUR_CHAR_CODE('sbar')
kThemeSoundScrollEndOfTrack = FOUR_CHAR_CODE('sbte')
kThemeSoundScrollTrackPress = FOUR_CHAR_CODE('sbtp')
kThemeSoundSliderEndOfTrack = FOUR_CHAR_CODE('slte')
kThemeSoundSliderTrackPress = FOUR_CHAR_CODE('sltp')
kThemeSoundBalloonOpen = FOUR_CHAR_CODE('blno')
kThemeSoundBalloonClose = FOUR_CHAR_CODE('blnc')
kThemeSoundBevelPress = FOUR_CHAR_CODE('bevp')
kThemeSoundBevelEnter = FOUR_CHAR_CODE('beve')
kThemeSoundBevelExit = FOUR_CHAR_CODE('bevx')
kThemeSoundBevelRelease = FOUR_CHAR_CODE('bevr')
kThemeSoundLittleArrowUpPress = FOUR_CHAR_CODE('laup')
kThemeSoundLittleArrowDnPress = FOUR_CHAR_CODE('ladp')
kThemeSoundLittleArrowEnter = FOUR_CHAR_CODE('lare')
kThemeSoundLittleArrowExit = FOUR_CHAR_CODE('larx')
kThemeSoundLittleArrowUpRelease = FOUR_CHAR_CODE('laur')
kThemeSoundLittleArrowDnRelease = FOUR_CHAR_CODE('ladr')
kThemeSoundPopupPress = FOUR_CHAR_CODE('popp')
kThemeSoundPopupEnter = FOUR_CHAR_CODE('pope')
kThemeSoundPopupExit = FOUR_CHAR_CODE('popx')
kThemeSoundPopupRelease = FOUR_CHAR_CODE('popr')
kThemeSoundDisclosurePress = FOUR_CHAR_CODE('dscp')
kThemeSoundDisclosureEnter = FOUR_CHAR_CODE('dsce')
kThemeSoundDisclosureExit = FOUR_CHAR_CODE('dscx')
kThemeSoundDisclosureRelease = FOUR_CHAR_CODE('dscr')
kThemeSoundTabPressed = FOUR_CHAR_CODE('tabp')
kThemeSoundTabEnter = FOUR_CHAR_CODE('tabe')
kThemeSoundTabExit = FOUR_CHAR_CODE('tabx')
kThemeSoundTabRelease = FOUR_CHAR_CODE('tabr')
kThemeSoundDragTargetHilite = FOUR_CHAR_CODE('dthi')
kThemeSoundDragTargetUnhilite = FOUR_CHAR_CODE('dtuh')
kThemeSoundDragTargetDrop = FOUR_CHAR_CODE('dtdr')
kThemeSoundEmptyTrash = FOUR_CHAR_CODE('ftrs')
kThemeSoundSelectItem = FOUR_CHAR_CODE('fsel')
kThemeSoundNewItem = FOUR_CHAR_CODE('fnew')
kThemeSoundReceiveDrop = FOUR_CHAR_CODE('fdrp')
kThemeSoundCopyDone = FOUR_CHAR_CODE('fcpd')
kThemeSoundResolveAlias = FOUR_CHAR_CODE('fral')
kThemeSoundLaunchApp = FOUR_CHAR_CODE('flap')
kThemeSoundDiskInsert = FOUR_CHAR_CODE('dski')
kThemeSoundDiskEject = FOUR_CHAR_CODE('dske')
kThemeSoundFinderDragOnIcon = FOUR_CHAR_CODE('fdon')
kThemeSoundFinderDragOffIcon = FOUR_CHAR_CODE('fdof')
kThemePopupTabNormalPosition = 0
kThemePopupTabCenterOnWindow = 1
kThemePopupTabCenterOnOffset = 2
kThemeMetricScrollBarWidth = 0
kThemeMetricSmallScrollBarWidth = 1
kThemeMetricCheckBoxHeight = 2
kThemeMetricRadioButtonHeight = 3
kThemeMetricEditTextWhitespace = 4
kThemeMetricEditTextFrameOutset = 5
kThemeMetricListBoxFrameOutset = 6
kThemeMetricFocusRectOutset = 7
kThemeMetricImageWellThickness = 8
kThemeMetricScrollBarOverlap = 9
kThemeMetricLargeTabHeight = 10
kThemeMetricLargeTabCapsWidth = 11
kThemeMetricTabFrameOverlap = 12
kThemeMetricTabIndentOrStyle = 13
kThemeMetricTabOverlap = 14
kThemeMetricSmallTabHeight = 15
kThemeMetricSmallTabCapsWidth = 16
kThemeMetricDisclosureButtonHeight = 17
kThemeMetricRoundButtonSize = 18
kThemeMetricPushButtonHeight = 19
kThemeMetricListHeaderHeight = 20
kThemeMetricSmallCheckBoxHeight = 21
kThemeMetricDisclosureButtonWidth = 22
kThemeMetricSmallDisclosureButtonHeight = 23
kThemeMetricSmallDisclosureButtonWidth = 24
kThemeMetricDisclosureTriangleHeight = 25
kThemeMetricDisclosureTriangleWidth = 26
kThemeMetricLittleArrowsHeight = 27
kThemeMetricLittleArrowsWidth = 28
kThemeMetricPaneSplitterHeight = 29
kThemeMetricPopupButtonHeight = 30
kThemeMetricSmallPopupButtonHeight = 31
kThemeMetricLargeProgressBarThickness = 32
kThemeMetricPullDownHeight = 33
kThemeMetricSmallPullDownHeight = 34
kThemeMetricSmallPushButtonHeight = 35
kThemeMetricSmallRadioButtonHeight = 36
kThemeMetricRelevanceIndicatorHeight = 37
kThemeMetricResizeControlHeight = 38
kThemeMetricSmallResizeControlHeight = 39
kThemeMetricLargeRoundButtonSize = 40
kThemeMetricHSliderHeight = 41
kThemeMetricHSliderTickHeight = 42
kThemeMetricSmallHSliderHeight = 43
kThemeMetricSmallHSliderTickHeight = 44
kThemeMetricVSliderWidth = 45
kThemeMetricVSliderTickWidth = 46
kThemeMetricSmallVSliderWidth = 47
kThemeMetricSmallVSliderTickWidth = 48
kThemeMetricTitleBarControlsHeight = 49
kThemeMetricCheckBoxWidth = 50
kThemeMetricSmallCheckBoxWidth = 51
kThemeMetricRadioButtonWidth = 52
kThemeMetricSmallRadioButtonWidth = 53
kThemeMetricSmallHSliderMinThumbWidth = 54
kThemeMetricSmallVSliderMinThumbHeight = 55
kThemeMetricSmallHSliderTickOffset = 56
kThemeMetricSmallVSliderTickOffset = 57
kThemeMetricNormalProgressBarThickness = 58
kThemeMetricProgressBarShadowOutset = 59
kThemeMetricSmallProgressBarShadowOutset = 60
kThemeMetricPrimaryGroupBoxContentInset = 61
kThemeMetricSecondaryGroupBoxContentInset = 62
# appearanceBadBrushIndexErr = themeInvalidBrushErr
# appearanceProcessRegisteredErr = themeProcessRegisteredErr
# appearanceProcessNotRegisteredErr = themeProcessNotRegisteredErr
# appearanceBadTextColorIndexErr = themeBadTextColorErr
# appearanceThemeHasNoAccents = themeHasNoAccentsErr
# appearanceBadCursorIndexErr = themeBadCursorIndexErr
kThemeActiveDialogBackgroundBrush = kThemeBrushDialogBackgroundActive
kThemeInactiveDialogBackgroundBrush = kThemeBrushDialogBackgroundInactive
kThemeActiveAlertBackgroundBrush = kThemeBrushAlertBackgroundActive
kThemeInactiveAlertBackgroundBrush = kThemeBrushAlertBackgroundInactive
kThemeActiveModelessDialogBackgroundBrush = kThemeBrushModelessDialogBackgroundActive
kThemeInactiveModelessDialogBackgroundBrush = kThemeBrushModelessDialogBackgroundInactive
kThemeActiveUtilityWindowBackgroundBrush = kThemeBrushUtilityWindowBackgroundActive
kThemeInactiveUtilityWindowBackgroundBrush = kThemeBrushUtilityWindowBackgroundInactive
kThemeListViewSortColumnBackgroundBrush = kThemeBrushListViewSortColumnBackground
kThemeListViewBackgroundBrush = kThemeBrushListViewBackground
kThemeIconLabelBackgroundBrush = kThemeBrushIconLabelBackground
kThemeListViewSeparatorBrush = kThemeBrushListViewSeparator
kThemeChasingArrowsBrush = kThemeBrushChasingArrows
kThemeDragHiliteBrush = kThemeBrushDragHilite
kThemeDocumentWindowBackgroundBrush = kThemeBrushDocumentWindowBackground
kThemeFinderWindowBackgroundBrush = kThemeBrushFinderWindowBackground
kThemeActiveScrollBarDelimiterBrush = kThemeBrushScrollBarDelimiterActive
kThemeInactiveScrollBarDelimiterBrush = kThemeBrushScrollBarDelimiterInactive
kThemeFocusHighlightBrush = kThemeBrushFocusHighlight
kThemeActivePopupArrowBrush = kThemeBrushPopupArrowActive
kThemePressedPopupArrowBrush = kThemeBrushPopupArrowPressed
kThemeInactivePopupArrowBrush = kThemeBrushPopupArrowInactive
kThemeAppleGuideCoachmarkBrush = kThemeBrushAppleGuideCoachmark
kThemeActiveDialogTextColor = kThemeTextColorDialogActive
kThemeInactiveDialogTextColor = kThemeTextColorDialogInactive
kThemeActiveAlertTextColor = kThemeTextColorAlertActive
kThemeInactiveAlertTextColor = kThemeTextColorAlertInactive
kThemeActiveModelessDialogTextColor = kThemeTextColorModelessDialogActive
kThemeInactiveModelessDialogTextColor = kThemeTextColorModelessDialogInactive
kThemeActiveWindowHeaderTextColor = kThemeTextColorWindowHeaderActive
kThemeInactiveWindowHeaderTextColor = kThemeTextColorWindowHeaderInactive
kThemeActivePlacardTextColor = kThemeTextColorPlacardActive
kThemeInactivePlacardTextColor = kThemeTextColorPlacardInactive
kThemePressedPlacardTextColor = kThemeTextColorPlacardPressed
kThemeActivePushButtonTextColor = kThemeTextColorPushButtonActive
kThemeInactivePushButtonTextColor = kThemeTextColorPushButtonInactive
kThemePressedPushButtonTextColor = kThemeTextColorPushButtonPressed
kThemeActiveBevelButtonTextColor = kThemeTextColorBevelButtonActive
kThemeInactiveBevelButtonTextColor = kThemeTextColorBevelButtonInactive
kThemePressedBevelButtonTextColor = kThemeTextColorBevelButtonPressed
kThemeActivePopupButtonTextColor = kThemeTextColorPopupButtonActive
kThemeInactivePopupButtonTextColor = kThemeTextColorPopupButtonInactive
kThemePressedPopupButtonTextColor = kThemeTextColorPopupButtonPressed
kThemeIconLabelTextColor = kThemeTextColorIconLabel
kThemeListViewTextColor = kThemeTextColorListView
kThemeActiveDocumentWindowTitleTextColor = kThemeTextColorDocumentWindowTitleActive
kThemeInactiveDocumentWindowTitleTextColor = kThemeTextColorDocumentWindowTitleInactive
kThemeActiveMovableModalWindowTitleTextColor = kThemeTextColorMovableModalWindowTitleActive
kThemeInactiveMovableModalWindowTitleTextColor = kThemeTextColorMovableModalWindowTitleInactive
kThemeActiveUtilityWindowTitleTextColor = kThemeTextColorUtilityWindowTitleActive
kThemeInactiveUtilityWindowTitleTextColor = kThemeTextColorUtilityWindowTitleInactive
kThemeActivePopupWindowTitleColor = kThemeTextColorPopupWindowTitleActive
kThemeInactivePopupWindowTitleColor = kThemeTextColorPopupWindowTitleInactive
kThemeActiveRootMenuTextColor = kThemeTextColorRootMenuActive
kThemeSelectedRootMenuTextColor = kThemeTextColorRootMenuSelected
kThemeDisabledRootMenuTextColor = kThemeTextColorRootMenuDisabled
kThemeActiveMenuItemTextColor = kThemeTextColorMenuItemActive
kThemeSelectedMenuItemTextColor = kThemeTextColorMenuItemSelected
kThemeDisabledMenuItemTextColor = kThemeTextColorMenuItemDisabled
kThemeActivePopupLabelTextColor = kThemeTextColorPopupLabelActive
kThemeInactivePopupLabelTextColor = kThemeTextColorPopupLabelInactive
kAEThemeSwitch = kAEAppearanceChanged
kThemeNoAdornment = kThemeAdornmentNone
kThemeDefaultAdornment = kThemeAdornmentDefault
kThemeFocusAdornment = kThemeAdornmentFocus
kThemeRightToLeftAdornment = kThemeAdornmentRightToLeft
kThemeDrawIndicatorOnly = kThemeAdornmentDrawIndicatorOnly
kThemeBrushPassiveAreaFill = kThemeBrushStaticAreaFill
kThemeMetricCheckBoxGlyphHeight = kThemeMetricCheckBoxHeight
kThemeMetricRadioButtonGlyphHeight = kThemeMetricRadioButtonHeight
kThemeMetricDisclosureButtonSize = kThemeMetricDisclosureButtonHeight
kThemeMetricBestListHeaderHeight = kThemeMetricListHeaderHeight
kThemeMetricSmallProgressBarThickness = kThemeMetricNormalProgressBarThickness
kThemeMetricProgressBarThickness = kThemeMetricLargeProgressBarThickness
kThemeScrollBar = kThemeMediumScrollBar
kThemeSlider = kThemeMediumSlider
kThemeProgressBar = kThemeMediumProgressBar
kThemeIndeterminateBar = kThemeMediumIndeterminateBar
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.3/Lib/plat-mac/Carbon/Appearance.py | Python | mit | 26,525 |
# -*- coding: utf-8 -*-
def get_instance_children(obj, depth=0, sig=0):
"""
Récupèration récursive des relations enfants d'un objet
@depth: integer limitant le niveau de recherche des enfants, 0=illimité
"""
children = []
# Pour toute les relations enfants de l'objet
for child in obj._meta.get_all_related_objects():
# Nom de l'attribut d'accès
cname = child.get_accessor_name()
verbose_name = child.model._meta.verbose_name
# Récupère tout les objets des relations
for elem in getattr(obj, cname).all():
followed = []
# Recherche récursive des enfants
if depth == 0 or sig < depth:
followed = get_instance_children(elem, depth=depth, sig=sig+1)
children.append( (verbose_name, unicode(elem), followed) )
return children
| sveetch/sveedocuments | sveedocuments/utils/objects.py | Python | mit | 871 |
#!/usr/bin/env python
"""Plot information needed file"""
########################################################################
# File: plot_raw_read_alignment.py
# executable: plot_raw_read_alignment.py
#
# Author: Andrew Bailey
# History: Created 12/01/17
########################################################################
from __future__ import print_function
import sys
import os
from timeit import default_timer as timer
import pysam
import matplotlib.pyplot as plt
import matplotlib.patches as mplpatches
import numpy as np
import scipy.stats as stats
import seaborn as sns
from py3helpers.utils import list_dir
from PyPore.parsers import SpeedyStatSplit
from nanonet.eventdetection.filters import minknow_event_detect
from nanotensor.fast5 import Fast5
from nanotensor.event_detection import resegment_reads, create_anchor_kmers, index_to_time_rna_basecall
def raw_scatter_plot(signal_data, label_data, outpath, interval):
"""plot accuracy distribution of reads"""
# define figure size
size = (interval[1] - interval[0]) / 100
plt.figure(figsize=(size, 4))
panel1 = plt.axes([0.01, 0.1, .95, .9])
# longest = max(data[0]) + data[1])
# panel1.set_xlim(0, 1000)
mean = np.mean(signal_data)
stdv = np.std(signal_data)
panel1.set_ylim(mean - (3 * stdv), mean + (3 * stdv))
panel1.set_xlim(interval[0], interval[1])
# panel1.set_xscale("log")
plt.scatter(x=range(len(signal_data)), y=signal_data, s=1, c="k")
plt.title('Nanopore Read')
for i in range(len(label_data.start)):
if interval[0] < label_data.start[i] < interval[1]:
panel1.text(label_data.start[i] + (label_data.length[i] / 2), 2, "{}".format(label_data.base[i]),
fontsize=10, va="bottom", ha="center")
panel1.axvline(label_data.start[i])
panel1.axvline(label_data.start[i] + label_data.length[i])
plt.show()
# plt.savefig(outpath)
def raw_scatter_plot_with_events(signal_data, label_data, outpath, interval, events):
"""plot accuracy distribution of reads"""
# define figure size
size = (interval[1] - interval[0]) / 75
plt.figure(figsize=(size, 4))
panel1 = plt.axes([0.01, 0.1, .95, .9])
# longest = max(data[0]) + data[1])
# panel1.set_xlim(0, 1000)
mean = np.mean(signal_data)
stdv = np.std(signal_data)
panel1.set_ylim(mean - (3 * stdv), mean + (3 * stdv))
panel1.set_xlim(interval[0], interval[1])
# panel1.set_xscale("log")
plt.scatter(x=range(len(signal_data)), y=signal_data, s=1, c="k")
plt.title('Nanopore Read')
for i in range(len(label_data.start)):
if interval[0] < label_data.start[i] < interval[1]:
panel1.text(label_data.start[i] + (label_data.length[i] / 2), 2, "{}".format(label_data.base[i]),
fontsize=10, va="bottom", ha="center")
panel1.axvline(label_data.start[i])
panel1.axvline(label_data.start[i] + label_data.length[i])
for event_peak in events:
if interval[0] < event_peak < interval[1]:
panel1.axvline(event_peak, linestyle='--', color='r')
plt.show()
# plt.savefig(outpath)
def plot_raw_reads(current, old_events, resegment=None, dna=False, sampling_freq=4000, start_time=0, window_size=None):
"""Plot raw reads using ideas from Ryan Lorig-Roach's script"""
fig1 = plt.figure(figsize=(24, 3))
panel = fig1.add_subplot(111)
prevMean = 0
handles = list()
handle, = panel.plot(current, color="black", lw=0.2)
handles.append(handle)
start = 0
if window_size:
start = old_events[0]["start"]
end = old_events[-1]["start"]
if dna:
start = (start - (start_time / sampling_freq)) * sampling_freq
end = (end - (start_time / sampling_freq)) * sampling_freq
start = np.random.randint(start, end - window_size)
# print(start, end - window_size)
# print(len(old_events), len(resegment))
for j, segment in enumerate(old_events):
x0 = segment["start"]
x1 = x0 + segment["length"]
if dna:
x0 = (x0 - (start_time / sampling_freq)) * sampling_freq
x1 = (x1 - (start_time / sampling_freq)) * sampling_freq
if start < x0 < (start + window_size):
kmer = segment["model_state"]
mean = segment['mean']
color = [.082, 0.282, 0.776]
handle1, = panel.plot([x0, x1], [mean, mean], color=color, lw=0.8)
panel.plot([x0, x0], [prevMean, mean], color=color, lw=0.5) # <-- uncomment for pretty square wave
# panel.text(x0, mean - 2, bytes.decode(kmer), fontsize=5)
prevMean = mean
handles.append(handle1)
panel.set_title("Signal")
panel.set_xlabel("Time (ms)")
panel.set_ylabel("Current (pA)")
if resegment is not None:
color = [1, 0.282, 0.176]
prevMean = 0
for indx, segment in enumerate(resegment):
kmer = segment["model_state"]
x0 = segment["raw_start"]
x1 = x0 + segment["raw_length"]
mean = segment['mean']
if start < x0 < start + window_size:
handle2, = panel.plot([x0, x1], [mean, mean], color=color, lw=0.8)
panel.plot([x0, x0], [prevMean, mean], color=color, lw=0.5) # <-- uncomment for pretty square wave
panel.text(x0, mean + 2, bytes.decode(kmer), fontsize=5)
prevMean = mean
handles.append(handle2)
box = panel.get_position()
panel.set_position([box.x0, box.y0, box.width * 0.95, box.height])
if len(handles) == 3:
plt.legend(handles, ["Raw", "OriginalSegment", "New Segment"], loc='upper left', bbox_to_anchor=(1, 1))
else:
plt.legend(handles, ["Raw", "OriginalSegment"], loc='upper left', bbox_to_anchor=(1, 1))
plt.show()
def plot_segmented_comparison(fast5_handle, window_size=None):
"""Plot read with segmented lines and kmers.
:param fast5_handle: Fast5 instance where there is already a resegemented analysis table
:param window_size: size of window to display instead of whole file
"""
events = fast5_handle.get_basecall_data()
signal = fast5_handle.get_read(raw=True, scale=True)
resegment_events = fast5_handle.get_resegment_basecall()
if fast5_handle.is_read_rna():
plot_raw_reads(signal, events, resegment=resegment_events, window_size=window_size)
else:
start_time = fast5_handle.raw_attributes["start_time"]
sampling_freq = fast5_handle.sample_rate
plot_raw_reads(signal, events, resegment=None, dna=True, sampling_freq=sampling_freq,
start_time=start_time, window_size=window_size)
def main():
"""Main docstring"""
start = timer()
minknow_params = dict(window_lengths=(5, 10), thresholds=(2.0, 1.1), peak_height=1.2)
speedy_params = dict(min_width=5, max_width=30, min_gain_per_sample=0.008, window_width=800)
dna_reads = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/canonical/"
files = list_dir(dna_reads, ext='fast5')
rna_reads = "/Users/andrewbailey/CLionProjects/nanopore-RNN/test_files/minion-reads/rna_reads"
# files = list_dir(rna_reads, ext='fast5')
print(files[0])
f5fh = Fast5(files[0])
# f5fh = resegment_reads(files[0], minknow_params, speedy=False, overwrite=True)
plot_segmented_comparison(f5fh, window_size=3000)
stop = timer()
print("Running Time = {} seconds".format(stop - start), file=sys.stderr)
if __name__ == "__main__":
main()
raise SystemExit
| UCSC-nanopore-cgl/nanopore-RNN | nanotensor/visualization/plot_raw_read_alignment.py | Python | mit | 7,635 |
#!/usr/bin/env python
"""
gets basic info about AVI file using OpenCV
input: filename or cv2.Capture
"""
from pathlib import Path
from struct import pack
from typing import Dict, Any
import cv2
def getaviprop(fn: Path) -> Dict[str, Any]:
if isinstance(fn, (str, Path)): # assuming filename
fn = Path(fn).expanduser()
if not fn.is_file():
raise FileNotFoundError(fn)
v = cv2.VideoCapture(str(fn))
if v is None:
raise OSError(f'could not read {fn}')
else: # assuming cv2.VideoCapture object
v = fn
if not v.isOpened():
raise OSError(f'cannot read {fn} probable codec issue')
vidparam = {
'nframe': int(v.get(cv2.CAP_PROP_FRAME_COUNT)),
'xy_pixel': (
int(v.get(cv2.CAP_PROP_FRAME_WIDTH)),
int(v.get(cv2.CAP_PROP_FRAME_HEIGHT)),
),
'fps': v.get(cv2.CAP_PROP_FPS),
'codec': fourccint2ascii(int(v.get(cv2.CAP_PROP_FOURCC))),
}
if isinstance(fn, Path):
v.release()
return vidparam
def fourccint2ascii(fourcc_int: int) -> str:
"""
convert fourcc 32-bit integer code to ASCII
"""
assert isinstance(fourcc_int, int)
return pack('<I', fourcc_int).decode('ascii')
if __name__ == '__main__':
from argparse import ArgumentParser
p = ArgumentParser(description='get parameters of AVI file')
p.add_argument('avifn', help='avi filename')
p = p.parse_args()
vidparam = getaviprop(p.avifn)
print(vidparam)
| scienceopen/CVutils | morecvutils/getaviprop.py | Python | mit | 1,520 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-01-09 15:27
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('helusers', '0001_add_ad_groups'),
('helevents', '0003_auto_20170915_1529'),
]
operations = [
migrations.AlterModelOptions(
name='user',
options={'ordering': ('id',)},
),
migrations.AddField(
model_name='user',
name='ad_groups',
field=models.ManyToManyField(blank=True, to='helusers.ADGroup'),
),
]
| City-of-Helsinki/linkedevents | helevents/migrations/0004_auto_20180109_1727.py | Python | mit | 639 |
from __future__ import (absolute_import, division, print_function, unicode_literals)
from builtins import *
import boto.exception
import io
import json
import wizzat.kvtable
from boto.s3.key import Key, compute_md5
__all__ = [
'S3Table',
]
class S3Table(wizzat.kvtable.KVTable):
"""
This is a micro-ORM for working with S3.
Relevant options (on top of KVTable options):
- bucket: The S3 bucket name to store this table in
- json_encoder: func, the json encoder (typically, staticmethod(json.dumps))
- json_encoder: func, the json decoder (typically, staticmethod(json.loads))
- reduced_redundancy: bool, Whether or not to store the key with S3 reduced redundancy
- encrypt_key: bool, Use S3 encryption
- policy: CannedACLStrings, The S3 policy to apply to new objects in S3
"""
memoize = False
table_name = ''
key_fields = []
fields = []
bucket = None
policy = None
encrypt_key = False
reduced_redundancy = False
json_encoder = staticmethod(json.dumps)
json_decoder = staticmethod(json.loads)
@classmethod
def _remote_bucket(cls):
return cls.conn.get_bucket(cls.bucket)
@classmethod
def delete_key(cls, kv_key):
cls._remote_bucket().delete_key(kv_key)
@classmethod
def _remote_key(cls, kv_key):
return Key(cls._remote_bucket(), kv_key)
@classmethod
def _find_by_key(cls, kv_key):
try:
content_str = cls._remote_key(kv_key).get_contents_as_string()
if not isinstance(content_str, str):
content_str = content_str.decode()
return True, cls.json_decoder(content_str)
except boto.exception.S3ResponseError:
return None, None
def _insert(self, force=False):
content_str = self.json_encoder(self._data)
if not isinstance(content_str, str):
content_str = content_str.decode()
md5, b64, file_size = compute_md5(io.StringIO(content_str))
self._remote_key(self._key).set_contents_from_string(content_str,
md5 = (md5, b64, file_size),
policy = self.policy,
encrypt_key = self.encrypt_key,
reduced_redundancy = self.reduced_redundancy,
)
return True
_update = _insert
def _delete(self, force=False):
self.delete_key(self._key)
return False
| wizzat/wizzat.py | wizzat/s3table.py | Python | mit | 2,568 |
#!/usr/bin/env python
#coding=utf-8
# Filename: writer.py
'''
日志记录
@author: 1th
@data: 2017.2.28
'''
from time import sleep
import datetime
from simpleat.conf import settings, globalvar
from simpleat.core import exceptions
from .logger import write_log
_CMD_OUT = settings.CMD_OUT # 是否在命令行进行输出
_LOG_OUT = settings.LOG_OUT # 程序运行过程中是否开启日志输出
_LOG_DIR = settings.LOG_DIR # log文件存储文件夹
def log(logmsg, level, logstr=_LOG_DIR):
'''
纪录日志,自动获取当前时间
Args:
level: 日志等级
logstr: log文件文件夹
'''
fulllogmsg = ''.join(['[', datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), \
'] - ', level, ' - ', unicode(logmsg)])
try:
write_log(fulllogmsg, level, logstr)
except exceptions.WriteLogException as wle:
if _CMD_OUT:
print unicode(wle)
def logger():
'''
自动检查LOG_MESSAGE中是否有需要记录的日志
'''
while True:
if not globalvar.g_hold_lognote.empty():
content, level = globalvar.g_hold_lognote.get()
if _LOG_OUT:
log(content, level)
sleep(0.5)
| 15th/simpleat | simpleat/core/log/writer.py | Python | mit | 1,220 |
#!/usr/bin/env python
# Note: this module is not a demo per se, but is used by many of
# the demo modules for various purposes.
import wx
#---------------------------------------------------------------------------
class ColoredPanel(wx.Window):
def __init__(self, parent, color):
wx.Window.__init__(self, parent, -1, style = wx.SIMPLE_BORDER)
self.SetBackgroundColour(color)
if wx.Platform == '__WXGTK__':
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
#---------------------------------------------------------------------------
| dnxbjyj/python-basic | gui/wxpython/wxPython-demo-4.0.1/demo/ColorPanel.py | Python | mit | 577 |
from django.apps import AppConfig
class ImagerProfileAppConfig(AppConfig):
name = "imager_profile"
verbose_name = "Imager User Profile"
def ready(self):
"""code to run when the app is ready"""
from imager_profile import handlers
| crashtack/django-imager | imager_profile/apps.py | Python | mit | 260 |
VERSION = (0, 0, 1, 'dev')
# Dynamically calculate the version based on VERSION tuple
if len(VERSION) > 2 and VERSION[2] is not None:
if isinstance(VERSION[2], int):
str_version = "%s.%s.%s" % VERSION[:3]
else:
str_version = "%s.%s_%s" % VERSION[:3]
else:
str_version = "%s.%s" % VERSION[:2]
__version__ = str_version
| yceruto/django-formapi | formapi/__init__.py | Python | mit | 348 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-24 18:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounting', '0007_bankstatement'),
]
operations = [
migrations.AddField(
model_name='entry',
name='scan',
field=models.FileField(blank=True, upload_to='justificatif'),
),
migrations.AlterField(
model_name='bankstatement',
name='scan',
field=models.FileField(upload_to='releves'),
),
]
| eedf/jeito | accounting/migrations/0008_auto_20170224_1912.py | Python | mit | 631 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
Very simple script to replace a template with another one.
It also converts the old MediaWiki boilerplate format to the new format.
Syntax: python template.py [-remove] [xml[:filename]] oldTemplate [newTemplate]
Specify the template on the command line. The program will pick up the template
page, and look for all pages using it. It will then automatically loop over
them, and replace the template.
Command line options:
-remove Remove every occurrence of the template from every article
-subst Resolves the template by putting its text directly into the
article. This is done by changing {{...}} or {{msg:...}} into
{{subst:...}}
-assubst Replaces the first argument as old template with the second
argument as new template but substitutes it like -subst does.
Using both options -remove and -subst in the same command line has
the same effect.
-xml retrieve information from a local dump
(https://download.wikimedia.org). If this argument isn't given,
info will be loaded from the maintenance page of the live wiki.
argument can also be given as "-xml:filename.xml".
-user: Only process pages edited by a given user
-skipuser: Only process pages not edited by a given user
-timestamp: (With -user or -skipuser). Only check for a user where his edit is
not older than the given timestamp. Timestamp must be writen in
MediaWiki timestamp format which is "%Y%m%d%H%M%S"
If this parameter is missed, all edits are checked but this is
restricted to the last 100 edits.
-summary: Lets you pick a custom edit summary. Use quotes if edit summary
contains spaces.
-always Don't bother asking to confirm any of the changes, Just Do It.
-addcat: Appends the given category to every page that is edited. This is
useful when a category is being broken out from a template
parameter or when templates are being upmerged but more information
must be preserved.
other: First argument is the old template name, second one is the new
name.
If you want to address a template which has spaces, put quotation
marks around it, or use underscores.
Examples:
If you have a template called [[Template:Cities in Washington]] and want to
change it to [[Template:Cities in Washington state]], start
python pwb.py template "Cities in Washington" "Cities in Washington state"
Move the page [[Template:Cities in Washington]] manually afterwards.
If you have a template called [[Template:test]] and want to substitute it only
on pages in the User: and User talk: namespaces, do:
python pwb.py template test -subst -namespace:2 -namespace:3
Note that -namespace: is a global Pywikibot parameter
This next example substitutes the template lived with a supplied edit summary.
It only performs substitutions in main article namespace and doesn't prompt to
start replacing. Note that -putthrottle: is a global Pywikibot parameter.
python pwb.py template -putthrottle:30 -namespace:0 lived -subst -always \
-summary:"BOT: Substituting {{lived}}, see [[WP:SUBST]]."
This next example removes the templates {{cfr}}, {{cfru}}, and {{cfr-speedy}}
from five category pages as given:
python pwb.py template cfr cfru cfr-speedy -remove -always \
-page:"Category:Mountain monuments and memorials" \
-page:"Category:Indian family names" \
-page:"Category:Tennis tournaments in Belgium" \
-page:"Category:Tennis tournaments in Germany" \
-page:"Category:Episcopal cathedrals in the United States" \
-summary:"Removing Cfd templates from category pages that survived."
This next example substitutes templates test1, test2, and space test on all
pages:
python pwb.py template test1 test2 "space test" -subst -always
"""
#
# (C) Daniel Herding, 2004
# (C) Rob W.W. Hooft, 2003-2005
# (C) xqt, 2009-2015
# (C) Pywikibot team, 2004-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
#
import re
from warnings import warn
import pywikibot
from pywikibot import i18n, pagegenerators, xmlreader, Bot
from pywikibot.exceptions import ArgumentDeprecationWarning
from scripts.replace import ReplaceRobot as ReplaceBot
class XmlDumpTemplatePageGenerator(object):
"""
Generator which yields Pages that transclude a template.
These pages will be retrieved from a local XML dump file
(cur table), and may not still transclude the template.
"""
def __init__(self, templates, xmlfilename):
"""
Constructor.
Arguments:
* templateNames - A list of Page object representing the searched
templates
* xmlfilename - The dump's path, either absolute or relative
"""
self.templates = templates
self.xmlfilename = xmlfilename
def __iter__(self):
"""Yield page objects until the entire XML dump has been read."""
mysite = pywikibot.Site()
dump = xmlreader.XmlDump(self.xmlfilename)
# regular expression to find the original template.
# {{vfd}} does the same thing as {{Vfd}}, so both will be found.
# The old syntax, {{msg:vfd}}, will also be found.
templatePatterns = []
for template in self.templates:
templatePattern = template.title(withNamespace=False)
if mysite.namespaces[10].case == 'first-letter':
templatePattern = '[%s%s]%s' % (templatePattern[0].upper(),
templatePattern[0].lower(),
templatePattern[1:])
templatePattern = re.sub(' ', '[_ ]', templatePattern)
templatePatterns.append(templatePattern)
templateRegex = re.compile(
r'\{\{ *([mM][sS][gG]:)?(?:%s) *(?P<parameters>\|[^}]+|) *}}'
% '|'.join(templatePatterns))
for entry in dump.parse():
if templateRegex.search(entry.text):
page = pywikibot.Page(mysite, entry.title)
yield page
class TemplateRobot(ReplaceBot):
"""This bot will replace, remove or subst all occurrences of a template."""
def __init__(self, generator, templates, **kwargs):
"""
Constructor.
@param generator: the pages to work on
@type generator: iterable
@param templates: a dictionary which maps old template names to
their replacements. If remove or subst is True, it maps the
names of the templates that should be removed/resolved to None.
@type templates: dict
"""
self.availableOptions.update({
'subst': False,
'remove': False,
'summary': None,
'addedCat': None,
})
Bot.__init__(self, generator=generator, **kwargs)
self.templates = templates
# get edit summary message if it's empty
if not self.getOption('summary'):
comma = self.site.mediawiki_message('comma-separator')
params = {'list': comma.join(self.templates.keys()),
'num': len(self.templates)}
site = self.site
if self.getOption('remove'):
self.options['summary'] = i18n.twntranslate(
site, 'template-removing', params)
elif self.getOption('subst'):
self.options['summary'] = i18n.twntranslate(
site, 'template-substituting', params)
else:
self.options['summary'] = i18n.twntranslate(
site, 'template-changing', params)
# regular expression to find the original template.
# {{vfd}} does the same thing as {{Vfd}}, so both will be found.
# The old syntax, {{msg:vfd}}, will also be found.
# The group 'parameters' will either match the parameters, or an
# empty string if there are none.
replacements = []
exceptions = {}
namespace = self.site.namespaces[10]
for old, new in self.templates.items():
if namespace.case == 'first-letter':
pattern = '[' + \
re.escape(old[0].upper()) + \
re.escape(old[0].lower()) + \
']' + re.escape(old[1:])
else:
pattern = re.escape(old)
pattern = re.sub(r'_|\\ ', r'[_ ]', pattern)
templateRegex = re.compile(r'\{\{ *(' + ':|'.join(namespace) +
r':|[mM][sS][gG]:)?' + pattern +
r'(?P<parameters>\s*\|.+?|) *}}',
re.DOTALL)
if self.getOption('subst') and self.getOption('remove'):
replacements.append((templateRegex,
r'{{subst:%s\g<parameters>}}' % new))
exceptions['inside-tags'] = ['ref', 'gallery']
elif self.getOption('subst'):
replacements.append((templateRegex,
r'{{subst:%s\g<parameters>}}' % old))
exceptions['inside-tags'] = ['ref', 'gallery']
elif self.getOption('remove'):
replacements.append((templateRegex, ''))
else:
template = pywikibot.Page(self.site, new, ns=10)
if not template.exists():
pywikibot.warning(u'Template "%s" does not exist.' % new)
if not pywikibot.input_yn('Do you want to proceed anyway?',
default=False, automatic_quit=False):
continue
replacements.append((templateRegex,
r'{{%s\g<parameters>}}' % new))
super(TemplateRobot, self).__init__(
generator, replacements, exceptions,
always=self.getOption('always'),
addedCat=self.getOption('addedCat'),
summary=self.getOption('summary'))
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
templateNames = []
templates = {}
options = {}
# If xmlfilename is None, references will be loaded from the live wiki.
xmlfilename = None
user = None
skip = False
timestamp = None
# read command line parameters
local_args = pywikibot.handle_args(args)
# Avoid conflicts with pagegenerators.py parameters.
if any(arg.startswith('-category:') for arg in local_args):
warn('-category (to append a category to each edited page) has been'
' renamed to -addcat; make sure you are using the correct param.',
ArgumentDeprecationWarning)
site = pywikibot.Site()
genFactory = pagegenerators.GeneratorFactory()
for arg in local_args:
if arg == '-remove':
options['remove'] = True
elif arg == '-subst':
options['subst'] = True
elif arg == '-assubst':
options['subst'] = options['remove'] = True
elif arg == '-always':
options['always'] = True
elif arg.startswith('-xml'):
if len(arg) == 4:
xmlfilename = pywikibot.input(
u'Please enter the XML dump\'s filename: ')
else:
xmlfilename = arg[5:]
elif arg.startswith('-addcat:'):
options['addedCat'] = arg[len('-addcat:'):]
elif arg.startswith('-summary:'):
options['summary'] = arg[len('-summary:'):]
elif arg.startswith('-user:'):
user = arg[len('-user:'):]
elif arg.startswith('-skipuser:'):
user = arg[len('-skipuser:'):]
skip = True
elif arg.startswith('-timestamp:'):
timestamp = arg[len('-timestamp:'):]
else:
if not genFactory.handleArg(arg):
templateName = pywikibot.Page(site, arg, ns=10)
templateNames.append(templateName.title(withNamespace=False))
if not templateNames:
pywikibot.bot.suggest_help(missing_parameters=['templates'])
return False
if options.get('subst', False) ^ options.get('remove', False):
for templateName in templateNames:
templates[templateName] = None
else:
try:
for i in range(0, len(templateNames), 2):
templates[templateNames[i]] = templateNames[i + 1]
except IndexError:
pywikibot.output('Unless using solely -subst or -remove, '
'you must give an even number of template names.')
return
oldTemplates = []
for templateName in templates.keys():
oldTemplate = pywikibot.Page(site, templateName, ns=10)
oldTemplates.append(oldTemplate)
if xmlfilename:
gen = XmlDumpTemplatePageGenerator(oldTemplates, xmlfilename)
else:
gen = genFactory.getCombinedGenerator()
if not gen:
gens = [
pagegenerators.ReferringPageGenerator(t, onlyTemplateInclusion=True)
for t in oldTemplates
]
gen = pagegenerators.CombinedPageGenerator(gens)
gen = pagegenerators.DuplicateFilterPageGenerator(gen)
if user:
gen = pagegenerators.UserEditFilterGenerator(gen, user, timestamp, skip,
max_revision_depth=100,
show_filtered=True)
if not genFactory.gens:
# make sure that proper namespace filtering etc. is handled
gen = genFactory.getCombinedGenerator(gen)
preloadingGen = pagegenerators.PreloadingGenerator(gen)
bot = TemplateRobot(preloadingGen, templates, **options)
bot.run()
if __name__ == "__main__":
try:
main()
except Exception:
pywikibot.error("Fatal error:", exc_info=True)
| icyflame/batman | scripts/template.py | Python | mit | 14,372 |
'''
Virtual topology
'''
class VTopo(object):
'''
Attributes:
- switches : virtual switch list
- links : virtual links
'''
def __init__(self):
super(VTopo, self).__init__()
self.isStart = False
self.switches = []
self.links = []
def addSwitch(self, vswitch):
'''
Add new virtual switch
Mapping between physical and virtual automatically
'''
pass
def addLink(self, vlink):
'''
Add new virtual link
Mapping between physical and virtual automatically
'''
pass
def getVPSwitchMapping(self, vswitch):
'''
get virtual to physical mapping
'''
pass
def getPVSwitchMapping(self, pswitch):
'''
get physical to virtual mapping
'''
pass
def start(self):
pass
| TakeshiTseng/HyperRyu | hyper_ryu/vtopo/vtopo.py | Python | mit | 884 |
'''
Download Cricket Data
'''
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
import csv
import sys
import time
import os
import unicodedata
from urlparse import urlparse
from BeautifulSoup import BeautifulSoup, SoupStrainer
BASE_URL = 'http://www.espncricinfo.com'
if not os.path.exists('./espncricinfo-fc'):
os.mkdir('./espncricinfo-fc')
for i in range(0, 6019):
#odi: soupy = BeautifulSoup(urllib2.urlopen('http://search.espncricinfo.com/ci/content/match/search.html?search=odi;all=1;page=' + str(i)).read())
#test: soupy = BeautifulSoup(urllib2.urlopen('http://search.espncricinfo.com/ci/content/match/search.html?search=test;all=1;page=' + str(i)).read())
#t20i: soupy = BeautifulSoup(urllib2.urlopen('http://search.espncricinfo.com/ci/content/match/search.html?search=t20i;all=1;page=' + str(i)).read())
#t20: soupy = BeautifulSoup(urllib2.urlopen('http://search.espncricinfo.com/ci/content/match/search.html?search=t20;all=1;page=' + str(i)).read())
#list a: soupy = BeautifulSoup(urllib2.urlopen('http://search.espncricinfo.com/ci/content/match/search.html?search=list%20a;all=1;page=' + str(i)).read())
#fc:
soupy = BeautifulSoup(urllib2.urlopen('http://search.espncricinfo.com/ci/content/match/search.html?search=first%20class;all=1;page=' + str(i)).read())
time.sleep(1)
for new_host in soupy.findAll('a', {'class' : 'srchPlyrNmTxt'}):
try:
new_host = new_host['href']
except:
continue
odiurl = BASE_URL + urlparse(new_host).geturl()
new_host = unicodedata.normalize('NFKD', new_host).encode('ascii','ignore')
print new_host
#print(type(str.split(new_host)[3]))
print str.split(new_host, "/")[4]
html = urllib2.urlopen(odiurl).read()
if html:
with open('espncricinfo-fc/{0!s}'.format(str.split(new_host, "/")[4]), "wb") as f:
f.write(html)
| soodoku/get-cricket-data | scripts/scraper.py | Python | mit | 1,947 |
# coding: utf8
from django.test import RequestFactory
from django.test import TestCase
from django.core.urlresolvers import resolve
from django.views.generic import ListView
import pytest
from selenium import webdriver
import time
from cms.test_utils.testcases import CMSTestCase
from django.test.utils import override_settings
from .. import views
from categories.models import Category
from mixer.backend.django import mixer
from django.contrib.auth.models import AnonymousUser
from bidders.models import Bidder
pytestmark = pytest.mark.django_db
class SmokeTest(TestCase):
def setUp(self):
self.browser = webdriver.Chrome()
def tearDown(self):
self.browser.quit()
def test_bad_maths(self):
self.assertEqual(1 + 1, 2)
self.browser.get('http://localhost:8000')
assert u'홈페이지' in self.browser.title
class TestViews(TestCase):
def setUp(self):
self.browser = webdriver.Chrome()
self.category = mixer.blend(Category, name=u'조달업체')
def tearDown(self):
self.browser.quit()
def test_anonymous(self):
req = RequestFactory().get('/categories/')
categorytree_dict = {
'queryset': Category.objects.filter(level=0)
}
user = AnonymousUser()
req.user = user
res = ListView.as_view(**categorytree_dict)(req)
assert res.status_code == 200
print(res.context_data)
@override_settings(ROOT_URLCONF='categories.tests.urls')
def test_url_resolves_to_categories_page(self):
found = resolve('/categories/')
assert found.view_name == 'categories_tree_list'
def test_home_page_returns_correct_html(self):
self.browser.get('http://localhost:8000/categories')
assert u'조달업체' in self.browser.page_source
def test_entry_listing_within_a_category(self):
self.browser.get('http://localhost:8000/categories')
self.browser.find_element_by_link_text(u'조달업체').click()
time.sleep(1)
assert u'견본 업체' in self.browser.page_source
| kecheon/yablist | categories/tests/test_fn.py | Python | mit | 2,091 |
import hashlib
def hash_list():
return str(hashlib.algorithms_guaranteed)
def hash_text(algorithm_array, text, pass_count):
result_dict = {}
# Type checking
if type(pass_count) is not int:
return [False, {"error": "Pass count should be of 'integer' type."}]
elif type(text) is not str:
return [False, {"error": "Text should be of 'string' type."}]
elif type(algorithm_array) is not list:
return [False, {"error": "Algorithm list should be of 'list' type."}]
# Bounds checking
avail_alg_set = set(algorithm_array) & set(hashlib.algorithms_guaranteed)
if pass_count > 1000000 or pass_count <= 0:
return [False, {"error": "Pass count should be larger than 0 and smaller than 1000000."}]
elif len(avail_alg_set) == 0:
return [False, {"error": "None of these hash algorithms are available."}]
# There is no error case; do the hash computations for every function
for function in avail_alg_set:
hash_val = text
for _ in range(pass_count):
hash_val = getattr(hashlib, function)(hash_val.encode()).hexdigest()
result_dict[function] = hash_val
return [True, result_dict]
| tykkz/hasherapp | algorithm.py | Python | mit | 1,194 |
"""Form mixins for the ``multilingual_tags`` app."""
from django import forms
from django.forms.utils import ErrorList
from django.contrib.contenttypes.models import ContentType
from django.utils.text import slugify
from django.utils.translation import get_language, ugettext_lazy as _
from .. import models
class TaggingFormMixin(object):
"""Mixin for ModelForms to add multilingual tags to a model."""
tag_field = {
'name': 'tags',
'label': _('Tags'),
'help_text': _('Add tags separated by comma.'),
'required': True,
'max_tags': 0,
}
def __init__(self, *args, **kwargs):
super(TaggingFormMixin, self).__init__(*args, **kwargs)
self._taggeditems = []
self._instance_ctype = None
self.fields[self._get_tag_field_name()] = forms.CharField(
label=self._get_tag_field_label(),
help_text=self._get_tag_field_help_text(),
initial=self._get_tag_field_initial(),
required=self._get_tag_field_required(),
)
self.fields[self._get_tag_field_name()].widget.attrs.update({
'data-class': 'multilingual-tags-field',
'data-max-tags': self._get_tag_field_max_tags()})
setattr(self, 'clean_{0}'.format(self._get_tag_field_name()),
self._get_tag_field_clean())
def add_error(self, fieldname, message):
if fieldname in self._errors:
self._errors[fieldname].append(message)
else:
self._errors[fieldname] = ErrorList()
self._errors[fieldname].append(message)
def _get_tag_field_clean(self):
def clean_field():
self._tags_added = []
self._taggeditems = []
language = get_language()
max_tags = self._get_tag_field_max_tags()
data = self.data.get(self._get_tag_field_name())
if not data:
return []
tag_data = [t.strip() for t in data.split(',')]
self._instance_ctype = ContentType.objects.get_for_model(
self.instance)
for tag_string in tag_data:
if len(tag_string) > 64:
self.add_error(
self._get_tag_field_name(),
_('Tags cannot be longer than 64 characters:'
' "{0}"'.format(tag_string))
)
continue
try:
tag = models.Tag.objects.get(
slug=slugify(tag_string))
except models.Tag.DoesNotExist:
# TODO tags should not be stored directly
tag = models.Tag.objects.create(
slug=slugify(tag_string),
name=tag_string,
language_code=language)
# prevent duplicate tags
if tag not in self._tags_added:
self._tags_added.append(tag)
if self.instance.id:
taggeditem, created = (
models.TaggedItem.objects.get_or_create(
tag=tag,
content_type=self._instance_ctype,
object_id=self.instance.id,
)
)
else:
taggeditem = models.TaggedItem(
tag=tag,
content_type=self._instance_ctype)
self._taggeditems.append(taggeditem)
if max_tags and len(self._tags_added) > max_tags:
self.add_error(
self._get_tag_field_name(),
_('You cannot add more than {0} tags.'.format(
self._get_tag_field_max_tags()
))
)
return self._taggeditems
return clean_field
def _get_tag_field_help_text(self):
return self.tag_field.get('help_text', '')
def _get_tag_field_initial(self):
tag_model_field = getattr(self.instance, self._get_tag_field_name())
return ','.join([ti.tag.name for ti in tag_model_field.all()])
def _get_tag_field_label(self):
return self.tag_field.get('label', 'Tags')
def _get_tag_field_max_tags(self):
return int(self.tag_field.get('max_tags', 0))
def _get_tag_field_name(self):
return self.tag_field.get('name', 'tags')
def _get_tag_field_required(self):
return self.tag_field.get('required', True)
def save(self, commit=True):
instance = super(TaggingFormMixin, self).save(commit)
for item in self._taggeditems:
if hasattr(instance, 'get_user'):
item.user = instance.get_user()
item.object_id = instance.id
item.save()
models.TaggedItem.objects.filter(
content_type=self._instance_ctype,
object_id=instance.id).exclude(
pk__in=[ti.pk for ti in self._taggeditems]).delete()
return instance
| bitmazk/django-multilingual-tags | multilingual_tags/forms/mixins.py | Python | mit | 5,178 |
class Obfuscator:
""" A simple obfuscator class using repeated xor """
def __init__(self, data):
self._string = data
def obfuscate(self):
"""Obfuscate a string by using repeated xor"""
out = ""
data = self._string
a0=ord(data[0])
a1=ord(data[1])
e0=chr(a0^a1)
out += e0
x=1
eprev=e0
while x<len(data):
ax=ord(data[x])
ex=chr(ax^ord(eprev))
out += ex
#throw some chaff
chaff = chr(ord(ex)^ax)
out += chaff
eprev = ex
x+=1
return out
def unobfuscate(self):
""" Reverse of obfuscation """
out = ""
data = self._string
x=len(data) - 2
while x>1:
apos=data[x]
aprevpos=data[x-2]
epos=chr(ord(apos)^ord(aprevpos))
out += epos
x -= 2
#reverse string
out2=""
x=len(out)-1
while x>=0:
out2 += out[x]
x -= 1
out=out2
#second character
e2=data[2]
a2=data[1]
a1=chr(ord(a2)^ord(e2))
a1 += out
out = a1
#first character
e1=out[0]
a1=data[0]
a0=chr(ord(a1)^ord(e1))
a0 += out
out = a0
return out
def main():
testString="Python obfuscator"
obfuscator = Obfuscator(testString)
testStringObf = obfuscator.obfuscate()
print testStringObf
obfuscator = Obfuscator(testStringObf)
testString = obfuscator.unobfuscate()
print testString
if __name__=="__main__":
main()
| ActiveState/code | recipes/Python/189745_Symmetric_datobfuscatiusing/recipe-189745.py | Python | mit | 1,786 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clients', '0017_auto_20151025_1240'),
('clients', '0015_auto_20151025_1209'),
]
operations = [
]
| deafhhs/adapt | clients/migrations/0018_merge.py | Python | mit | 297 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-03 16:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('player', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PlayerFeedback',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.TextField(blank=True)),
],
),
migrations.AlterField(
model_name='player',
name='server',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Server'),
),
migrations.AlterField(
model_name='race',
name='faction',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Faction'),
),
migrations.AlterField(
model_name='race',
name='game',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Game'),
),
migrations.AlterField(
model_name='specs',
name='spec_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.SpecType'),
),
migrations.DeleteModel(
name='Faction',
),
migrations.DeleteModel(
name='Game',
),
migrations.DeleteModel(
name='Server',
),
migrations.DeleteModel(
name='SpecType',
),
migrations.AddField(
model_name='playerfeedback',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='owner', to='player.Player'),
),
migrations.AddField(
model_name='playerfeedback',
name='to_player',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='to_player', to='player.Player'),
),
]
| valdergallo/raidmanager | player/migrations/0002_auto_20160403_1645.py | Python | mit | 2,163 |
#!/usr/bin/python
import magic
import sys
m = magic.open(magic.MIME_TYPE)
m.load()
for f in sys.argv[1:]:
try :
print(f, m.file(f))
except :
print("Except with %s" % f)
| pzia/keepmydatas | misc/testmagic.py | Python | mit | 344 |
__all__ = ['tsort']
from functools import reduce
def tsort(data, smallest_first=False, fewest_edges_first=False, flatten=False):
# FIXME: support fewest_edges_first
# make copy of data
tmp = data.copy()
# remove self-references
for k, v in tmp.items():
v.discard(k)
# initially find vertices that do not point to anything
all_vertices = reduce(set.union, tmp.values())
starting_vertices = set(tmp.keys())
empty_vertices = all_vertices - starting_vertices
# insert empty vertices
for k in empty_vertices:
tmp[k] = set()
# algorithm starts here
sorted_vertices = []
while True:
# get all vertices that do not point to anything
empty_vertices = {k for k, v in tmp.items() if not v}
if not empty_vertices:
break
# if required, sort by smallest-numbered available vertex first
if smallest_first:
_empty_vertices = sorted(empty_vertices)
else:
_empty_vertices = (v for v in empty_vertices)
# add current vertices that do not point to any other vertices
if flatten:
sorted_vertices.extend(_empty_vertices)
else:
sorted_vertices.append(_empty_vertices)
# traverse all vertices and take set difference for
# vertices which are not in previously found vertices
# that do not point to any other vertices
# tmp = {
# k: (v - empty_vertices)
# for k, v in tmp.items()
# if k not in empty_vertices
# }
for k, v in list(tmp.items()):
if k in empty_vertices:
del tmp[k]
else:
tmp[k] = v - empty_vertices
if tmp:
raise ValueError('Cyclic dependencies found')
return sorted_vertices
if __name__ == '__main__':
from pprint import pprint
data = {
2: {11},
9: {11, 8},
10: {11, 3},
11: {7, 5},
8: {7, 3},
}
out = tsort(data, smallest_first=True)
pprint(out) | mtasic85/pytsort | tsort.py | Python | mit | 2,082 |
# -*- coding: utf-8 -*-
import random as rand
class PrimeTester(object):
def solovay_strassen(self, primo, acuracidade=5):
nro_tentativas = 0
if primo == 2 or primo == 3:
return (nro_tentativas, True)
if primo < 2:
raise ValueError('Entrada < 2')
if primo % 2 == 0:
raise ValueError('Entrada % 2 == 0')
for _ in range(acuracidade):
nro_tentativas += 1
a = rand.randint(2, primo - 1)
res = self.adrien_legendre(a, primo)
potencia = self.potencia(a, (primo - 1) // 2, primo)
if res == 0 or potencia != res % primo:
return (nro_tentativas, False)
return (nro_tentativas, True)
def adrien_legendre(self, a, primo):
if a == 0 or a == 1:
return a
if a % 2 == 0:
res = self.adrien_legendre(a // 2, primo)
if ((primo ** 2) - 1) & 8 != 0:
res = -res
else:
res = self.adrien_legendre(primo % a, a)
if (a - 1) * (primo - 1) & 4 != 0:
res = -res
return res
def fatora(self, n):
exp2 = 0
while n % 2 == 0:
n = n // 2
exp2 += 1
return exp2, n
def testacandidato(self, primo_candidato, primo, expoente, resto):
primo_candidato = self.potencia(primo_candidato, resto, primo)
if primo_candidato == 1 or primo_candidato == primo - 1:
return False
for _ in range(expoente):
primo_candidato = self.potencia(primo_candidato, 2, primo)
if primo_candidato == primo - 1:
return False
return True
def miller_rabin(self, primo, acuracidade=5):
nro_tentativas = 0
if primo == 2 or primo == 3:
return (nro_tentativas, True)
if primo < 2:
return (nro_tentativas, False)
if primo % 2 == 0:
return (nro_tentativas, False)
expoente, resto = self.fatora(primo - 1)
for _ in range(acuracidade):
nro_tentativas += 1
possivelmente_primo = rand.randint(2, primo - 2)
if self.testacandidato(possivelmente_primo, primo, expoente, resto):
return (nro_tentativas, False)
return (nro_tentativas, True)
def potencia(self, base, exp, modulo):
res = 1
base = base % modulo
while exp > 0:
if exp % 2 == 1:
res = (res * base) % modulo
exp = exp >> 1
base = (base * base) % modulo
return res
class Twister(object):
def __init__(self, index=0):
self.N = 624
self.M = 397
self.mersenne_twister = [x for x in range(624)]
self.index = 0
self.index_tamanho = 64
self.__magic__ = rand.getrandbits(64)
def alimentar(self, seed):
self.index = 0
self.mersenne_twister[0] = seed
for i in range(1, self.N):
self.mersenne_twister[i] = (
1812433253 * (self.mersenne_twister[i - 1] ^ (self.mersenne_twister[i - 1] >> 30)) + i)
def extrair(self, bits):
if bits in [64, 128, 256, 512, 1024, 2048, 4096]:
self.__magic__ = rand.getrandbits(bits)
else:
raise 'Precisa ser algum desses valores: {64, 128, 256, 512, 1024, 2048, 4096}'
if self.index == 0:
self.gerar()
y = self.mersenne_twister[self.index]
y = self.mascara(y)
self.index = (self.index + 1) % len(self.mersenne_twister)
return y
def mascara(self, y):
y ^= (y >> 11)
y ^= (y << 7) & 0x9d2c5680
y ^= (y << 15) & 0xefc60000
y ^= (y >> 18)
return y
def gerar(self):
for i in range(self.N):
y = (self.mersenne_twister[i] and 0x80000000) + (self.mersenne_twister[(i + 1) % self.N] and 0x7fffffff)
self.mersenne_twister[i] = self.mersenne_twister[(i + 397) % self.N] ^ (y >> 1)
if y % 2 != 0:
# Original : MT[i] := MT[i] xor (2567483615) // 0x9908b0df
# Hacked : MT[i] := MT[i] mod (2567483615) // 0x9908b0df
# https://en.wikipedia.org/wiki/Mersenne_Twister
self.mersenne_twister[i] %= self.__magic__
| tonussi/inseguro | laboratorio/rsa/pseudo.py | Python | mit | 4,350 |
"""
A collection of Xentica models and experiments.
Indended to illustrate how to use the framework.
"""
| a5kin/hecate | examples/__init__.py | Python | mit | 107 |
# Created by PyCharm Pro Edition
# User: Kaushik Talukdar
# Date: 01-04-17
# Time: 03:10 AM
# Using continue
# continue will send the loop back to its root
nos = 0
while nos < 10:
nos += 1
if nos%2 == 0:
continue
print(nos)
# AVOID INFINITE LOOP
# if the loop has no condition that can end, the loop will run infinitely
# here if we forgot to add "nos += 1, the loop will become infinite loop | KT26/PythonCourse | 6. User Input and While Loops/8.py | Python | mit | 465 |
import os
import gevent
from rhizo.main import c
path_on_server = c.path_on_server()
name = path_on_server.rsplit('/', 1)[1]
print(name, os.getcwd(), path_on_server)
# loop forever sending messages
message_index = 0
while True:
message = 'msg-%s-%d' % (name, message_index)
print('%s: send: %s' % (name, message))
c.send_message(message, {})
gevent.sleep(1)
message_index += 1
| rhizolab/rhizo | tests_with_server/stress-testing/each_controller.py | Python | mit | 401 |
"""
The Fibonacci numbers, which we are all familiar with, start like this:
0,1,1,2,3,5,8,13,21,34,...
Where each new number in the sequence is the sum of the previous two.
It turns out that by summing different Fibonacci numbers with each other, you can create every single positive integer.
In fact, a much stronger statement holds:
Every single positive integer can be represented in one and only one way as a sum of non-consecutive Fibonacci numbers.
This is called the number's "Zeckendorf representation" [http://en.wikipedia.org/wiki/Zeckendorf%27s_theorem].
For instance, the Zeckendorf representation of the number 100 is 89 + 8 + 3, and the Zeckendorf representation of 1234
is 987 + 233 + 13 + 1. Note that all these numbers are Fibonacci numbers, and that they are non-consecutive (i.e. no
two numbers in a Zeckendorf representation can be next to each other in the Fibonacci sequence).
There are other ways of summing Fibonacci numbers to get these numbers. For instance, 100 is also equal to 89 + 5 + 3 +
2 + 1, but 1, 2, 3, 5 are all consecutive Fibonacci numbers. If no consecutive Fibonacci numbers are allowed, the
representation is unique.
Finding the Zeckendorf representation is actually not very hard. Lets use the number 100 as an example of how it's done:
First, you find the largest fibonacci number less than or equal to 100. In this case that is 89. This number will always
be of the representation, so we remember that number and proceed recursively, and figure out the representation of
100 - 89 = 11.
The largest Fibonacci number less than or equal to 11 is 8. We remember that number and proceed recursively with
11 - 8 = 3.
3 is a Fibonacci number itself, so now we're done. The answer is 89 + 8 + 3.
Write a program that finds the Zeckendorf representation of different numbers.
What is the Zeckendorf representation of 315 ?
Thanks to SwimmingPastaDevil for suggesting this problem in /r/dailyprogrammer_ideas! Do you have a problem you
think would be good for us? Why not head over there and post it?
"""
def zeckendorf(target, fib_list):
res = []
for f in fib_list[::-1]:
if f <= target:
res.append(f)
target -= f
return res
def get_fibonacci_list(target):
""" returns fibonacci numbers upto less than the target and not including zero"""
fib = [1, 1]
while fib[-1] < target:
fib.append(fib[-1] + fib[-2])
return fib[:-1]
def main():
target = 3**15
fib_list = get_fibonacci_list(target)
zeck = zeckendorf(target, fib_list)
print(zeck)
print(' 3**15 = {} \nsum of zeckendorf = {}'.format(3**15, sum(zeck)))
if __name__ == "__main__":
main()
| DayGitH/Python-Challenges | DailyProgrammer/DP20120709A.py | Python | mit | 2,709 |
#!/usr/bin/python3
from .compat import BaseHTTPRequestHandler, HTTPServer
import urllib
import json
import sys
import time
import warnings
from slackly import SlackClient
warnings.warn("This part of slackly (oauth_utils) is highly experimental and will likely see api breaking changes")
class CodeServer(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
host, query = urllib.parse.splitquery(self.path)
query_values = urllib.parse.parse_qs(query)
if 'code' in query_values:
query_values['code'] = query_values['code'][0]
if 'state' in query_values:
query_values['state'] = query_values['state'][0]
if query_values['state'] != self.state_validate:
print("Not a valid request")
return
print(json.dumps(query_values, indent=4))
client = SlackClient()
response = client.api.oauth.access(
client_id=client_id,
client_secret=client_secret,
code=query_values['code'],
redirect_uri=redirect_uri,
)
print(json.dumps(response.data, indent=4))
return
def main(host, port, state, client_id, client_secret, redirect_uri):
CodeServer.state_validate = state
server = HTTPServer((host, port), CodeServer)
print(time.asctime(), "Server Starts - %s:%s" % (host, port))
try:
server.serve_forever()
except KeyboardInterrupt:
pass
server.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (host, port))
if __name__ == '__main__':
host = sys.argv[1]
port = int(sys.argv[2])
state = sys.argv[3]
client_id = sys.argv[4]
client_secret = sys.argv[5]
redirect_uri = sys.argv[6]
main(host, port, state, client_id, client_secret, redirect_uri)
| huntcsg/slackly | src/slackly/oauth_utils.py | Python | mit | 1,841 |
#!/usr/bin/env python
# Author: csiu
# Created: 2015-02-02
import argparse
from ConfigParser import SafeConfigParser
import sys
import os
from utils import get_value_from_keycolonvalue_list, ensure_dir, random_string
import features
import mirna_proximity
import correlation
import gff_unify_features
import promirna
import plots
usage = """- Runs promi2
EXAMPLE:
python2.7 promi2.py -i ../test/test.gff -o ../Testout-promi2
- When the features.gff file is already available, use the '-f' option
EXAMPLE:
python2.7 promi2.py -i ../test/test-features.gff -f -o ../Testout-promi2predict
- enable plotting with "-p"
"""
def _read_params(f_param):
params_dict = {}
with open(f_param) as f:
for l in f:
k,v = l.strip().split(':')
params_dict[k] = float(v)
mu1 = params_dict['mu_promoter']
mu2 = params_dict['mu_background']
lambda1 = params_dict['lambda_promoter']
lambda2 = params_dict['lambda_background']
betas = [i for i in params_dict.keys() if i.startswith('beta')]
betas.sort()
betas = [params_dict[b] for b in betas]
return (mu1, mu2, lambda1, lambda2, betas)
def _make_prediction(prior_prom, p_prom, p_back):
if str(prior_prom).endswith('*'):
note = '*'
else:
note = ''
if p_prom >= p_back:
prediction = 'prom'+note
else:
prediction = 'back'+note
return prediction
def promi2(f_param, listoffeatures, infile, outfile):
mu1, mu2, lambda1, lambda2, betas = _read_params(f_param)
if len(betas) != len(listoffeatures)+1:
sys.exit("ERROR: number of betas does not match number of features")
with open(outfile, 'w') as out:
with open(infile) as f:
for line in f:
line = line.strip()
l = line.split('\t')
x = float(l[5])
_features = l[7].split(';')
fvalues = []
for lof in listoffeatures:
try:
fvalues.append(float(get_value_from_keycolonvalue_list(lof, _features)))
except ValueError:
fvalues.append(0)
p_prom, p_back, prior_prom, prior_back = promirna.promirna(x, mu1, mu2, lambda1, lambda2,
betas, fvalues)
prediction = _make_prediction(prior_prom, p_prom, p_back)
#line = '\t'.join([line,
# ';'.join(['prior_prom:'+str(prior_prom), 'prior_back:'+str(prior_back),
# 'prob_prom:'+str(p_prom), 'prob_back:'+str(p_back)]),
# prediction]) + '\n'
line = line + '\t%s\t%s\t%s\t%s\t%s\n' % (prior_prom, prior_back, p_prom, p_back, prediction)
out.write(line)
return
def _cleanup_extra_positions(infile, outfile):
## cleanup of extra positions
## compare miRNA positions in PROX & CORR
with open(outfile, 'w') as out:
with open(infile) as f:
for line in f:
l = line.split('\t')
descript = l[8].split('@')
if (descript[1] != '') and (descript[2] != '\n'):
info_mprox = descript[1].split(';')
prox_start = get_value_from_keycolonvalue_list('mirna_start', info_mprox)
prox_stop = get_value_from_keycolonvalue_list('mirna_stop', info_mprox)
info_corr = descript[2].split(';')
corr_start = get_value_from_keycolonvalue_list('mirna_start', info_corr)
corr_stop = get_value_from_keycolonvalue_list('mirna_stop', info_corr)
if (prox_start == corr_start) and \
(prox_stop == prox_stop):
out.write(line)
else:
out.write(line)
return outfile
def main(f_config, gff_cage, is_gff, outdir, make_plots):
cparser = SafeConfigParser()
cparser.read(f_config)
in_bname = os.path.basename(gff_cage)
if outdir == None:
outdir = 'promi2_outdir_'+in_bname+'_'+random_string(6)
ensure_dir(outdir, False)
f_param = cparser.get('promi2','params')
listoffeatures = cparser.get('promi2','features')
listoffeatures = listoffeatures.split(',')
if 'corr' in listoffeatures:
is_consider_corr = True
corrmethod = cparser.get('correlation','corrmethod')
else:
is_consider_corr = False
## PART1: Feature extraction
if not is_gff:
## feature extraction: cpg, cons, tata (features.py)
outdir_seqfeatures = os.path.join(outdir, 'seqfeatures')
ensure_dir(outdir_seqfeatures, False)
gff_1kbfeatures = os.path.join(outdir_seqfeatures, 'features_1kbseq.gff')
f_fasta = cparser.get('genome','fasta')
f_chromsizes = cparser.get('genome','chromsizes')
d_phastcons = cparser.get('cons','phastcons')
TRAP = cparser.get('tata','trap')
f_psemmatrix = cparser.get('tata','psem')
features.main(gff_cage, outdir_seqfeatures,
f_fasta, f_chromsizes, d_phastcons, TRAP, f_psemmatrix,
gff_1kbfeatures)
## feature extraction: mirna_proximity (mirna_proximity.py)
outdir_mprox = os.path.join(outdir, 'mprox')
ensure_dir(outdir_mprox, False)
gff_mirnaprox = os.path.join(outdir_mprox, 'features_mirnaprox.gff')
gff_mirna = cparser.get('mirbase','gff2')
mirna_proximity.main(gff_cage, gff_mirna, gff_mirnaprox)
## merge extracted features (gff_unify_features.py)
gff_features = os.path.join(outdir, 'Features.1kb.mprox.'+in_bname)
gff_unify_features.main(gff_1kbfeatures, gff_mirnaprox, 'mirna_prox', '0', gff_features)
if is_consider_corr:
## merge extracted features (gff_unify_features.py) after compute correlation
gff_features_corr = os.path.join(outdir,
'Features.1kb.mprox.%s.%s' % (corrmethod, in_bname))
outdir_corr = os.path.join(outdir, 'corr')
m_mirna = cparser.get('correlation', 'srnaseqmatrix')
m_tss = cparser.get('correlation', 'cageseqmatrix')
gff_corr = correlation.main(gff_mirna, m_mirna, m_tss, corrmethod, outdir_corr)
gff_unify_features.main(gff_features, gff_corr, 'corr', '0', gff_features_corr)
gff_allfeatures = gff_features_corr
else:
gff_allfeatures = gff_features
else:
gff_allfeatures = gff_cage
with open(gff_allfeatures) as f:
l = f.readline().split('\t')
if not (':' in l[7]):
sys.exit('ERROR: this is not a features.gff formatted file')
## PART2: extract parameters & run promirna
f_prediction = os.path.join(outdir, 'Predictions.'+in_bname+'.txt')
print 'COMPUTING: "%s"...' % f_prediction
promi2(f_param, listoffeatures, gff_allfeatures, f_prediction)
## PART3: plots
if make_plots:
plotdir = os.path.join(outdir, 'plots')
ensure_dir(plotdir, False)
plots.main(f_prediction, plotdir, f_config)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=usage,
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-i', '--infile', dest='infile',
required=True,
help='''path to input gff input file.
Tab-separated columns should be like:
1. chrom
2. source
3. feature
4. start (+500)
5. stop (-500)
6. normalized tag count
7. strand
8. .
9. info
''')
parser.add_argument('-f', dest='is_gff',
action='store_true',
help='flag to specify that infile is already features.gff file')
parser.add_argument('-p', dest='make_plots',
action='store_true',
help='''Flag to enable plotting
This requires extra packages to be pre-installed:
- Python: pandas, matplotlib, rpy2
- R: ggplot2
''')
parser.add_argument('-c', '--config', dest='f_config',
default='config.ini',
help='path to config file; default="config.ini"')
parser.add_argument('-o', '--outdir', dest='outdir',
help='specify output directory')
##get at the arguments
args = parser.parse_args()
## do something..
main(args.f_config, args.infile, args.is_gff, args.outdir, args.make_plots)
| csiu/promi2 | code/promi2.py | Python | mit | 8,709 |
import unittest
import os.path
import numpy as np
import pandas as pd
from pandas.util.testing import assert_frame_equal
import test_helper
import copy
from operator import lt, le, eq, ne, ge, gt
from pandas.core.index import Index
__index_symbol__ = {
Index.union: ',',
Index.intersection: '&',
Index.difference: '~',
Index.sym_diff: '^'
}
from collections import defaultdict, OrderedDict
from quantipy.core.stack import Stack
from quantipy.core.chain import Chain
from quantipy.core.link import Link
from quantipy.core.view_generators.view_mapper import ViewMapper
from quantipy.core.view_generators.view_maps import QuantipyViews
from quantipy.core.view import View
from quantipy.core.helpers import functions
from quantipy.core.helpers.functions import load_json
from quantipy.core.tools.dp.prep import (
frange,
frequency,
crosstab
)
from quantipy.core.tools.view.query import get_dataframe
from quantipy.core.dataset import DataSet
EXTENDED_TESTS = False
COUNTER = 0
class TestRules(unittest.TestCase):
def setUp(self):
self.path = './tests/'
project_name = 'Example Data (A)'
# Load Example Data (A) data and meta into self
name_data = '%s.csv' % (project_name)
path_data = '%s%s' % (self.path, name_data)
self.example_data_A_data = pd.DataFrame.from_csv(path_data)
name_meta = '%s.json' % (project_name)
path_meta = '%s%s' % (self.path, name_meta)
self.example_data_A_meta = load_json(path_meta)
# Variables by type for Example Data A
self.dk = 'Example Data (A)'
self.fk = 'no_filter'
self.single = ['gender', 'locality', 'ethnicity', 'religion', 'q1']
self.delimited_set = ['q2', 'q3', 'q8', 'q9']
self.q5 = ['q5_1', 'q5_2', 'q5_3']
def test_slicex(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## values
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def _get_dataset(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
dataset = DataSet('rules_test')
dataset.set_verbose_infomsg(False)
dataset.from_components(data, meta)
return dataset
def _get_stack_with_links(self, dataset, x=None, y=None, w=None):
stack = Stack()
stack.add_data(dataset.name, dataset._data, dataset._meta)
if not x: x = '@'
if not y: y = '@'
stack.add_link(x=x, y=y, weights=w)
return stack
def test_sortx_summaries_mean(self):
dataset = self._get_dataset()
x = 'q5'
y = '@'
dataset.sorting(x, on='mean')
stack = self._get_stack_with_links(dataset, x)
stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean'])
vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%',
'x|d.mean|x:|||mean']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=[y], rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for vk in vks:
v = chain['rules_test']['no_filter'][x][y][vk]
l = stack['rules_test']['no_filter'][x][y][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['q5_4', 'q5_6', 'q5_1', 'q5_3', 'q5_5', 'q5_2']
self.assertEqual(actual_order, expected_order)
def test_sortx_summaries_value(self):
dataset = self._get_dataset()
x = 'q5'
y = '@'
dataset.sorting(x, on=3, ascending=True)
stack = self._get_stack_with_links(dataset, x)
stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean'])
vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%',
'x|d.mean|x:|||mean']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=[y], rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for vk in vks:
v = chain['rules_test']['no_filter'][x][y][vk]
l = stack['rules_test']['no_filter'][x][y][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['q5_4', 'q5_5', 'q5_6', 'q5_1', 'q5_3', 'q5_2']
self.assertEqual(actual_order, expected_order)
def test_sortx_summaries_items(self):
dataset = self._get_dataset()
x = '@'
y = 'q5'
dataset.sorting(y, on='q5_2', ascending=False)
stack = self._get_stack_with_links(dataset, y=y)
stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean'])
vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%',
'x|d.mean|x:|||mean']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=[y], rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for vk in vks:
v = chain['rules_test']['no_filter'][x][y][vk]
l = stack['rules_test']['no_filter'][x][y][vk]
if not 'd.mean' in vk and not 'cbase' in vk:
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = [3, 5, 98, 2, 1, 97, 4]
self.assertEqual(actual_order, expected_order)
def test_sortx_expand_net_within(self):
dataset = self._get_dataset()
x = 'q2'
y = ['@', 'gender']
dataset.sorting(x, on='@', within=True, between=False, fix=98)
stack = self._get_stack_with_links(dataset, x=x, y=y)
net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}},
{'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}]
net_view = ViewMapper().make_template('frequency')
view_name = 'expandnet'
options = {'logic': net,
'expand': 'after',
'complete': True,
'axis': 'x',
'iterators': {'rel_to': [None, 'y']}}
net_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=net_view)
vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet',
'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=y, rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for yk in y:
for vk in vks:
v = chain['rules_test']['no_filter'][x][yk][vk]
l = stack['rules_test']['no_filter'][x][yk][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['test A', 3, 2, 1, 4, 'test B', 97, 5, 6, 98]
self.assertEqual(actual_order, expected_order)
def test_sortx_expand_net_between(self):
dataset = self._get_dataset()
x = 'q2'
y = ['@', 'gender']
dataset.sorting(x, on='@', within=False, between=True, ascending=True,
fix=98)
stack = self._get_stack_with_links(dataset, x=x, y=y)
net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}},
{'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}]
net_view = ViewMapper().make_template('frequency')
view_name = 'expandnet'
options = {'logic': net,
'expand': 'after',
'complete': True,
'axis': 'x',
'iterators': {'rel_to': [None, 'y']}}
net_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=net_view)
vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet',
'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=y, rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for yk in y:
for vk in vks:
v = chain['rules_test']['no_filter'][x][yk][vk]
l = stack['rules_test']['no_filter'][x][yk][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = [4, 'test B', 5, 6, 97, 'test A', 1, 2, 3, 98]
self.assertEqual(actual_order, expected_order)
def test_sortx_expand_net_within_between(self):
dataset = self._get_dataset()
x = 'q2'
y = ['@', 'gender']
dataset.sorting(x, on='@', within=True, between=True, ascending=False,
fix=98)
stack = self._get_stack_with_links(dataset, x=x, y=y)
net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}},
{'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}]
net_view = ViewMapper().make_template('frequency')
view_name = 'expandnet'
options = {'logic': net,
'expand': 'after',
'complete': True,
'axis': 'x',
'iterators': {'rel_to': [None, 'y']}}
net_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=net_view)
test_view = ViewMapper().make_template('coltests')
view_name = 'test'
options = {'level': 0.2}
test_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=test_view)
vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet',
'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet',
'x|t.props.Dim.20|x[{1,2,3}+],x[{5,6,97}+]*:|||test']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=y, rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for yk in y:
for vk in vks:
v = chain['rules_test']['no_filter'][x][yk][vk]
l = stack['rules_test']['no_filter'][x][yk][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['test A', 3, 2, 1, 'test B', 97, 5, 6, 4, 98]
self.assertEqual(actual_order, expected_order)
def test_sortx(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## sort_on - default
meta['columns'][col_x]['rules'] = {'x': {'sortx': {}}}
meta['columns'][col_y]['rules'] = {'y': {'sortx': {}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 15, 12, 3, 11, 14, 6, 8, 10, 9, 5, 4, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 6, 4, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## sort_on - '@'
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'sort_on': '@'}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'sort_on': '@'}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 15, 12, 3, 11, 14, 6, 8, 10, 9, 5, 4, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 6, 4, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## fixed
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=True,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## with_weight
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'with_weight': 'weight_b'}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'with_weight': 'weight_b'}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 12, 6, 10, 14, 11, 7, 13, 9, 8]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 12, 6, 10, 14, 11, 7, 13, 9, 8])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 11, 3, 12, 15, 8, 9, 10, 5, 14, 6, 4, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 11, 3, 12, 15, 8, 9, 10, 5, 14, 6, 4, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def test_dropx(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## values
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def test_rules_frequency(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col = 'religion'
################## slicex
meta['columns'][col]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}},
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## sortx
meta['columns'][col]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}},
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col, all=True,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[1, 3, 15, 5, 16, 10, 12, 14, 11, 7, 13, 8, 9, 6, 2, 4]),
'iswtd': index_items(col, all=True,
values=[1, 3, 15, 5, 16, 12, 10, 14, 11, 7, 13, 9, 8, 6, 2, 4])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## dropx
meta['columns'][col]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}},
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## slicex + sortx
meta['columns'][col]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [1, 2]}},
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [15, 16]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[4, 5, 6, 10, 12, 11, 7, 13, 8, 9, 1, 2]),
'iswtd': index_items(col, all=True,
values=[4, 5, 6, 12, 10, 11, 7, 13, 9, 8, 1, 2])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[10, 12, 14, 11, 7, 13, 8, 9, 15, 16]),
'iswtd': index_items(col, all=True,
values=[12, 10, 14, 11, 7, 13, 9, 8, 15, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## slicex + dropx
meta['columns'][col]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}},
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[1, 5, 9, 13]),
'iswtd': index_items(col, all=True,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[4, 8, 12, 16]),
'iswtd': index_items(col, all=True,
values=[4, 8, 12, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## sortx + dropx
meta['columns'][col]['rules'] = {
'x': {
'sortx': {'fixed': [1, 2]},
'dropx': {'values': [5, 11, 13]}},
'y': {
'sortx': {'fixed': [15, 16]},
'dropx': {'values': [7, 13, 14]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[3, 15, 4, 16, 6, 10, 12, 14, 7, 8, 9, 1, 2]),
'iswtd': index_items(col, all=True,
values=[3, 15, 4, 16, 6, 12, 10, 14, 7, 9, 8, 1, 2])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[2, 1, 3, 4, 5, 6, 10, 12, 11, 8, 9, 15, 16]),
'iswtd': index_items(col, all=True,
values=[2, 1, 3, 4, 5, 6, 12, 10, 11, 9, 8, 15, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## slicex + sortx + dropx
meta['columns'][col]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [11, 13]},
'dropx': {'values': [7]}},
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [15, 16]},
'dropx': {'values': [7, 13]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[4, 5, 6, 10, 12, 8, 9, 11, 13]),
'iswtd': index_items(col, all=True,
values=[4, 5, 6, 12, 10, 9, 8, 11, 13])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[10, 12, 14, 11, 8, 9, 15, 16]),
'iswtd': index_items(col, all=True,
values=[12, 10, 14, 11, 9, 8, 15, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
def test_rules_crosstab(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## slicex + sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [6, 11]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [11, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[5, 10, 12, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=True,
values=[5, 12, 10, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[15, 12, 14, 8, 10, 9, 7, 13]),
'iswtd': index_items(col_y, all=True,
values=[12, 15, 8, 9, 10, 14, 7, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
if EXTENDED_TESTS:
################## slicex
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## sortx
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=True,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## dropx
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## slicex + sortx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=True,
values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]),
'iswtd': index_items(col_y, all=True,
values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## slicex + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[1, 5, 9, 13]),
'iswtd': index_items(col_x, all=True,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[4, 8, 12, 16]),
'iswtd': index_items(col_y, all=True,
values=[4, 8, 12, 16])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [5, 10]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [4, 12]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def test_rules_get_dataframe(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
xks = [col_x]
yks = ['@', col_y]
test_views = [
'cbase', 'rbase',
# 'ebase',
'counts', 'c%', 'r%',
'mean']
weights = [None, 'weight_a']
################## slicex + sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [6, 11]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [11, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 10, 12, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 12, 10, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[15, 12, 14, 8, 10, 9, 7, 13]),
'iswtd': index_items(col_y, all=False,
values=[12, 15, 8, 9, 10, 14, 7, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
if EXTENDED_TESTS:
################## slicex
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## sortx
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=False,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## dropx
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## slicex + sortx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## slicex + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 5, 9, 13]),
'iswtd': index_items(col_x, all=False,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[4, 8, 12, 16]),
'iswtd': index_items(col_y, all=False,
values=[4, 8, 12, 16])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [5, 10]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [4, 12]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
def test_rules_get_chain(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
others = ['q5_1']
xks = [col_x]
yks = ['@', col_y] + others
test_views = [
'cbase', 'rbase',
# 'ebase',
'counts', 'c%', 'r%',
'mean']
weights = [None, 'weight_a']
################## slicex + sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [6, 11]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [11, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 10, 12, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 12, 10, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[15, 12, 14, 8, 10, 9, 7, 13]),
'iswtd': index_items(col_y, all=False,
values=[12, 15, 8, 9, 10, 14, 7, 13])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
if EXTENDED_TESTS:
################## slicex
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## sortx
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=False,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## dropx
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## slicex + sortx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## slicex + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 5, 9, 13]),
'iswtd': index_items(col_x, all=False,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[4, 8, 12, 16]),
'iswtd': index_items(col_y, all=False,
values=[4, 8, 12, 16])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [5, 10]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [4, 12]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
def test_rules_coltests(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'q5_1'
col_y = 'locality'
xks = [col_x]
yks = ['@', col_y]
test_views = [
'cbase', 'counts', 'mean']
weights = [None]
dk = 'test'
fk = 'no_filter'
xk = col_x
yk = col_y
stack = get_stack(
self, meta, data, xks, yks, test_views, weights,
extras=True, coltests=True)
################## slicex
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [5, 2, 3]}}}
vk = 'x|t.props.askia.01|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
['[2]', np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### net
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [3, 1, 5]}}}
vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, '[5]', np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### block net
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [4, 1, 3]}}}
vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN],
[np.NaN, '[3, 4]', np.NaN],
[np.NaN, '[4]', np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### mean
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [5, 2, 4]}}}
vk = 'x|t.means.askia.10|x:|||askia tests'
rules_values_df = pd.DataFrame([
['[2, 4]', np.NaN, '[2]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## sortx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.askia.01|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN, '[5]', np.NaN],
[np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
['[1]', np.NaN, np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
[np.NaN, '[1, 2]', np.NaN, np.NaN, np.NaN],
[np.NaN, '[1]', np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### net
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN, '[4, 5]', '[4]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### block net
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
['[5]', np.NaN, np.NaN, '[2, 5]', np.NaN],
[np.NaN, np.NaN, np.NaN, '[3, 4, 5]', '[4, 5]'],
[np.NaN, np.NaN, np.NaN, '[4]', np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### mean
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.means.askia.10|x:|||askia tests'
rules_values_df = pd.DataFrame([
['[1]', '[1, 2, 3, 4]', '[1, 2, 3]', np.NaN, '[1]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## dropx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 4]}}}
vk = 'x|t.props.askia.01|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, '[2]'],
[np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### net
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 3]}}}
vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
['[4]', np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### block net
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4]}}}
vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
['[5]', '[5]', np.NaN],
['[3, 5]', np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### mean
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 3]}}}
vk = 'x|t.means.askia.10|x:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, '[2]', '[2, 4]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
def test_rules_coltests_flag_bases(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'q5_1'
col_y = 'locality'
xks = [col_x]
yks = ['@', col_y]
test_views = [
'cbase', 'counts', 'mean']
weights = [None]
dk = 'test'
fk = 'no_filter'
xk = col_x
yk = col_y
minimum = 1000
small = 2000
stack = get_stack(
self, meta, data, xks, yks, test_views, weights,
extras=True, coltests=True, flag_bases=[minimum, small])
################## slicex
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [5, 2, 3]}}}
vk = 'x|t.props.Dim.05|:|||askia tests'
rules_values_df = pd.DataFrame([
['**', np.NaN, '[2]*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
cbase = 'x|f|x:|||cbase'
keys_cbase = [dk, fk, xk, yk, cbase]
df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True)
is_minimum = [c<=minimum for c in df_cbase.values[0]]
is_small = [c>minimum and c<=small for c in df_cbase.values[0]]
actual = is_minimum
expected = [True, False, False]
self.assertSequenceEqual(actual, expected)
actual = is_small
expected = [False, False, True]
self.assertSequenceEqual(actual, expected)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## sortx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.Dim.05|:|||askia tests'
rules_values_df = pd.DataFrame([
['[1, 2]*', '**', '**', np.NaN, np.NaN],
['*', '**', '**', '[2, 3]', np.NaN],
['*', '**', '**', np.NaN, np.NaN],
['[1]*', '**', '**', np.NaN, '[1]'],
['*', '**', '**', np.NaN, np.NaN],
['*', '**', '**', np.NaN, np.NaN],
['*', '**', '**', np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
cbase = 'x|f|x:|||cbase'
keys_cbase = [dk, fk, xk, yk, cbase]
df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True)
is_minimum = [c<=minimum for c in df_cbase.values[0]]
is_small = [c>minimum and c<=small for c in df_cbase.values[0]]
actual = is_minimum
expected = [False, True, True, False, False]
self.assertSequenceEqual(actual, expected)
actual = is_small
expected = [True, False, False, False, False]
self.assertSequenceEqual(actual, expected)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## dropx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 4]}}}
vk = 'x|t.props.Dim.05|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, '[2]*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
cbase = 'x|f|x:|||cbase'
keys_cbase = [dk, fk, xk, yk, cbase]
df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True)
is_minimum = [c<=minimum for c in df_cbase.values[0]]
is_small = [c>minimum and c<=small for c in df_cbase.values[0]]
actual = is_minimum
expected = [False, False, True]
self.assertSequenceEqual(actual, expected)
actual = is_small
expected = [False, True, False]
self.assertSequenceEqual(actual, expected)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
# ##################### Helper functions #####################
def index_items(col, values, all=False):
"""
Return a correctly formed list of tuples to matching an index.
"""
items = [
(col, i)
for i in values
]
if all: items = [(col, 'All')] + items
return items
def confirm_frequencies(self, meta, data,
weights,
col,
rules_values_x,
rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
df = frequency(meta, data, x=col)
natural_x = df.index.values.tolist()
natural_y = natural_x
frequ_x = [(col, '@')]
frequ_y = frequ_x
for weight in weights:
if weight is None:
rules_x = rules_values_x['unwtd']
rules_y = rules_values_y['unwtd']
else:
rules_x = rules_values_x['iswtd']
rules_y = rules_values_y['iswtd']
# rules=True
fx = frequency(meta, data, x=col, weight=weight, rules=True)
fy = frequency(meta, data, y=col, weight=weight, rules=True)
# print fx
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, fx, rules_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, rules_y)
# rules=False
fx = frequency(meta, data, x=col, weight=weight, rules=False)
fy = frequency(meta, data, y=col, weight=weight, rules=False)
confirm_index_columns(self, fx, natural_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, natural_y)
# rules=x
fx = frequency(meta, data, x=col, weight=weight, rules=['x'])
fy = frequency(meta, data, y=col, weight=weight, rules=['x'])
confirm_index_columns(self, fx, rules_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, natural_y)
# rules=y
fx = frequency(meta, data, x=col, weight=weight, rules=['y'])
fy = frequency(meta, data, y=col, weight=weight, rules=['y'])
confirm_index_columns(self, fx, natural_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, rules_y)
# rules=xy
fx = frequency(meta, data, x=col, weight=weight, rules=['x', 'y'])
fy = frequency(meta, data, y=col, weight=weight, rules=['x', 'y'])
confirm_index_columns(self, fx, rules_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, rules_y)
def confirm_crosstabs(self, meta, data,
weights,
col_x, col_y,
rules_values_x,
rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
fx = frequency(meta, data, x=col_x)
natural_x = fx.index.values.tolist()
fy = frequency(meta, data, y=col_y)
natural_y = fy.columns.values.tolist()
for weight in weights:
if weight is None:
rules_x = rules_values_x['unwtd']
rules_y = rules_values_y['unwtd']
else:
rules_x = rules_values_x['iswtd']
rules_y = rules_values_y['iswtd']
for xtotal in [False, True]:
# rules=True
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=True, xtotal=xtotal)
confirm_index_columns(self, df, rules_x, rules_y)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
# rules=False
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=False, xtotal=xtotal)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['x'], xtotal=xtotal)
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['y'], xtotal=xtotal)
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['x', 'y'], xtotal=xtotal)
confirm_index_columns(self, df, rules_x, rules_y)
def confirm_get_dataframe(self, stack, col_x, col_y,
rules_values_x, rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
keys = ['dk', 'fk', 'xk', 'yk', 'vk']
keys[0] = dk = 'test'
keys[1] = fk = 'no_filter'
keys[2] = xk = col_x
keys[3] = yk = col_y
meta = stack[dk].meta
data = stack[dk].data
vks = stack.describe()['view'].values.tolist()
for xk in [col_x]:
keys[2] = xk
for yk in ['@', col_y]:
if xk=='@' and yk=='@':
continue
keys[3] = yk
for vk in vks:
keys[4] = vk
# if 'mean' in vk:
# print vk
rules_x, natural_x, rules_y, natural_y = get_xy_values(
meta, data,
col_x, col_y,
xk, yk, vk,
rules_values_x, rules_values_y
)
# rules=True
df = get_dataframe(stack, keys=keys, rules=True)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, df, rules_x, rules_y)
# rules=False
df = get_dataframe(stack, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = get_dataframe(stack, keys=keys, rules=['x'])
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = get_dataframe(stack, keys=keys, rules=['y'])
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = get_dataframe(stack, keys=keys, rules=['x', 'y'])
confirm_index_columns(self, df, rules_x, rules_y)
def confirm_xy_chains(self, meta, data, col_x, col_y, others, views, weights,
rules_values_x, rules_values_y):
stack = get_stack(
self, meta, data,
[col_x],
['@', col_y] + others,
views,
weights,
extras=True)
confirm_get_xchain(
self, stack, col_x, col_y, others,
rules_values_x, rules_values_y)
stack = get_stack(
self, meta, data,
[col_x] + others,
[col_y],
views,
weights,
extras=True)
confirm_get_ychain(
self, stack, col_x, col_y, others,
rules_values_x, rules_values_y)
def confirm_get_xchain(self, stack, col_x, col_y, others,
rules_values_x, rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
keys = ['dk', 'fk', 'xk', 'yk', 'vk']
keys[0] = dk = 'test'
keys[1] = fk = 'no_filter'
keys[2] = xk = col_x
keys[3] = yk = col_y
meta = stack[dk].meta
data = stack[dk].data
xks = [col_x]
yks = ['@', col_y] + others
confirm_get_chain(
self,
meta, data,
stack, keys,
col_x, col_y,
xks, yks,
rules_values_x, rules_values_y,
others)
def confirm_get_ychain(self, stack, col_x, col_y, others,
rules_values_x, rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
keys = ['dk', 'fk', 'xk', 'yk', 'vk']
keys[0] = dk = 'test'
keys[1] = fk = 'no_filter'
keys[2] = xk = col_x
keys[3] = yk = col_y
meta = stack[dk].meta
data = stack[dk].data
xks = [col_x] + others
yks = [col_y]
confirm_get_chain(
self,
meta, data,
stack, keys,
col_x, col_y,
xks, yks,
rules_values_x, rules_values_y,
others)
def confirm_get_chain(self,
meta, data,
stack, keys,
col_x, col_y,
xks, yks,
rules_values_x, rules_values_y,
others=[]):
vks = stack.describe()['view'].values.tolist()
weight = None
chain_true_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=True, rules_weight=weight)
chain_false_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=False, rules_weight=weight)
chain_x_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x'], rules_weight=weight)
chain_y_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['y'], rules_weight=weight)
chain_xy_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x', 'y'], rules_weight=weight)
weight = 'weight_a'
chain_true_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=True, rules_weight=weight)
chain_false_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=False, rules_weight=weight)
chain_x_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x'], rules_weight=weight)
chain_y_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['y'], rules_weight=weight)
chain_xy_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x', 'y'], rules_weight=weight)
for xk in xks:
keys[2] = xk
for yk in yks:
if xk=='@' and yk=='@':
continue
keys[3] = yk
for vk in vks:
keys[4] = vk
for weight in [None, 'weight_a']:
# if xk=='q5_1' and yk=='ethnicity' and vk=='x|f|x:|||ebase':
# print xk, yk, vk
# if vk=='x|f|:y|||rbase' and yk=='q5_1':
# print vk
rules_x, natural_x, rules_y, natural_y = get_xy_values(
meta, data,
col_x, col_y,
xk, yk, vk,
rules_values_x, rules_values_y,
others,
rules_weight=weight
)
# rules=True
if weight is None:
df = get_dataframe(chain_true_unwtd, keys=keys, rules=False)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, df, rules_x, rules_y)
# rules=False
df = get_dataframe(chain_false_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = get_dataframe(chain_x_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = get_dataframe(chain_y_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = get_dataframe(chain_xy_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, rules_y)
else:
df = get_dataframe(chain_true_wtd, keys=keys, rules=False)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, df, rules_x, rules_y)
# rules=False
df = get_dataframe(chain_false_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = get_dataframe(chain_x_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = get_dataframe(chain_y_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = get_dataframe(chain_xy_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, rules_y)
def get_xy_values(meta, data,
col_x, col_y,
xk, yk, vk,
rules_values_x, rules_values_y,
others=[], rules_weight='auto'):
v_method = vk.split('|')[1]
relation = vk.split('|')[2]
relative = vk.split('|')[3]
weight = vk.split('|')[4]
shortnam = vk.split('|')[5]
condensed_x = relation.split(":")[0].startswith('x') or v_method.startswith('d.')
condensed_y = relation.split(":")[1].startswith('y')
if rules_weight=='auto':
rules_weight = None if weight=='' else weight
if rules_weight is None:
rules_x = rules_values_x['unwtd']
rules_y = rules_values_y['unwtd']
else:
rules_x = rules_values_x['iswtd']
rules_y = rules_values_y['iswtd']
if xk in others:
fx = frequency(meta, data, x=xk)
natural_x = fx.index.values.tolist()
natural_x.remove((xk, 'All'))
rules_x = natural_x
if condensed_x:
if shortnam=='Block net':
rules_x = natural_x = [
(xk, 'bn1'),
(xk, 'bn2'),
(xk, 'bn3')]
elif shortnam in ['cbase', 'ebase']:
rules_x = natural_x = [(xk, 'All')]
else:
rules_x = natural_x = [(xk, shortnam)]
elif xk=='@':
if condensed_x:
if shortnam=='Block net':
rules_x = natural_x = [
(col_x, 'bn1'),
(col_x, 'bn2'),
(col_x, 'bn3')]
elif shortnam in ['cbase', 'ebase']:
rules_x = natural_x = [(col_y, 'All')]
else:
rules_x = natural_x = [(col_y, shortnam)]
else:
rules_x = natural_x = [(col_y, '@')]
elif condensed_x:
if shortnam=='Block net':
rules_x = natural_x = [
(col_x, 'bn1'),
(col_x, 'bn2'),
(col_x, 'bn3')]
elif shortnam in ['cbase', 'ebase']:
rules_x = natural_x = [(xk, 'All')]
else:
rules_x = natural_x = [(xk, shortnam)]
else:
fx = frequency(meta, data, x=col_x)
natural_x = fx.index.values.tolist()
natural_x.remove((col_x, 'All'))
if yk in others:
fy = frequency(meta, data, y=yk)
natural_y = fy.columns.values.tolist()
natural_y.remove((yk, 'All'))
rules_y = natural_y
if condensed_y:
if shortnam=='Block net':
rules_y = natural_y = [
(yk, 'bn1'),
(yk, 'bn2'),
(yk, 'bn3')]
elif shortnam in ['rbase']:
rules_y = natural_y = [(yk, 'All')]
else:
rules_y = natural_y = [(yk, shortnam)]
elif yk=='@':
if condensed_y:
if shortnam=='Block net':
rules_y = natural_y = [
(col_y, 'bn1'),
(col_y, 'bn2'),
(col_y, 'bn3')]
elif shortnam in ['rbase']:
rules_y = natural_y = [(col_x, 'All')]
else:
rules_y = natural_y = [(col_x, shortnam)]
else:
rules_y = natural_y = [(col_x, '@')]
elif condensed_y:
if shortnam=='Block net':
rules_y = natural_y = [
(col_y, 'bn1'),
(col_y, 'bn2'),
(col_y, 'bn3')]
elif shortnam in ['rbase']:
rules_y = natural_y = [(col_y, 'All')]
else:
rules_y = natural_y = [(col_y, shortnam)]
else:
fy = frequency(meta, data, y=col_y)
natural_y = fy.columns.values.tolist()
natural_y.remove((col_y, 'All'))
return rules_x, natural_x, rules_y, natural_y
def str_index_values(index):
"""
Make sure level 1 of the multiindex are all strings
"""
values = index.values.tolist()
values = zip(*[zip(*values)[0], [str(i) for i in zip(*values)[1]]])
return values
def confirm_index_columns(self, df, expected_x, expected_y):
"""
Confirms index and columns are as expected.
"""
# global COUNTER
# actual_x = str_index_values(df.index)
# actual_y = str_index_values(df.columns)
actual_x = df.index.values.tolist()
actual_y = df.columns.values.tolist()
# print
# print actual_x
# print expected_x
# print actual_y
# print expected_y
# Remove xtotal from columns if present
if len(df.columns.levels[0])>1:
actual_y = actual_y[1:]
self.assertEqual(actual_x, expected_x)
self.assertEqual(actual_y, expected_y)
# COUNTER = COUNTER + 2
# print COUNTER
def get_stack(self, meta, data, xks, yks, views, weights,
extras=False, coltests=False, flag_bases=None):
stack = Stack('test')
stack.add_data('test', data, meta)
stack.add_link(x=xks, y=yks, views=views, weights=weights)
if extras or coltests:
# Add a basic net
net_views = ViewMapper(
template={
'method': QuantipyViews().frequency,
'kwargs': {'iterators': {'rel_to': [None, 'y']}}})
net_views.add_method(
name='Net 1-3',
kwargs={'logic': [1, 2, 3], 'axis': 'x',
'text': {'en-GB': '1-3'}})
stack.add_link(x=xks, y=yks, views=net_views, weights=weights)
# Add block net
net_views.add_method(
name='Block net',
kwargs={
'logic': [
{'bn1': [1, 2]},
{'bn2': [2, 3]},
{'bn3': [1, 3]}], 'axis': 'x'})
stack.add_link(x=xks, y=yks, views=net_views.subset(['Block net']), weights=weights)
# Add NPS
## TO DO
# Add standard deviation
stddev_views = ViewMapper(
template = {
'method': QuantipyViews().descriptives,
'kwargs': {'stats': 'stddev'}})
stddev_views.add_method(name='stddev')
stack.add_link(x=xks, y=yks, views=stddev_views, weights=weights)
if coltests:
if flag_bases is None:
test_views = ViewMapper(
template={
'method': QuantipyViews().coltests,
'kwargs': {
'mimic': 'askia',
'iterators': {
'metric': ['props', 'means'],
'level': ['low', 'mid', 'high']}}})
else:
test_views = ViewMapper(
template={
'method': QuantipyViews().coltests,
'kwargs': {
'mimic': 'Dim',
'flag_bases': flag_bases,
'iterators': {
'metric': ['props', 'means'],
'level': ['low', 'mid', 'high']}}})
test_views.add_method('askia tests')
stack.add_link(x=xks, y=yks, views=test_views)
return stack
| Quantipy/quantipy | tests/test_rules.py | Python | mit | 81,385 |