code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_backend_test.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| semorale/backend-test | django_backend_test/manage.py | Python | mit | 262 |
##################################################################
# Code for testing the variational Multi-Stage Generative Model. #
##################################################################
# basic python
import numpy as np
import numpy.random as npr
import cPickle
# theano business
import theano
import theano.tensor as T
# phil's sweetness
import utils
from NetLayers import relu_actfun, softplus_actfun, tanh_actfun
from InfNet import InfNet
from HydraNet import HydraNet
from GPSImputer import GPSImputer, load_gpsimputer_from_file
from load_data import load_udm, load_tfd, load_svhn_gray, load_binarized_mnist
from HelperFuncs import construct_masked_data, shift_and_scale_into_01, \
row_shuffle, to_fX
RESULT_PATH = "IMP_MNIST_GPSI/"
###############################
###############################
## TEST GPS IMPUTER ON MNIST ##
###############################
###############################
def test_mnist(step_type='add',
imp_steps=6,
occ_dim=15,
drop_prob=0.0):
#########################################
# Format the result tag more thoroughly #
#########################################
dp_int = int(100.0 * drop_prob)
result_tag = "{}RELU_GPSI_OD{}_DP{}_IS{}_{}_NA".format(RESULT_PATH, occ_dim, dp_int, imp_steps, step_type)
##########################
# Get some training data #
##########################
rng = np.random.RandomState(1234)
Xtr, Xva, Xte = load_binarized_mnist(data_path='./data/')
Xtr = np.vstack((Xtr, Xva))
Xva = Xte
#del Xte
tr_samples = Xtr.shape[0]
va_samples = Xva.shape[0]
##########################
# Get some training data #
##########################
# rng = np.random.RandomState(1234)
# dataset = 'data/mnist.pkl.gz'
# datasets = load_udm(dataset, as_shared=False, zero_mean=False)
# Xtr = datasets[0][0]
# Xva = datasets[1][0]
# Xte = datasets[2][0]
# # Merge validation set and training set, and test on test set.
# #Xtr = np.concatenate((Xtr, Xva), axis=0)
# #Xva = Xte
# Xtr = to_fX(shift_and_scale_into_01(Xtr))
# Xva = to_fX(shift_and_scale_into_01(Xva))
# tr_samples = Xtr.shape[0]
# va_samples = Xva.shape[0]
batch_size = 200
batch_reps = 1
all_pix_mean = np.mean(np.mean(Xtr, axis=1))
data_mean = to_fX( all_pix_mean * np.ones((Xtr.shape[1],)) )
############################################################
# Setup some parameters for the Iterative Refinement Model #
############################################################
x_dim = Xtr.shape[1]
s_dim = x_dim
#s_dim = 300
z_dim = 100
init_scale = 0.6
x_in_sym = T.matrix('x_in_sym')
x_out_sym = T.matrix('x_out_sym')
x_mask_sym = T.matrix('x_mask_sym')
#################
# p_zi_given_xi #
#################
params = {}
shared_config = [(x_dim + x_dim), 500, 500]
top_config = [shared_config[-1], z_dim]
params['shared_config'] = shared_config
params['mu_config'] = top_config
params['sigma_config'] = top_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
p_zi_given_xi = InfNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
p_zi_given_xi.init_biases(0.0)
###################
# p_sip1_given_zi #
###################
params = {}
shared_config = [z_dim, 500, 500]
output_config = [s_dim, s_dim, s_dim]
params['shared_config'] = shared_config
params['output_config'] = output_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
p_sip1_given_zi = HydraNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
p_sip1_given_zi.init_biases(0.0)
################
# p_x_given_si #
################
params = {}
shared_config = [s_dim]
output_config = [x_dim, x_dim]
params['shared_config'] = shared_config
params['output_config'] = output_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
p_x_given_si = HydraNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
p_x_given_si.init_biases(0.0)
#################
# q_zi_given_xi #
#################
params = {}
shared_config = [(x_dim + x_dim), 500, 500]
top_config = [shared_config[-1], z_dim]
params['shared_config'] = shared_config
params['mu_config'] = top_config
params['sigma_config'] = top_config
params['activation'] = relu_actfun
params['init_scale'] = init_scale
params['vis_drop'] = 0.0
params['hid_drop'] = 0.0
params['bias_noise'] = 0.0
params['input_noise'] = 0.0
params['build_theano_funcs'] = False
q_zi_given_xi = InfNet(rng=rng, Xd=x_in_sym, \
params=params, shared_param_dicts=None)
q_zi_given_xi.init_biases(0.0)
###########################################################
# Define parameters for the GPSImputer, and initialize it #
###########################################################
print("Building the GPSImputer...")
gpsi_params = {}
gpsi_params['x_dim'] = x_dim
gpsi_params['z_dim'] = z_dim
gpsi_params['s_dim'] = s_dim
# switch between direct construction and construction via p_x_given_si
gpsi_params['use_p_x_given_si'] = False
gpsi_params['imp_steps'] = imp_steps
gpsi_params['step_type'] = step_type
gpsi_params['x_type'] = 'bernoulli'
gpsi_params['obs_transform'] = 'sigmoid'
GPSI = GPSImputer(rng=rng,
x_in=x_in_sym, x_out=x_out_sym, x_mask=x_mask_sym, \
p_zi_given_xi=p_zi_given_xi, \
p_sip1_given_zi=p_sip1_given_zi, \
p_x_given_si=p_x_given_si, \
q_zi_given_xi=q_zi_given_xi, \
params=gpsi_params, \
shared_param_dicts=None)
################################################################
# Apply some updates, to check that they aren't totally broken #
################################################################
log_name = "{}_RESULTS.txt".format(result_tag)
out_file = open(log_name, 'wb')
costs = [0. for i in range(10)]
learn_rate = 0.0002
momentum = 0.5
batch_idx = np.arange(batch_size) + tr_samples
for i in range(250000):
scale = min(1.0, ((i+1) / 5000.0))
lam_scale = 1.0 - min(1.0, ((i+1) / 100000.0)) # decays from 1.0->0.0
if (((i + 1) % 15000) == 0):
learn_rate = learn_rate * 0.93
if (i > 10000):
momentum = 0.90
else:
momentum = 0.75
# get the indices of training samples for this batch update
batch_idx += batch_size
if (np.max(batch_idx) >= tr_samples):
# we finished an "epoch", so we rejumble the training set
Xtr = row_shuffle(Xtr)
batch_idx = np.arange(batch_size)
# set sgd and objective function hyperparams for this update
GPSI.set_sgd_params(lr=scale*learn_rate, \
mom_1=scale*momentum, mom_2=0.98)
GPSI.set_train_switch(1.0)
GPSI.set_lam_nll(lam_nll=1.0)
GPSI.set_lam_kld(lam_kld_p=0.05, lam_kld_q=0.95, lam_kld_g=(0.1 * lam_scale))
GPSI.set_lam_l2w(1e-5)
# perform a minibatch update and record the cost for this batch
xb = to_fX( Xtr.take(batch_idx, axis=0) )
xi, xo, xm = construct_masked_data(xb, drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
result = GPSI.train_joint(xi, xo, xm, batch_reps)
# do diagnostics and general training tracking
costs = [(costs[j] + result[j]) for j in range(len(result)-1)]
if ((i % 250) == 0):
costs = [(v / 250.0) for v in costs]
str1 = "-- batch {0:d} --".format(i)
str2 = " joint_cost: {0:.4f}".format(costs[0])
str3 = " nll_bound : {0:.4f}".format(costs[1])
str4 = " nll_cost : {0:.4f}".format(costs[2])
str5 = " kld_cost : {0:.4f}".format(costs[3])
str6 = " reg_cost : {0:.4f}".format(costs[4])
joint_str = "\n".join([str1, str2, str3, str4, str5, str6])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
costs = [0.0 for v in costs]
if ((i % 1000) == 0):
Xva = row_shuffle(Xva)
# record an estimate of performance on the test set
xi, xo, xm = construct_masked_data(Xva[0:5000], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll, kld = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10)
vfe = np.mean(nll) + np.mean(kld)
str1 = " va_nll_bound : {}".format(vfe)
str2 = " va_nll_term : {}".format(np.mean(nll))
str3 = " va_kld_q2p : {}".format(np.mean(kld))
joint_str = "\n".join([str1, str2, str3])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
if ((i % 2000) == 0):
GPSI.save_to_file("{}_PARAMS.pkl".format(result_tag))
# Get some validation samples for evaluating model performance
xb = to_fX( Xva[0:100] )
xi, xo, xm = construct_masked_data(xb, drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
xi = np.repeat(xi, 2, axis=0)
xo = np.repeat(xo, 2, axis=0)
xm = np.repeat(xm, 2, axis=0)
# draw some sample imputations from the model
samp_count = xi.shape[0]
_, model_samps = GPSI.sample_imputer(xi, xo, xm, use_guide_policy=False)
seq_len = len(model_samps)
seq_samps = np.zeros((seq_len*samp_count, model_samps[0].shape[1]))
idx = 0
for s1 in range(samp_count):
for s2 in range(seq_len):
seq_samps[idx] = model_samps[s2][s1]
idx += 1
file_name = "{0:s}_samples_ng_b{1:d}.png".format(result_tag, i)
utils.visualize_samples(seq_samps, file_name, num_rows=20)
# get visualizations of policy parameters
# file_name = "{0:s}_gen_step_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_step_weights.get_value(borrow=False)
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
# file_name = "{0:s}_gen_write_gate_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_write_gate_weights.get_value(borrow=False)
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
# file_name = "{0:s}_gen_erase_gate_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_erase_gate_weights.get_value(borrow=False)
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
# file_name = "{0:s}_gen_inf_weights_b{1:d}.png".format(result_tag, i)
# W = GPSI.gen_inf_weights.get_value(borrow=False).T
# utils.visualize_samples(W[:,:x_dim], file_name, num_rows=20)
#################################
#################################
## CHECK MNIST IMPUTER RESULTS ##
#################################
#################################
def test_mnist_results(step_type='add',
imp_steps=6,
occ_dim=15,
drop_prob=0.0):
#########################################
# Format the result tag more thoroughly #
#########################################
dp_int = int(100.0 * drop_prob)
result_tag = "{}GPSI_OD{}_DP{}_IS{}_{}_NA".format(RESULT_PATH, occ_dim, dp_int, imp_steps, step_type)
##########################
# Get some training data #
##########################
rng = np.random.RandomState(1234)
Xtr, Xva, Xte = load_binarized_mnist(data_path='./data/')
Xtr = np.vstack((Xtr, Xva))
Xva = Xte
#del Xte
tr_samples = Xtr.shape[0]
va_samples = Xva.shape[0]
##########################
# Get some training data #
##########################
# rng = np.random.RandomState(1234)
# dataset = 'data/mnist.pkl.gz'
# datasets = load_udm(dataset, as_shared=False, zero_mean=False)
# Xtr = datasets[0][0]
# Xva = datasets[1][0]
# Xte = datasets[2][0]
# # Merge validation set and training set, and test on test set.
# #Xtr = np.concatenate((Xtr, Xva), axis=0)
# #Xva = Xte
# Xtr = to_fX(shift_and_scale_into_01(Xtr))
# Xva = to_fX(shift_and_scale_into_01(Xva))
# tr_samples = Xtr.shape[0]
# va_samples = Xva.shape[0]
batch_size = 250
batch_reps = 1
all_pix_mean = np.mean(np.mean(Xtr, axis=1))
data_mean = to_fX( all_pix_mean * np.ones((Xtr.shape[1],)) )
# Load parameters from a previously trained model
print("Testing model load from file...")
GPSI = load_gpsimputer_from_file(f_name="{}_PARAMS.pkl".format(result_tag), \
rng=rng)
################################################################
# Apply some updates, to check that they aren't totally broken #
################################################################
log_name = "{}_FINAL_RESULTS_NEW.txt".format(result_tag)
out_file = open(log_name, 'wb')
Xva = row_shuffle(Xva)
# record an estimate of performance on the test set
str0 = "GUIDED SAMPLE BOUND:"
print(str0)
xi, xo, xm = construct_masked_data(Xva[:5000], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_0, kld_0 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=True)
xi, xo, xm = construct_masked_data(Xva[5000:], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_1, kld_1 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=True)
nll = np.concatenate((nll_0, nll_1))
kld = np.concatenate((kld_0, kld_1))
vfe = np.mean(nll) + np.mean(kld)
str1 = " va_nll_bound : {}".format(vfe)
str2 = " va_nll_term : {}".format(np.mean(nll))
str3 = " va_kld_q2p : {}".format(np.mean(kld))
joint_str = "\n".join([str0, str1, str2, str3])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
# record an estimate of performance on the test set
str0 = "UNGUIDED SAMPLE BOUND:"
print(str0)
xi, xo, xm = construct_masked_data(Xva[:5000], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_0, kld_0 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=False)
xi, xo, xm = construct_masked_data(Xva[5000:], drop_prob=drop_prob, \
occ_dim=occ_dim, data_mean=data_mean)
nll_1, kld_1 = GPSI.compute_fe_terms(xi, xo, xm, sample_count=10, \
use_guide_policy=False)
nll = np.concatenate((nll_0, nll_1))
kld = np.concatenate((kld_0, kld_1))
str1 = " va_nll_bound : {}".format(np.mean(nll))
str2 = " va_nll_term : {}".format(np.mean(nll))
str3 = " va_kld_q2p : {}".format(np.mean(kld))
joint_str = "\n".join([str0, str1, str2, str3])
print(joint_str)
out_file.write(joint_str+"\n")
out_file.flush()
if __name__=="__main__":
#########
# MNIST #
#########
# TRAINING
#test_mnist(step_type='add', occ_dim=14, drop_prob=0.0)
#test_mnist(step_type='add', occ_dim=16, drop_prob=0.0)
#test_mnist(step_type='add', occ_dim=0, drop_prob=0.6)
#test_mnist(step_type='add', occ_dim=0, drop_prob=0.8)
#test_mnist(step_type='jump', occ_dim=14, drop_prob=0.0)
#test_mnist(step_type='jump', occ_dim=16, drop_prob=0.0)
#test_mnist(step_type='jump', occ_dim=0, drop_prob=0.6)
#test_mnist(step_type='jump', occ_dim=0, drop_prob=0.8)
#test_mnist(step_type='add', imp_steps=1, occ_dim=0, drop_prob=0.9)
#test_mnist(step_type='add', imp_steps=2, occ_dim=0, drop_prob=0.9)
test_mnist(step_type='add', imp_steps=5, occ_dim=0, drop_prob=0.9)
#test_mnist(step_type='add', imp_steps=10, occ_dim=0, drop_prob=0.9)
#test_mnist(step_type='add', imp_steps=15, occ_dim=0, drop_prob=0.9)
# RESULTS
# test_mnist_results(step_type='add', occ_dim=14, drop_prob=0.0)
# test_mnist_results(step_type='add', occ_dim=16, drop_prob=0.0)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.6)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.7)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.8)
# test_mnist_results(step_type='add', occ_dim=0, drop_prob=0.9)
# test_mnist_results(step_type='jump', occ_dim=14, drop_prob=0.0)
# test_mnist_results(step_type='jump', occ_dim=16, drop_prob=0.0)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.6)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.7)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.8)
# test_mnist_results(step_type='jump', occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=1, occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=2, occ_dim=0, drop_prob=0.9)
test_mnist_results(step_type='add', imp_steps=5, occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=10, occ_dim=0, drop_prob=0.9)
#test_mnist_results(step_type='add', imp_steps=15, occ_dim=0, drop_prob=0.9)
| capybaralet/Sequential-Generation | TestImpGPSI_MNIST.py | Python | mit | 18,331 |
# =============================================================================
# Author: Teerapat Jenrungrot - https://github.com/mjenrungrot/
# FileName: 12626.py
# Description: UVa Online Judge - 12626
# =============================================================================
from collections import Counter
T = int(input())
for i in range(T):
line = input()
counter = Counter(line)
ans = min(
counter["M"],
counter["A"] // 3,
counter["R"] // 2,
counter["G"],
counter["I"],
counter["T"],
)
print(ans)
| mjenrungrot/competitive_programming | UVa Online Judge/v126/12626.py | Python | mit | 604 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from .stretch import *
from .interval import *
from .transform import *
from .ui import *
from .mpl_style import *
| piotroxp/scibibscan | scib/lib/python3.5/site-packages/astropy/visualization/__init__.py | Python | mit | 180 |
# -*- coding: utf-8 -*-
#
# EnergiScore Web Documentation documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 9 11:19:34 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'EnergiStream API Client'
copyright = u'2015, MelRok LLC'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1'
# The full version, including alpha/beta/rc tags.
release = '0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages.
import sphinx_rtd_theme
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'EnergiScoreWebDocumentationdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'EnergiScoreWebDocumentation.tex', u'EnergiScore Web Documentation Documentation',
u'Harrison Fross', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'energiscorewebdocumentation', u'EnergiScore Web Documentation Documentation',
[u'Harrison Fross'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'EnergiScoreWebDocumentation', u'EnergiScore Web Documentation Documentation',
u'Harrison Fross', 'EnergiScoreWebDocumentation', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| Melrok/energistream-py | docs/conf.py | Python | mit | 8,708 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class WebApplicationFirewallPoliciesOperations:
"""WebApplicationFirewallPoliciesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.WebApplicationFirewallPolicyListResult"]:
"""Lists all of the protection policies within a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either WebApplicationFirewallPolicyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_11_01.models.WebApplicationFirewallPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('WebApplicationFirewallPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.WebApplicationFirewallPolicyListResult"]:
"""Gets all the WAF policies in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either WebApplicationFirewallPolicyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_11_01.models.WebApplicationFirewallPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('WebApplicationFirewallPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies'} # type: ignore
async def get(
self,
resource_group_name: str,
policy_name: str,
**kwargs: Any
) -> "_models.WebApplicationFirewallPolicy":
"""Retrieve protection policy with specified name within a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param policy_name: The name of the policy.
:type policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: WebApplicationFirewallPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.WebApplicationFirewallPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
policy_name: str,
parameters: "_models.WebApplicationFirewallPolicy",
**kwargs: Any
) -> "_models.WebApplicationFirewallPolicy":
"""Creates or update policy with specified rule set name within a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param policy_name: The name of the policy.
:type policy_name: str
:param parameters: Policy to be created.
:type parameters: ~azure.mgmt.network.v2020_11_01.models.WebApplicationFirewallPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: WebApplicationFirewallPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.WebApplicationFirewallPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'WebApplicationFirewallPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
policy_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
policy_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes Policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param policy_name: The name of the policy.
:type policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
policy_name=policy_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_11_01/aio/operations/_web_application_firewall_policies_operations.py | Python | mit | 20,812 |
"""
Project: flask-rest
Author: Saj Arora
Description: All of the rest methods...
"""
class SageMethod:
GET = 'get'
POST = 'post'
DELETE = 'delete'
PUT = 'put'
ALL = [GET, POST, DELETE, PUT] | aroraenterprise/projecteos | backend/api/v1/fundamentals/sage_methods.py | Python | mit | 212 |
# pylint: disable=unused-import, unused-variable, missing-docstring
def _readline():
try:
import readline
except ImportError:
print("Module readline not available.")
else:
import rlcompleter
readline.parse_and_bind("tab: complete")
import os
histfile = os.path.join(os.environ["HOME"], 'python', '.history')
try:
readline.read_history_file(histfile)
except IOError:
pass
import atexit
atexit.register(readline.write_history_file, histfile)
del os, histfile
_readline()
del _readline
import sys
sys.ps1 = "\001\033[01;33m\002>>>\001\033[00m\002 "
sys.ps2 = "\001\033[01;33m\002...\001\033[00m\002 "
| rwstauner/run_control | python/startup.py | Python | mit | 665 |
# -*- coding: utf-8 -*-
"""
The initialization file for the Pywikibot framework.
"""
#
# (C) Pywikipedia bot team, 2008
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
import datetime
import difflib
import logging
import math
import re
import sys
import threading
from Queue import Queue
# Use pywikibot. prefix for all in-package imports; this is to prevent
# confusion with similarly-named modules in version 1 framework, for users
# who want to continue using both
from pywikibot import config2 as config
from pywikibot.bot import *
from pywikibot.exceptions import *
from pywikibot.textlib import *
from pywikibot.i18n import translate
class Timestamp(datetime.datetime):
"""Class for handling Mediawiki timestamps.
This inherits from datetime.datetime, so it can use all of the methods
and operations of a datetime object. To ensure that the results of any
operation are also a Timestamp object, be sure to use only Timestamp
objects (and datetime.timedeltas) in any operation.
Use Timestamp.fromISOformat() and Timestamp.fromtimestampformat() to
create Timestamp objects from Mediawiki string formats.
Use Site.getcurrenttime() for the current time; this is more reliable
than using Timestamp.utcnow().
"""
mediawikiTSFormat = "%Y%m%d%H%M%S"
ISO8601Format = "%Y-%m-%dT%H:%M:%SZ"
@classmethod
def fromISOformat(cls, ts):
"""Convert an ISO 8601 timestamp to a Timestamp object."""
return cls.strptime(ts, cls.ISO8601Format)
@classmethod
def fromtimestampformat(cls, ts):
"""Convert the internal MediaWiki timestamp format to a Timestamp object."""
return cls.strptime(ts, cls.mediawikiTSFormat)
def toISOformat(self):
"""Converts the Timestamp object to an ISO 8601 timestamp"""
return self.strftime(self.ISO8601Format)
def totimestampformat(self):
"""Converts the Timestamp object to the internal MediaWiki timestamp format."""
return self.strftime(self.mediawikiTSFormat)
def __str__(self):
"""Return a string format recognized by the API"""
return self.toISOformat()
def __add__(self, other):
newdt = datetime.datetime.__add__(self, other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
else:
return newdt
def __sub__(self, other):
newdt = datetime.datetime.__sub__(self, other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
else:
return newdt
class Coordinate(object):
"""
Class for handling and storing Coordinates.
For now its just being used for DataSite, but
in the future we can use it for the GeoData extension.
"""
def __init__(self, lat, lon, alt=None, precision=None, globe='earth',
typ="", name="", dim=None, site=None):
"""
@param lat: Latitude
@type lat: float
@param lon: Longitute
@type lon: float
@param alt: Altitute? TODO FIXME
@param precision: precision
@type precision: float
@param globe: Which globe the point is on
@type globe: str
@param typ: The type of coordinate point
@type typ: str
@param name: The name
@type name: str
@param dim: Dimension (in meters)
@type dim: int
"""
self.lat = lat
self.lon = lon
self.alt = alt
self._precision = precision
self.globe = globe.lower()
self.type = typ
self.name = name
self._dim = dim
if not site:
self.site = Site().data_repository()
else:
self.site = site
#Copied from [[mw:Extension:GeoData]]
if not self.globe in ['earth', 'mercury', 'venus', 'moon',
'mars', 'phobos', 'deimos', 'ganymede',
'callisto', 'io', 'europa', 'mimas',
'enceladus', 'tethys', 'dione',
'rhea', 'titan', 'hyperion', 'iapetus',
'phoebe', 'miranda', 'ariel', 'umbriel',
'titania', 'oberon', 'triton', 'pluto']:
raise ValueError(u"%s is not a supported globe." % self.globe)
def __repr__(self):
string = 'Coordinate(%s, %s' % (self.lat, self.lon)
if self.globe != 'earth':
string += ', globe="%s"' % self.globe
string += ')'
return string
def toWikibase(self):
"""
Function which converts the data to a JSON object
for the Wikibase API.
FIXME Should this be in the DataSite object?
"""
if not self.globe in self.site.globes():
raise NotImplementedError(u"%s is not supported in Wikibase yet." % self.globe)
return {'latitude': self.lat,
'longitude': self.lon,
'altitude': self.alt,
'globe': self.site.globes()[self.globe],
'precision': self.precision,
}
@staticmethod
def fromWikibase(data, site):
"""Constructor to create an object from Wikibase's JSON output"""
globes = {}
for k in site.globes():
globes[site.globes()[k]] = k
globekey = data['globe']
if globekey:
# FIXME: Should probably use get() with some error handling when it's an unknown globe
globe = globes[data['globe']]
else:
# Default to earth or should we use None here?
globe = 'earth'
return Coordinate(data['latitude'], data['longitude'],
data['altitude'], data['precision'],
globe, site=site)
@property
def precision(self):
"""
The biggest error (in degrees) will be given by the longitudinal error - the same error in meters becomes larger
(in degrees) further up north. We can thus ignore the latitudinal error.
The longitudinal can be derived as follows:
In small angle approximation (and thus in radians):
Δλ ≈ Δpos / r_φ, where r_φ is the radius of earth at the given latitude. Δλ is the error in longitude.
r_φ = r cos φ, where r is the radius of earth, φ the latitude
Therefore: precision = math.degrees( self._dim / ( radius * math.cos( math.radians( self.lat ) ) ) )
"""
if not self._precision:
radius = 6378137 # TODO: Support other globes
self._precision = math.degrees(self._dim / (radius * math.cos(math.radians(self.lat))))
return self._precision
def precisionToDim(self):
"""Convert precision from Wikibase to GeoData's dim"""
raise NotImplementedError
def deprecated(instead=None):
"""Decorator to output a method deprecation warning.
@param instead: if provided, will be used to specify the replacement
@type instead: string
"""
def decorator(method):
def wrapper(*args, **kwargs):
funcname = method.func_name
classname = args[0].__class__.__name__
if instead:
warning(u"%s.%s is DEPRECATED, use %s instead."
% (classname, funcname, instead))
else:
warning(u"%s.%s is DEPRECATED." % (classname, funcname))
return method(*args, **kwargs)
wrapper.func_name = method.func_name
return wrapper
return decorator
def deprecate_arg(old_arg, new_arg):
"""Decorator to declare old_arg deprecated and replace it with new_arg"""
_logger = ""
def decorator(method):
def wrapper(*__args, **__kw):
meth_name = method.__name__
if old_arg in __kw:
if new_arg:
if new_arg in __kw:
pywikibot.warning(
u"%(new_arg)s argument of %(meth_name)s replaces %(old_arg)s; cannot use both."
% locals())
else:
pywikibot.warning(
u"%(old_arg)s argument of %(meth_name)s is deprecated; use %(new_arg)s instead."
% locals())
__kw[new_arg] = __kw[old_arg]
else:
pywikibot.debug(
u"%(old_arg)s argument of %(meth_name)s is deprecated."
% locals(),
_logger)
del __kw[old_arg]
return method(*__args, **__kw)
wrapper.__doc__ = method.__doc__
wrapper.__name__ = method.__name__
return wrapper
return decorator
_sites = {}
def Site(code=None, fam=None, user=None, sysop=None, interface=None):
"""Return the specified Site object.
Returns a cached object if possible, otherwise instantiates a new one.
@param code: language code
@type code: string
@param fam: family name or object
@type fam: string or Family
@param user: bot user name to use on this site
@type user: unicode
"""
_logger = "wiki"
if code is None:
code = config.mylang
if fam is None:
fam = config.family
if user is None:
try:
user = config.usernames[fam][code]
except KeyError:
user = None
if sysop is None:
try:
sysop = config.sysopnames[fam][code]
except KeyError:
sysop = None
if interface is None:
interface = config.site_interface
try:
tmp = __import__('pywikibot.site', fromlist=[interface])
__Site = getattr(tmp, interface)
except ImportError:
raise ValueError("Invalid interface name '%(interface)s'" % locals())
key = '%s:%s:%s' % (fam, code, user)
if not key in _sites or not isinstance(_sites[key], __Site):
_sites[key] = __Site(code=code, fam=fam, user=user, sysop=sysop)
pywikibot.debug(u"Instantiating Site object '%(site)s'"
% {'site': _sites[key]}, _logger)
return _sites[key]
getSite = Site # alias for backwards-compability
from page import Page, ImagePage, Category, Link, User, ItemPage, PropertyPage, Claim
from page import html2unicode, url2unicode
link_regex = re.compile(r'\[\[(?P<title>[^\]|[<>{}]*)(\|.*?)?\]\]')
def setAction(s):
"""Set a summary to use for changed page submissions"""
config.default_edit_summary = s
def showDiff(oldtext, newtext):
"""
Output a string showing the differences between oldtext and newtext.
The differences are highlighted (only on compatible systems) to show which
changes were made.
"""
# This is probably not portable to non-terminal interfaces....
# For information on difflib, see http://pydoc.org/2.3/difflib.html
color = {
'+': 'lightgreen',
'-': 'lightred',
}
diff = u''
colors = []
# This will store the last line beginning with + or -.
lastline = None
# For testing purposes only: show original, uncolored diff
# for line in difflib.ndiff(oldtext.splitlines(), newtext.splitlines()):
# print line
for line in difflib.ndiff(oldtext.splitlines(), newtext.splitlines()):
if line.startswith('?'):
# initialize color vector with None, which means default color
lastcolors = [None for c in lastline]
# colorize the + or - sign
lastcolors[0] = color[lastline[0]]
# colorize changed parts in red or green
for i in range(min(len(line), len(lastline))):
if line[i] != ' ':
lastcolors[i] = color[lastline[0]]
diff += lastline + '\n'
# append one None (default color) for the newline character
colors += lastcolors + [None]
elif lastline:
diff += lastline + '\n'
# colorize the + or - sign only
lastcolors = [None for c in lastline]
lastcolors[0] = color[lastline[0]]
colors += lastcolors + [None]
lastline = None
if line[0] in ('+', '-'):
lastline = line
# there might be one + or - line left that wasn't followed by a ? line.
if lastline:
diff += lastline + '\n'
# colorize the + or - sign only
lastcolors = [None for c in lastline]
lastcolors[0] = color[lastline[0]]
colors += lastcolors + [None]
result = u''
lastcolor = None
for i in range(len(diff)):
if colors[i] != lastcolor:
if lastcolor is None:
result += '\03{%s}' % colors[i]
else:
result += '\03{default}'
lastcolor = colors[i]
result += diff[i]
output(result)
# Throttle and thread handling
stopped = False
def stopme():
"""Drop this process from the throttle log, after pending threads finish.
Can be called manually if desired, but if not, will be called automatically
at Python exit.
"""
global stopped
_logger = "wiki"
if not stopped:
pywikibot.debug(u"stopme() called", _logger)
def remaining():
import datetime
remainingPages = page_put_queue.qsize() - 1
# -1 because we added a None element to stop the queue
remainingSeconds = datetime.timedelta(
seconds=(remainingPages * config.put_throttle))
return (remainingPages, remainingSeconds)
page_put_queue.put((None, [], {}))
stopped = True
if page_put_queue.qsize() > 1:
output(u'Waiting for %i pages to be put. Estimated time remaining: %s'
% remaining())
while(_putthread.isAlive()):
try:
_putthread.join(1)
except KeyboardInterrupt:
answer = inputChoice(u"""\
There are %i pages remaining in the queue. Estimated time remaining: %s
Really exit?"""
% remaining(),
['yes', 'no'], ['y', 'N'], 'N')
if answer == 'y':
return
# only need one drop() call because all throttles use the same global pid
try:
_sites.values()[0].throttle.drop()
pywikibot.log(u"Dropped throttle(s).")
except IndexError:
pass
import atexit
atexit.register(stopme)
# Create a separate thread for asynchronous page saves (and other requests)
def async_manager():
"""Daemon; take requests from the queue and execute them in background."""
while True:
(request, args, kwargs) = page_put_queue.get()
if request is None:
break
request(*args, **kwargs)
def async_request(request, *args, **kwargs):
"""Put a request on the queue, and start the daemon if necessary."""
if not _putthread.isAlive():
try:
page_put_queue.mutex.acquire()
try:
_putthread.start()
except (AssertionError, RuntimeError):
pass
finally:
page_put_queue.mutex.release()
page_put_queue.put((request, args, kwargs))
# queue to hold pending requests
page_put_queue = Queue(config.max_queue_size)
# set up the background thread
_putthread = threading.Thread(target=async_manager)
# identification for debugging purposes
_putthread.setName('Put-Thread')
_putthread.setDaemon(True)
| legoktm/pywikipedia-rewrite | pywikibot/__init__.py | Python | mit | 15,844 |
import os
from distutils.sysconfig import get_python_inc
from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
except ImportError:
print("Please install cython and try again.")
raise SystemExit
PACKAGES = [
'pdsa',
'pdsa.cardinality',
'pdsa.frequency',
'pdsa.helpers',
'pdsa.helpers.hashing',
'pdsa.helpers.storage',
'pdsa.membership',
'pdsa.rank',
]
def setup_package():
root = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(root, 'pdsa', '__about__.py')) as f:
about = {}
exec(f.read(), about)
with open(os.path.join(root, 'README.rst')) as f:
readme = f.read()
extensions = []
extensions.append(
Extension(
"pdsa.membership.bloom_filter",
language='c++',
sources=['pdsa/membership/bloom_filter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.membership.counting_bloom_filter",
language='c++',
sources=['pdsa/membership/counting_bloom_filter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.cardinality.linear_counter",
language='c++',
sources=['pdsa/cardinality/linear_counter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.cardinality.probabilistic_counter",
language='c++',
sources=['pdsa/cardinality/probabilistic_counter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.cardinality.hyperloglog",
language='c++',
sources=['pdsa/cardinality/hyperloglog.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.helpers.hashing.mmh",
language='c++',
sources=[
'pdsa/helpers/hashing/mmh.pyx',
os.path.join('pdsa/helpers/hashing', 'src', 'MurmurHash3.cpp')
],
include_dirs=[
get_python_inc(plat_specific=True),
os.path.join('pdsa/helpers/hashing', 'src')
]
)
)
extensions.append(
Extension(
"pdsa.helpers.storage.bitvector",
language='c++',
sources=[
'pdsa/helpers/storage/bitvector.pyx',
os.path.join('pdsa/helpers/storage', 'src', 'BitField.cpp')
],
include_dirs=[
get_python_inc(plat_specific=True),
os.path.join('pdsa/helpers/storage', 'src')
]
)
)
extensions.append(
Extension(
"pdsa.helpers.storage.bitvector_counter",
language='c++',
sources=[
'pdsa/helpers/storage/bitvector_counter.pyx',
os.path.join('pdsa/helpers/storage', 'src', 'BitCounter.cpp')
],
include_dirs=[
get_python_inc(plat_specific=True),
os.path.join('pdsa/helpers/storage', 'src')
]
)
)
extensions.append(
Extension(
"pdsa.frequency.count_min_sketch",
language='c++',
sources=['pdsa/frequency/count_min_sketch.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.frequency.count_sketch",
language='c++',
sources=['pdsa/frequency/count_sketch.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.rank.random_sampling",
language='c++',
sources=['pdsa/rank/random_sampling.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.rank.qdigest",
language='c++',
sources=['pdsa/rank/qdigest.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
setup(
name="pdsa",
packages=PACKAGES,
package_data={'': ['*.pyx', '*.pxd', '*.cpp', '*.h']},
description=about['__summary__'],
long_description=readme,
keywords=about['__keywords__'],
author=about['__author__'],
author_email=about['__email__'],
version=about['__version__'],
url=about['__uri__'],
license=about['__license__'],
ext_modules=cythonize(
extensions,
compiler_directives={"language_level": "3str"}
),
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Cython',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering'
],
python_requires='>=3.5',
install_requires=["cython>=0.28"]
)
if __name__ == '__main__':
setup_package()
| gakhov/pdsa | setup.py | Python | mit | 5,837 |
# coding: utf-8
example_traceback = """*** HARAKIRI ON WORKER 16 (pid: 2259, try: 1) ***
*** uWSGI Python tracebacker output ***
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 504 function = __bootstrap line = self.__bootstrap_inner()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 532 function = __bootstrap_inner line = self.run()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 484 function = run line = self.__target(*self.__args, **self.__kwargs)
thread_id = Thread-2 filename = /home/project/envs/project_prod/lib/python2.6/site-packages/raven/transport/threaded.py lineno = 79 function = _target line = record = self._queue.get()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/Queue.py lineno = 168 function = get line = self.not_empty.wait()
thread_id = Thread-2 filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 239 function = wait line = waiter.acquire()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 504 function = __bootstrap line = self.__bootstrap_inner()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 532 function = __bootstrap_inner line = self.run()
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 484 function = run line = self.__target(*self.__args, **self.__kwargs)
thread_id = NR-Harvest-Thread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/core/agent.py lineno = 511 function = _harvest_loop line = self._harvest_shutdown.wait(delay)
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 395 function = wait line = self.__cond.wait(timeout)
thread_id = NR-Harvest-Thread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/threading.py lineno = 258 function = wait line = _sleep(delay)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/web_transaction.py lineno = 828 function = __call__ line = result = application(environ, _start_response)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/function_trace.py lineno = 93 function = literal_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/web_transaction.py lineno = 717 function = __call__ line = return self._nr_next_object(environ, start_response)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/core/handlers/wsgi.py lineno = 241 function = __call__ line = response = self.get_response(request)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/core/handlers/base.py lineno = 111 function = get_response line = response = callback(request, *callback_args, **callback_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/hooks/framework_django.py lineno = 475 function = wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/django/views/decorators/csrf.py lineno = 77 function = wrapped_view line = return view_func(*args, **kwargs)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/views.py lineno = 74 function = complete line = return backend.complete(request, response)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/vkontakte.py lineno = 59 function = complete line = redirect = super(VkontakteBackend, self).complete(request, response)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/__init__.py lineno = 175 function = complete line = self.fill_extra_fields(request, extra)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/backends/__init__.py lineno = 114 function = fill_extra_fields line = form = str_to_class(settings.EXTRA_FORM)(data)
thread_id = MainThread filename = /home/project/src/project_prod/contrib/netauth/forms.py lineno = 43 function = __init__ line = files = {'avatar': ContentFile(requests.get(url).content, name=str(uuid.uuid4()))}
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/api.py lineno = 55 function = get line = return request('get', url, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/external_trace.py lineno = 123 function = dynamic_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/api.py lineno = 44 function = request line = return session.request(method=method, url=url, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/external_trace.py lineno = 123 function = dynamic_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/sessions.py lineno = 335 function = request line = resp = self.send(prep, **send_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/sessions.py lineno = 438 function = send line = r = adapter.send(request, **kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/adapters.py lineno = 292 function = send line = timeout=timeout
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/packages/urllib3/connectionpool.py lineno = 428 function = urlopen line = body=body, headers=headers)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/requests/packages/urllib3/connectionpool.py lineno = 280 function = _make_request line = conn.request(method, url, **httplib_request_kw)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 914 function = request line = self._send_request(method, url, body, headers)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 951 function = _send_request line = self.endheaders()
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/hooks/external_httplib.py lineno = 49 function = httplib_endheaders_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 908 function = endheaders line = self._send_output()
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 780 function = _send_output line = self.send(msg)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 739 function = send line = self.connect()
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/api/object_wrapper.py lineno = 237 function = __call__ line = self._nr_instance, args, kwargs, **self._nr_kwargs)
thread_id = MainThread filename = /home/project/envs/project_prod/lib/python2.6/site-packages/newrelic-1.13.1.31/newrelic/hooks/external_httplib.py lineno = 25 function = httplib_connect_wrapper line = return wrapped(*args, **kwargs)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/httplib.py lineno = 720 function = connect line = self.timeout)
thread_id = MainThread filename = /home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/socket.py lineno = 554 function = create_connection line = sock.connect(sa)
*** backtrace of 2259 ***
/home/project/envs/project_prod/bin/uwsgi(uwsgi_backtrace+0x25) [0x456085]
/home/project/envs/project_prod/bin/uwsgi(uwsgi_segfault+0x21) [0x456161]
/lib/libc.so.6(+0x32230) [0x7f2c43376230]
/lib/libc.so.6(+0x108052) [0x7f2c4344c052]
/home/project/envs/project_prod/bin/uwsgi(uwsgi_python_tracebacker_thread+0x430) [0x471950]
/lib/libpthread.so.0(+0x68ca) [0x7f2c44b4a8ca]
/lib/libc.so.6(clone+0x6d) [0x7f2c43413b6d]"""
from raven_harakiri import convert_traceback
def test_convert():
frames = convert_traceback(example_traceback)
assert frames[-1] == {
'abs_path': '/home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/socket.py',
'context_line': 'sock.connect(sa)',
'filename': '/home/project/.pythonz/pythons/CPython-2.6.8/lib/python2.6/socket.py',
'function': 'create_connection',
'lineno': 554,
'module': None,
'post_context': [],
'pre_context': [],
'vars': {}
}
| futurecolors/raven-harakiri | test_harakiri.py | Python | mit | 10,635 |
# coding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
__metaclass__ = type
import pandas as pd
from pyoptflow import utils
from pyoptflow.core import extract_motion_proesmans
from pyoptflow.extrapolation import semilagrangian
def read_rainrate(filename):
return
def filter_rr(rr):
"""filtered version of data"""
return rr
def rr2ubyte(rr, R_min=0.05, R_max=10.0, filter_stddev=3.0):
return utils.rainfall_to_ubyte(rr, R_min=R_min, R_max=R_max,
filter_stddev=filter_stddev)
def motion(rr0ubyte, rr1ubyte, lam=25.0, num_iter=250, num_levels=6):
return extract_motion_proesmans(rr0ubyte, rr1ubyte, lam=lam,
num_iter=num_iter,
num_levels=num_levels)[0]
def extrapolate(rr, v, t, n_steps=15, n_iter=3, inverse=True):
return semilagrangian(rr, v, t, n_steps=n_steps, n_iter=n_iter,
inverse=inverse)
def forecast(cropped_rainrates, steps=13):
"""
cropped_rainrates: two-row Series of input rainrate fields
steps: number of time steps to extrapolate
"""
if cropped_rainrates.size != 2:
raise ValueError('cropped_rainrates must be a two-row pandas.Series')
tmax = cropped_rainrates.index.max()
tmin = cropped_rainrates.index.min()
dt = tmax-tmin
index = pd.DatetimeIndex(freq=dt, periods=steps, start=tmax+dt)
rr_ubyte = cropped_rainrates.apply(rr2ubyte)
v = motion(rr_ubyte.iloc[0], rr_ubyte.iloc[1])
fcast_list = []
for t in range(steps):
fcast_list.append(extrapolate(cropped_rainrates.loc[tmax], v, t+1))
return pd.Series(index=index, data=fcast_list, name='forecast')
| sataako/fmio-server | fmio/forecast.py | Python | mit | 1,752 |
import random
import zope.schema
import zope.interface
from zope.i18nmessageid import MessageFactory
from zope.component import getUtility, getMultiAdapter
from zope.browserpage.viewpagetemplatefile import ViewPageTemplateFile as Zope3PageTemplateFile
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile as FiveViewPageTemplateFile
from Products.CMFCore.utils import getToolByName
from Products.CMFCore.interfaces import ISiteRoot
from Products.CMFPlone.utils import _createObjectByType
from Products.CMFPlone.interfaces.controlpanel import IMailSchema
from Products.statusmessages.interfaces import IStatusMessage
import z3c.form
import plone.z3cform.templates
from plone.registry.interfaces import IRegistry
from smtplib import SMTPException, SMTPRecipientsRefused
from vfu.events import MessageFactory as _
from vfu.events.utils import trusted
from vfu.events.registration import IBasicForm
class MyForm(z3c.form.form.Form):
""" Display event with form """
template = Zope3PageTemplateFile("templates/form.pt")
fields = z3c.form.field.Fields(IBasicForm)
ignoreContext = True
enable_unload_protection = False
output = None
### ! fieldeset
fields['gender'].widgetFactory = z3c.form.browser.radio.RadioFieldWidget
fields['pricing'].widgetFactory = z3c.form.browser.radio.RadioFieldWidget
def _redirect(self, target=''):
if not target:
portal_state = getMultiAdapter((self.context, self.request), name=u'plone_portal_state')
target = portal_state.portal_url()
self.request.response.redirect(target)
@z3c.form.button.buttonAndHandler(_(u"Save"), name='submit')
def submit(self, action):
data, errors = self.extractData()
if errors:
self.status = _(u"Please correct errors")
return
folder = self.context
id = str(random.randint(0, 99999999))
new_obj = _createObjectByType("vfu.events.registration", folder, id, lastname = data['lastname'],
firstname = data['firstname'], gender = data['gender'], job = data['job'], organization = data['organization'],
email = data['email'], phone = data['phone'], street = data['street'], number = data['number'],
zipcode = data['zipcode'], city = data['city'], country = data['country'], pricing = data['pricing'],
comments = data['comments'])
portal = getToolByName(self, 'portal_url').getPortalObject()
encoding = portal.getProperty('email_charset', 'utf-8')
trusted_template = trusted(portal.registration_email)
mail_text = trusted_template(
self, charset=encoding, reg_data = new_obj, event = self.context)
subject = self.context.translate(_(u"New registration"))
m_to = data['email']
## notify admin about new registration
if isinstance(mail_text, unicode):
mail_text = mail_text.encode(encoding)
host = getToolByName(self, 'MailHost')
registry = getUtility(IRegistry)
mail_settings = registry.forInterface(IMailSchema, prefix='plone')
m_from = mail_settings.email_from_address
try:
host.send(mail_text, m_to, m_from, subject=subject,
charset=encoding, immediate=True, msg_type="text/html")
except SMTPRecipientsRefused:
raise SMTPRecipientsRefused(
_(u'Recipient address rejected by server.'))
except SMTPException as e:
raise(e)
IStatusMessage(self.request).add(_(u"Submit complete"), type='info')
return self._redirect(target=self.context.absolute_url())
form_frame = plone.z3cform.layout.wrap_form(MyForm, index=FiveViewPageTemplateFile("templates/layout.pt")) | a25kk/vfu | src/vfu.events/vfu/events/myform.py | Python | mit | 3,814 |
from __future__ import absolute_import
import logging
import ckan.logic as logic
import ckan.model as model
import ckan.lib.dictization.model_dictize as model_dictize
import ckan.lib.dictization as dictization
from sqlalchemy import func, text, or_, and_
from datetime import datetime, timedelta
from .utils import package_generator
import flask
from ckan.plugins import toolkit
get_action = logic.get_action
check_access = logic.check_access
NotAuthorized = logic.NotAuthorized
log = logging.getLogger(__name__)
admin_dashboard = flask.Blueprint('admin_dashboard', __name__, url_prefix='/ckan-admin')
def get_blueprint():
return [admin_dashboard]
@admin_dashboard.route('/admin_dashboard')
def read():
context = {'user': toolkit.g.user, 'auth_user_obj': toolkit.g.userobj}
try:
toolkit.check_access('admin_dashboard', context, {})
# Fetch invalid resources
invalid_resources = fetch_invalid_resources()
# Query package statistics
statistics = fetch_package_statistics()
# Find packageless organizations and produce a changelog
(packageless_organizations, packageless_organizations_changelog) = \
fetch_packageless_organizations_and_changelog(context)
# Generate activity stream snippet
# FIXME: Disabled because fetch_recent_package_activity_list_html is not ported to CKAN 2.9
# package_activity_html = fetch_recent_package_activity_list_html(context, user_not='harvest')
# harvest_activity_html = fetch_recent_package_activity_list_html(context, user='harvest')
# privatized_activity_html = fetch_recent_package_activity_list_html(context, only_privatized=True)
# interesting_activity_html = fetch_recent_package_activity_list_html(context, only_resourceful=True)
def prepare_heartbeat(hb):
if hb:
return {'success': hb.get('success'),
'timestamp': datetime.strptime(hb.get('timestamp'), '%Y-%m-%dT%H:%M:%S.%f')}
else:
return hb
xroad_heartbeat_latest = toolkit.get_action('xroad_heartbeat')(context, {})
# 'success' here means heartbeat checks have been performed, the check success is within the 'heartbeat' property
if xroad_heartbeat_latest.get('success'):
one_day_ago = datetime.now() - timedelta(days=1)
xroad_heartbeat_history = toolkit.get_action('xroad_heartbeat_history')(context, {'since': one_day_ago})
xroad_heartbeat = {
'latest': prepare_heartbeat(xroad_heartbeat_latest.get('heartbeat')),
'history': [prepare_heartbeat(item) for item in xroad_heartbeat_history.get('items', [])]
}
else:
xroad_heartbeat = False
# Render template
vars = {'invalid_resources': invalid_resources,
# 'package_activity_html': package_activity_html,
# 'harvest_activity_html': harvest_activity_html,
# 'privatized_activity_html': privatized_activity_html,
# 'interesting_activity_html': interesting_activity_html,
'packageless_organizations': packageless_organizations,
'packageless_organizations_changelog': packageless_organizations_changelog,
'stats': statistics,
'xroad_heartbeat': xroad_heartbeat,
}
template = 'admin/dashboard.html'
return toolkit.render(template, extra_vars=vars)
except toolkit.NotAuthorized:
toolkit.abort(403)
def fetch_invalid_resources():
context = {'ignore_auth': True}
def invalid_resource_generator():
for package in package_generator(context, '*:*', 1000):
for resource in package.get('resources', []):
if resource.get('valid_content', 'yes') == 'no':
yield (resource, package)
return list(invalid_resource_generator())
def fetch_package_statistics():
# Query the number of packages by "private"-value
public_private_query = (
model.Session.query(model.Package.private, func.count(model.Package.id))
.filter(model.Package.state == 'active')
.filter(model.Package.type == 'dataset')
.group_by(model.Package.private))
public_count = 0
private_count = 0
for private, count in public_private_query:
if private:
private_count = count
else:
public_count = count
# Query new package counts for different intervals
def new_packages_since(dt):
created = (
model.Session.query(
model.Package.id.label('id'),
model.Package.metadata_created.label('ts'))
.filter(model.Package.type == 'dataset')
.subquery())
return (model.Session.query(func.count(created.c.id))
.filter(created.c.ts >= dt)
.one())[0]
new_last_week = new_packages_since(datetime.utcnow() - timedelta(weeks=1))
new_last_month = new_packages_since(datetime.utcnow() - timedelta(days=30))
new_last_year = new_packages_since(datetime.utcnow() - timedelta(days=365))
return {'public': public_count,
'private': private_count,
'new_last_week': new_last_week,
'new_last_month': new_last_month,
'new_last_year': new_last_year,
}
def fetch_recent_package_activity_list_html(
context, user=None, user_not=None, only_privatized=False,
only_resourceful=False, limit=30):
# FIXME: disable function pending porting to CKAN 2.9
raise Exception('fetch_recent_package_activity_list_html is not yet ported for CKAN 2.9')
# FIXME: activity_streams was removed in CKAN 2.9, hack to "fix" references until porting
activity_streams = None
# Fetch recent revisions, store as list oredered by time
recent_revisions_query = (
model.Session.query(model.PackageRevision, model.User.id)
.join(model.Revision, model.PackageRevision.revision_id == model.Revision.id)
.join(model.User, model.Revision.author == model.User.name)
.distinct())
if only_resourceful:
recent_revisions_query = (
recent_revisions_query
.join(model.Resource, model.Resource.package_id == model.PackageRevision.id)
.filter(model.Resource.state == "active"))
if user is not None:
recent_revisions_query = recent_revisions_query.filter(
model.Revision.author == user)
if user_not is not None:
recent_revisions_query = recent_revisions_query.filter(
model.Revision.author != user_not)
if only_privatized:
recent_revisions_query = recent_revisions_query.filter(
model.PackageRevision.private)
recent_revisions_query = (
recent_revisions_query
.order_by(model.PackageRevision.metadata_modified.desc())
.limit(limit))
recent_revisions = [r for r in recent_revisions_query]
# Fetch related packages, store by id
packages = {r.id: None for r, uid in recent_revisions}
packages_query = (
model.Session.query(model.Package)
.filter(model.Package.id.in_(list(packages.keys()))))
for package in packages_query:
packages[package.id] = package
# Fetch related packages' first revision timestamps
packages_created = {}
packages_created_query = (
model.Session.query(
model.PackageRevision.id.label('id'),
func.min(model.PackageRevision.metadata_modified).label('ts'))
.filter(model.PackageRevision.id.in_(list(packages.keys())))
.group_by(model.PackageRevision.id))
for package_id, created in packages_created_query:
packages_created[package_id] = created
# Fetch previous revisions for the recent revisions
packages_previous = {}
packages_previous_query = (
model.Session.query(model.PackageRevision.revision_id.label("rid"), model.PackageRevision)
.from_statement(text("""
select p.revision_id as rid, r.*
from package_revision r
left join (
select l.revision_id, r.id, max(r.metadata_modified) as previous_timestamp
from package_revision r
join package_revision l on r.id = l.id
where l.revision_id = ANY(:ids)
and r.metadata_modified < l.metadata_modified
group by l.revision_id, r.id, l.metadata_modified
) p on r.id = p.id
where r.metadata_modified = p.previous_timestamp
"""))
.params(ids=[r.revision_id for r, uid in recent_revisions]))
for rid, package in packages_previous_query:
packages_previous[rid] = package
# Add support for new color for privacy-changed packages
activity_streams.activity_stream_string_icons['changed package privacy'] = 'sitemap'
activity_streams.activity_stream_string_functions['changed package privacy'] = \
activity_streams.activity_stream_string_changed_package
# Create activity objects based on revision data
def revision_to_activity(r, uid):
pr = packages_previous.get(r.revision_id)
if only_privatized and (pr is None or (pr.private or not r.private)):
return None
privacy_changed = pr is not None and pr.private != r.private
activity_type = None
if r.state in ('active', 'draft'):
if packages_created[r.id] == r.metadata_modified:
activity_type = 'new package'
elif privacy_changed:
activity_type = 'changed package privacy'
else:
activity_type = 'changed package'
elif r.state in ('deleted'):
activity_type = 'deleted package'
else:
log.warning("Unknown package state, skipping: %s" % r.state)
return None
d = {'package': dictization.table_dictize(
packages[r.id], context={'model': model})}
activity = model.Activity(uid, r.id, r.revision_id, activity_type, d)
activity.timestamp = r.metadata_modified
return activity
activity_objects = (
(r for r in
(revision_to_activity(r, uid) for r, uid in recent_revisions)
if r is not None))
# Render activity list snippet
changed_packages = model_dictize.activity_list_dictize(activity_objects, context)
return activity_streams.activity_list_to_html(context, changed_packages, {'offset': 0})
def fetch_packageless_organizations_and_changelog(context):
# Query package owners
package_owners = dict(model.Session.query(model.Package.id, model.Package.owner_org).all())
# Query organization data
organizations = (model.Session.query(model.Group.id, model.Group.created, model.Group.title, model.GroupExtra.value)
.join(model.GroupExtra, and_(model.GroupExtra.group_id == model.Group.id,
model.GroupExtra.key == 'title_translated',
model.GroupExtra.active == True), isouter=True) # noqa
.filter(model.Group.type == 'organization')
.all())
# Query package new/delete activity events
package_new_delete_activities = (model.Session.query(model.Activity.timestamp,
model.Activity.object_id, model.Activity.activity_type)
.filter(or_(model.Activity.activity_type == 'new package',
model.Activity.activity_type == 'deleted package'))
.order_by(model.Activity.timestamp)
.all())
# Define organization objects required for UI
organizations_by_id = {oid: {'id': oid, 'created': created, 'title': title, 'title_translated': title_translated}
for oid, created, title, title_translated in organizations}
# Initialize organization timelines with no packages at the time of their creation
organization_timelines = {oid: [(created, set())] for oid, created, _, _ in organizations}
# Create a timeline of contained packages for each organization
for timestamp, package_id, activity_type in package_new_delete_activities:
owner_id = package_owners.get(package_id)
if not owner_id:
log.warning('No owner found for package "%s"', package_id)
continue
organization_timeline = organization_timelines.get(owner_id)
if not organization_timeline:
log.warning('No timeline found for organization "%s"', owner_id)
continue
latest_timestamp, latest_package_set = organization_timeline[-1]
if activity_type == 'new package':
if package_id not in latest_package_set:
new_package_set = latest_package_set.copy()
new_package_set.add(package_id)
organization_timeline.append((timestamp, new_package_set))
else:
log.warning('Adding package "%s" a second time?', package_id)
continue
if activity_type == 'deleted package':
if package_id in latest_package_set:
new_package_set = latest_package_set.copy()
new_package_set.remove(package_id)
organization_timeline.append((timestamp, new_package_set))
else:
log.warning('Removing package "%s" before adding?', package_id)
continue
# Produce a collective changelog for all organizations
changelog = []
for oid, organization_timeline in list(organization_timelines.items()):
organization = organizations_by_id.get(oid)
if not organization:
log.warning('Organization "%s" not found', oid)
continue
for timestamp, package_set in organization_timeline:
if len(package_set) == 0:
changelog.append((timestamp, organization, False))
elif len(package_set) == 1:
changelog.append((timestamp, organization, True))
changelog.sort()
# Collect currently packageless organizations
packageless_organizations = []
for oid, organization_timeline in list(organization_timelines.items()):
latest_timestamp, latest_package_set = organization_timeline[-1]
if len(latest_package_set) == 0:
organization = organizations_by_id.get(oid)
if not organization:
log.warning('Organization "%s" not found', oid)
continue
packageless_organization = organization.copy()
packageless_organization['packageless_since'] = latest_timestamp
packageless_organizations.append(packageless_organization)
return (packageless_organizations, changelog)
| vrk-kpa/api-catalog | ckanext/ckanext-apicatalog_ui/ckanext/apicatalog_ui/admindashboard.py | Python | mit | 15,229 |
# coding=utf-8
jchars = u'。々ゝヽゞヾーぁァあアぃィいイぅゥうウヴぇェえエぉォおオヵかカがガきキぎギくクぐグヶけケげゲこコごゴさサざザしシじジすスず' \
u'ズせセぜゼそソぞゾたタだダちチぢヂっッつツづヅてテでデとトどドなナにニぬヌねネのノはハばバぱパひヒびビぴピふフぶブぷプへ' \
u'ヘべベぺペほホぼボぽポまマみミむムめメもモゃャやヤゅュゆユょョよヨらラりリるルれレろロゎヮわワゐヰゑヱをヲんン一丁七万-' \
u'下不与丑且世丘丙両並中丸丹主久乏乗乙九乱乳乾亀了予争事二互五井亜亡交亥亨享-亭人仁今介仏仕他付仙代-以仮仰仲件任企伊伏-休' \
u'会伝伯伴伸伺似但位-佐体何余作佳併使例侍供依価侮侯侵便係促俊俗保信修俳俵俸俺倉個倍倒候借倣値倫倹偉偏停健側-偶偽傍傑傘備催' \
u'債傷傾働像僕僚僧儀億儒償優元-兆先光克免兎児党入全八-六共兵具典兼内円冊再冒冗写冠冬冷准凍凝凡処凶凸-出刀刃分-刈刊刑列初判' \
u'別利到制-券刺刻則削前剖剛剣剤副剰割創劇力功加劣助努励労効劾勅勇勉動勘務勝募勢勤勧勲勺匁包化北匠匹-医匿十千升午半卑-協南単' \
u'博占卯-危即-卵卸厄厘厚原厳去参又及-収叔取受叙口-句叫召可台史右号司各合吉同-向君吟否含吸吹呈-告周味呼命和咲哀品員哲唆唇唐' \
u'唯唱商問啓善喚喜喝喪喫営嗣嘆嘉嘱器噴嚇囚四回因団困囲図固国圏園土圧在地坂均坊坑坪垂型垣埋城域執培基埼堀堂堅堕堤堪報場塀塁' \
u'塊塑塔塗塚塩塾境墓増墜墨墳墾壁壇壊壌士壬壮声-売変夏夕外多夜夢大天-夫央失奇-奉奏契奔奥奨奪奮女奴好如-妄妊妙妥妨妹妻姉始姓' \
u'委姫姻姿威娘娠娯婆婚婦婿媒嫁嫌嫡嬢子孔字存孝季孤学孫宅宇-安完宗-定宜宝実客-室宮宰害-家容宿寂寄-密富寒寛寝察寡寧審寮寸寺対' \
u'寿封専射将尉-尋導小少尚就尺尼-局居屈届屋展属層履屯山岐岡岩岬岳岸峠峡峰島崇崎崩川州巡巣工-巨差己巳巻市布帆希帝帥師席帯帰帳常' \
u'帽幅幕幣干-年幸幹幻-幾庁広床序底店庚府度座庫庭庶-庸廃廉廊延廷建弁弊式弐弓-引弘弟弦弧弱張強弾当形彩彫彰影役彼往征径待律後徐' \
u'徒従得御復循微徳徴徹心必忌忍志-忙応忠快念怒怖思怠急性怪恋恐恒恥恨恩恭息恵悔悟悠患悦悩悪悲悼情惑惜惨惰想愁愉意愚愛感慈態慌' \
u'慎慕慢慣慨慮慰慶憂憎憤憩憲憶憾懇懐懲懸戊戌成-戒戦戯戸戻房所扇扉手才打払扱扶批承技抄把抑投抗折抜択披抱抵抹押抽担拍拐拒拓拘' \
u'拙招拝拠拡括拷拾持指挑挙挟振挿捕捜捨据掃授掌排掘掛採探接控推措掲描提揚換握揮援揺損搬搭携搾摂摘摩撃撤撮撲擁操擦擬支改攻放政' \
u'故敏救敗教敢散敬数整敵敷文斉斎斗料斜斤斥断新方施旅旋族旗既日旧-早旬昆昇昌明易昔星映春昨昭是昼時晩普景晴晶暁暇暑暖暗暦暫暮暴' \
u'曇曜曲更書曹替最月有服朕朗望朝期木未-札朱朴机朽杉材村束条来杯東松板析林枚果枝枠枢枯架柄某染柔柱柳査栄栓校株核根格栽桃案桑' \
u'桜桟梅械棄棋棒棚棟森棺植検業極楼楽概構様槽標模権横樹橋機欄欠次欧欲欺款歌歓止正武歩歯歳歴死殉-残殖殴段殺殻殿母毎毒比毛氏民' \
u'気水氷永汁求汎汗汚江池決汽沈沖没沢河沸油治沼沿況泉泊泌法泡-泣泥注泰泳洋洗洞津洪活派流浄浅浜浦浪浮浴海浸消涙涯液涼淑淡深混' \
u'添清渇-渉渋渓減渡渦温測港湖湯湾-満源準溝溶滅滋滑滝滞滴漁漂漆漏演漠漢漫漬漸潔潜潟潤潮澄激濁濃濫濯瀬火灯灰災炉炊炎炭点為烈' \
u'無焦然焼煙照煩煮熟熱燃燥爆爵父片版牙牛牧物牲特犠犬犯状狂狩独狭猛猟猪猫献猶猿獄獣獲玄率玉王珍珠班現球理琴環璽瓶甘甚生産用田' \
u'-申男町画界畑畔留畜畝略番異畳疎疑疫疲疾病症痘痛痢痴療癒癖癸発登白百的皆皇皮皿盆益盗盛盟監盤目盲直相盾省看県真眠眺眼着睡督' \
u'瞬矛矢知短矯石砂研砕砲破硝硫硬碁碑確磁磨礁礎示礼社祈祉祖祚祝神祥票祭禁禄禅禍-福秀私秋科秒秘租秩称移程税稚種稲稼稿穀穂積穏' \
u'穫穴究空突窃窒窓窮窯立竜章童端競竹笑笛符第筆等筋筒答策箇算管箱節範築篤簡簿籍米粉粋粒粗粘粛粧精糖糧糸系糾紀約紅紋納純紙-' \
u'紛素-索紫累細紳紹紺終組経結絞絡給統絵絶絹継続維綱網綿緊総緑緒線締編緩緯練縁縄縛縦縫縮績繁繊織繕繭繰缶罪置罰署罷羅羊美群義' \
u'羽翁翌習翻翼老考者耐耕耗耳聖聞聴職肉肌肖肝肢肥肩肪肯育肺胃胆背胎胞胴胸能脂脅脈脚脱脳脹腐腕腰腸腹膚膜膨臓臣臨自臭至致興舌舎舗' \
u'舞舟航般舶船艇艦良色芋芝花芳芸芽苗若苦英茂茎茶草荒荘荷菊菌菓菜華落葉著葬蒸蓄蔵薄薦薪-薬藤藩藻虎虐虚虜虞虫蚊蚕蛇蛍蛮融血衆行' \
u'術街衛衝衡衣表衰衷袋被裁裂装裏裕補裸製複褐褒襟襲西要覆覇見規視覚覧親観角解触言訂計討訓託記訟訪設許訳訴診証詐詔評詞詠試詩' \
u'詰-詳誇誉誌認誓誕誘語誠誤説読誰課調談請論諭諮諸諾謀謁謄謙講謝謡謹識譜警議譲護谷豆豊豚象豪貝貞負-貢貧-販貫責貯貴買貸費貿賀' \
u'賃賄資賊賓賛賜賞賠賢賦質購贈赤赦走赴起超越趣足距跡路跳践踊踏躍身車軌軍軒軟転軸軽較載輝輩輪輸轄辛辞辰-農辺込迅迎近返迫迭述' \
u'迷追退送逃逆透逐逓途通逝速造連逮週進逸遂遅遇遊運遍過道-違遠遣適遭遮遵遷選遺避還邦邪邸郊郎郡部郭郵郷都酉酌配酒酔酢酪酬酵酷酸' \
u'醜醸釈里-量金針釣鈍鈴鉄鉛鉢鉱銀銃銅銑銘銭鋭鋳鋼錘錠錬錯録鍛鎖鎮鏡鐘鑑長門閉開閏閑間関閣閥閲闘阪防阻附降限陛院-陥陪陰陳陵' \
u'陶陸険陽隅隆隊階随隔際障隠隣隷隻雄-雇雉雌雑離難雨雪雰雲零雷電需震霊霜霧露青静非面革靴韓音韻響頂頃項順預-頒領頭頻頼題額顔顕' \
u'願類顧風飛食飢飯飲飼-飾養餓館首香馬駄-駆駐騎騒験騰驚骨髄高髪鬼魂魅魔魚鮮鯨鳥鳴鶏鹿麗麦麻黄黒黙鼓鼠鼻齢'
jrange = list(jchars)
| jr-garcia/Engendro3D | e3d/gui/FontRendering/japanese_range.py | Python | mit | 6,858 |
"""
Wikipedia utils.
@author: Faegheh Hasibi (faegheh.hasibi@idi.ntnu.no)
"""
from urllib import quote
class WikipediaUtils(object):
mongo = None
@staticmethod
def wiki_title_to_uri(title):
"""
Converts wiki page title to wiki_uri
based on https://en.wikipedia.org/wiki/Wikipedia:Page_name#Spaces.2C_underscores_and_character_coding
encoding based on http://dbpedia.org/services-resources/uri-encoding
"""
if title:
wiki_uri = "<wikipedia:" + quote(title, ' !$&\'()*+,-./:;=@_~').replace(' ', '_') + ">"
return wiki_uri
else:
return None
@staticmethod
def wiki_uri_to_dbp_uri(wiki_uri):
"""Converts Wikipedia uri to DBpedia URI."""
return wiki_uri.replace("<wikipedia:", "<dbpedia:")
def main():
# example usage
print WikipediaUtils.wiki_title_to_uri("Tango (genre musical)")
if __name__ == "__main__":
main() | hasibi/TAGME-Reproducibility | nordlys/wikipedia/utils.py | Python | mit | 966 |
# -*- coding: utf-8 -*-
import os
import os.path
import re
import sys
import string
from django.apps.registry import apps
from django.core.management.base import BaseCommand, CommandError
from python_translate.extractors import base as extractors
from python_translate import operations
from python_translate.translations import MessageCatalogue
from django_translate.utils import bcolors
from django_translate import services
from django_translate import settings
class AnyFormatSpec:
def __format__(self, fmt):
return ''
class Formatter(string.Formatter):
def __init__(self):
self.used = set()
def get_value(self, key, args, kwargs):
self.used.add(key)
return AnyFormatSpec()
class Command(BaseCommand):
help = """Extract translation strings from templates from a given location. It can display them or merge
the new ones into the translation files. When new translation strings are found it can
automatically add a prefix to the translation message.
Example running against app folder
./manage.py tranzdump -l en --path ./ --output-path ./tranz
./manage.py tranzdump -l fr --force --prefix="new_" --app website --exclude ./website/static
"""
def __init__(self, stdout=None, stderr=None, no_color=False):
self.excluded_paths = None
self.locale = None
self.verbosity = None
super(Command, self).__init__(stdout, stderr, no_color)
def add_arguments(self, parser):
parser.add_argument('--locale', '-l', default='en', dest='locale', action='store',
help='Locale to process')
parser.add_argument('--app', '-a', dest='app', action='store',
help='App to scan.')
parser.add_argument('--path', '-p', dest='path', action='store',
help='Path to scan')
parser.add_argument('--output-dir', dest='output_dir', default=None, action='store',
help='Override the default output dir')
parser.add_argument('--exclude-dir', '-x', default=[], dest='excluded_paths', action='append',
help='Paths to exclude. Default is none. Can be used multiple times. '
'Works only with ChainExtractor.')
parser.add_argument('--prefix', dest='prefix', default="__", action='store',
help='Override the default prefix')
parser.add_argument('--format', dest='format', default="yml", action='store',
help='Override the default output format')
parser.add_argument('--dump-messages', dest='dump_messages', action='store_true',
help='Should the messages be dumped in the console')
parser.add_argument('--force', dest='force', action='store_true',
help='Should the update be done')
parser.add_argument('--no-backup', dest='no_backup', action='store_true',
help='Should backup be disabled')
parser.add_argument('--clean', dest='clean', default=False, action='store_true',
help='Should clean not found messages',)
def handle(self, *args, **options):
if options.get('force') != True and options.get('dump_messages') != True:
print((bcolors.WARNING + 'You must choose at least one of --force or --dump-messages' + bcolors.ENDC))
return
if not (bool(options.get('app')) ^ bool(options.get('path'))):
print((bcolors.WARNING + 'You must choose only one of --app or --path' + bcolors.ENDC))
return
if not options.get('output_dir') and (not options.get('app') or not settings.TRANZ_SEARCH_LOCALE_IN_APPS):
print((bcolors.WARNING + 'You must provide an --output-dir when in --path mode, or when TRANZ_SEARCH_LOCALE_IN_APPS ' \
'settings variable is False.' + bcolors.ENDC))
return
self.excluded_paths = [os.path.abspath(path) for path in options['excluded_paths']]
self.excluded_paths += [os.path.abspath(django_translate.__path__[0])]
self.excluded_paths += settings.TRANZ_EXCLUDED_DIRS
# Find directories to scan
if options.get('app'):
for app in list(apps.app_configs.values()):
if app.name == options.get('app'):
current_name = app.name
root_path = app.path
break
else:
raise ValueError("App {0} not found".format(options.get('app')))
else:
root_path = os.path.abspath(options['path'])
current_name = root_path.split("/")[-1]
output_dir = options.get('output_dir') or os.path.join(root_path, 'tranz')
writer = services.writer
print(('Generating "{0}" translation files for "{1}"'.format(options.get('locale'), current_name)))
print("Loading existing messages")
current_catalogue = MessageCatalogue(options['locale'])
loader = services.loader
loader.load_messages(output_dir, current_catalogue)
if len(current_catalogue.messages) == 0:
print(("No messages were loaded, make sure there actually are " \
"translation file in format {{catalog}}.{{locale}}.{{format}} in {0}".format(output_dir)))
return
print("Extracting messages")
extracted_catalogue = MessageCatalogue(options['locale'])
extractor = services.extractor
extractor.set_prefix(options['prefix'])
self.extract_messages(extractor, root_path, extracted_catalogue)
print("Processing catalogues")
operation_class = operations.DiffOperation if options['clean'] else operations.MergeOperation
operation = operation_class(current_catalogue, extracted_catalogue)
if not len(operation.get_domains()):
print("No translations found")
return
if options["dump_messages"]:
for domain in operation.get_domains():
print(("Displaying messages for domain {0}".format(domain)))
new_keys = list(operation.get_new_messages(domain).keys())
all_keys = list(operation.get_messages(domain).keys())
for id in set(all_keys).difference(new_keys):
print(id)
for id in new_keys:
print((bcolors.OKGREEN + id + bcolors.ENDC))
for id in list(operation.get_obsolete_messages(domain).keys()):
print((bcolors.FAIL + id + bcolors.ENDC))
if options["no_backup"]:
writer.disable_backup()
if options["force"]:
print(("Writing files to {0}".format(output_dir)))
writer.write_translations(operation.get_result(), options['format'], {
"path": output_dir,
"default_locale": options['locale']
})
def extract_messages(self, extractor, root_path, extracted_catalogue):
if isinstance(extractor, extractors.ChainExtractor):
subextractors = list(extractor._extractors.values())
else:
subextractors = [extractor]
for subextractor in subextractors:
if not isinstance(subextractor, extractors.BaseExtractor):
subextractor.extract(root_path, extracted_catalogue)
continue
paths = subextractor.extract_files(root_path)
paths = self.filter_exluded_paths(paths)
for path in paths:
try:
subextractor.extract([path], extracted_catalogue)
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
msg = 'There was an exception in extractor {0} when processing ' \
'resource "{1}"'.format(type(subextractor).__name__, path)
msg = msg + "\nOriginal message: {0} {1}".format(exc_type.__name__, exc_value)
raise ValueError(msg).with_traceback(exc_traceback)
def filter_exluded_paths(self, paths):
valid = []
for path in paths:
for excluded in self.excluded_paths:
if path.startswith(excluded):
break
else:
valid.append(path)
return valid
| adamziel/django_translate | django_translate/management/commands/tranzdump.py | Python | mit | 8,550 |
XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXX X XXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXX XXXXX X XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXX
X XX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX
X XX
XXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXX XX X XXXXXXXXXXXX XXXXXX XXXXXXX XXX XXXXXXXXXXX XXXXXXXX X XXXXX XXXXXX XX XXXXX XX XXX XXX XX X XXXXXXXXX XXXXXXX XXXXXXXXXX XXXXX XXXX XXXX
XXXXXXXXXXXX XXXXXXXX XXX XX XXXXXXX XXXXXXX XXXX XXXXX XXX XXXXXXXX XXXXXXX XX XXXXX XX XXX XXXXXXX XXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXXX
XXX XXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXX
XXXXXX XXXXXXXXXX XXXXX XXXXX XX XXXX XX XXXXXXXXXX XXX XXXXX XXXXX XX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXX XXXXX X XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXX
X XX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XX
X XXXXXXXXX
XXXXX XXXXXXXX XX XXX XXXXX XXXXX XXX XXXXXXXXX XXXXXXXXXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXX XXXX XXXXX XXXXX XX XXX XXX XXXX XXXXX XXXXXXXX XXXXXX XX XXX XXXX XXXXXXXX XX XXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXX
XXXXXXX XXXXXXX XXXX X XXXXXX XXX XX XXXXXXXXXX XXX XXXXXX XXXX XX XXXXXX XXXX XXX XXXXXXX XXXXX XXXXXXXX XX XXXXX XX XXXXXXXXX XXXXXXX XXX XXXXXX XXX
XXXXXXXXXX XXX XXXX XX XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXX XXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXX XX XXXXXXX XXX XXXXXXX XX XXX XXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXX XXXXX XXXXX XXXX XX XXXXX XXX XXXXXX XXXX XXXX XXX XXXX XXXXXX XX XXXXX XXXXXX XXXX XXXX XXXX XXXXXX XXXXXXXXXXXXX XX XXX XXXXXXXXXX XXXX XX
XXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXXX XXXXXX XXXX XX XXXXXXX XXX XXXXXXXXXXX XXXXXXXXXX XXX XXXX XXXXX XX XXXXX XXXXXX XXXXXX XXXX XXXX XXXX XX XXXX XX XXXXXXX XXXXXX XXXXX XXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXX XXX XX XXXXXXX XX XXX XXXXXXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXX XX XXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXX XXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXXX
XXXXXXX | dnaextrim/django_adminlte_x | adminlte/static/plugins/datatables/extensions/ColReorder/examples/fixedheader.html.py | Python | mit | 17,099 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2018 jem@seethis.link
# Licensed under the MIT license (http://opensource.org/licenses/MIT)
from keyplus.utility import inverse_map
AES_KEY_LEN = 16
EP_VENDOR_SIZE = 64
VENDOR_REPORT_LEN = 64
FLASH_WRITE_PACKET_LEN = EP_VENDOR_SIZE - 5
SETTINGS_RF_INFO_SIZE = 64
SETTINGS_RF_INFO_HEADER_SIZE = (SETTINGS_RF_INFO_SIZE - AES_KEY_LEN*2)
SETTINGS_SIZE = 512
LAYOUT_HEADER_SIZE = 1
MAX_NUMBER_KEYBOARDS = 64
MAX_NUMBER_LAYOUTS = MAX_NUMBER_KEYBOARDS
MAX_NUMBER_DEVICES = 64
MAX_MATRIX_SIZE = 32
LAYOUT_ID_NONE = 0xfe
LAYOUT_ID_INVALID = 0xff
# Report
KEYBOARD_REPORT_MODE_AUTO = 0 # 6kro -> nkro if more than 6 keys pressed
KEYBOARD_REPORT_MODE_NKRO = 1 # nkro
KEYBOARD_REPORT_MODE_6KRO = 2 # 6kro
REPORT_MODE_STR_MAP = {
KEYBOARD_REPORT_MODE_AUTO: "Auto NKRO",
KEYBOARD_REPORT_MODE_6KRO: "6KRO",
KEYBOARD_REPORT_MODE_NKRO: "NKRO",
}
def report_mode_to_str(mode):
if mode in REPORT_MODE_STR_MAP:
return REPORT_MODE_STR_MAP[mode]
else:
return "Unknown({})".format(mode)
# FEATURE_CTRL bit mask values
FEATURE_CTRL_USB_DISABLE = (1 << 0)
FEATURE_CTRL_WIRED_DISABLE = (1 << 1)
FEATURE_CTRL_RF_DISABLE = (1 << 2)
FEATURE_CTRL_RF_MOUSE_DISABLE = (1 << 3)
FEATURE_CTRL_BT_DISABLE = (1 << 4)
FEATURE_CTRL_RESERVED_0 = (1 << 5)
FEATURE_CTRL_RESERVED_1 = (1 << 6)
FEATURE_CTRL_RESERVED_2 = (1 << 7)
###############################################################################
# firmware info constants #
###############################################################################
SUPPORT_SCANNING_MASK = 0x01
SUPPORT_SCANNING_COL_ROW_MASK = 0x02
SUPPORT_SCANNING_ROW_COL_MASK = 0x04
SUPPORT_SCANNING_PINS_MASK = 0x08
SUPPORT_SCANNING_ARBITRARY_MASK = 0x10
SUPPORT_SCANNING_BUILT_IN_MASK = 0x20
SUPPORT_KEY_MEDIA = 0x01
SUPPORT_KEY_MOUSE = 0x02
SUPPORT_KEY_LAYERS = 0x04
SUPPORT_KEY_STICKY = 0x08
SUPPORT_KEY_TAP = 0x10
SUPPORT_KEY_HOLD = 0x20
SUPPORT_KRO_N = 0x01;
SUPPORT_KRO_6 = 0x02;
SUPPORT_LED_INDICATORS = 0x01
SUPPORT_LED_BACKLIGHTING = 0x02
SUPPORT_LED_WS2812 = 0x04
SUPPORT_NRF24 = 0x01
SUPPORT_I2C = 0x02
SUPPORT_UNIFYING = 0x04
SUPPORT_USB = 0x08
SUPPORT_BT = 0x10
VERSION_IS_STABLE = 0x01
VERSION_RESERVED_1 = 0x02
VERSION_RESERVED_2 = 0x04
VERSION_RESERVED_3 = 0x08
SUPPORT_MOUSE = 0x01
SUPPORT_MOUSE_GESTURE = 0x02
MATRIX_SCANNER_INTERNAL_NONE = 0x00
MATRIX_SCANNER_INTERNAL_FAST_ROW_COL = 0x01
MATRIX_SCANNER_INTERNAL_BASIC_SCAN = 0x02
MATRIX_SCANNER_INTERNAL_HARD_CODED = 0x03
MATRIX_SCANNER_INTERNAL_VIRTUAL = 0x04
MATRIX_SCANNER_INTERNAL_CUSTOM = 0xff
INTERNAL_SCAN_METHOD_NAME_TABLE = {
"none": MATRIX_SCANNER_INTERNAL_NONE,
"fast_row_col": MATRIX_SCANNER_INTERNAL_FAST_ROW_COL,
"basic_scan": MATRIX_SCANNER_INTERNAL_BASIC_SCAN,
"hard_coded": MATRIX_SCANNER_INTERNAL_HARD_CODED,
"virtual": MATRIX_SCANNER_INTERNAL_VIRTUAL,
"custom": MATRIX_SCANNER_INTERNAL_CUSTOM,
}
INTERNAL_SCAN_METHOD_TABLE = inverse_map(INTERNAL_SCAN_METHOD_NAME_TABLE)
VIRTUAL_MAP_TABLE_SIZE = 0x300
| ahtn/keyplus | host-software/keyplus/constants/settings.py | Python | mit | 3,115 |
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from app.views import IndexView, RegisterView, UserProfileView
urlpatterns = [
url(r'^$',
IndexView.as_view(),
name='index'),
url(r'^register/$',
RegisterView.as_view(),
name='register'),
url(r'^login/$',
auth_views.login,
{'template_name': 'login.djhtml'},
name='login'),
url(r'^logout/$',
auth_views.logout,
{'next_page': '/'},
name='logout'),
url(r'^user/profile/(?P<username>.*)/$',
UserProfileView.as_view(),
name='user-profile'),
]
| BackwardSpy/leaderboard | app/urls.py | Python | mit | 645 |
from feature_extraction.pre_processing.filter_precedent import precendent_directory_cleaner
def run(command_list):
precendent_directory_cleaner.run(command_list)
| Cyberjusticelab/JusticeAI | src/ml_service/feature_extraction/pre_processing/pre_processing_driver.py | Python | mit | 168 |
#
# coding=utf-8
import cmd2_myplugin
from cmd2 import (
cmd2,
)
######
#
# define a class which uses our plugin and some convenience functions
#
######
class MyApp(cmd2_myplugin.MyPluginMixin, cmd2.Cmd):
"""Simple subclass of cmd2.Cmd with our SayMixin plugin included."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@cmd2_myplugin.empty_decorator
def do_empty(self, args):
self.poutput("running the empty command")
#
# You can't use a fixture to instantiate your app if you want to use
# to use the capsys fixture to capture the output. cmd2.Cmd sets
# internal variables to sys.stdout and sys.stderr on initialization
# and then uses those internal variables instead of sys.stdout. It does
# this so you can redirect output from within the app. The capsys fixture
# can't capture the output properly in this scenario.
#
# If you have extensive initialization needs, create a function
# to initialize your cmd2 application.
def init_app():
app = MyApp()
return app
#####
#
# unit tests
#
#####
def test_say(capsys):
# call our initialization function instead of using a fixture
app = init_app()
# run our mixed in command
app.onecmd_plus_hooks('say hello')
# use the capsys fixture to retrieve the output on stdout and stderr
out, err = capsys.readouterr()
# make our assertions
assert out == 'in postparsing hook\nhello\n'
assert not err
def test_decorator(capsys):
# call our initialization function instead of using a fixture
app = init_app()
# run one command in the app
app.onecmd_plus_hooks('empty')
# use the capsys fixture to retrieve the output on stdout and stderr
out, err = capsys.readouterr()
# make our assertions
assert out == 'in postparsing hook\nin the empty decorator\nrunning the empty command\n'
assert not err
| python-cmd2/cmd2 | plugins/template/tests/test_myplugin.py | Python | mit | 1,894 |
'''Base module to handle the collection and the output of statistical data.'''
import logging
import time
import multiprocessing as mp
import queue
from collections import Counter
log = logging.getLogger(__name__)
current_milli_time = lambda: int(round(time.time() * 1000))
def is_number(val):
'''Function to check if the value is a number.'''
try:
float(val)
return True
except ValueError:
return False
class Logstats(object):
'''This class briges the data in input (provided by the `stats` param)
to a generic output (`log`, by default).
'''
def __init__(self, msg=None, emit_func=None, logger=log, level='INFO',
timeout=1, queue=None):
'''Initialize the instance.
If `emit_func` is defined, `logger` and `level` are ignored.
Keyword arguments:
stats -- a dict-like object storing values to output
msg -- a string to use to format `stats` (by default it outputs a
list of comma separated values)
emit_func -- a function to emit the formatted output
(default: logging.log)
logger -- the logger to use to log the formatted output (default:
a `log` instance
level -- the log level (default: INFO)
'''
self.stats = Counter()
self.msg = msg
self.logger = logger
self.level = level
self.old_stats = {}
self.emit_func = emit_func
self.last = current_milli_time()
self.timeout = timeout
self.queue = queue
self.main_queue = None
if not logger.isEnabledFor(logging.getLevelName(level)):
logger.warning('Logger is not enabled to log at level {}.'.format(level))
def __getitem__(self, key):
return self.stats[key]
def __setitem__(self, key, val):
self.stats[key] = val
def update(self, *args, **kwargs):
self.stats.update(*args, **kwargs)
def _get_speed(self, new, old, delta):
return int(round(float((new - old)) / (delta / 1e3)))
def _consume_queue(self):
if self.main_queue:
while True:
try:
self.stats.update(self.main_queue.get_nowait())
except queue.Empty:
return
def get_stats(self, delta):
self._consume_queue()
stats = self.stats
if hasattr(self.stats, '__call__'):
stats = self.stats(delta)
else:
stats = stats.copy()
speed = dict(('{}.speed'.format(k),
self._get_speed(stats[k],
self.old_stats.get(k, 0),
delta))
for k in stats if is_number(stats[k]))
self.old_stats = stats
stats.update(speed)
return stats
def get_child(self):
if not self.main_queue:
self.main_queue = mp.Queue()
return Logstats(queue=self.main_queue)
def format_msg(self, stats):
if self.msg:
msg = self.msg.format(**stats)
else:
msg = ', '.join('{}: {}'.format(k, stats[k])
for k in sorted(stats))
return msg
def emit(self, msg):
if self.emit_func:
self.emit_func(msg)
else:
self.logger.log(getattr(logging, self.level), msg)
def __call__(self):
if self.queue:
self.queue.put(self.stats)
self.stats = Counter()
else:
delta = current_milli_time() - self.last
stats = self.get_stats(delta)
if stats:
self.emit(self.format_msg(stats))
self.last = current_milli_time()
| vrde/logstats | logstats/base.py | Python | mit | 3,761 |
from string import Template
import re
import os
import sys
import time
import json
import math
import os
import subprocess
import csv
def saveResults(resultsList, json_fname, csv_fname):
print(resultsList)
print(json.dumps(resultsList, indent=4, sort_keys=True))
with open(json_fname, 'w') as outfile:
json.dump(resultsList, outfile, indent=4, sort_keys=True)
keys = resultsList[0].keys()
with open(csv_fname, 'w') as output_file:
dict_writer = csv.DictWriter(output_file, keys)
dict_writer.writeheader()
dict_writer.writerows(resultsList)
| ECP-CANDLE/Supervisor | workflows/async-search/python/utils.py | Python | mit | 594 |
from pact_test.runners import pact_tests_runner
def test_consumer_tests(mocker):
mocker.spy(pact_tests_runner, 'run_consumer_tests')
pact_tests_runner.verify(verify_consumers=True)
assert pact_tests_runner.run_consumer_tests.call_count == 1
def test_provider_tests(mocker):
mocker.spy(pact_tests_runner, 'run_provider_tests')
pact_tests_runner.verify(verify_providers=True)
assert pact_tests_runner.run_provider_tests.call_count == 1
def test_default_setup(mocker):
mocker.spy(pact_tests_runner, 'run_consumer_tests')
mocker.spy(pact_tests_runner, 'run_provider_tests')
pact_tests_runner.verify()
assert pact_tests_runner.run_consumer_tests.call_count == 0
assert pact_tests_runner.run_provider_tests.call_count == 0
| Kalimaha/pact-test | tests/runners/pact_tests_runner.py | Python | mit | 767 |
from slackminion.bot import Bot
from slackminion.webserver import Webserver
class DummyBot(Bot):
def __init__(self, *args, **kwargs):
super(DummyBot, self).__init__(None, *args, **kwargs)
setattr(self, 'start', lambda: None)
setattr(self, 'send_message', lambda x, y, z, a: None)
self.webserver = Webserver('127.0.0.1', '9999')
| arcticfoxnv/slackminion | slackminion/utils/test_helpers/bot.py | Python | mit | 366 |
# helloworld.py
#
# familiar test program, demonstrating py2js conversion
def helloworld(suffix):
print "hello world"+suffix
helloworld("!")
| treeform/pystorm | examples/helloworld.py | Python | mit | 147 |
#!/usr/bin/python
import numpy as np
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('inset', choices=['test', 'val', 'train', 'all'])
parser.add_argument('recog', choices=['clean', 'reverb', 'noisy', 'retrain', 'all'])
phase_group = parser.add_mutually_exclusive_group(required = False)
phase_group.add_argument("-gen", "--just-generate",
help="Only generate features",
action="store_true")
phase_group.add_argument("-tst", "--just-test",
help="Only generate features",
action="store_true")
parser.add_argument("-cd", "--cuda",
help="Enable cuda",
action="store_true")
parser.add_argument("testid", help="String to generate necessary folders etc.") # can potentially delete data
parser.add_argument("netname", help="Input autosave file")
parser.add_argument("-del", "--delete",
help="Delete generated features to save space",
action="store_true")
args = parser.parse_args()
#print (args)
# create and change to test directory
rootdir = "/mnt/data/Fer/diplomski/training_currennt/speech_autoencoding_chime/test/" + args.testid + "/"
import shutil as sh
import os
if not os.path.exists(rootdir):
os.makedirs(rootdir)
os.chdir(rootdir)
# setup logging
import logging
logging.basicConfig(filename='chime_scorer.log', format='%(asctime)s:%(levelname)s:%(message)s', level=logging.DEBUG)
logging.info("=" * 20 + "Program started" + "=" * 20)
logging.info("Arguments: " + str(args))
# copy selected network file to test directory
import subprocess as sb
netname = os.path.basename(args.netname)
#netname = netname.split('.')[0] + ".json"
print(netname, args.testid)
try:
#print(["cp", args.netname, rootdir])
tmp_output = sb.check_output(["cp", args.netname, rootdir], stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Invalid netname. returncode: " + str(exc.returncode) + " output: " + str(exc.output))
print("Invalid netname")
exit()
else:
logging.info("Copy netname: \n{}\n".format(tmp_output))
# feature generate phase
testfeat = "output_test/"
valfeat = "output_val/"
trainfeat = "output_train/"
testfeatnorm = "output_norm_test/"
valfeatnorm = "output_norm_val/"
trainfeatnorm = "output_norm_train/"
testnc = "../../test_reverb_norm.nc"
valnc = "../../val_reverb_norm.nc"
trainnc = "../../train_reverb_norm.nc"
# for dubugging
#testnc = "../../dev2.nc"
#valnc = trainnc = "../../train2.nc"
for f in [testfeat, valfeat, trainfeat]:
if not os.path.exists(rootdir + f):
os.makedirs(rootdir + f)
logging.info("Created " + rootdir + f)
for f in [testnc, valnc, trainnc]:
if not os.path.isfile(rootdir + f):
logging.error("File doesn't exist: " + rootdir + f)
print("File doesn't exist: " + rootdir + f)
exit()
if args.delete:
for f in [testfeat, valfeat, trainfeat, testfeatnorm, valfeatnorm, trainfeatnorm]:
if os.path.exists(f):
sh.rmtree(f)
logging.info("Deleted temporary feature folders")
exit(0)
def clean_folder(foldername):
if os.path.exists(foldername):
sh.rmtree(foldername)
os.makedirs(foldername)
else:
os.makedirs(foldername) # should never happen
#network = autosave_run7_epoch138.autosave
#train = false
#input_noise_sigma = 0
#parallel_sequences = 2
#ff_output_format = htk
#ff_output_kind = 838
#feature_period = 10
#cuda = false
#revert_std = true
#ff_input_file = ../../test_reverb_norm.nc
#ff_output_file = ./output-test-138/
#print(os.path.basename(args.netname))
if args.cuda:
tmp_cuda = "true"
tmp_parallel = "100"
else:
tmp_cuda = "false"
tmp_parallel = "3"
command_template = ["currennt", "--network", "./" + netname, "--train","false",
"--input_noise_sigma", "0", "--parallel_sequences", tmp_parallel, "--ff_output_format", "htk", "--ff_output_kind", "838",
"--feature_period", "10", "--cuda", tmp_cuda, "--revert_std", "true"]
def generate_features(feat, nc):
try:
command = command_template[:];
command.extend(["--ff_input_file", nc, "--ff_output_file", "./" + feat])
logging.info("Command: " + str(command))
tmp_output = sb.check_output(command, stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Error generating features " + feat + " . returncode: " + str(exc.returncode) + " output: \n" + str(exc.output))
print("Error generating features " + feat + " ")
exit()
else:
logging.info("Generated features " + feat + " : \n{}\n".format(tmp_output))
rename_template = ["rename2mfcc.sh"]
def do_rename(feat):
try:
command = rename_template[:];
command.extend([feat])
logging.info("Command: " + str(command))
tmp_output = sb.check_output(command, stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Error renaming features " + feat + " . returncode: " + str(exc.returncode) + " output: \n" + str(exc.output))
print("Error renaming features " + feat + " ")
exit()
else:
logging.info("Renamed features " + feat + " : \n{}\n".format(tmp_output))
compute_template = ["compute_output_mean_stddev.py"]
def compute_means(feat, saved_means):
try:
command = compute_template[:];
command.extend([feat, saved_means])
logging.info("Command: " + str(command))
tmp_output = sb.check_output(command, stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Error computing means and stddevs of features " + feat + " . returncode: " + str(exc.returncode) + " output: \n" + str(exc.output))
print("Error computing means and stddevs of features " + feat + " ")
exit()
else:
logging.info("Computing means and stddevs of features " + feat + " : \n{}\n".format(tmp_output))
normalize_template = ["normalizer.py"]
def do_normalize(feat, saved_means, outfeat):
try:
command = normalize_template[:];
command.extend([feat, saved_means, outfeat])
logging.info("Command: " + str(command))
tmp_output = sb.check_output(command, stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Error normalizing features " + feat + " . returncode: " + str(exc.returncode) + " output: \n" + str(exc.output))
print("Error normalizing features " + feat + " ")
exit()
else:
logging.info("Normalized features " + feat + " : \n{}\n".format(tmp_output))
def do_feature_work(feat, outfeat, nc, saved_means):
clean_folder(rootdir + feat)
generate_features(feat, nc)
do_rename(feat)
compute_means(feat, saved_means)
#sb.call(["htk_mfcc_visualize.py", feat + "0dB/10_bgakzn.mfcc"])
do_normalize(feat, saved_means, outfeat)
#sb.call(["htk_mfcc_visualize.py", outfeat + "0dB/10_bgakzn.mfcc"])
if not args.just_test:
logging.info("Started generating features")
if args.inset == "test" or args.inset == "all" :
feat = testfeat
outfeat = testfeatnorm
nc = testnc
saved_means = "./test_means.json"
do_feature_work(feat, outfeat, nc, saved_means)
if args.inset == "train" or args.inset == "all" :
feat = trainfeat
outfeat = trainfeatnorm
nc = trainnc
saved_means = "./train_means.json"
do_feature_work(feat, outfeat, nc, saved_means)
if args.inset == "val" or args.inset == "all" :
feat = valfeat
outfeat = valfeatnorm
nc = valnc
saved_means = "./val_means.json"
do_feature_work(feat, outfeat, nc, saved_means)
logging.info("Finished generating features")
# feature score phase
evalroot = "/mnt/data/Fer/diplomski/CHiME2/eval_tools_grid/"
def do_retrain(feat, clasif):
logging.info("Started retraining " + feat + " " + clasif)
# need to chdir to /mnt/data/Fer/diplomski/CHiME2/eval_tools_grid
os.chdir(evalroot)
# ./do_train_all.sh processed chime2-grid/train/processed_isolated
# TODO prekopirat feature u ocekivani direktorij ?
#command = ["./do_train_all.sh", clasif, rootdir + feat]
command = ["./scripts/do_train.sh", clasif, "mfcc", rootdir + feat]
try:
tmp_output = sb.check_output(command, stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Error retraining a model " + clasif + " . returncode: " + str(exc.returncode) + " output: \n" + str(exc.output))
print("Error retraining a model " + clasif + " ")
exit()
else:
logging.info("Retrained model " + clasif + " : \n{}\n".format(tmp_output))
os.chdir(rootdir)
logging.info("Finished retraining " + feat + " " + clasif)
from save_score import save_score, parse_result
def do_score(dataset, classifier):
dsname = dataset[0]
dspath = dataset[1]
scoreid = args.testid + "_" + dsname
logging.debug("Do score for: " + str(dataset) + " " + classifier + " " + scoreid)
# ./do_recog_all.sh classifier scoreid dspath
os.chdir(evalroot)
command = ["./do_recog_all.sh", classifier, scoreid, rootdir + dspath]
try:
logging.debug(str(command))
tmp_output = sb.check_output(command, stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Error recognizing " + str(dataset) + " " + classifier + " " + scoreid + " . returncode: " + str(exc.returncode) + " output: \n" + str(exc.output))
print("Error recognizing " + str(dataset) + " " + classifier + " " + scoreid)
exit()
else:
logging.info("Succesfully recognized " + str(dataset) + " " + classifier + " " + scoreid + " : \n{}\n".format(tmp_output))
command2 = ["./do_score_all.sh", "results/" + scoreid + "_" + classifier]
# ./do_score_all.sh scoreid
try:
logging.debug(str(command2))
tmp_output = sb.check_output(command2, stderr=sb.STDOUT, universal_newlines=True)
except sb.CalledProcessError as exc:
logging.error("Error scoring " + str(dataset) + " " + classifier + " " + scoreid + " . returncode: " + str(exc.returncode) + " output: \n" + str(exc.output))
print("Error scoring " + str(dataset) + " " + classifier + " " + scoreid)
exit()
else:
logging.info("Succesfully scored " + str(dataset) + " " + classifier + " " + scoreid + " : \n{}\n".format(tmp_output))
os.chdir(rootdir)
return parse_result(tmp_output)
if not args.just_generate:
logging.info("Started scoring features")
# name of retrained hmm classifier
retrid = "processed" + "_" + args.testid
classifiers = []
# not by default included in "all"
if args.recog == "retrain": #or args.recog == "all":
do_retrain(trainfeatnorm, retrid)
classifiers.append(retrid)
# 'clean', 'reverb', 'noisy', 'retrain',
if args.recog == "clean" or args.recog == "all":
classifiers.append("clean")
if args.recog == "reverb" or args.recog == "all":
classifiers.append("reverberated")
if args.recog == "noisy" or args.recog == "all":
classifiers.append("noisy")
logging.info("Selected recognizer models: " + str(classifiers))
datasets = []
if args.inset == "test" or args.inset == "all":
datasets.append(("test", testfeatnorm))
# recognizing isn't implemented for training set, so it isn't included by default
if args.inset == "train": #or args.inset == "all":
datasets.append(("train", trainfeatnorm))
if args.inset == "val" or args.inset == "all":
datasets.append(("val", valfeatnorm))
logging.info("Selected datasets: " + str(datasets))
results = {}
for ds in datasets:
for cl in classifiers:
results[(ds,cl)] = do_score(ds, cl)
save_score(args.testid, netname, results)
logging.info("Results obtained " + str(results))
logging.info("Finished scoring features")
# finish: save results to .ods file or someting
| sthenc/nc_packer | tools/chime_scorer.py | Python | mit | 13,171 |
"""pidaemon.py
Usage:
pidaemon.py [--brightness=<b>] [--sleep=<s>] [--interval=<s>] [--wait=<s>]
pidaemon.py (-h | --help)
pidaemon.py --version
Options:
-h --help Show this screen.
--version Show version
--brightness=<b> Default brightness level 1-255 [default: 2]
--interval=<s> Default interval in seconds between each frame in jobs [default: 0.1]
--sleep=<s> Default number of seconds to pause after each job [default: 0]
--wait=<s> Time between each iteration when polling for job on an empty queue. [default: 5]
"""
import sys
import signal
import time
from docopt import docopt
from collections import defaultdict
import settings
from piqueue import piqueue
class PiDaemon():
def __init__(self, opts):
self.running = None
self.options = self.parse_options(opts)
self.session = piqueue.Session()
self.setup_signal_handlers()
def parse_options(self, opts):
options = defaultdict(lambda: None, {
'brightness': int(opts['--brightness']),
'sleep': float(opts['--sleep']),
'interval': float(opts['--interval']),
'wait': float(opts['--wait']),
})
return options
def run(self):
while True:
job = self.next_job()
if job is not None:
self.run_job(job)
if job.options['keep'] == True:
self.add_job(job)
self.delete_job(job)
else:
time.sleep(self.options['wait'])
def run_job(self, job):
self.running = job.job_instance(self.options.copy())
self.running.run()
self.running.sleep()
self.running.cleanup()
self.running = None
def queue(self):
return self.session.query(piqueue.Job).order_by(piqueue.Job.date_created)
def next_job(self):
return self.queue().first()
def add_job(self, old_job):
new_job = piqueue.Job(old_job.job_name, old_job.options)
self.session.add(new_job)
self.session.commit()
def delete_job(self, job):
self.session.delete(job)
self.session.commit()
def setup_signal_handlers(self):
signal.signal(signal.SIGINT, self.cleanup)
signal.signal(signal.SIGTERM, self.cleanup)
def cleanup(self, signum, frame):
if self.running is not None:
self.running.cleanup()
sys.exit(-1)
if __name__ == '__main__':
opts = docopt(__doc__, version='PiDaemon v1.0')
PiDaemon(opts).run()
| ollej/piapi | pidaemon.py | Python | mit | 2,568 |
import uuid
import factory.fuzzy
from django.conf import settings
from .. import models
from utils.factories import FuzzyMoney
class UserFactory(factory.DjangoModelFactory):
class Meta:
model = settings.AUTH_USER_MODEL
username = factory.Sequence('terminator{0}'.format)
email = factory.Sequence('terminator{0}@skynet.com'.format)
password = 'hunter2'
is_superuser = False
is_staff = False
@classmethod
def _create(cls, model_class, *args, **kwargs):
manager = cls._get_manager(model_class)
return manager.create_user(*args, **kwargs)
class AccountFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Account
user = factory.SubFactory(UserFactory)
class CardFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Card
account = factory.SubFactory(AccountFactory)
number = factory.fuzzy.FuzzyInteger(0, (1 << 32) - 1)
class PurchaseFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Purchase
account = factory.SubFactory(AccountFactory)
amount = factory.fuzzy.FuzzyInteger(0, 1337)
class PurchaseItemFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.PurchaseItem
purchase = factory.SubFactory(PurchaseFactory)
product_id = factory.fuzzy.FuzzyAttribute(uuid.uuid4)
qty = 1
amount = FuzzyMoney(0, 1000)
class PurchaseStatusFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.PurchaseStatus
purchase = factory.SubFactory(PurchaseFactory)
| uppsaladatavetare/foobar-api | src/foobar/tests/factories.py | Python | mit | 1,604 |
from __future__ import print_function, unicode_literals
import os
import shutil
import zipfile
import datetime
import tempfile
import subprocess
from copy import deepcopy
import pytest
import numpy as np
from numpy.testing import assert_almost_equal
from sklearn.dummy import DummyClassifier
from destimator import DescribedEstimator, utils
@pytest.fixture
def features():
return np.zeros([10, 3])
@pytest.fixture
def labels():
labels = np.zeros(10)
labels[5:] = 1.0
return labels
@pytest.fixture
def clf(features, labels):
clf = DummyClassifier(strategy='constant', constant=0.0)
clf.fit(features, labels)
return clf
@pytest.fixture
def clf_described(clf, features, labels, feature_names):
return DescribedEstimator(clf, features, labels, features, labels, feature_names)
@pytest.fixture
def feature_names():
return ['one', 'two', 'three']
@pytest.fixture
def metadata_v1():
return {
'metadata_version': 1,
'created_at': '2016-01-01-00-00-00',
'feature_names': ['f0', 'f1', 'f2'],
'vcs_hash': 'deadbeef',
'distribution_info': {
'python': 3.5,
'packages': [],
},
}
@pytest.fixture
def metadata_v2():
return {
'metadata_version': 2,
'created_at': '2016-02-01-00-00-00',
'feature_names': ['f0', 'f1', 'f2'],
'vcs_hash': 'deadbeef',
'distribution_info': {
'python': 3.5,
'packages': [],
},
'performance_scores': {
'precision': [0.7],
'recall': [0.8],
'fscore': [0.9],
'support': [100],
'roc_auc': 0.6,
'log_loss': 0.5,
}
}
class TestDescribedEstimator(object):
def test_init(self, clf_described):
assert clf_described.n_training_samples_ == 10
assert clf_described.n_features_ == 3
def test_init_error(self, clf, features, labels, feature_names):
with pytest.raises(ValueError):
wrong_labels = np.zeros([9, 1])
DescribedEstimator(clf, features, wrong_labels, features, labels, feature_names)
with pytest.raises(ValueError):
wrong_feature_names = ['']
DescribedEstimator(clf, features, labels, features, labels, wrong_feature_names)
def test_eq(self, clf, features, labels, feature_names, metadata_v1, metadata_v2):
d1 = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1)
d1b = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1)
assert d1 == d1b
d2 = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v2)
assert d1 != d2
metadata_v1a = dict(metadata_v1)
metadata_v1a['metadata_version'] = 3
d1a = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1a)
assert d1 != d1a
def test_from_file(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
file_path = clf_described.save(save_dir)
destimator = DescribedEstimator.from_file(file_path)
assert destimator == clf_described
finally:
shutil.rmtree(save_dir)
def test_is_compatible(self, clf, clf_described, features, labels):
compatible = DescribedEstimator(clf, features, labels, features, labels, ['one', 'two', 'three'])
assert clf_described.is_compatible(compatible)
incompatible = DescribedEstimator(clf, features, labels, features, labels, ['one', 'two', 'boom'])
assert not clf_described.is_compatible(incompatible)
def test_metadata(self, clf, features, labels, feature_names):
clf_described = DescribedEstimator(clf, features, labels, features, labels, feature_names)
d = clf_described.metadata
assert d['feature_names'] == feature_names
# assert type(d['metadata_version']) == str
assert type(datetime.datetime.strptime(d['created_at'], '%Y-%m-%d-%H-%M-%S')) == datetime.datetime
# assert type(d['vcs_hash']) == str
assert type(d['distribution_info']) == dict
# assert type(d['distribution_info']['python']) == str
assert type(d['distribution_info']['packages']) == list
assert type(d['performance_scores']['precision']) == list
assert type(d['performance_scores']['precision'][0]) == float
assert type(d['performance_scores']['recall']) == list
assert type(d['performance_scores']['recall'][0]) == float
assert type(d['performance_scores']['fscore']) == list
assert type(d['performance_scores']['fscore'][0]) == float
assert type(d['performance_scores']['support']) == list
assert type(d['performance_scores']['support'][0]) == int
assert type(d['performance_scores']['roc_auc']) == float
assert type(d['performance_scores']['log_loss']) == float
def test_get_metric(self, clf_described):
assert clf_described.recall == [1.0, 0.0]
assert clf_described.roc_auc == 0.5
# log_loss use epsilon 1e-15, so -log(1e-15) / 2 approximately equal 20
assert_almost_equal(clf_described.log_loss, 17.269, decimal=3)
def test_save_classifier(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(save_dir)
assert os.path.dirname(saved_name) == save_dir
assert os.path.isfile(saved_name)
assert saved_name.endswith('.zip')
zf = zipfile.ZipFile(saved_name)
files_present = zf.namelist()
expected_files = [
'model.bin', 'features_train.bin', 'labels_train.bin',
'features_test.bin', 'labels_test.bin', 'metadata.json',
]
# could use a set, but this way errors are easier to read
for f in expected_files:
assert f in files_present
finally:
shutil.rmtree(save_dir)
def test_save_classifier_with_filename(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(save_dir, filename='boom.pkl')
assert os.path.basename(saved_name) == 'boom.pkl.zip'
assert os.path.isfile(saved_name)
finally:
shutil.rmtree(save_dir)
def test_save_classifier_nonexistent_path(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(os.path.join(save_dir, 'nope'))
os.path.dirname(saved_name) == save_dir
assert os.path.isfile(saved_name)
finally:
shutil.rmtree(save_dir)
class TestGetCurrentGitHash(object):
def test_get_current_vcs_hash(self, monkeypatch):
def fake_check_output(*args, **kwargs):
return b'thisisagithash'
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == 'thisisagithash'
def test_get_current_vcs_hash_no_git(self, monkeypatch):
def fake_check_output(*args, **kwargs):
raise OSError()
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == ''
def test_get_current_vcs_hash_git_error(self, monkeypatch):
def fake_check_output(*args, **kwargs):
raise subprocess.CalledProcessError(0, '', '')
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == ''
| rainforestapp/destimator | destimator/tests/test.py | Python | mit | 7,733 |
from core import db
from auth import models
from admin import models
db.create_all() | zoowii/pia-cloud | cloudweb/app/db.py | Python | mit | 85 |
"""
WSGI config for admin_readonly_model project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "admin_readonly_model.settings")
application = get_wsgi_application()
| mozillazg/django-simple-projects | projects/admin_readonly_model/admin_readonly_model/wsgi.py | Python | mit | 417 |
from django import forms
class ExampleForm(forms.Form):
non_blank_field = forms.CharField(max_length=100,
widget=forms.TextInput(attrs={
'placeholder': "Must not be blank!",
}),
)
| carlmjohnson/django-json-form | jsonform_example/forms.py | Python | mit | 218 |
"""
This package contains a number of utilities that are used inside of openmdao.
It does not depend on any other openmdao package.
"""
| DailyActie/Surrogate-Model | 01-codes/OpenMDAO-Framework-dev/openmdao.util/src/openmdao/util/__init__.py | Python | mit | 137 |
import json
import requests
# TODO: Complete methods.
class AttributeType():
"""Attribute Type Client."""
# Service Setup
config = {
'schema': 'http',
'host': 'localhost',
'port': '9903',
'endpoint': 'api/v1/attribute_types'
}
@classmethod
def base_url(cls):
"""Form the base url for the service."""
return "{schema}://{host}:{port}/{endpoint}".format(**cls.config)
@classmethod
def configure(cls, options={}):
cls.config.update(options)
@classmethod
def get_all(cls):
"""Return all attribute types."""
r = requests.get(cls.base_url())
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def get(cls, code):
"""Return an attribute type."""
r = requests.get(cls.base_url() + '/' + code)
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def create(cls, attrs):
"""Create an attribute type with the attributes
passed in attrs dict."""
r = requests.post(cls.base_url(), data=json.dumps(attrs))
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def update(cls, code, attrs):
"""Update the attribute type identified by code with attrs dict."""
r = requests.put(cls.base_url() + '/' + code, data=json.dumps(attrs))
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def delete(cls, code):
"""Delete the attribute type identified by code."""
r = requests.delete(cls.base_url() + '/' + code)
return r.status_code == 204
@classmethod
def delete_all(cls):
"""Delete all attribute types (for all kind of resources)."""
r = requests.delete(cls.base_url())
return r.status_code == 204
@classmethod
def bulk_load(cls, json_string):
"""Bulk loads an array of attribute types."""
h = {
'Content-Type': 'application/json'
}
return requests.post(cls.base_url(), data=json_string, headers=h)
class Attribute():
"""Attribute Client."""
# Service Setup
config = {
'schema': 'http',
'host': 'localhost',
'port': '9903',
'endpoint': 'api/v1/attributes'
}
@classmethod
def base_url(cls):
"""Form the base url for the service."""
return "{schema}://{host}:{port}/{endpoint}".format(**cls.config)
@classmethod
def configure(cls, options={}):
cls.config.update(options)
@classmethod
def get_all(cls):
"""Return all attributes defined."""
r = requests.get(cls.base_url())
if r.status_code == 200:
return r.json()
else:
return None
@classmethod
def get_all_for(cls, resource_type):
"""Return all attribute for the resource type."""
r = requests.get(cls.base_url() + '/for/' + resource_type)
if r.status_code == 200:
return r.json()
else:
return None
# @classmethod
# def create(cls, attrs):
# """Create an classroom with the attributes passed in attrs dict."""
# r = requests.post(cls.base_url(), data=json.dumps(attrs))
# if r.status_code == 200:
# return r.json()
# else:
# return None
# @classmethod
# def delete(cls, code):
# """Delete the classroom identified by code."""
# r = requests.delete(cls.base_url() + '/' + code)
# return r.status_code == 204
@classmethod
def delete_all(cls):
"""Delete all instructors."""
r = requests.delete(cls.base_url())
return r.status_code == 204
@classmethod
def bulk_load(cls, json_string):
"""Bulk loads an array of attributes."""
h = {
'Content-Type': 'application/json'
}
return requests.post(cls.base_url(), data=json_string, headers=h)
| Foris/darwined-core-python-clients | darwined_core_python_clients/central/attributes.py | Python | mit | 4,104 |
# asyncio version
# The MIT License (MIT)
#
# Copyright (c) 2016, 2017 Robert Hammelrath (basic driver)
# 2016 Peter Hinch (asyncio extension)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Class supporting the resisitve touchpad of TFT LC-displays
#
import pyb, stm
from machine import SPI, Pin
# define constants
#
T_GETX = const(0xd0) ## 12 bit resolution
T_GETY = const(0x90) ## 12 bit resolution
T_GETZ1 = const(0xb8) ## 8 bit resolution
T_GETZ2 = const(0xc8) ## 8 bit resolution
#
X_LOW = const(10) ## lowest reasonable X value from the touchpad
Y_HIGH = const(4090) ## highest reasonable Y value
class TOUCH:
#
# Init just sets the PIN's to In / out as required
# async: set True if asynchronous operation intended
# confidence: confidence level - number of consecutive touches with a margin smaller than the given level
# which the function will sample until it accepts it as a valid touch
# margin: Distance from mean centre at which touches are considered at the same position
# delay: Delay between samples in ms. (n/a if asynchronous)
#
DEFAULT_CAL = (-3917, -0.127, -3923, -0.1267, -3799, -0.07572, -3738, -0.07814)
def __init__(self, controller="XPT2046", asyn=False, *, confidence=5, margin=50, delay=10, calibration=None, spi = None):
if spi is None:
self.spi = SPI(-1, baudrate=1000000, sck=Pin("X12"), mosi=Pin("X11"), miso=Pin("Y2"))
else:
self.spi = spi
self.recv = bytearray(3)
self.xmit = bytearray(3)
# set default values
self.ready = False
self.touched = False
self.x = 0
self.y = 0
self.buf_length = 0
cal = TOUCH.DEFAULT_CAL if calibration is None else calibration
self.asynchronous = False
self.touch_parameter(confidence, margin, delay, cal)
if asyn:
self.asynchronous = True
import uasyncio as asyncio
loop = asyncio.get_event_loop()
loop.create_task(self._main_thread())
# set parameters for get_touch()
# res: Resolution in bits of the returned values, default = 10
# confidence: confidence level - number of consecutive touches with a margin smaller than the given level
# which the function will sample until it accepts it as a valid touch
# margin: Difference from mean centre at which touches are considered at the same position
# delay: Delay between samples in ms.
#
def touch_parameter(self, confidence=5, margin=50, delay=10, calibration=None):
if not self.asynchronous: # Ignore attempts to change on the fly.
confidence = max(min(confidence, 25), 5)
if confidence != self.buf_length:
self.buff = [[0,0] for x in range(confidence)]
self.buf_length = confidence
self.delay = max(min(delay, 100), 5)
margin = max(min(margin, 100), 1)
self.margin = margin * margin # store the square value
if calibration:
self.calibration = calibration
# get_touch(): Synchronous use. get a touch value; Parameters:
#
# initital: Wait for a non-touch state before getting a sample.
# True = Initial wait for a non-touch state
# False = Do not wait for a release
# wait: Wait for a touch or not?
# False: Do not wait for a touch and return immediately
# True: Wait until a touch is pressed.
# raw: Setting whether raw touch coordinates (True) or normalized ones (False) are returned
# setting the calibration vector to (0, 1, 0, 1, 0, 1, 0, 1) result in a identity mapping
# timeout: Longest time (ms, or None = 1 hr) to wait for a touch or release
#
# Return (x,y) or None
#
def get_touch(self, initial=True, wait=True, raw=False, timeout=None):
if self.asynchronous:
return None # Should only be called in synhronous mode
if timeout is None:
timeout = 3600000 # set timeout to 1 hour
#
if initial: ## wait for a non-touch state
sample = True
while sample and timeout > 0:
sample = self.raw_touch()
pyb.delay(self.delay)
timeout -= self.delay
if timeout <= 0: # after timeout, return None
return None
#
buff = self.buff
buf_length = self.buf_length
buffptr = 0
nsamples = 0
while timeout > 0:
if nsamples == buf_length:
meanx = sum([c[0] for c in buff]) // buf_length
meany = sum([c[1] for c in buff]) // buf_length
dev = sum([(c[0] - meanx)**2 + (c[1] - meany)**2 for c in buff]) / buf_length
if dev <= self.margin: # got one; compare against the square value
if raw:
return (meanx, meany)
else:
return self.do_normalize((meanx, meany))
# get a new value
sample = self.raw_touch() # get a touch
if sample is None:
if not wait:
return None
nsamples = 0 # Invalidate buff
else:
buff[buffptr] = sample # put in buff
buffptr = (buffptr + 1) % buf_length
nsamples = min(nsamples + 1, buf_length)
pyb.delay(self.delay)
timeout -= self.delay
return None
# Asynchronous use: this thread maintains self.x and self.y
async def _main_thread(self):
import uasyncio as asyncio
buff = self.buff
buf_length = self.buf_length
buffptr = 0
nsamples = 0
await asyncio.sleep(0)
while True:
if nsamples == buf_length:
meanx = sum([c[0] for c in buff]) // buf_length
meany = sum([c[1] for c in buff]) // buf_length
dev = sum([(c[0] - meanx)**2 + (c[1] - meany)**2 for c in buff]) / buf_length
if dev <= self.margin: # got one; compare against the square value
self.ready = True
self.x, self.y = self.do_normalize((meanx, meany))
sample = self.raw_touch() # get a touch
if sample is None:
self.touched = False
self.ready = False
nsamples = 0 # Invalidate buff
else:
self.touched = True
buff[buffptr] = sample # put in buff
buffptr = (buffptr + 1) % buf_length
nsamples = min(nsamples + 1, buf_length)
await asyncio.sleep(0)
# Asynchronous get_touch
def get_touch_async(self):
if self.ready:
self.ready = False
return self.x, self.y
return None
#
# do_normalize(touch)
# calculate the screen coordinates from the touch values, using the calibration values
# touch must be the tuple return by get_touch
#
def do_normalize(self, touch):
xmul = self.calibration[3] + (self.calibration[1] - self.calibration[3]) * (touch[1] / 4096)
xadd = self.calibration[2] + (self.calibration[0] - self.calibration[2]) * (touch[1] / 4096)
ymul = self.calibration[7] + (self.calibration[5] - self.calibration[7]) * (touch[0] / 4096)
yadd = self.calibration[6] + (self.calibration[4] - self.calibration[6]) * (touch[0] / 4096)
x = int((touch[0] + xadd) * xmul)
y = int((touch[1] + yadd) * ymul)
return (x, y)
#
# raw_touch(tuple)
# raw read touch. Returns (x,y) or None
#
def raw_touch(self):
global CONTROL_PORT
x = self.touch_talk(T_GETX, 12)
y = self.touch_talk(T_GETY, 12)
if x > X_LOW and y < Y_HIGH: # touch pressed?
return (x, y)
else:
return None
#
# Send a command to the touch controller and wait for the response
# cmd: command byte
# bits: expected data size. Reasonable values are 8 and 12
#
def touch_talk(self, cmd, bits):
self.xmit[0] = cmd
self.spi.write_readinto(self.xmit, self.recv)
return (self.recv[1] * 256 + self.recv[2]) >> (15 - bits)
| robert-hh/XPT2046-touch-pad-driver-for-PyBoard | touch.py | Python | mit | 9,126 |
'''
@author: Sergio Rojas
@contact: rr.sergio@gmail.com
--------------------------
Contenido bajo
Atribución-NoComercial-CompartirIgual 3.0 Venezuela (CC BY-NC-SA 3.0 VE)
http://creativecommons.org/licenses/by-nc-sa/3.0/ve/
Creado en abril 19, 2016
'''
print(3+5)
print(2-6)
print(2*7)
print(6/2)
print(1/3)
print(1.0/3)
print(((2 + 7*(234 -15)+673)*775)/(5+890.0 -(234+1)*5.0))
print(( (2.0 + 7*(234 - 15) + 673)*775 )/( 5+890.0 - (234+1)*5.0 ))
print(( (2.0 + 7*(234 - 15) + 673)*775 ) /( 5+890.0 - (234+1)*5.0 ))
print(2.5**3)
print(2.5**(3.2 + 2.1))
print(6.78**30)
print(8.647504884825773*1e+24 - 8.647504884825773*10**24)
print(1e+2)
print(1e2)
print(1e-2)
print(2e4)
print(4**(1./2.))
print(4**0.5)
print(8**(1./3.))
print(8**0.3333)
| rojassergio/Aprendiendo-a-programar-en-Python-con-mi-computador | Programas_Capitulo_02/Cap02_pagina_25_comp_interactiva.py | Python | mit | 746 |
# -*- coding: utf-8 -*-
# code for console Encoding difference. Dont' mind on it
import sys
import imp
imp.reload(sys)
try:
sys.setdefaultencoding('UTF8')
except Exception as E:
pass
import testValue
from popbill import ClosedownService, PopbillException
closedownService = ClosedownService(testValue.LinkID, testValue.SecretKey)
closedownService.IsTest = testValue.IsTest
closedownService.IPRestrictOnOff = testValue.IPRestrictOnOff
closedownService.UseStaticIP = testValue.UseStaticIP
closedownService.UseLocalTimeYN = testValue.UseLocalTimeYN
'''
파트너 포인트충전 팝업 URL을 반환합니다.
- 보안정책에 따라 반환된 URL은 30초의 유효시간을 갖습니다.
- https://docs.popbill.com/closedown/python/api#GetPartnerURL
'''
try:
print("=" * 15 + " 파트너 포인트충전 URL 확인 " + "=" * 15)
# 팝빌회원 사업자번호
CorpNum = testValue.testCorpNum
# CHRG-포인트 충전 URL
TOGO = "CHRG"
url = closedownService.getPartnerURL(CorpNum, TOGO)
print("URL: %s" % url)
except PopbillException as PE:
print("Exception Occur : [%d] %s" % (PE.code, PE.message))
| linkhub-sdk/popbill.closedown.example.py | getPartnerURL.py | Python | mit | 1,149 |
"""stockretriever"""
from setuptools import setup
setup(
name='portfolio-manager',
version='1.0',
description='a web app that keeps track of your investment portfolio',
url='https://github.com/gurch101/portfolio-manager',
author='Gurchet Rai',
author_email='gurch101@gmail.com',
license='MIT',
classifiers=[
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
keywords='investment portfolio',
dependency_links=['https://github.com/gurch101/StockScraper/tarball/master#egg=stockretriever-1.0'],
zip_safe=True,
setup_requires=[
'stockretriever==1.0',
'Flask==0.10.1',
'passlib==1.6.2',
'schedule==0.3.2',
'requests==2.2.1'
]
)
| gurch101/portfolio-manager | setup.py | Python | mit | 973 |
# -*- coding: utf-8 -*-
"""
pygments.styles.colorful
~~~~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by CodeRay.
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ColorfulStyle(Style):
"""
A colorful style, inspired by CodeRay.
"""
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "#888",
Comment.Preproc: "#579",
Comment.Special: "bold #cc0000",
Keyword: "bold #080",
Keyword.Pseudo: "#038",
Keyword.Type: "#339",
Operator: "#333",
Operator.Word: "bold #000",
Name.Builtin: "#007020",
Name.Function: "bold #06B",
Name.Class: "bold #B06",
Name.Namespace: "bold #0e84b5",
Name.Exception: "bold #F00",
Name.Variable: "#963",
Name.Variable.Instance: "#33B",
Name.Variable.Class: "#369",
Name.Variable.Global: "bold #d70",
Name.Constant: "bold #036",
Name.Label: "bold #970",
Name.Entity: "bold #800",
Name.Attribute: "#00C",
Name.Tag: "#070",
Name.Decorator: "bold #555",
String: "bg:#fff0f0",
String.Char: "#04D bg:",
String.Doc: "#D42 bg:",
String.Interpol: "bg:#eee",
String.Escape: "bold #666",
String.Regex: "bg:#fff0ff #000",
String.Symbol: "#A60 bg:",
String.Other: "#D20",
Number: "bold #60E",
Number.Integer: "bold #00D",
Number.Float: "bold #60E",
Number.Hex: "bold #058",
Number.Oct: "bold #40E",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "#F00 bg:#FAA"
}
| tmm1/pygments.rb | vendor/pygments-main/pygments/styles/colorful.py | Python | mit | 2,778 |
from __future__ import print_function, unicode_literals
from future.builtins import open
import os
import re
import sys
from contextlib import contextmanager
from functools import wraps
from getpass import getpass, getuser
from glob import glob
from importlib import import_module
from posixpath import join
from mezzanine.utils.conf import real_project_name
from fabric.api import abort, env, cd, prefix, sudo as _sudo, run as _run, \
hide, task, local
from fabric.context_managers import settings as fab_settings
from fabric.contrib.console import confirm
from fabric.contrib.files import exists, upload_template
from fabric.contrib.project import rsync_project
from fabric.colors import yellow, green, blue, red
from fabric.decorators import hosts
################
# Config setup #
################
env.proj_app = real_project_name("electionNepal")
conf = {}
if sys.argv[0].split(os.sep)[-1] in ("fab", "fab-script.py"):
# Ensure we import settings from the current dir
try:
conf = import_module("%s.settings" % env.proj_app).FABRIC
try:
conf["HOSTS"][0]
except (KeyError, ValueError):
raise ImportError
except (ImportError, AttributeError):
print("Aborting, no hosts defined.")
exit()
env.db_pass = conf.get("DB_PASS", None)
env.admin_pass = conf.get("ADMIN_PASS", None)
env.user = conf.get("SSH_USER", getuser())
env.password = conf.get("SSH_PASS", None)
env.key_filename = conf.get("SSH_KEY_PATH", None)
env.hosts = conf.get("HOSTS", [""])
env.proj_name = conf.get("PROJECT_NAME", env.proj_app)
env.venv_home = conf.get("VIRTUALENV_HOME", "/home/%s/.virtualenvs" % env.user)
env.venv_path = join(env.venv_home, env.proj_name)
env.proj_path = "/home/%s/mezzanine/%s" % (env.user, env.proj_name)
env.manage = "%s/bin/python %s/manage.py" % (env.venv_path, env.proj_path)
env.domains = conf.get("DOMAINS", [conf.get("LIVE_HOSTNAME", env.hosts[0])])
env.domains_nginx = " ".join(env.domains)
env.domains_regex = "|".join(env.domains)
env.domains_python = ", ".join(["'%s'" % s for s in env.domains])
env.ssl_disabled = "#" if len(env.domains) > 1 else ""
env.vcs_tools = ["git", "hg"]
env.deploy_tool = conf.get("DEPLOY_TOOL", "rsync")
env.reqs_path = conf.get("REQUIREMENTS_PATH", None)
env.locale = conf.get("LOCALE", "en_US.UTF-8")
env.num_workers = conf.get("NUM_WORKERS",
"multiprocessing.cpu_count() * 2 + 1")
env.secret_key = conf.get("SECRET_KEY", "")
env.nevercache_key = conf.get("NEVERCACHE_KEY", "")
# Remote git repos need to be "bare" and reside separated from the project
if env.deploy_tool == "git":
env.repo_path = "/home/%s/git/%s.git" % (env.user, env.proj_name)
else:
env.repo_path = env.proj_path
##################
# Template setup #
##################
# Each template gets uploaded at deploy time, only if their
# contents has changed, in which case, the reload command is
# also run.
templates = {
"nginx": {
"local_path": "deploy/nginx.conf.template",
"remote_path": "/etc/nginx/sites-enabled/%(proj_name)s.conf",
"reload_command": "service nginx restart",
},
"supervisor": {
"local_path": "deploy/supervisor.conf.template",
"remote_path": "/etc/supervisor/conf.d/%(proj_name)s.conf",
"reload_command": "supervisorctl update gunicorn_%(proj_name)s",
},
"cron": {
"local_path": "deploy/crontab.template",
"remote_path": "/etc/cron.d/%(proj_name)s",
"owner": "root",
"mode": "600",
},
"gunicorn": {
"local_path": "deploy/gunicorn.conf.py.template",
"remote_path": "%(proj_path)s/gunicorn.conf.py",
},
"settings": {
"local_path": "deploy/local_settings.py.template",
"remote_path": "%(proj_path)s/%(proj_app)s/local_settings.py",
},
}
######################################
# Context for virtualenv and project #
######################################
@contextmanager
def virtualenv():
"""
Runs commands within the project's virtualenv.
"""
with cd(env.venv_path):
with prefix("source %s/bin/activate" % env.venv_path):
yield
@contextmanager
def project():
"""
Runs commands within the project's directory.
"""
with virtualenv():
with cd(env.proj_path):
yield
@contextmanager
def update_changed_requirements():
"""
Checks for changes in the requirements file across an update,
and gets new requirements if changes have occurred.
"""
reqs_path = join(env.proj_path, env.reqs_path)
get_reqs = lambda: run("cat %s" % reqs_path, show=False)
old_reqs = get_reqs() if env.reqs_path else ""
yield
if old_reqs:
new_reqs = get_reqs()
if old_reqs == new_reqs:
# Unpinned requirements should always be checked.
for req in new_reqs.split("\n"):
if req.startswith("-e"):
if "@" not in req:
# Editable requirement without pinned commit.
break
elif req.strip() and not req.startswith("#"):
if not set(">=<") & set(req):
# PyPI requirement without version.
break
else:
# All requirements are pinned.
return
pip("-r %s/%s" % (env.proj_path, env.reqs_path))
###########################################
# Utils and wrappers for various commands #
###########################################
def _print(output):
print()
print(output)
print()
def print_command(command):
_print(blue("$ ", bold=True) +
yellow(command, bold=True) +
red(" ->", bold=True))
@task
def run(command, show=True, *args, **kwargs):
"""
Runs a shell comand on the remote server.
"""
if show:
print_command(command)
with hide("running"):
return _run(command, *args, **kwargs)
@task
def sudo(command, show=True, *args, **kwargs):
"""
Runs a command as sudo on the remote server.
"""
if show:
print_command(command)
with hide("running"):
return _sudo(command, *args, **kwargs)
def log_call(func):
@wraps(func)
def logged(*args, **kawrgs):
header = "-" * len(func.__name__)
_print(green("\n".join([header, func.__name__, header]), bold=True))
return func(*args, **kawrgs)
return logged
def get_templates():
"""
Returns each of the templates with env vars injected.
"""
injected = {}
for name, data in templates.items():
injected[name] = dict([(k, v % env) for k, v in data.items()])
return injected
def upload_template_and_reload(name):
"""
Uploads a template only if it has changed, and if so, reload the
related service.
"""
template = get_templates()[name]
local_path = template["local_path"]
if not os.path.exists(local_path):
project_root = os.path.dirname(os.path.abspath(__file__))
local_path = os.path.join(project_root, local_path)
remote_path = template["remote_path"]
reload_command = template.get("reload_command")
owner = template.get("owner")
mode = template.get("mode")
remote_data = ""
if exists(remote_path):
with hide("stdout"):
remote_data = sudo("cat %s" % remote_path, show=False)
with open(local_path, "r") as f:
local_data = f.read()
# Escape all non-string-formatting-placeholder occurrences of '%':
local_data = re.sub(r"%(?!\(\w+\)s)", "%%", local_data)
if "%(db_pass)s" in local_data:
env.db_pass = db_pass()
local_data %= env
clean = lambda s: s.replace("\n", "").replace("\r", "").strip()
if clean(remote_data) == clean(local_data):
return
upload_template(local_path, remote_path, env, use_sudo=True, backup=False)
if owner:
sudo("chown %s %s" % (owner, remote_path))
if mode:
sudo("chmod %s %s" % (mode, remote_path))
if reload_command:
sudo(reload_command)
def rsync_upload():
"""
Uploads the project with rsync excluding some files and folders.
"""
excludes = ["*.pyc", "*.pyo", "*.db", ".DS_Store", ".coverage",
"local_settings.py", "/static", "/.git", "/.hg"]
local_dir = os.getcwd() + os.sep
return rsync_project(remote_dir=env.proj_path, local_dir=local_dir,
exclude=excludes)
def vcs_upload():
"""
Uploads the project with the selected VCS tool.
"""
if env.deploy_tool == "git":
remote_path = "ssh://%s@%s%s" % (env.user, env.host_string,
env.repo_path)
if not exists(env.repo_path):
run("mkdir -p %s" % env.repo_path)
with cd(env.repo_path):
run("git init --bare")
local("git push -f %s master" % remote_path)
with cd(env.repo_path):
run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path)
run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path)
elif env.deploy_tool == "hg":
remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string,
env.repo_path)
with cd(env.repo_path):
if not exists("%s/.hg" % env.repo_path):
run("hg init")
print(env.repo_path)
with fab_settings(warn_only=True):
push = local("hg push -f %s" % remote_path)
if push.return_code == 255:
abort()
run("hg update")
def db_pass():
"""
Prompts for the database password if unknown.
"""
if not env.db_pass:
env.db_pass = getpass("Enter the database password: ")
return env.db_pass
@task
def apt(packages):
"""
Installs one or more system packages via apt.
"""
return sudo("apt-get install -y -q " + packages)
@task
def pip(packages):
"""
Installs one or more Python packages within the virtual environment.
"""
with virtualenv():
return run("pip install %s" % packages)
def postgres(command):
"""
Runs the given command as the postgres user.
"""
show = not command.startswith("psql")
return sudo(command, show=show, user="postgres")
@task
def psql(sql, show=True):
"""
Runs SQL against the project's database.
"""
out = postgres('psql -c "%s"' % sql)
if show:
print_command(sql)
return out
@task
def backup(filename):
"""
Backs up the project database.
"""
tmp_file = "/tmp/%s" % filename
# We dump to /tmp because user "postgres" can't write to other user folders
# We cd to / because user "postgres" might not have read permissions
# elsewhere.
with cd("/"):
postgres("pg_dump -Fc %s > %s" % (env.proj_name, tmp_file))
run("cp %s ." % tmp_file)
sudo("rm -f %s" % tmp_file)
@task
def restore(filename):
"""
Restores the project database from a previous backup.
"""
return postgres("pg_restore -c -d %s %s" % (env.proj_name, filename))
@task
def python(code, show=True):
"""
Runs Python code in the project's virtual environment, with Django loaded.
"""
setup = "import os;" \
"os.environ[\'DJANGO_SETTINGS_MODULE\']=\'%s.settings\';" \
"import django;" \
"django.setup();" % env.proj_app
full_code = 'python -c "%s%s"' % (setup, code.replace("`", "\\\`"))
with project():
if show:
print_command(code)
result = run(full_code, show=False)
return result
def static():
"""
Returns the live STATIC_ROOT directory.
"""
return python("from django.conf import settings;"
"print(settings.STATIC_ROOT)", show=False).split("\n")[-1]
@task
def manage(command):
"""
Runs a Django management command.
"""
return run("%s %s" % (env.manage, command))
###########################
# Security best practices #
###########################
@task
@log_call
@hosts(["root@%s" % host for host in env.hosts])
def secure(new_user=env.user):
"""
Minimal security steps for brand new servers.
Installs system updates, creates new user (with sudo privileges) for future
usage, and disables root login via SSH.
"""
run("apt-get update -q")
run("apt-get upgrade -y -q")
run("adduser --gecos '' %s" % new_user)
run("usermod -G sudo %s" % new_user)
run("sed -i 's:RootLogin yes:RootLogin no:' /etc/ssh/sshd_config")
run("service ssh restart")
print(green("Security steps completed. Log in to the server as '%s' from "
"now on." % new_user, bold=True))
#########################
# Install and configure #
#########################
@task
@log_call
def install():
"""
Installs the base system and Python requirements for the entire server.
"""
# Install system requirements
sudo("apt-get update -y -q")
apt("nginx libjpeg-dev python-dev python-setuptools git-core "
"postgresql libpq-dev memcached supervisor python-pip")
run("mkdir -p /home/%s/logs" % env.user)
# Install Python requirements
sudo("pip install -U pip virtualenv virtualenvwrapper mercurial")
# Set up virtualenv
run("mkdir -p %s" % env.venv_home)
run("echo 'export WORKON_HOME=%s' >> /home/%s/.bashrc" % (env.venv_home,
env.user))
run("echo 'source /usr/local/bin/virtualenvwrapper.sh' >> "
"/home/%s/.bashrc" % env.user)
print(green("Successfully set up git, mercurial, pip, virtualenv, "
"supervisor, memcached.", bold=True))
@task
@log_call
def create():
"""
Creates the environment needed to host the project.
The environment consists of: system locales, virtualenv, database, project
files, SSL certificate, and project-specific Python requirements.
"""
# Generate project locale
locale = env.locale.replace("UTF-8", "utf8")
with hide("stdout"):
if locale not in run("locale -a"):
sudo("locale-gen %s" % env.locale)
sudo("update-locale %s" % env.locale)
sudo("service postgresql restart")
run("exit")
# Create project path
run("mkdir -p %s" % env.proj_path)
# Set up virtual env
run("mkdir -p %s" % env.venv_home)
with cd(env.venv_home):
if exists(env.proj_name):
if confirm("Virtualenv already exists in host server: %s"
"\nWould you like to replace it?" % env.proj_name):
run("rm -rf %s" % env.proj_name)
else:
abort()
run("virtualenv %s" % env.proj_name)
# Upload project files
if env.deploy_tool in env.vcs_tools:
vcs_upload()
else:
rsync_upload()
# Create DB and DB user
pw = db_pass()
user_sql_args = (env.proj_name, pw.replace("'", "\'"))
user_sql = "CREATE USER %s WITH ENCRYPTED PASSWORD '%s';" % user_sql_args
psql(user_sql, show=False)
shadowed = "*" * len(pw)
print_command(user_sql.replace("'%s'" % pw, "'%s'" % shadowed))
psql("CREATE DATABASE %s WITH OWNER %s ENCODING = 'UTF8' "
"LC_CTYPE = '%s' LC_COLLATE = '%s' TEMPLATE template0;" %
(env.proj_name, env.proj_name, env.locale, env.locale))
# Set up SSL certificate
if not env.ssl_disabled:
conf_path = "/etc/nginx/conf"
if not exists(conf_path):
sudo("mkdir %s" % conf_path)
with cd(conf_path):
crt_file = env.proj_name + ".crt"
key_file = env.proj_name + ".key"
if not exists(crt_file) and not exists(key_file):
try:
crt_local, = glob(join("deploy", "*.crt"))
key_local, = glob(join("deploy", "*.key"))
except ValueError:
parts = (crt_file, key_file, env.domains[0])
sudo("openssl req -new -x509 -nodes -out %s -keyout %s "
"-subj '/CN=%s' -days 3650" % parts)
else:
upload_template(crt_local, crt_file, use_sudo=True)
upload_template(key_local, key_file, use_sudo=True)
# Install project-specific requirements
upload_template_and_reload("settings")
with project():
if env.reqs_path:
pip("-r %s/%s" % (env.proj_path, env.reqs_path))
pip("gunicorn setproctitle psycopg2 "
"django-compressor python-memcached")
# Bootstrap the DB
manage("createdb --noinput --nodata")
python("from django.conf import settings;"
"from django.contrib.sites.models import Site;"
"Site.objects.filter(id=settings.SITE_ID).update(domain='%s');"
% env.domains[0])
for domain in env.domains:
python("from django.contrib.sites.models import Site;"
"Site.objects.get_or_create(domain='%s');" % domain)
if env.admin_pass:
pw = env.admin_pass
user_py = ("from django.contrib.auth import get_user_model;"
"User = get_user_model();"
"u, _ = User.objects.get_or_create(username='admin');"
"u.is_staff = u.is_superuser = True;"
"u.set_password('%s');"
"u.save();" % pw)
python(user_py, show=False)
shadowed = "*" * len(pw)
print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed))
return True
@task
@log_call
def remove():
"""
Blow away the current project.
"""
if exists(env.venv_path):
run("rm -rf %s" % env.venv_path)
if exists(env.proj_path):
run("rm -rf %s" % env.proj_path)
for template in get_templates().values():
remote_path = template["remote_path"]
if exists(remote_path):
sudo("rm %s" % remote_path)
if exists(env.repo_path):
run("rm -rf %s" % env.repo_path)
sudo("supervisorctl update")
psql("DROP DATABASE IF EXISTS %s;" % env.proj_name)
psql("DROP USER IF EXISTS %s;" % env.proj_name)
##############
# Deployment #
##############
@task
@log_call
def restart():
"""
Restart gunicorn worker processes for the project.
If the processes are not running, they will be started.
"""
pid_path = "%s/gunicorn.pid" % env.proj_path
if exists(pid_path):
run("kill -HUP `cat %s`" % pid_path)
else:
sudo("supervisorctl update")
@task
@log_call
def deploy():
"""
Deploy latest version of the project.
Backup current version of the project, push latest version of the project
via version control or rsync, install new requirements, sync and migrate
the database, collect any new static assets, and restart gunicorn's worker
processes for the project.
"""
if not exists(env.proj_path):
if confirm("Project does not exist in host server: %s"
"\nWould you like to create it?" % env.proj_name):
create()
else:
abort()
# Backup current version of the project
with cd(env.proj_path):
backup("last.db")
if env.deploy_tool in env.vcs_tools:
with cd(env.repo_path):
if env.deploy_tool == "git":
run("git rev-parse HEAD > %s/last.commit" % env.proj_path)
elif env.deploy_tool == "hg":
run("hg id -i > last.commit")
with project():
static_dir = static()
if exists(static_dir):
run("tar -cf static.tar --exclude='*.thumbnails' %s" %
static_dir)
else:
with cd(join(env.proj_path, "..")):
excludes = ["*.pyc", "*.pio", "*.thumbnails"]
exclude_arg = " ".join("--exclude='%s'" % e for e in excludes)
run("tar -cf {0}.tar {1} {0}".format(env.proj_name, exclude_arg))
# Deploy latest version of the project
with update_changed_requirements():
if env.deploy_tool in env.vcs_tools:
vcs_upload()
else:
rsync_upload()
with project():
manage("collectstatic -v 0 --noinput")
manage("migrate --noinput")
for name in get_templates():
upload_template_and_reload(name)
restart()
return True
@task
@log_call
def rollback():
"""
Reverts project state to the last deploy.
When a deploy is performed, the current state of the project is
backed up. This includes the project files, the database, and all static
files. Calling rollback will revert all of these to their state prior to
the last deploy.
"""
with update_changed_requirements():
if env.deploy_tool in env.vcs_tools:
with cd(env.repo_path):
if env.deploy_tool == "git":
run("GIT_WORK_TREE={0} git checkout -f "
"`cat {0}/last.commit`".format(env.proj_path))
elif env.deploy_tool == "hg":
run("hg update -C `cat last.commit`")
with project():
with cd(join(static(), "..")):
run("tar -xf %s/static.tar" % env.proj_path)
else:
with cd(env.proj_path.rsplit("/", 1)[0]):
run("rm -rf %s" % env.proj_name)
run("tar -xf %s.tar" % env.proj_name)
with cd(env.proj_path):
restore("last.db")
restart()
@task
@log_call
def all():
"""
Installs everything required on a new system and deploy.
From the base software, up to the deployed project.
"""
install()
if create():
deploy()
| okfnepal/election-nepal | fabfile.py | Python | mit | 21,828 |
from __future__ import absolute_import, print_function
import numpy as np
import warnings
from numpy.polynomial.hermite import hermvander
from six.moves import xrange
from .common import Baseline
try:
from cvxopt import matrix as cvx_matrix, solvers
except ImportError:
from scipy.optimize import linprog
HAS_CVXOPT = False
else:
HAS_CVXOPT = True
np.set_printoptions(precision=4, suppress=True)
_callback_state = {'last_nit':0, 'last_phase':0}
def mario_baseline(bands, intensities, poly_order=10, max_iters=None,
verbose=False, tol=1e-2):
'''Solves a linear program: min_u f'u s.t. -P'u <= -s
Where u are coefficients of a Hermite polynomial.'''
bands = bands.astype(float)
intensities = intensities.astype(float)
if max_iters is None:
max_iters = len(bands) * 10
opts = dict(maxiter=max_iters, disp=verbose, tol=tol)
callback = _linprog_callback if verbose else None
# Flip intensities upside down.
maxval = intensities.max() + 500
s = maxval - intensities
# Keep trying to solve until we succeed.
for order in xrange(poly_order, 0, -1):
result, P = _mario_helper(bands, s, order, opts, callback)
if verbose:
print('With order %d:' % order, result['status'])
if result['x'] is not None:
break
else:
warnings.warn('mario_baseline didnt find a fit at any order')
return np.zeros_like(s)
baseline = P.dot(np.array(result['x']).ravel())
# Flip it back over.
return maxval - baseline
def _mario_helper(bands, s, poly_order, opts, callback):
# Build the polynomial basis over the bands.
P = hermvander(bands, poly_order-1)
f = P.sum(axis=0)
if HAS_CVXOPT:
solvers.options['show_progress'] = opts['disp']
solvers.options['maxiters'] = opts['maxiter']
solvers.options['abstol'] = opts['tol']
solvers.options['reltol'] = opts['tol']
solvers.options['feastol'] = 1e-100 # For some reason this helps.
try:
res = solvers.lp(cvx_matrix(f), cvx_matrix(-P), cvx_matrix(-s))
except ValueError as e:
# This can be thrown when poly_order is too large for the data size.
res = {'status': e.message, 'x': None}
return res, P
res = linprog(f, A_ub=-P, b_ub=-s, bounds=(-np.inf,np.inf), options=opts,
callback=callback)
res = {'status': res.message, 'x': res.x if res.success else None}
return res, P
def _linprog_callback(xk, nit=0, phase=0, tableau=None, **kwargs):
obj = -tableau[-1, -1]
new_state = False
if _callback_state['last_phase'] != phase:
new_state = True
_callback_state['last_phase'] = phase
if phase == 1:
if new_state:
print('--- Phase 1: Find a feasible point. ---')
print('Iter\tObjective')
_callback_state['last_nit'] = nit
print('%d\t%g' % (nit, obj))
else:
if new_state:
print('--- Phase 2: Minimize using simplex. ---')
print('Iter\tObjective')
print('%d\t%g' % (nit - _callback_state['last_nit'], obj))
class Mario(Baseline):
def __init__(self, poly_order=10, max_iters=None, verbose=False, tol=1e-2):
self.poly_order_ = poly_order
self.max_iters_ = max_iters
self.verbose_ = verbose
self.tol_ = tol
def _fit_one(self, bands, intensities):
return mario_baseline(bands, intensities, self.poly_order_,
self.max_iters_, self.verbose_, self.tol_)
def param_ranges(self):
return {'poly_order_': (1, 12, 'integer')}
| all-umass/superman | superman/baseline/mario.py | Python | mit | 3,423 |
from contentbase.upgrader import upgrade_step
@upgrade_step('evidenceScore', '1', '2')
def evidenceScore_1_2(value, system):
# https://github.com/ClinGen/clincoded/issues/1507
# Add affiliation property and update schema version
return
| ClinGen/clincoded | src/clincoded/upgrade/evidenceScore.py | Python | mit | 250 |
from .. import models
import datetime
def typical_user():
username = 'alice'
if not models.User.query.filter_by(username=username).first():
return models.User.register(
username=username,
password='qqq',
confirmed=True,
)
def typical_dataset():
pass
| msarfati/dodecahedron | dodecahedron/tests/fixtures.py | Python | mit | 318 |
import shopify
import json
from test.test_helper import TestCase
class ApplicationCreditTest(TestCase):
def test_get_application_credit(self):
self.fake("application_credits/445365009", method="GET", body=self.load_fixture("application_credit"), code=200)
application_credit = shopify.ApplicationCredit.find(445365009)
self.assertEqual("5.00", application_credit.amount)
def test_get_all_application_credits(self):
self.fake("application_credits", method="GET", body=self.load_fixture("application_credits"), code=200)
application_credits = shopify.ApplicationCredit.find()
self.assertEqual(1, len(application_credits))
self.assertEqual(445365009, application_credits[0].id)
def test_create_application_credit(self):
self.fake(
"application_credits",
method="POST",
body=self.load_fixture("application_credit"),
headers={"Content-type": "application/json"},
code=201,
)
application_credit = shopify.ApplicationCredit.create(
{"description": "application credit for refund", "amount": 5.0}
)
expected_body = {"application_credit": {"description": "application credit for refund", "amount": 5.0}}
self.assertEqual(expected_body, json.loads(self.http.request.data.decode("utf-8")))
| Shopify/shopify_python_api | test/application_credit_test.py | Python | mit | 1,374 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jdpages', '0003_auto_20170725_2349'),
]
operations = [
migrations.AlterField(
model_name='visionspage',
name='vision_pages',
field=models.ManyToManyField(blank=True, to='jdpages.VisionPage'),
),
]
| jonge-democraten/website | website/jdpages/migrations/0004_auto_20170726_0019.py | Python | mit | 441 |
"""Django app config for the analytics app."""
from django.apps import AppConfig
class AnalyticsAppConfig(AppConfig):
"""Analytics app init code."""
name = 'readthedocs.analytics'
verbose_name = 'Analytics'
| rtfd/readthedocs.org | readthedocs/analytics/apps.py | Python | mit | 224 |
#-*- coding: utf-8 -*-
'''
Created on 23 mar 2014
@author: mariusz
@author: tomasz
'''
import unittest
from selearea import get_ast, get_workareas
class seleareaTest(unittest.TestCase):
def get_fc_pages(self):
urls = {
"http://fc.put.poznan.pl",
"http://fc.put.poznan.pl/rekrutacja/post-powanie-kwalifikacyjne%2C29.html",
"http://fc.put.poznan.pl/o-wydziale/witamy%2C39.html"
}
return [get_ast(url) for url in urls]
def get_fce_pages(self):
urls = {
"http://www.bis.put.poznan.pl/",
"http://www.bis.put.poznan.pl/evPages/show/id/182"
}
return [get_ast(url) for url in urls]
def get_identical_pages(self):
urls = {
"http://www.bis.put.poznan.pl/",
"http://www.bis.put.poznan.pl/"
}
return [get_ast(url) for url in urls]
def test_get_wrong_page(self):
url = "putpoznan.pl"
with self.assertRaises(ValueError):
get_ast(url)
def test_get_none_page(self):
with self.assertRaises(ValueError):
get_ast(None)
def test_get_workarea_identical_pages(self):
asts = self.get_identical_pages()
workareas = get_workareas(asts)
self.assertEqual(0, len(workareas), "AssertionFailed: work area found on identical pages.")
def test_get_ast_fc_count(self):
asts = self.get_fc_pages()
self.assertEqual(3, len(asts), "AssertionFailed: count for fc pages.")
def test_get_workarea_fc_content(self):
asts = self.get_fc_pages()
workareas = get_workareas(asts)
xpath = str("//html[@class='js']/body/div[@id='right']/div[@id='content']")
self.assertEqual(xpath, workareas[0], "AssertionFailed: xpaths for fc pages.")
def test_get_ast_fce_count(self):
asts = self.get_fce_pages()
self.assertEqual(2, len(asts), "AssertionFailed: count for fc pages.")
def test_get_workarea_fce_content(self):
asts = self.get_fce_pages()
workareas = get_workareas(asts)
xpath = str("//html/body/div[@id='main']/div/div[@id='left_menu']/div[@id='left_menu_box']")
self.assertEqual(xpath, workareas[1], "AssertionFailed: xpaths for fc pages.")
if __name__ == "__main__":
unittest.main()
| perfidia/selearea | tests/seleareaTest.py | Python | mit | 2,350 |
# Exercise 43: Basic Object-Oriented Analysis and Design
# Process to build something to evolve problems
# 1. Write or draw about the problem.
# 2. Extract key concepts from 1 and research them.
# 3. Create a class hierarchy and object map for the concepts.
# 4. Code the classes and a test to run them.
# 5. Repeat and refine.
# The Analysis of a Simple Game Engine
# Write or Draw About the Problem
"""
Aliens have invaded a space ship and our hero has to go through a maze of rooms
defeating them so he can escape into an escape pod to the planet below. The game
will be more like a Zork or Adventure type game with text outputs and funny ways
to die. The game will involve an engine that runs a map full of rooms or scenes.
Each room will print its own description when the player enters it and then tell
the engine what room to run next out of the map.
"""
# At this point I have a good idea for the game and how it would run, so now I want
# to describe each scene:
"""
Death
This is when the player dies and should be something funny.
Central Corridor
This is the starting point and has a Gothon already standing there.
They have to defeat with a joke before continuing.
Laser Weapon Armory
This is where the hero gets a neutron bomb to blow up the ship before
getting to the escape pod. It has a keypad the hero has to gues the
number for.
The Bridge
Another battle scene with a Gothon where the hero places the bomb.
Escape Pod
Where the hero escapes but only after guessing the right escape pod.
"""
# Extract Key Concepts and Research Them
# First I make a list of all the nouns:
# Alien, Player, Ship, Maze, Room, Scene, Gothon, Escape Pod, Planet, Map, Engine, Death,
# Central Corridor, Laser Weapon Armory, The Bridge
# Create a Class Hierarchy and Object Map for the Concepts
"""
Right away I see that "Room" and "Scene" are basically the same thing depending on how
I want to do things. I'm going to pick "Scene" for this game. Then I see that all the
specific rooms like "Central Corridor" are basically just Scenes. I see also that Death
is basically a Scene, which confirms my choice of "Scene" over "Room" since you can have
a death scene, but a death room is kind of odd. "Maze" and "Map" are basically the same
so I'm going to go with "Map" since I used it more often. I don't want to do a battle
system so I'm going to ignore "Alien" and "Player" and save that for later. The "Planet"
could also just be another scene instead of something specific
"""
# After all of that thoiught process I start to make a class hierarchy that looks
# like this in my text editor:
# * Map
# * Engine
# * Scene
# * Death
# * Central Corridor
# * Laser Weapon Armory
# * The Bridge
# * Escape Pod
"""
I would then go through and figure out what actions are needed on each thing based on
verbs in the description. For example, I know from the description I'm going to need a
way to "run" the engine, "get the next scene" from the map, get the "opening scene" and
"enter" a scene. I'll add those like this:
"""
# * Map
# - next_scene
# - opening_scene
# * Engine
# - play
# * Scene
# - enter
# * Death
# * Central Corridor
# * Laser Weapon Armory
# * The Bridge
# * Escape Pod
"""
Notice how I just put -enter under Scene since I know that all the scenes under it will
inherit it and have to override it later.
"""
# Code the Classes and a Test to Run Them
# The Code for "Gothons from Planet Percal #25"
from sys import exit
from random import randint
class Scene(object):
def enter(self):
print "This scene is not yet configured. Subclass it and implement enter()."
exit(1)
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
def play(self):
current_scene = self.scene_map.opening_scene()
last_scene = self.scene_map.next_scene('finished')
while current_scene != last_scene:
next_scene_name = current_scene.enter()
current_scene = self.scene_map.next_scene(next_scene_name)
# be sure to print out the last scene
current_scene.enter()
class Death(Scene):
quips = [
"You died. You kinda suck at this.",
"Your mom would be proud...if she were smarter.",
"Such a luser.",
"I have a small puppy that's better at this."
]
def enter(self):
print Death.quips[randint(0, len(self.quips)-1)]
exit(1)
class CentralCorridor(Scene):
def enter(self):
print "The Gothons of Planet Percal #25 have invaded your ship and destroyed"
print "your entire crew. You are the last surviving member and your last"
print "mission is to get the neutron destruct bomb from the Weapons Armory,"
print "put it in the bridge, and blow the ship up after getting into an "
print "escape pod."
print "\n"
print "You're running down the central corridor to the Weapons Armory when"
print "a Gothon jumps out, red scaly skin, dark grimy teeth, and evil clown costume"
print "flowing around his hate filled body. He's blocking the door to the"
print "Armory and about to pull a weapon to blast you."
print "What will you do?"
print ">> shoot!"
print ">> dodge!"
print ">>tell a joke"
action = raw_input("> ")
if action == "shoot!":
print "Quick on the draw you yank out your blaster and fire it at the Gothon."
print "His clown costume is flowing and moving around his body, which throws"
print "off your aim. Your laser hits his costume but misses him entirely. This"
print "completely ruins his brand new costume his mother bought him, which"
print "makes him fly into an insane rage and blast you repeatedly in the face until"
print "you are dead. Then he eats you."
return 'death'
elif action == "dodge!":
print "Like a world class boxer you dodge, weave, slip and slide right"
print "as the Gothon's blaster cranks a laser past your head."
print "In the middle of your artful dodge your foot slips and you"
print "bang your head on the metal wall and pass out."
print "You wake up shortly after only to die as the Gothon stomps on"
print "your head and eats you."
return 'death'
elif action == "tell a joke":
print "Lucky for you they made you learn Gothon insults in the academy."
print "You tell the one Gothon joke you know: "
print "Lbhe zbgure vf fb sng, jura fur fvgf nebhaq gur ubhfr, fur fvgf nebhaq gur ubhfr."
print "The Gothon stops, tries not to laugh, then busts out laughing and can't move."
print "While he's laughing you run up and shoot him square in the head"
print "putting him down, then jump through the Weapon Armory door."
return 'laser_weapon_armory'
else:
print "DOES NOT COMPUTE!"
return 'central_corridor'
class LaserWeaponArmory(Scene):
def enter(self):
print "You do a dive roll into the Weapon Armory, crouch and scan the room"
print "for more Gothons that might be hiding. It's dead quiet, too quiet."
print "You stand up and run to the far side of the room and find the"
print "neutron bomb in its container. There's a keypad lock on the box"
print "and you need the code to get the bomb out. If you get the code"
print "wrong 10 times then the lock closes forever and you can't"
print "get the bomb. The code is 3 digits."
code = "%d%d%d" % (randint(1,9), randint(1,9), randint(1,9))
print "This is the code: %s." % code
guess = raw_input("[keypad]> ")
guesses = 0
while guess != code and guesses < 10:
print "BZZZZEDDD!"
guesses += 1
guess = raw_input("[keypad]> ")
if guess == code:
print "The container clicks open and the seal breaks, letting gas out."
print "You grab the neutron bomb and run as fast as you can to the"
print "bridge where you must place it in the right spot."
return 'the_bridge'
else:
print "The lock buzzes one last time and then you hear a sickening"
print "melting sound as the mechanism is fused together."
print "You decide to sit there, and finally the Gothons blow up the"
print "ship from their ship and you die."
return 'death'
class TheBridge(Scene):
def enter(self):
print "You burst onto the Bridge with the netron destruct bomb"
print "under your arm and surprise 5 Gothons who are trying to"
print "take control of the ship. Each of them has an even uglier"
print "clown costume than the last. They haven't pulled their"
print "weapons out yet, as they see the active bomb under your"
print "arm and don't want to set it off."
print "What will you do?"
print ">> throw the bomb"
print ">>slowly place the bomb"
action = raw_input("> ")
if action == "throw the bomb":
print "In a panic you throw the bomb at the group of Gothons"
print "and make a leap for the door. Right as you drop it a"
print "Gothon shoots you right in the back killing you."
print "As you die you see another Gothon frantically try to disarm"
print "the bomb. You die knowing they will probably blow up when"
print "it goes off."
return 'death'
elif action == "slowly place the bomb":
print "You point your blaster at the bomb under your arm"
print "and the Gothons put their hands up and start to sweat."
print "You inch backward to the door, open it, and then carefully"
print "place the bomb on the floor, pointing your blaster at it."
print "You then jump back through the door, punch the close button"
print "and blast the lock so the Gothons can't get out."
print "Now that the bomb is placed you run to the escape pod to"
print "get off this tin can."
return 'escape_pod'
else:
print "DOES NOT COMPUTE!"
return "the_bridge"
class EscapePod(Scene):
def enter(self):
print "You rush through the ship desperately trying to make it to"
print "the escape pod before the whole ship explodes. It seems like"
print "hardly any Gothons are on the ship, so your run is clear of"
print "interference. You get to the chamber with the escape pods, and"
print "now need to pick one to take. Some of them could be damaged"
print "but you don't have time to look. There's 5 pods, which one"
print "do you take?"
good_pod = randint(1,5)
print "Fast look tells you %s is good." % good_pod
guess = raw_input("[pod #]> ")
if int(guess) != good_pod:
print "You jump into pod %s and hit the eject button." % guess
print "The pod escapes out into the void of space, then"
print "implodes as the hull ruptures, crushing your body"
print "into jam jelly."
return 'death'
else:
print "You jump into pod %s and hit the eject button." % guess
print "The pod easily slides out into space heading to"
print "the planet below. As it flies to the planet, you look"
print "back and see your ship implode then explode like a"
print "bright star, taking out the Gothon ship at the same"
print "time. You won!"
return 'finished'
class Finished(Scene):
def enter(self):
print "You won! Good job."
return 'finished'
class Map(object):
scenes = {
'central_corridor': CentralCorridor(),
'laser_weapon_armory': LaserWeaponArmory(),
'the_bridge': TheBridge(),
'escape_pod': EscapePod(),
'death': Death(),
'finished': Finished(),
}
def __init__(self, start_scene):
self.start_scene = start_scene
def next_scene(self, scene_name):
val = Map.scenes.get(scene_name)
return val
def opening_scene(self):
return self.next_scene(self.start_scene)
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
# Top Down vs Bottom Up
# Steps to do Bottom Up:
# 1. Take a small piece of the problem; hack on some code and get it to run barely.
# 2. Refine the code into something more formal with classes and automated tests.
# 3. Extract the key concepts you're using and try to find research for them.
# 4. Write a description of what's really going on.
# 5. Go back and refine the code, possibly throwing it out and starting over.
# 6. Repeat, moving on to some other piece of the problem.
# Study Drills:
# 1. Change it! Maybe you hate this game. Could be to violent, you aren't into sci-fi. Get the game
# working, then change it to what you like. This is your computer, you make it do what you want.
# 2. I have a bug in this code. Why is the door lock guessing 11 times?
# 3. Explain how returning the next room works.
# 4. Add cheat codes to the game so you can get past the more difficult rooms. I can do this with
# two words on one line.
# 5. Go back to my description and analysis, then try to build a small combat system for the hero
# and the various Gothons he encounters.
# 6. This is actually a small version of something called a "finite state machine". Read about them.
# They might not make sense but try anyway.
| Valka7a/python-playground | python-the-hard-way/43-basic-object-oriented-analysis-and-design.py | Python | mit | 13,790 |
import json
import logging
import socket
from contextlib import closing
from django.core.exceptions import ValidationError
from django.db import connection
from zeroconf import get_all_addresses
from zeroconf import NonUniqueNameException
from zeroconf import ServiceInfo
from zeroconf import USE_IP_OF_OUTGOING_INTERFACE
from zeroconf import Zeroconf
from kolibri.core.discovery.models import DynamicNetworkLocation
from kolibri.core.public.utils import get_device_info
logger = logging.getLogger(__name__)
SERVICE_TYPE = "Kolibri._sub._http._tcp.local."
LOCAL_DOMAIN = "kolibri.local"
ZEROCONF_STATE = {"zeroconf": None, "listener": None, "service": None}
def _id_from_name(name):
assert name.endswith(SERVICE_TYPE), (
"Invalid service name; must end with '%s'" % SERVICE_TYPE
)
return name.replace(SERVICE_TYPE, "").strip(".")
def _is_port_open(host, port, timeout=1):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.settimeout(timeout)
return sock.connect_ex((host, port)) == 0
class KolibriZeroconfService(object):
info = None
def __init__(self, id, port=8080, data={}):
self.id = id
self.port = port
self.data = {key: json.dumps(val) for (key, val) in data.items()}
def register(self):
if not ZEROCONF_STATE["zeroconf"]:
initialize_zeroconf_listener()
if self.info is not None:
logger.error("Service is already registered!")
return
i = 1
id = self.id
while not self.info:
# attempt to create an mDNS service and register it on the network
try:
info = ServiceInfo(
SERVICE_TYPE,
name=".".join([id, SERVICE_TYPE]),
server=".".join([id, LOCAL_DOMAIN, ""]),
address=USE_IP_OF_OUTGOING_INTERFACE,
port=self.port,
properties=self.data,
)
ZEROCONF_STATE["zeroconf"].register_service(info, ttl=60)
self.info = info
except NonUniqueNameException:
# if there's a name conflict, append incrementing integer until no conflict
i += 1
id = "%s-%d" % (self.id, i)
if i > 100:
raise NonUniqueNameException()
self.id = id
return self
def unregister(self):
if self.info is None:
logging.error("Service is not registered!")
return
ZEROCONF_STATE["zeroconf"].unregister_service(self.info)
self.info = None
def cleanup(self, *args, **kwargs):
if self.info and ZEROCONF_STATE["zeroconf"]:
self.unregister()
class KolibriZeroconfListener(object):
instances = {}
def add_service(self, zeroconf, type, name):
timeout = 5000
info = zeroconf.get_service_info(type, name, timeout=timeout)
if info is None:
logger.warn(
"Zeroconf network service information could not be retrieved within {} seconds".format(
str(timeout / 1000.0)
)
)
return
id = _id_from_name(name)
ip = socket.inet_ntoa(info.address)
base_url = "http://{ip}:{port}/".format(ip=ip, port=info.port)
zeroconf_service = ZEROCONF_STATE.get("service")
is_self = zeroconf_service and zeroconf_service.id == id
instance = {
"id": id,
"ip": ip,
"local": ip in get_all_addresses(),
"port": info.port,
"host": info.server.strip("."),
"base_url": base_url,
"self": is_self,
}
device_info = {
bytes.decode(key): json.loads(val) for (key, val) in info.properties.items()
}
instance.update(device_info)
self.instances[id] = instance
if not is_self:
try:
DynamicNetworkLocation.objects.update_or_create(
dict(base_url=base_url, **device_info), id=id
)
logger.info(
"Kolibri instance '%s' joined zeroconf network; service info: %s"
% (id, self.instances[id])
)
except ValidationError:
import traceback
logger.warn(
"""
A new Kolibri instance '%s' was seen on the zeroconf network,
but we had trouble getting the information we needed about it.
Service info:
%s
The following exception was raised:
%s
"""
% (id, self.instances[id], traceback.format_exc(limit=1))
)
finally:
connection.close()
def remove_service(self, zeroconf, type, name):
id = _id_from_name(name)
logger.info("Kolibri instance '%s' has left the zeroconf network." % (id,))
try:
if id in self.instances:
del self.instances[id]
except KeyError:
pass
DynamicNetworkLocation.objects.filter(pk=id).delete()
connection.close()
def register_zeroconf_service(port):
device_info = get_device_info()
DynamicNetworkLocation.objects.all().delete()
connection.close()
id = device_info.get("instance_id")
if ZEROCONF_STATE["service"] is not None:
unregister_zeroconf_service()
logger.info("Registering ourselves to zeroconf network with id '%s'..." % id)
data = device_info
ZEROCONF_STATE["service"] = KolibriZeroconfService(id=id, port=port, data=data)
ZEROCONF_STATE["service"].register()
def unregister_zeroconf_service():
if ZEROCONF_STATE["service"] is not None:
ZEROCONF_STATE["service"].cleanup()
ZEROCONF_STATE["service"] = None
if ZEROCONF_STATE["zeroconf"] is not None:
ZEROCONF_STATE["zeroconf"].close()
def initialize_zeroconf_listener():
ZEROCONF_STATE["zeroconf"] = Zeroconf()
ZEROCONF_STATE["listener"] = KolibriZeroconfListener()
ZEROCONF_STATE["zeroconf"].add_service_listener(
SERVICE_TYPE, ZEROCONF_STATE["listener"]
)
def get_peer_instances():
try:
return ZEROCONF_STATE["listener"].instances.values()
except AttributeError:
return []
| mrpau/kolibri | kolibri/core/discovery/utils/network/search.py | Python | mit | 6,523 |
from PIL import Image
from math import ceil, floor
def load_img(src):
return Image.open(src)
def create_master(width, height):
return Image.new("RGBA", (width, height))
def closest_power_two(num):
result = 2
while result < num:
result = result * 2
return result
def create_matrix(cols, rows, images):
x, y = images[0].size # We assume that all images are same size
width = closest_power_two(x)
height = closest_power_two(y)
print("Width: {0} Height: {1}".format(width, height))
offset_x = int((width - x) / 2)
offset_y = int((height - y) / 2)
master = create_master(width * cols, height * rows)
for index, img in enumerate(images):
row = floor(index / cols)
col = index % cols
master.paste(img, (width * col + offset_x, height * row - offset_y))
return master
def hero_sprites(name, action, frames):
from functools import reduce
def generator(name, action, position, frames):
if frames > 1:
return [load_img("img/png/1x/{0}/{1}{2} ({3}).png".format(name, action, position, frame)) for frame in range(1, frames + 1)]
else:
return [load_img("img/png/1x/{0}/{1}{2}.png".format(name, action, position))]
imgs = list(reduce(lambda a, b: a + b, [generator(name, action, pos, frames) for pos in ["Back", "Front", "Left", "Right"]], []))
return imgs
if __name__ == "__main__":
matrix = create_matrix(4, 4, hero_sprites("hero1", "Dead", 3))
matrix.save("img/hero1_dead.png", "PNG")
| zaibacu/DamnedQuest | sprite_creator.py | Python | mit | 1,532 |
from django.conf import settings
_TRACKING_USPS_URL = 'http://trkcnfrm1.smi.usps.com/PTSInternetWeb/InterLabelInquiry.do?origTrackNum='
TRACKING_USPS_URL = getattr(settings, 'OWNEY_USPS_TRACKING_URL', _TRACKING_USPS_URL)
_USPS_API_URL = 'http://production.shippingapis.com/ShippingAPI.dll'
USPS_API_URL = getattr(settings, 'OWNEY_USPS_API_URL', _USPS_API_URL)
_USPS_API_USERID = 'Set your USPS API userid here'
USPS_API_USERID = getattr(settings, 'OWNEY_USPS_API_USERID', _USPS_API_USERID)
_CS_URL = 'Set the URL for your Customer Service application here'
TRACKING_CS_URL = getattr(settings, 'OWNEY_TRACKING_CS_URL', _CS_URL)
| JohnSpeno/owney | owney/conf/settings.py | Python | mit | 635 |
import _plotly_utils.basevalidators
class YcalendarValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="ycalendar", parent_name="histogram", **kwargs):
super(YcalendarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
values=kwargs.pop(
"values",
[
"gregorian",
"chinese",
"coptic",
"discworld",
"ethiopian",
"hebrew",
"islamic",
"julian",
"mayan",
"nanakshahi",
"nepali",
"persian",
"jalali",
"taiwan",
"thai",
"ummalqura",
],
),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/histogram/_ycalendar.py | Python | mit | 1,058 |
# Uppgift 4
[str(i) for i in range(4)]
# Uppgift 5
from math import sqrt # square root
list(filter(lambda x: sqrt(x).is_integer(), range(1000, 1200)))
# Uppgift 6
>>> def make_decrementor(n):
... return lambda x: n - x
...
>>> f = make_decrementor(42)
>>> f(0)
42
>>> f(1)
41
>>> f(-3)
| simonlindblad/quizilito | build/funktionell-programmering.py | Python | mit | 295 |
import string, copy
def joinHeaders(first, second, joined, on):
joined.headers = first.headers[:]
mappedHeaders = {}
for header in second.headers:
if header == on:
continue
i = 0
newHeader = header
while newHeader in first.headers:
newHeader = '{0}_{1}'.format(newHeader, i)
i += 1
if i > 0:
mappedHeaders[header] = newHeader
joined.headers.append(newHeader)
return mappedHeaders
def mergeRow(row, toMerge, mappedHeaders):
for header in toMerge:
if header in mappedHeaders:
row[mappedHeaders[header]] = toMerge[header]
else:
row[header] = toMerge[header]
def mergeRows(first, second, joined, on, mappedHeaders):
joined.rows = copy.deepcopy(first.rows)
secondRows = copy.deepcopy(second.rows)
for secondRow in secondRows:
pivot = secondRow[on]
for row in joined.rows:
if row[on] == pivot:
mergeRow(row, secondRow, mappedHeaders)
break
else:
newRow = {}
mergeRow(newRow, secondRow, mappedHeaders)
joined.rows.append(newRow)
class Dataset:
def __init__(self, filename = '', separator=',', header=True):
self.headers = []
self.rows = []
try:
infile = file(filename, 'r')
if header:
self.headers = infile.readline().strip().split(separator)
for line in infile:
row = line.strip().split(separator)
if not header and not self.headers:
self.headers = ["V{0}".format(i) for i in range(len(row))]
self.rows.append({self.headers[i]:row[i] for i in range(len(row))})
infile.close()
except IOError:
pass
def export(self, filename):
outfile = file(filename, 'w')
outfile.write(','.join(self.headers))
for row in self.rows:
outfile.write('\n')
outfile.write(','.join([row[x] for x in self.headers]))
outfile.close()
def join(self, other, on):
"""Join self dataset with another dataset, creating a new dataset.
The original datasets remain unchanged.
The third argument is the header on which to join"""
# check for correct join
if not (on in self.headers or on in other.headers):
print "Error: header '{0}' not found in both collections".format(on)
return None
# create new dataset
joined = Dataset()
# fill new dataset with combined data
mappedHeaders = joinHeaders(self, other, joined, on)
mergeRows(self, other, joined, on, mappedHeaders)
joined.ensureFilled()
# return newly created dataset
return joined
def pivot(self):
"""Pivot this dataset into a new one, discarding current headers, using first column as new headers"""
pivoted = Dataset()
for (index, header) in enumerate(self.headers):
for row in self.rows:
if index == 0:
pivoted.headers.append(row[header])
else:
if len(pivoted.rows) < index:
pivoted.rows.extend([{} for x in range(index - len(pivoted.rows))])
pivoted.rows[index - 1][row[self.headers[0]]] = row[header]
return pivoted
def ensureFilled(self):
for row in self.rows:
for header in self.headers:
if not header in row:
row[header] = None
def append(self, other, ensureFilled = True):
"""Append rows of another dataset to this one, leaving the other dataset unchanged"""
self.rows.extend(other.rows)
self.headers.extend([x for x in other.headers if not x in self.headers])
if(ensureFilled):
self.ensureFilled()
return self
| dwilmer/rcpsp-testing-framework | dataset.py | Python | mit | 3,291 |
#!/bin/python3
import sys
fact = lambda n: 1 if n <= 1 else n * fact(n - 1)
n = int(input().strip())
fct = fact(n)
print(fct)
| lilsweetcaligula/Online-Judges | hackerrank/algorithms/implementation/medium/extra_long_factorials/py/solution.py | Python | mit | 134 |
################################################################################
# Copyright (C) 2012-2013 Leap Motion, Inc. All rights reserved. #
# Leap Motion proprietary and confidential. Not for distribution. #
# Use subject to the terms of the Leap Motion SDK Agreement available at #
# https://developer.leapmotion.com/sdk_agreement, or another agreement #
# between Leap Motion and you, your company or other organization. #
################################################################################
# set library path
import os, sys, inspect
src_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
arch_dir = 'lib/x64' if sys.maxsize > 2**32 else 'lib/x86'
sys.path.insert(0, os.path.abspath(os.path.join(src_dir, arch_dir)))
import Leap, sys, thread, time
from Leap import CircleGesture, KeyTapGesture, ScreenTapGesture, SwipeGesture
class SampleListener(Leap.Listener):
finger_names = ['Thumb', 'Index', 'Middle', 'Ring', 'Pinky']
bone_names = ['Metacarpal', 'Proximal', 'Intermediate', 'Distal']
state_names = ['STATE_INVALID', 'STATE_START', 'STATE_UPDATE', 'STATE_END']
def on_init(self, controller):
print "Initialized"
def on_connect(self, controller):
print "Connected"
# Enable gestures
controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE);
controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SWIPE);
def on_disconnect(self, controller):
# Note: not dispatched when running in a debugger.
print "Disconnected"
def on_exit(self, controller):
print "Exited"
def on_frame(self, controller):
# Get the most recent frame and report some basic information
frame = controller.frame()
print "Frame id: %d, timestamp: %d, hands: %d, fingers: %d, tools: %d, gestures: %d" % (
frame.id, frame.timestamp, len(frame.hands), len(frame.fingers), len(frame.tools), len(frame.gestures()))
# Get hands
for hand in frame.hands:
handType = "Left hand" if hand.is_left else "Right hand"
print " %s, id %d, position: %s" % (
handType, hand.id, hand.palm_position)
# Get the hand's normal vector and direction
normal = hand.palm_normal
direction = hand.direction
# Calculate the hand's pitch, roll, and yaw angles
print " pitch: %f degrees, roll: %f degrees, yaw: %f degrees" % (
direction.pitch * Leap.RAD_TO_DEG,
normal.roll * Leap.RAD_TO_DEG,
direction.yaw * Leap.RAD_TO_DEG)
# Get arm bone
arm = hand.arm
print " Arm direction: %s, wrist position: %s, elbow position: %s" % (
arm.direction,
arm.wrist_position,
arm.elbow_position)
# Get fingers
for finger in hand.fingers:
print " %s finger, id: %d, length: %fmm, width: %fmm" % (
self.finger_names[finger.type()],
finger.id,
finger.length,
finger.width)
# Get bones
for b in range(0, 4):
bone = finger.bone(b)
print " Bone: %s, start: %s, end: %s, direction: %s" % (
self.bone_names[bone.type],
bone.prev_joint,
bone.next_joint,
bone.direction)
# Get tools
for tool in frame.tools:
print " Tool id: %d, position: %s, direction: %s" % (
tool.id, tool.tip_position, tool.direction)
# Get gestures
for gesture in frame.gestures():
if gesture.type == Leap.Gesture.TYPE_CIRCLE:
circle = CircleGesture(gesture)
# Determine clock direction using the angle between the pointable and the circle normal
if circle.pointable.direction.angle_to(circle.normal) <= Leap.PI/2:
clockwiseness = "clockwise"
else:
clockwiseness = "counterclockwise"
# Calculate the angle swept since the last frame
swept_angle = 0
if circle.state != Leap.Gesture.STATE_START:
previous_update = CircleGesture(controller.frame(1).gesture(circle.id))
swept_angle = (circle.progress - previous_update.progress) * 2 * Leap.PI
print " Circle id: %d, %s, progress: %f, radius: %f, angle: %f degrees, %s" % (
gesture.id, self.state_names[gesture.state],
circle.progress, circle.radius, swept_angle * Leap.RAD_TO_DEG, clockwiseness)
if gesture.type == Leap.Gesture.TYPE_SWIPE:
swipe = SwipeGesture(gesture)
print " Swipe id: %d, state: %s, position: %s, direction: %s, speed: %f" % (
gesture.id, self.state_names[gesture.state],
swipe.position, swipe.direction, swipe.speed)
if gesture.type == Leap.Gesture.TYPE_KEY_TAP:
keytap = KeyTapGesture(gesture)
print " Key Tap id: %d, %s, position: %s, direction: %s" % (
gesture.id, self.state_names[gesture.state],
keytap.position, keytap.direction )
if gesture.type == Leap.Gesture.TYPE_SCREEN_TAP:
screentap = ScreenTapGesture(gesture)
print " Screen Tap id: %d, %s, position: %s, direction: %s" % (
gesture.id, self.state_names[gesture.state],
screentap.position, screentap.direction )
if not (frame.hands.is_empty and frame.gestures().is_empty):
print ""
def state_string(self, state):
if state == Leap.Gesture.STATE_START:
return "STATE_START"
if state == Leap.Gesture.STATE_UPDATE:
return "STATE_UPDATE"
if state == Leap.Gesture.STATE_STOP:
return "STATE_STOP"
if state == Leap.Gesture.STATE_INVALID:
return "STATE_INVALID"
def main():
# Create a sample listener and controller
listener = SampleListener()
controller = Leap.Controller()
# Have the sample listener receive events from the controller
controller.add_listener(listener)
# Keep this process running until Enter is pressed
print "Press Enter to quit..."
try:
sys.stdin.readline()
except KeyboardInterrupt:
pass
finally:
# Remove the sample listener when done
controller.remove_listener(listener)
if __name__ == "__main__":
main()
| if1live/marika | server/sample.py | Python | mit | 6,943 |
<<<<<<< HEAD
from flask import Blueprint, render_template, request, url_for, jsonify
from config import mongo
import pandas as pd
import json
from bson import json_util
import retrieve_model as rmodel
from collections import Counter
main = Blueprint('main', __name__, template_folder='templates')
@main.route('/')
def index():
#mongo.db.visits.insert_one({"no":"way"})
#visits = mongo.db.visits.find_one()
#return str(visits)
return render_template('index.html')
@main.route('/predict/')
def get_started():
down_list = [{'value':1,'name':'1st'},{'value':2,'name':'2nd'},{'value':3,'name':'3rd'},{'value':4,'name':'4th'}]
quarter_list = [{'value':1,'name':'1st'},{'value':2,'name':'2nd'},{'value':3,'name':'3rd'},{'value':4,'name':'4th'}]
clock_list = [{'value':15,'name':'<15'}, {'value':14,'name':'<14'}, {'value':13,'name':'<13'},
{'value':12,'name':'<12'}, {'value':11,'name':'<11'}, {'value':10,'name':'<10'},
{'value':9,'name':'<9'}, {'value':8,'name':'<8'}, {'value':7,'name':'<7'},
{'value':6,'name':'<6'}, {'value':5,'name':'<5'}, {'value':4,'name':'<4'},
{'value':3,'name':'<3'}, {'value':2,'name':'<2'}, {'value':1,'name':'<1'}]
yards_list = [{'value':0,'name':'inches'}, {'value':1,'name':'1'},
{'value':2,'name':'2'}, {'value':3,'name':'3'}, {'value':4,'name':'4'},
{'value':5,'name':'5'}, {'value':6,'name':'6'}, {'value':7,'name':'7'},
{'value':8,'name':'8'}, {'value':9,'name':'9'}, {'value':10,'name':'10'},
{'value':11,'name':'11'}, {'value':12,'name':'12'}, {'value':13,'name':'13'},
{'value':14,'name':'14'}, {'value':15,'name':'15'}, {'value':16,'name':'16'},
{'value':17,'name':'17'}, {'value':18,'name':'18'}, {'value':19,'name':'19'},
{'value':20,'name':'20'}, {'value':21,'name':'21'}, {'value':22,'name':'22'},
{'value':23,'name':'23'}, {'value':24,'name':'24'}, {'value':25,'name':'25'}]
field_list = range(0,101,1)
score_list = range(0,61,1)
down_dict = [{'value':1,'name':'1st'},{'value':2,'name':'2nd'},{'value':3,'name':'3rd'},{'value':4,'name':'4th'}]
return render_template('predict.html',
=======
from flask import Blueprint, render_template, request, url_for
from config import mongo
main = Blueprint('main', __name__, template_folder='templates')
@main.route('/')
def index():
mongo.db.visits.insert_one({"foo":"bar"})
visits = mongo.db.visits.find_one()
return str(visits)
#return render_template('index.html')
@main.route('/getstarted/')
def get_started():
down_list = ['1st','2nd','3rd','4th']
quarter_list = ['1st','2nd','3rd','4th']
clock_list = ['> 15 min', '> 10 min', '> 5 min', '> 2 min', '< 2 min', '< 1 min']
yards_list = ['inches', 'goal', '1', '2', '3', '4', '5', '6', '7' ,'8', '9', '10', '> 10']
field_list = range(0,105,5)
score_list = range(-60,61,1)
return render_template('getstarted.html',
>>>>>>> master
down_list=down_list,
quarter_list=quarter_list,
clock_list=clock_list,
yards_list=yards_list,
field_list=field_list,
<<<<<<< HEAD
score_list=score_list,
down_dict=down_dict
)
@main.route('/results/', methods=['POST'])
def results():
=======
score_list=score_list
)
@main.route('/run/', methods=['POST'])
def run():
>>>>>>> master
down = request.form['down']
quarter = request.form['quarter']
clock = request.form['clock']
yards = request.form['yards']
field = request.form['field']
score = request.form['score']
<<<<<<< HEAD
sign = request.form['sign']
guess = request.form['guess']
score = str(int(score) * int(sign))
# Store scenario in mongodb
scenario = {
'down': int(down),
'quarter': int(quarter),
'clock': int(clock),
'yards': int(yards),
'field': int(field),
'score': int(score),
'guess': guess
}
# Insert the current user's guess into the DB
print('Puting this into db:', scenario)
mongo.db.scenarios.insert_one(scenario)
# Pull User guesses from MongoDB
#scenarios = mongo.db.scenarios.find()
# Pull NFL Stats from MongoDB
#nflstats = mongo.db.nfldata.find()
guesses = {'pass':'Pass', 'run':'Run', 'punt':'Punt', 'fg':'Field Goal', 'kneel': 'QB Kneel'}
try:
return render_template('results.html',
guess_title = guesses[guess],
=======
guess = request.form['guess']
# Store scenario in mongodb
scenario = {
'down': down,
'quarter': quarter,
'clock': clock,
'yards': yards,
'field': field,
'score': score,
'guess': guess
}
mongo.db.scenarios.insert_one(scenario)
scenarios = mongo.db.scenarios.find()
try:
return render_template('results.html',
>>>>>>> master
down=down,
quarter=quarter,
clock=clock,
yards=yards,
field=field,
score=score,
guess=guess,
<<<<<<< HEAD
scenarios=[None],#scenarios,
nflstats=[None]#nflstats
)
except Exception as e:
return "Something went wrong..." + str(e)
@main.route('/stats/')
def tables():
title = 'Test Table'
title = rmodel.predict_proba(4,4,1,20,-1)
table = title
return render_template('stats.html', table=table, title=title)
@main.route('/data/guesses/')
def guessData():
guess = request.args.get('guess')
down = request.args.get('down')
quarter = request.args.get('quarter')
clock = request.args.get('clock')
yards = request.args.get('yards')
field = request.args.get('field')
score = request.args.get('score')
search_dict = request.args.to_dict()
for key in search_dict:
#if key != 'guess':
try:
search_dict[key] = int(search_dict[key])
except:
pass
print(search_dict)
s=[data['guess'] for data in mongo.db.scenarios.find(search_dict)]
options = ['pass', 'run', 'punt', 'fg', 'kneel']
count = {option:s.count(option) for option in options}
print(count)
return json.dumps(count, default=json_util.default)
@main.route('/data/nfl/')
def nflData():
playtype = request.args.get('PlayType')
down = request.args.get('down')
quarter = request.args.get('quarter')
clock = request.args.get('clock')
yards = request.args.get('yards')
field = request.args.get('field')
score = request.args.get('score')
search_dict = request.args.to_dict()
for key in search_dict:
if key != 'playtype':
try:
search_dict[key] = int(search_dict[key])
except:
pass
s=[data["PlayType"] for data in mongo.db.nfldata.find(search_dict)]
print(s)
options = ['pass', 'run', 'punt', 'fg', 'kneel']
count = {option:s.count(option) for option in options}
print(count)
return json.dumps(count, default=json_util.default)
@main.route('/api/predict/')
def apiPredict():
arg_dict = request.args.to_dict()
for key in arg_dict:
try:
arg_dict[key] = int(arg_dict[key])
except:
pass
calculations = [
{name:rmodel.predict_group_proba(
arg_dict['quarter'],
arg_dict['down'],
arg_dict['yards'],
arg_dict['clock'],
arg_dict['field'],
arg_dict['score'],
name)
} for name in ['quarter', 'down', 'yards', 'timeunder', 'yrdline100', 'scorediff']
]
calculations.append({'request':rmodel.predict_proba(
arg_dict['quarter'],
arg_dict['down'],
arg_dict['yards'],
arg_dict['clock'],
arg_dict['field'],
arg_dict['score'],
False)
})
return jsonify(calculations)
=======
scenarios=scenarios
)
except:
return "fail"
>>>>>>> master
| skrzym/monday-morning-quarterback | Application/Site/mmq/main/controllers.py | Python | mit | 7,841 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-16 16:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pushkin', '0024_authparam_secret'),
]
operations = [
migrations.AlterField(
model_name='command',
name='arguments',
field=models.ManyToManyField(blank=True, null=True, to='pushkin.CommandArgument'),
),
]
| ilique/webpushkin | pushkin/migrations/0025_auto_20160616_1637.py | Python | mit | 495 |
def read_logfile_by_line(logfile):
"""generator function that yields the log file content line by line"""
with open(logfile, 'r') as f:
for line in f:
yield line
yield None
def parse_commands(log_content):
"""
parse cwl commands from the line-by-line generator of log file content and
returns the commands as a list of command line lists, each corresponding to a step run.
"""
command_list = []
command = []
in_command = False
line = next(log_content)
while(line):
line = line.strip('\n')
if '[job' in line and line.endswith('docker \\'):
line = 'docker \\' # remove the other stuff
in_command = True
if in_command:
command.append(line.strip('\\').rstrip(' '))
if not line.endswith('\\'):
in_command = False
command_list.append(command)
command = []
line = next(log_content)
return(command_list)
| 4dn-dcic/tibanna | awsf3/log.py | Python | mit | 1,003 |
__version__ = '20.0.0'
| ofek/pypinfo | pypinfo/__init__.py | Python | mit | 23 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-04 17:41
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
],
options={
'db_table': 'auth_user',
'permissions': (('can_view_dashboard', 'Can view all dashboards'), ('can_view_store_profiles', 'Can store profiles')),
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(help_text='Slug', max_length=255, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('is_hidden', models.BooleanField(default=False)),
('is_disabled', models.BooleanField(default=False)),
('is_password_changed', models.BooleanField(default=False)),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ProfileType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_type', models.CharField(choices=[('admin', 'Admin'), ('user', 'User'), ('manager', 'Manager'), ('other', 'Other')], default='admin', max_length=255)),
('profile', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, related_name='profile_types', to='accounts.Profile')),
],
),
migrations.CreateModel(
name='Permission',
fields=[
],
options={
'permissions': (('shorten_urls', 'Can shorten urls in the dashboard'),),
'proxy': True,
'indexes': [],
},
bases=('auth.permission',),
managers=[
('objects', django.contrib.auth.models.PermissionManager()),
],
),
migrations.AddField(
model_name='user',
name='user_permissions',
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
),
]
| tiagoarasilva/django-boilerplate | project_name/accounts/migrations/0001_initial.py | Python | mit | 5,076 |
"""
This file is part of the TheLMA (THe Laboratory Management Application) project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
MoleculeType entity classes.
"""
from everest.entities.base import Entity
from everest.entities.utils import slug_from_string
__docformat__ = "reStructuredText en"
__all__ = ['MoleculeType',
'MOLECULE_TYPE_IDS']
class MOLECULE_TYPE_IDS(object):
"""
Known molecule types.
"""
# FIXME: reconcile with `thelma.data.moleculetype` # pylint:disable=W0511
SSDNA = 'SSDNA'
AMPLICON = 'AMPLICON'
SIRNA = 'SIRNA'
COMPOUND = 'COMPOUND'
LONG_DSRNA = 'LONG_DSRNA'
ANTI_MIR = 'ANTI_MIR'
ESI_RNA = 'ESI_RNA'
MIRNA_INHI = 'MIRNA_INHI'
CLND_DSDNA = 'CLND_DSDNA'
MIRNA_MIMI = 'MIRNA_MIMI'
__ALL = [nm for nm in sorted(locals().keys()) if not nm.startswith('_')]
@classmethod
def is_known_type(cls, molecule_type_name):
"""
Checks whether the given molecule type name is a known one.
"""
return molecule_type_name in cls.__ALL
class MoleculeType(Entity):
"""
Instances of this class describe molecule types, such as \'siRna\'.
"""
#: The name of the molecule type.
name = None
#: A more detailed description.
description = None
#: An number indicating the time it takes for molecules of this type to
#: thaw.
thaw_time = None
#: A list of modification chemical structures
#: (:class:`thelma.entities.chemicalstructure.ChemicalStructure`)
#: that are associated with this molecule type.
modifications = None
#: The default stock concentration for this molecule type.
default_stock_concentration = None
def __init__(self, name, default_stock_concentration,
description='', thaw_time=0, modifications=None, **kw):
if not 'id' in kw:
kw['id'] = name.lower()
Entity.__init__(self, **kw)
self.name = name
self.default_stock_concentration = default_stock_concentration
self.description = description
self.thaw_time = thaw_time
if modifications == None:
self.modifications = []
@property
def slug(self):
#: For instances of this class, the slug is derived from the
#: :attr:`name`.
return slug_from_string(self.name)
def __str__(self):
return self.id
def __repr__(self):
str_format = '<%s id: %s, name: %s, thaw_time: %s>'
params = (self.__class__.__name__, self.id, self.name, self.thaw_time)
return str_format % params
| helixyte/TheLMA | thelma/entities/moleculetype.py | Python | mit | 2,617 |
# <pep8 compliant>
from .xmodel import Model
from .xanim import Anim
from .sanim import SiegeAnim
version = (0, 3, 0) # Version specifier for PyCoD
| SE2Dev/PyCoD | __init__.py | Python | mit | 158 |
from setuptools import setup
import os
import martapy
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="martapy",
version=martapy.__version__,
author=martapy.__author__,
author_email="git@edward.sh",
description="Wrapper for MARTA realtime rail/bus APIs",
long_description=read('README.rst'),
keywords="MARTA API rail train Atlanta Georgia GA ATL itsmarta",
url="https://github.com/arcward/fbparser",
license="MIT",
packages=['martapy'],
install_requires=['requests'],
include_package_data=True
)
| arcward/martapy | setup.py | Python | mit | 599 |
from django import forms
from .models import Question, Answer, Categories, Customuser
from django.contrib import auth
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
class add_Question_Form(forms.ModelForm): # just a regular form
question_text = forms.CharField(label=_("question_text"),
widget=forms.Textarea({'cols': '40', 'rows': '5'}))
class Meta:
model = Question
fields = ['question_text', 'upload',
'category1','category2',
'category3','category4']
def clean_text(self):
if question_text == "":
raise forms.ValidationError(
"Need a question",)
else:
return True
def save(self,commit=True):
question = super(add_Question_Form, self).save(commit=False)
question.question_text = self.cleaned_data["question_text"]
if commit:
question.save()
return question
class add_Answer_Form(forms.ModelForm):
class Meta:
model = Answer
fields = ['answer_text']
def clean_text(self):
return self.cleaned_data.get('answer_text')
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as before, for verification."))
# User's username field and our own 2 fields pass1 and pass2 are used. Later
# we shall set the User's password by user.set_password.
class Meta:
model = Customuser
fields = ("username","email","first_name","department")
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
self.instance.username = self.cleaned_data.get('username')
# To remove invalid passwords like short words, number only cases
auth.password_validation.validate_password(self.cleaned_data.get('password2'), self.instance)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password2"])
if commit:
user.save()
return user
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = forms.CharField( max_length=254,
widget=forms.TextInput( attrs={'autofocus': ''}),
)
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _("Please enter a correct username and password. "
"Note that both fields may be case-sensitive."),
'inactive': _("This account is inactive."),
}
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = auth.authenticate(username=username,
password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
)
else:
return self.cleaned_data
class UserForm(forms.ModelForm):
class Meta:
model = Customuser
fields = ('categories',)
class CustomuserAdminForm(forms.ModelForm):
class Meta:
model = Customuser
fields = ("username","email","first_name","last_name",
'department','groups','is_active','is_staff','is_superuser')
# fields = ['username','password','verify,'first_name','last_name','email','batch',]
################### Django classes ##########################
| aadithyamd/QandA | QA/forms.py | Python | mit | 4,443 |
import django.conf
class AppSettings(object):
"""
A holder for app-specific default settings that allows overriding via
the project's settings.
"""
def __getattribute__(self, attr):
if attr == attr.upper():
try:
return getattr(django.conf.settings, attr)
except AttributeError:
pass
return super(AppSettings, self).__getattribute__(attr)
class Settings(AppSettings):
COUNTRIES_FLAG_URL = 'flags/{code}.gif'
"""
The URL for a flag.
It can either be relative to the static url, or an absolute url.
The location is parsed using Python's string formatting and is passed the
following arguments:
* code
* code_upper
For example: ``COUNTRIES_FLAG_URL = 'flags/16x10/{code_upper}.png'``
"""
COUNTRIES_COMMON_NAMES = True
"""
Whether to use the common names for some countries, as opposed to the
official ISO name.
Some examples:
"Bolivia" instead of "Bolivia, Plurinational State of"
"South Korea" instead of "Korea (the Republic of)"
"Taiwan" instead of "Taiwan (Province of China)"
"""
COUNTRIES_OVERRIDE = {}
"""
A dictionary of names to override the defaults.
Note that you will need to handle translation of customised country names.
Setting a country's name to ``None`` will exclude it from the country list.
For example::
COUNTRIES_OVERRIDE = {
'NZ': _('Middle Earth'),
'AU': None
}
"""
COUNTRIES_ONLY = {}
"""
Similar to COUNTRIES_OVERRIDE
A dictionary of names to include in selection.
Note that you will need to handle translation of customised country names.
For example::
COUNTRIES_ONLY = {
'NZ': _('Middle Earth'),
'AU': _('Desert'),
}
"""
COUNTRIES_FIRST = []
"""
Countries matching the country codes provided in this list will be shown
first in the countries list (in the order specified) before all the
alphanumerically sorted countries.
"""
COUNTRIES_FIRST_REPEAT = False
"""
Countries listed in :attr:`COUNTRIES_FIRST` will be repeated again in the
alphanumerically sorted list if set to ``True``.
"""
COUNTRIES_FIRST_BREAK = None
"""
Countries listed in :attr:`COUNTRIES_FIRST` will be followed by a null
choice with this title (if set) before all the alphanumerically sorted
countries.
"""
COUNTRIES_FIRST_SORT = False
"""
Countries listed in :attr:`COUNTRIES_FIRST` will be alphanumerically
sorted based on their translated name instead of relying on their
order in :attr:`COUNTRIES_FIRST`.
"""
settings = Settings()
| schinckel/django-countries | django_countries/conf.py | Python | mit | 2,772 |
from interfaces.labels_map import LabelsMap
from helpers.python_ext import to_str
class LTS:
def __init__(self,
init_states,
model_by_signal:dict,
tau_model:LabelsMap,
state_name:str,
input_signals,
output_signals):
self._output_models = model_by_signal
self._tau_model = tau_model
self._init_states = set(init_states)
self._state_name = state_name
self._output_signals = output_signals # TODO: duplication with _output_models?
self._input_signals = input_signals
@property
def state_name(self):
return self._state_name
@property
def input_signals(self):
return self._input_signals
@property
def output_signals(self):
return self._output_signals
@property
def init_states(self):
return self._init_states
@property
def states(self):
# states = set(k[self._state_name] for k in self._tau_model)
# return the range of tau \cup init_states
states = set(map(lambda l_v: l_v[1], self._tau_model.items()))
states.update(self.init_states)
return states
@property
def tau_model(self) -> LabelsMap:
return self._tau_model
@property
def model_by_signal(self):
return self._output_models
@property
def output_models(self) -> dict:
return self._output_models
def __str__(self):
return 'LTS:\n' \
' inputs: {inputs}\n' \
' outputs: {outputs}\n' \
' init_states: {init}\n' \
' states: {states}\n' \
' output_models: {output_models}'.format(init=str(self._init_states),
states=str(self.states),
output_models=str(self.model_by_signal),
inputs=to_str(self._input_signals),
outputs=to_str(self._output_signals))
| 5nizza/party-elli | interfaces/LTS.py | Python | mit | 2,142 |
# -*- coding: utf-8 -*-
#
# state_machine documentation build configuration file, created by
# sphinx-quickstart on Thu Apr 3 08:46:47 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'state_machine'
copyright = u'2014, Jonathan Tushman'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.9'
# The full version, including alpha/beta/rc tags.
release = '0.2.9'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'state_machinedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'state_machine.tex', u'state\\_machine Documentation',
u'Jonathan Tushman', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'state_machine', u'state_machine Documentation',
[u'Jonathan Tushman'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'state_machine', u'state_machine Documentation',
u'Jonathan Tushman', 'state_machine', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| jtushman/state_machine | docs/conf.py | Python | mit | 8,221 |
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from selvbetjening.sadmin2 import menu
from selvbetjening.sadmin2.decorators import sadmin_prerequisites
from selvbetjening.sadmin2.forms import UserForm, PasswordForm
from selvbetjening.sadmin2.views.generic import generic_create_view
@sadmin_prerequisites
def user_change(request, user_pk):
user = get_object_or_404(get_user_model(), pk=user_pk)
context = {
'sadmin2_menu_main_active': 'userportal',
'sadmin2_breadcrumbs_active': 'user',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_user,
'sadmin2_menu_tab_active': 'user',
'user': user
}
return generic_create_view(request,
UserForm,
reverse('sadmin2:user', kwargs={'user_pk': user.pk}),
message_success=_('User updated'),
context=context,
instance=user)
@sadmin_prerequisites
def user_password(request, user_pk):
user = get_object_or_404(get_user_model(), pk=user_pk)
context = {
'sadmin2_menu_main_active': 'userportal',
'sadmin2_breadcrumbs_active': 'user_password',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_user,
'sadmin2_menu_tab_active': 'password',
'user': user
}
return generic_create_view(request,
PasswordForm,
redirect_success_url=reverse('sadmin2:user_password', kwargs={'user_pk': user.pk}),
message_success=_('Password updated'),
context=context,
instance=user) | animekita/selvbetjening | selvbetjening/sadmin2/views/user.py | Python | mit | 1,851 |
# -*- coding: utf-8 -*-
#
# BbQuick documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 10 20:55:10 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'BbQuick'
copyright = u'2012, Ian A Wilson'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0a'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'BbQuickdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'BbQuick.tex', u'BbQuick Documentation',
u'Ian A Wilson', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'bbquick', u'BbQuick Documentation',
[u'Ian A Wilson'], 1)
]
| ianawilson/BbQuick | docs/conf.py | Python | mit | 6,974 |
from statistics import mode
def moda(muestras):
frecuencias = {}
for muestra in muestras:
if muestra not in frecuencias.keys():
frecuencias[muestra] = 1
else:
frecuencias[muestra] += 1
valores = []
frecuencia_maxima = max(frecuencias.values())
for clave in frecuencias.keys():
if frecuencia_maxima == frecuencias[clave]:
valores.append(clave)
return valores
if __name__ == '__main__':
soluciones = []
tamano_muestra = int(input())
while tamano_muestra:
muestras = [int(x) for x in input().split(' ')]
# soluciones.append(moda(muestras))
soluciones.append(mode(muestras))
tamano_muestra = int(input())
for solucion in soluciones:
print(solucion)
| israelem/aceptaelreto | codes/2017-10-09-modas.py | Python | mit | 790 |
from __future__ import print_function, absolute_import
import random
import unittest
from pysmoke import marshal
from pysmoke.smoke import ffi, Type, TypedValue, pystring, smokec, not_implemented, charp, dbg
from pysmoke import QtCore, QtGui
qtcore = QtCore.__binding__
qtgui = QtGui.__binding__
class MarshalTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_qstring(self):
qstr = marshal.QString.from_py('aqstring')
print(qstr)
pstr = marshal.QString.to_py(qstr)
#dbg()
self.assertEqual(pstr, 'aqstring')
import gc; gc.collect()
qstr2 = marshal.QString.from_py(pstr)
print('QS:', qstr, pstr, qstr2, marshal.QString.to_py(qstr))
obj = QtGui.QObject()
print('obj', obj.__cval__.value.s_voidp)
obj.setObjectName('my_object')
self.assertEqual(obj.objectName(), 'my_object')
if __name__ == '__main__':
unittest.main()
| pankajp/pysmoke | pysmoke/tests/test_marshal.py | Python | mit | 987 |
from flask_simple_alchemy import Relator
from testers import db, app, FakeTable, OtherTable
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid')
class ThirdTable(db.Model, this_table.HasOneToOneWith.FakeTable):
__tablename__ = 'thirdtable'
id = db.Column(db.Integer, primary_key=True)
elf = db.Column(db.Boolean(False))
monkey = db.Column(db.String, default='yep')
def test_Relator_setattrs():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid')
assert this_table.HasForeignKeyOf
assert this_table.HasOneToOneWith
assert this_table.HasManyToOneWith
assert this_table.HasForeignKeyOf.FakeTable
assert this_table.HasOneToOneWith.FakeTable
assert this_table.HasManyToOneWith.FakeTable
assert this_table.HasForeignKeyOf.OtherTable
assert this_table.HasOneToOneWith.OtherTable
assert this_table.HasManyToOneWith.OtherTable
def test_Realtor_relationship():
assert ThirdTable.faketable_id
assert ThirdTable.faketable
with app.app_context():
fk = FakeTable()
fk.unique_name = 'gggg'
db.session.add(fk)
db.session.commit()
saved = FakeTable.query.filter_by(unique_name='gggg').first()
tt = ThirdTable()
tt.faketable_id = saved.id
db.session.add(tt)
db.session.commit()
saved2 = ThirdTable.query.filter_by(monkey='yep').first()
assert saved
assert tt
assert saved2
def test_Realtor_relationship_again():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid', relation_name='OtherTableUUID1')
class FourthTable(db.Model, this_table.HasManyToOneWith.OtherTableUUID1):
__tablename__ = 'fourthtable'
id = db.Column(db.Integer, primary_key=True)
assert FourthTable.othertable_uuid
assert FourthTable.othertable
def test_Realtor_relation_name():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable')
this_table.add('OtherTable', foreign_key='uuid', relation_name="OtherTableUUID")
class SixthTable(db.Model, this_table.HasManyToOneWith.OtherTable):
__tablename__ = 'sixthtable'
id = db.Column(db.Integer, primary_key=True)
class FifthTable(db.Model, this_table.HasManyToOneWith.OtherTableUUID):
__tablename__ = 'fifthtable'
id = db.Column(db.Integer, primary_key=True)
assert SixthTable.othertable_id
assert SixthTable.othertable
assert FifthTable.othertable_uuid
assert FifthTable.othertable
def test_database_creation():
this_table = Relator(db)
this_table.add('FakeTable')
this_table.add('OtherTable', foreign_key='uuid')
#class ThirdTable(db.Model, this_table.HasOneToOneWith.FakeTable):
# __tablename__ = 'thirdtable'
# id = db.Column(db.Integer, primary_key=True)
db.drop_all()
db.create_all()
db.drop_all()
| elbow-jason/flask-simple-alchemy | tests/test_relator.py | Python | mit | 3,028 |
import time
import find_remove_find5
import sort_then_find3
import walk_through7
def time_find_two_smallest(find_func, lst):
...
| JSBCCA/pythoncode | early_projects/perfcountertest.py | Python | mit | 135 |
from sys import argv
script, user_name = argv
prompt = '> '
print "Hi %s, I'm the %s script." % (user_name, script)
print "I'd like to ask you a few questions."
print "Do you like me %s?" % user_name
likes = raw_input(prompt)
print "Where do you live %s?" % user_name
lives = raw_input(prompt)
print "What kind of computer do you have?"
computer = raw_input(prompt)
print """
Alright, so you said %r about liking me.
You live in %r. Not sure where that is.
And you have a %r computer. Nice.
""" % (likes, lives, computer)
| Skreex/LPTHW | ex14.py | Python | mit | 529 |
"""
Script for the paper
"""
import time
import numpy as np
import snr_of_images
import urllib
import zlib
from io import BytesIO
import cv2
import h5py
import os
import matplotlib.pyplot as plt
import json
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
#Following if it gives an InsecureRequestWarning
#requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
sbem_skip = False
atum_skip = False
fbem_skip = False
temca_skip = False
OCP_server = 'http://cloud.neurodata.io/ocp/ca/'
n_comp = 100
class OCP_data:
def __init__(self,key,num):
info = requests.get(OCP_server+key+'/info/',verify=False).json()['dataset']
res = info['resolutions'][0]
x_size,y_size,z_size = info['imagesize'][str(res)]
x_off,y_off,z_off = info['neariso_offset'][str(res)]
ocp_x_rand = np.random.randint(x_off,x_size-2000,num+400)
ocp_y_rand = np.random.randint(y_off,y_size-2000,num+400)
ocp_z_rand = np.random.randint(z_off,z_size,num+400)
count = 0
self.bad = []
self.bad_state = []
for i in range(num+400):
print(key,': ',count)
try_count = 0
while try_count < 10:
try_count2 = 0
try:
f = requests.get("http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/",timeout=60,verify=False).content
except Exception as e:
print(e)
print(key,', type 1: ',count, "http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/")
try_count2 +=1
if try_count2 == 5:
raise IOError('Maximum tries to download exceeded')
continue
try:
zdata = f#.read()
datastr = zlib.decompress ( zdata[:] )
datafobj = BytesIO ( datastr )
temp_data = np.load (datafobj)
except:
try_count +=1
print(key,', type 2: ',count, "http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/")
continue
if len(temp_data) == 0: #data failed to download correctly
try_count +=1
else:
break
if try_count == 10:
self.bad.append("http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/")
self.bad_state.append(np.random.get_state())
continue
if np.sum(temp_data[0]==0) > 0.5*len(temp_data[0].flatten()):
continue
if count == 0:
data = temp_data
else:
data = np.append(data, temp_data, axis=1)
count += 1
if count == num:
break
self.data = data[0]
np.random.seed(20170127)
#sudo mount -t davfs https://segem.rzg.mpg.de/webdav /image/sbem
sbem_snr = np.zeros(n_comp)
count = 0
while count < n_comp:
x = np.random.permutation(25)[0]+1 #Plus one to avoid some of the edges
y = np.random.permutation(35)[0]+1
z = np.random.permutation(42)[0]+1
if sbem_skip is True:
count+=1
if count == n_comp:
break
else:
continue
im = np.zeros([128*5,128*5])
for k in range(5):
for l in range(5):
#Construct large images by copying over to /tmp and reading the raw
os.system('cp /image/sbem/datasets/ek0563/raw/color/1/x'+str(x+k).zfill(4)+'/y'+str(y+l).zfill(4)+'/z'+str(z).zfill(4)+'/100527_k0563_mag1_x'+str(x+k).zfill(4)+'_y'+str(y+l).zfill(4)+'_z'+str(z).zfill(4)+'.raw /tmp/tmpim.raw')
im[l*128:(l+1)*128,k*128:(k+1)*128] = np.memmap('/tmp/tmpim.raw',dtype=np.uint8,shape=(128,128))
sbem_snr[count] = snr_of_images.SNR(im.astype(np.uint8),mode='im_array',conv=35,hess=200)
count += 1
if count == n_comp:
break
#sbem_snr.sort()
sbem_snr = sbem_snr[sbem_snr < np.inf]
atum_snr = np.zeros(n_comp)
atum= OCP_data('kasthuri11',n_comp+10)
count = 0
for i in range(n_comp+10):
try:
atum_snr[count] = snr_of_images.SNR(atum.data[i,:,:],mode='im_array',conv=55,hess=800)
count += 1
if count == n_comp:
break
except:
continue
atum_snr = atum_snr[atum_snr < np.inf]
fibsem_snr = np.zeros(n_comp)
fib_random = np.random.permutation(range(1376,7651))[:300]
#fib_sem images have to be downloaded in advance
count = 0
for i,j in enumerate(fib_random):
if fbem_skip is True:
break
try:
fibsem_snr[count] = snr_of_images.SNR('fib_sem_images/grayscale-xy-'+str(j)+'.png',conv=35,hess=3200)
im = cv2.imread('fib_sem_images/grayscale-xy-'+str(j)+'.png',cv2.IMREAD_GRAYSCALE)
count += 1
if count == n_comp:
break
except:
continue
fibsem_snr = fibsem_snr[fibsem_snr < np.inf]
#aplus, bplus and cplus are the cremi.org files
crop_im = h5py.File('aplus.h5','r')
temca = crop_im['volumes']['raw']
crop_im = h5py.File('bplus.h5','r')
temca = np.append(temca,crop_im['volumes']['raw'],axis=0)
crop_im = h5py.File('cplus.h5','r')
temca = np.append(temca,crop_im['volumes']['raw'],axis=0)
temca_random = np.random.permutation(len(temca))[:150]
temcas = np.zeros(n_comp)
count = 0
for i,j in enumerate(temca_random):
if temca_skip is True:
break
try:
temcas[count] = snr_of_images.SNR(temca[j,:,:],mode='im_array',conv=55,hess=200)
cv2.imwrite('/image/Used/temca_'+str(count).zfill(3)+'.tif',temca[j,:,:])
count +=1
if count == n_comp:
break
except:
continue
temcas = temcas[temcas < np.inf]
bock11 = OCP_data('bock11',n_comp+10)
bock_snr = np.zeros(n_comp)
count = 0
for i in range(n_comp+10):
try:
bock_snr[count] = snr_of_images.SNR(bock11.data[i,:,:],mode='im_array',conv=55,hess=800)
cv2.imwrite('/image/Used/bock11_'+str(count).zfill(3)+'.tif',bock11.data[i,:,:])
count += 1
if count == n_comp:
break
except:
continue
bock_snr = bock_snr[bock_snr < np.inf]
acardona11 = OCP_data('acardona_0111_8',n_comp+10)
acardona_snr = np.zeros(n_comp)
count = 0
for i in range(n_comp+10):
try:
acardona_snr[count] = snr_of_images.SNR(acardona11.data[i,:,:],mode='im_array',conv=55,hess=800)
cv2.imwrite('/image/Used/acardona11_'+str(count).zfill(3)+'.tif',acardona11.data[i,:,:])
count += 1
if count == n_comp:
break
except:
continue
acardona_snr = acardona_snr[acardona_snr < np.inf]
takemura13 = OCP_data('takemura13',n_comp+10)
takemura_snr = np.zeros(n_comp)
count = 0
for i in range(n_comp+10):
try:
takemura_snr[count] = snr_of_images.SNR(takemura13.data[i,:,:],mode='im_array',conv=55,hess=800)
cv2.imwrite('/image/Used/takemura13_'+str(count).zfill(3)+'.tif',takemura13.data[i,:,:])
count += 1
if count == n_comp:
break
except:
continue
takemura_snr = takemura_snr[takemura_snr < np.inf]
times = np.array([35,1.2e-2,0.14,0.59])
means = np.array([np.nanmean(temcas),np.nanmean(fibsem_snr),np.nanmean(atum_snr),np.nanmean(sbem_snr),np.nanmean(bock_snr),np.nanmean(acardona_snr),np.nanmean(takemura_snr)])
yerr = np.array([np.nanstd(temcas),np.nanstd(fibsem_snr),np.nanstd(atum_snr),np.nanstd(sbem_snr),np.nanstd(bock_snr),np.nanstd(acardona_snr),np.nanstd(takemura_snr)])
np.savetxt('means_feature.txt',means)
np.savetxt('std_feature.txt',yerr)
b = np.argmin(np.abs(atum_snr-np.mean(atum_snr)))
#np.savetxt('atum_loc.txt',b)
means2 = np.array([ 15.7, 11.1, 9.9, 5.2])
yerr2 = np.array([ 2. , 2.6, 1.5, 0.8])
f = plt.figure(figsize=(10,4))
ax0 = f.add_subplot(121)
symbol = ['o','x','s','d','>','8','h','+']
colors = ['r','g','b','k','y','c','m','brown']
[ax0.loglog(times[i],means[i],'.',mfc=colors[i],marker=symbol[i],mec=colors[i]) for i in range(len(means))]
[ax0.errorbar(times[i],means[i],yerr=yerr[i],lw=1,fmt='none',ecolor=colors[i]) for i in range(len(means))]
ax0.set_title(r'${\rm a)~Feature~based~S/N}$')
ax0.set_xlim(1e-2,50)
ax0.set_ylim(1,30)
ax0.xlabel(r'${\rm Acquisition~Rate}~[\mu{}m^3~s^{-1}]$')
ax0.ylabel(r'${\rm S/N}$')
ax1 = f.add_subplot(122)
symbol = ['o','x','s','d']
colors = ['r','g','b','k']
[ax1.loglog(times[i],means2[i],'.',mfc=colors[i],marker=symbol[i],mec=colors[i]) for i in range(4)]
[ax1.errorbar(times[i],means2[i],yerr=yerr2[i],lw=1,fmt='none',ecolor=colors[i]) for i in range(len(means))]
ax1.set_title(r'${\rm b)~Cell-edge~S/N}$')
ax1.set_xlim(1e-2,50)
ax1.set_ylim(1,30)
ax1.xlabel(r'${\rm Acquisition~Rate}~[\mu{}m^3~s^{-1}]$')
ax1.ylabel(r'${\rm S/N}$')
| dbock/bocklab_public | temca2data/SNR/snrscript.py | Python | mit | 9,484 |
def ContinueCurse():
test_note_1 = (int(input("Type first Test Value: ")))
test_note_2 = (int(input("Type Second Test Value: ")))
tmp_note = (((test_note_1 + test_note_2)/2)*0.6)
if ((3 - tmp_note)/0.4) <= 5:
print "Keep Studying :D"
else:
print "Cancel course :/"
print "=== Program Finished ==="
print " ==== Starting the -ContinueCourse- Script ===="
ContinueCurse()
| 13lcp2000/pythonExercises | script3.py | Python | mit | 414 |
from draftjs_exporter.constants import BLOCK_TYPES, INLINE_STYLES
from draftjs_exporter.dom import DOM
from draftjs_exporter.types import Element, Props
def render_children(props: Props) -> Element:
"""
Renders the children of a component without any specific
markup for the component itself.
"""
return props["children"]
def code_block(props: Props) -> Element:
return DOM.create_element(
"pre", {}, DOM.create_element("code", {}, props["children"])
)
# Default block map to extend.
BLOCK_MAP = {
BLOCK_TYPES.UNSTYLED: "p",
BLOCK_TYPES.HEADER_ONE: "h1",
BLOCK_TYPES.HEADER_TWO: "h2",
BLOCK_TYPES.HEADER_THREE: "h3",
BLOCK_TYPES.HEADER_FOUR: "h4",
BLOCK_TYPES.HEADER_FIVE: "h5",
BLOCK_TYPES.HEADER_SIX: "h6",
BLOCK_TYPES.UNORDERED_LIST_ITEM: {"element": "li", "wrapper": "ul"},
BLOCK_TYPES.ORDERED_LIST_ITEM: {"element": "li", "wrapper": "ol"},
BLOCK_TYPES.BLOCKQUOTE: "blockquote",
BLOCK_TYPES.PRE: "pre",
BLOCK_TYPES.CODE: code_block,
BLOCK_TYPES.ATOMIC: render_children,
}
# Default style map to extend.
# Tags come from https://developer.mozilla.org/en-US/docs/Web/HTML/Element.
# and are loosely aligned with https://github.com/jpuri/draftjs-to-html.
# Only styles that map to HTML elements are allowed as defaults.
STYLE_MAP = {
INLINE_STYLES.BOLD: "strong",
INLINE_STYLES.CODE: "code",
INLINE_STYLES.ITALIC: "em",
INLINE_STYLES.UNDERLINE: "u",
INLINE_STYLES.STRIKETHROUGH: "s",
INLINE_STYLES.SUPERSCRIPT: "sup",
INLINE_STYLES.SUBSCRIPT: "sub",
INLINE_STYLES.MARK: "mark",
INLINE_STYLES.QUOTATION: "q",
INLINE_STYLES.SMALL: "small",
INLINE_STYLES.SAMPLE: "samp",
INLINE_STYLES.INSERT: "ins",
INLINE_STYLES.DELETE: "del",
INLINE_STYLES.KEYBOARD: "kbd",
}
| springload/draftjs_exporter | draftjs_exporter/defaults.py | Python | mit | 1,811 |
from devito.ir.iet import MapNodes, Section, TimedList, Transformer
from devito.mpi.routines import (HaloUpdateCall, HaloWaitCall, MPICall, MPIList,
HaloUpdateList, HaloWaitList, RemainderCall)
from devito.passes.iet.engine import iet_pass
from devito.passes.iet.orchestration import BusyWait
from devito.types import Timer
__all__ = ['instrument']
def instrument(graph, **kwargs):
track_subsections(graph, **kwargs)
# Construct a fresh Timer object
profiler = kwargs['profiler']
timer = Timer(profiler.name, list(profiler.all_sections))
instrument_sections(graph, timer=timer, **kwargs)
@iet_pass
def track_subsections(iet, **kwargs):
"""
Add custom Sections to the `profiler`. Custom Sections include:
* MPI Calls (e.g., HaloUpdateCall and HaloUpdateWait)
* Busy-waiting on While(lock) (e.g., from host-device orchestration)
"""
profiler = kwargs['profiler']
sregistry = kwargs['sregistry']
name_mapper = {
HaloUpdateCall: 'haloupdate',
HaloWaitCall: 'halowait',
RemainderCall: 'remainder',
HaloUpdateList: 'haloupdate',
HaloWaitList: 'halowait',
BusyWait: 'busywait'
}
mapper = {}
for NodeType in [MPIList, MPICall, BusyWait]:
for k, v in MapNodes(Section, NodeType).visit(iet).items():
for i in v:
if i in mapper or not any(issubclass(i.__class__, n)
for n in profiler.trackable_subsections):
continue
name = sregistry.make_name(prefix=name_mapper[i.__class__])
mapper[i] = Section(name, body=i, is_subsection=True)
profiler.track_subsection(k.name, name)
iet = Transformer(mapper).visit(iet)
return iet, {}
@iet_pass
def instrument_sections(iet, **kwargs):
"""
Instrument the Sections of the input IET based on `profiler.sections`.
"""
profiler = kwargs['profiler']
timer = kwargs['timer']
piet = profiler.instrument(iet, timer)
if piet is iet:
return piet, {}
headers = [TimedList._start_timer_header(), TimedList._stop_timer_header()]
return piet, {'args': timer, 'headers': headers}
| opesci/devito | devito/passes/iet/instrument.py | Python | mit | 2,261 |
# !/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (c) 2012-2015 Christian Schwarz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
def optimized(fn):
"""Decorator that will call the optimized c++ version
of a pycast function if available rather than theo
original pycast function
:param function fn: original pycast function
:return: return the wrapped function
:rtype: function
"""
def _optimized(self, *args, **kwargs):
""" This method calls the pycastC function if
optimization is enabled and the pycastC function
is available.
:param: PyCastObject self: reference to the calling object.
Needs to be passed to the pycastC function,
so that all uts members are available.
:param: list *args: list of arguments the function is called with.
:param: dict **kwargs: dictionary of parameter names and values the function has been called with.
:return result of the function call either from pycast or pycastC module.
:rtype: function
"""
if self.optimizationEnabled:
class_name = self.__class__.__name__
module = self.__module__.replace("pycast", "pycastC")
try:
imported = __import__("%s.%s" % (module, class_name), globals(), locals(), [fn.__name__])
function = getattr(imported, fn.__name__)
return function(self, *args, **kwargs)
except ImportError:
print "[WARNING] Could not enable optimization for %s, %s" % (fn.__name__, self)
return fn(self, *args, **kwargs)
else:
return fn(self, *args, **kwargs)
setattr(_optimized, "__name__", fn.__name__)
setattr(_optimized, "__repr__", fn.__repr__)
setattr(_optimized, "__str__", fn.__str__)
setattr(_optimized, "__doc__", fn.__doc__)
return _optimized
| T-002/pycast | pycast/common/decorators.py | Python | mit | 2,987 |
"""
Module where admin tools dashboard classes are defined.
"""
from django.template.defaultfilters import slugify
try:
from importlib import import_module
except ImportError:
# Django < 1.9 and Python < 2.7
from django.utils.importlib import import_module
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_text
from admin_tools.dashboard import modules
from admin_tools.utils import get_admin_site_name, uniquify
class Dashboard(object):
"""
Base class for dashboards.
The Dashboard class is a simple python list that has three additional
properties:
``title``
The dashboard title, by default, it is displayed above the dashboard
in a ``h2`` tag. Default value: 'Dashboard'.
``template``
The template to use to render the dashboard.
Default value: 'admin_tools/dashboard/dashboard.html'
``columns``
An integer that represents the number of columns for the dashboard.
Default value: 2.
If you want to customize the look of your dashboard and it's modules, you
can declare css stylesheets and/or javascript files to include when
rendering the dashboard (these files should be placed in your
media path), for example::
from admin_tools.dashboard import Dashboard
class MyDashboard(Dashboard):
class Media:
css = ('css/mydashboard.css',)
js = ('js/mydashboard.js',)
Here's an example of a custom dashboard::
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from admin_tools.dashboard import modules, Dashboard
class MyDashboard(Dashboard):
# we want a 3 columns layout
columns = 3
def __init__(self, **kwargs):
# append an app list module for "Applications"
self.children.append(modules.AppList(
title=_('Applications'),
exclude=('django.contrib.*',),
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
title=_('Administration'),
models=('django.contrib.*',),
))
# append a recent actions module
self.children.append(modules.RecentActions(
title=_('Recent Actions'),
limit=5
))
Below is a screenshot of the resulting dashboard:
.. image:: images/dashboard_example.png
"""
title = _('Dashboard')
template = 'admin_tools/dashboard/dashboard.html'
columns = 2
children = None
class Media:
css = ()
js = ()
def __init__(self, **kwargs):
for key in kwargs:
if hasattr(self.__class__, key):
setattr(self, key, kwargs[key])
self.children = self.children or []
def init_with_context(self, context):
"""
Sometimes you may need to access context or request variables to build
your dashboard, this is what the ``init_with_context()`` method is for.
This method is called just before the display with a
``django.template.RequestContext`` as unique argument, so you can
access to all context variables and to the ``django.http.HttpRequest``.
"""
pass
def get_id(self):
"""
Internal method used to distinguish different dashboards in js code.
"""
return 'dashboard'
def _prepare_children(self):
""" Enumerates children without explicit id """
seen = set()
for id, module in enumerate(self.children):
module.id = uniquify(module.id or str(id+1), seen)
module._prepare_children()
class AppIndexDashboard(Dashboard):
"""
Class that represents an app index dashboard, app index dashboards are
displayed in the applications index page.
:class:`~admin_tools.dashboard.AppIndexDashboard` is very similar to the
:class:`~admin_tools.dashboard.Dashboard` class except
that its constructor receives two extra arguments:
``app_title``
The title of the application
``models``
A list of strings representing the available models for the current
application, example::
['yourproject.app.Model1', 'yourproject.app.Model2']
It also provides two helper methods:
``get_app_model_classes()``
Method that returns the list of model classes for the current app.
``get_app_content_types()``
Method that returns the list of content types for the current app.
If you want to provide custom app index dashboard, be sure to inherit from
this class instead of the :class:`~admin_tools.dashboard.Dashboard` class.
Here's an example of a custom app index dashboard::
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from admin_tools.dashboard import modules, AppIndexDashboard
class MyAppIndexDashboard(AppIndexDashboard):
# we don't want a title, it's redundant
title = ''
def __init__(self, app_title, models, **kwargs):
AppIndexDashboard.__init__(self, app_title, models, **kwargs)
# append a model list module that lists all models
# for the app and a recent actions module for the current app
self.children += [
modules.ModelList(self.app_title, self.models),
modules.RecentActions(
include_list=self.models,
limit=5
)
]
Below is a screenshot of the resulting dashboard:
.. image:: images/dashboard_app_index_example.png
"""
models = None
app_title = None
def __init__(self, app_title, models, **kwargs):
kwargs.update({'app_title': app_title, 'models': models})
super(AppIndexDashboard, self).__init__(**kwargs)
def get_app_model_classes(self):
"""
Helper method that returns a list of model classes for the current app.
"""
models = []
for m in self.models:
mod, cls = m.rsplit('.', 1)
mod = import_module(mod)
models.append(getattr(mod, cls))
return models
def get_app_content_types(self):
"""
Return a list of all content_types for this app.
"""
return [ContentType.objects.get_for_model(c) for c \
in self.get_app_model_classes()]
def get_id(self):
"""
Internal method used to distinguish different dashboards in js code.
"""
return '%s-dashboard' % slugify(force_text(self.app_title))
class DefaultIndexDashboard(Dashboard):
"""
The default dashboard displayed on the admin index page.
To change the default dashboard you'll have to type the following from the
commandline in your project root directory::
python manage.py customdashboard
And then set the ``ADMIN_TOOLS_INDEX_DASHBOARD`` settings variable to
point to your custom index dashboard class.
"""
def init_with_context(self, context):
site_name = get_admin_site_name(context)
# append a link list module for "quick links"
self.children.append(modules.LinkList(
_('Quick links'),
layout='inline',
draggable=False,
deletable=False,
collapsible=False,
children=[
[_('Return to site'), '/'],
[_('Change password'),
reverse('%s:password_change' % site_name)],
[_('Log out'), reverse('%s:logout' % site_name)],
]
))
# append an app list module for "Applications"
self.children.append(modules.AppList(
_('Applications'),
exclude=('django.contrib.*',),
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
_('Administration'),
models=('django.contrib.*',),
))
# append a recent actions module
self.children.append(modules.RecentActions(_('Recent Actions'), 5))
# append a feed module
self.children.append(modules.Feed(
_('Latest Django News'),
feed_url='http://www.djangoproject.com/rss/weblog/',
limit=5
))
# append another link list module for "support".
self.children.append(modules.LinkList(
_('Support'),
children=[
{
'title': _('Django documentation'),
'url': 'http://docs.djangoproject.com/',
'external': True,
},
{
'title': _('Django "django-users" mailing list'),
'url': 'http://groups.google.com/group/django-users',
'external': True,
},
{
'title': _('Django irc channel'),
'url': 'irc://irc.freenode.net/django',
'external': True,
},
]
))
class DefaultAppIndexDashboard(AppIndexDashboard):
"""
The default dashboard displayed on the applications index page.
To change the default dashboard you'll have to type the following from the
commandline in your project root directory::
python manage.py customdashboard
And then set the ``ADMIN_TOOLS_APP_INDEX_DASHBOARD`` settings variable to
point to your custom app index dashboard class.
"""
# we disable title because its redundant with the model list module
title = ''
def __init__(self, *args, **kwargs):
AppIndexDashboard.__init__(self, *args, **kwargs)
# append a model list module and a recent actions module
self.children += [
modules.ModelList( self.app_title,self.models),
modules.RecentActions(
_('Recent Actions'),
include_list=self.get_app_content_types(),
limit=5
)
]
| miurahr/django-admin-tools | admin_tools/dashboard/dashboards.py | Python | mit | 10,473 |
#!/usr/bin/env python
import sys
from httplib import HTTPConnection
from urllib import urlencode
from urlparse import urljoin
from json import loads
from reportlab.pdfgen import canvas
OUTPUTFILE = 'certificate.pdf'
def get_brooklyn_integer():
''' Ask Brooklyn Integers for a single integer.
Returns a tuple with number and integer permalink.
From: https://github.com/migurski/ArtisinalInts/
'''
body = 'method=brooklyn.integers.create'
head = {'Content-Type': 'application/x-www-form-urlencoded'}
conn = HTTPConnection('api.brooklynintegers.com', 80)
conn.request('POST', '/rest/', body, head)
resp = conn.getresponse()
if resp.status not in range(200, 299):
raise Exception('Non-2XX response code from Brooklyn: %d' % resp.status)
data = loads(resp.read())
value = data['integer']
return value
def draw_pdf(sparklydevop):
certimage = './devops.cert.png'
# TODO make this a function of image size
width = 1116
height = 1553
# Times Roman better fits the other fonts on the template
font_name = "Times-Roman"
# TODO make font size a function of name length
font_size = 72
c = canvas.Canvas(OUTPUTFILE, pagesize=(width, height))
c.setFont(font_name, font_size)
# Print Name
name_offset = c.stringWidth(sparklydevop)
try:
c.drawImage(certimage, 1, 1)
except IOError:
print "I/O error trying to open %s" % certimage
else:
c.drawString((width-name_offset)/2, height*3/4, sparklydevop)
# Print Certificate Number
cert_number = "Certificate No. " + str(get_brooklyn_integer())
cert_offset = c.stringWidth(cert_number)
c.drawString((width-cert_offset)/2, height*3/4-font_size*2, cert_number)
c.showPage()
# TODO check for write permissions/failure
try:
c.save()
except IOError:
print "I/O error trying to save %s" % OUTPUTFILE
if __name__ == "__main__":
if len(sys.argv) != 2:
print 'Usage: gendocert.py "Firstname Lastname"'
sys.exit(1)
else:
# TODO if this is run as a CGI need to sanitize input
draw_pdf(sys.argv[1])
| dmangot/devops-certifyme | gendocert.py | Python | mit | 2,175 |
# coding: utf-8
'''
Created on 2012-8-30
@author: shanfeng
'''
import smtplib
from email.mime.text import MIMEText
import urllib
import web
class XWJemail:
'''
classdocs
'''
def __init__(self, params):
'''
Constructor
'''
pass
@staticmethod
def sendfindpass(user,hash):
link = "%s/account/newpass?%s" %(web.ctx.sitehost,urllib.urlencode({'email':user.u_email,"v":hash}))
mail_body = """
<html>
<head></head>
<body>
<h4>%s,你好</h4>
您刚才在 liulin.info 申请了找回密码。<br>
请点击下面的链接来重置密码:<br>
<a href="%s">%s</a><br>
如果无法点击上面的链接,您可以复制该地址,并粘帖在浏览器的地址栏中访问。<br>
</body>
</html>
""" % (web.utf8(user.u_name),link,link)
#mail_body = web.utf8(mail_body)
if isinstance(mail_body,unicode):
mail_body = str(mail_body)
mail_from = "liulin.info<wukong10086@163.com>"
mail_to = user.u_email
mail_subject = 'liulin.info重置密码邮件'
msg = MIMEText(mail_body,'html','utf-8')
#msg=MIMEText(mail_body,'html')
if not isinstance(mail_subject,unicode):
mail_subject = unicode(mail_subject)
msg['Subject']= mail_subject
msg['From']=mail_from
msg['To'] = mail_to
msg["Accept-Language"]="zh-CN"
msg["Accept-Charset"]="ISO-8859-1,utf-8"
smtp=smtplib.SMTP()
smtp.connect('smtp.163.com')
smtp.login('wukong10086@163.com','831112')
smtp.sendmail(mail_from,mail_to,msg.as_string())
smtp.quit()
def sendMail(mailto,subject,body,format='plain'):
if isinstance(body,unicode):
body = str(body)
me= ("%s<"+fromMail+">") % (Header(_mailFrom,'utf-8'),)
msg = MIMEText(body,format,'utf-8')
if not isinstance(subject,unicode):
subject = unicode(subject)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = mailto
msg["Accept-Language"]="zh-CN"
msg["Accept-Charset"]="ISO-8859-1,utf-8"
try:
s = smtplib.SMTP()
s.connect(host)
s.login(user,password)
s.sendmail(me, mailto, msg.as_string())
s.close()
return True
except Exception, e:
print str(e)
return False | waile23/todo | utils/xwjemail.py | Python | mit | 2,175 |
'''
TURKSAT 4A
1 39522U 14007A 15301.78105273 .00000128 00000-0 00000+0 0 9996
2 39522 0.0299 272.9737 0004735 326.3457 120.6614 1.00271335 6265
'''
import math
import sys
import astropy.units as u
name = sys.stdin.readline().strip()
line1 = sys.stdin.readline()
line2 = sys.stdin.readline()
number = int(line1[2:7]) # + line1[7]
year = int(line1[9:11]) + 1000
if year < 1057: # 2k
year = year + 1000
launch = line1[11:14]
piece = line1[14]
epoch = line1[18:32]
i = float(line2[8:16])
raan = float(line2[17:25])
e = float("0." + line2[26:33].strip())
ap = float(line2[34:42])
ma = float(line2[43:51])
f = float(line2[52:63])
revs = int(line2[63:68])
t = 1.0 / f * u.day
print ap
print t
print t.to(u.second)
print 1.0 / t.to(u.second)
| Camiloasc1/AstronomyUNAL | IntroductionToTheSpacialVoyage/SatelliteOrbit/CelestrackNORAD.py | Python | mit | 770 |
from pprint import pprint
from Conundrum.utils import sanitize
def decrypt(msg: str, repeated_letter: str) -> str:
"""
Extract every letter after an occurrence of the repeated letter
"""
msg = sanitize(msg)
result = []
remove_next = False
for letter in msg:
take_this = remove_next
remove_next = letter == repeated_letter
if take_this:
result += letter
return ''.join(result)
def decrypt_try_all(msg: str) -> [str]:
msg = sanitize(msg)
letters_to_try = sorted({letter for letter in msg})
return {letter: decrypt(msg, letter) for letter in letters_to_try}
if __name__ == '__main__':
# Used in Movies 4
encrypted_msg = 'i bet pews or leisure chains can seem to stink of effort, george, under no illusions of vanity'
pprint(decrypt_try_all(encrypted_msg))
| physicalattraction/kerstpuzzel | src/Conundrum/repeated_letters.py | Python | mit | 853 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Optional, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ManagementLinkClientConfiguration
from .operations import Operations, ResourceLinksOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class ManagementLinkClient:
"""Azure resources can be linked together to form logical relationships. You can establish links between resources belonging to different resource groups. However, all the linked resources must belong to the same subscription. Each resource can be linked to 50 other resources. If any of the linked resources are deleted or moved, the link owner must clean up the remaining link.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.resource.links.v2016_09_01.operations.Operations
:ivar resource_links: ResourceLinksOperations operations
:vartype resource_links:
azure.mgmt.resource.links.v2016_09_01.operations.ResourceLinksOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ManagementLinkClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.resource_links = ResourceLinksOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs: Any
) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> ManagementLinkClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/links/v2016_09_01/_management_link_client.py | Python | mit | 4,330 |
"basic 2D vector geometry"
from math import acos, sqrt, sin, cos, pi
class Vec2D(object):
" Simple 2D vector class for euclidean geometry "
EPSILON = 0.0001
def __init__(self, x=0.0, y=0.0):
self.pos_x = x
self.pos_y = y
def dot(self, other):
"dot product"
return self.pos_x * other.pos_x + self.pos_y * other.pos_y
def cross(self, other):
"2d cross product"
return self.pos_x * other.pos_y - self.pos_y * other.pos_x
def length(self):
"length of vector"
return sqrt(self.dot(self))
def normalized(self):
"unit vector with same direction as self"
length = self.length()
return self * (1/length)
def rotate(self, angle, center=None):
"rotate self by angle radians around center"
if center is None:
center = Vec2D()
centered = self - center
cosine = cos(angle)
sine = sin(angle)
new_pos_x = cosine * centered.pos_x - sine * centered.pos_y
new_pos_y = sine * centered.pos_x + cosine * centered.pos_y
final = Vec2D(new_pos_x, new_pos_y) + center
return final
def oriented_angle(self, other):
"oriented angle from self to other"
vec1 = self.normalized()
vec2 = other.normalized()
cross_prod = vec1.cross(vec2) # sin(angle)
dot_prod = vec1.dot(vec2) # cos(angle)
if dot_prod < -1.0:
dot_prod = -1.0
if dot_prod > 1.0:
dot_prod = 1.0
if cross_prod > 0:
angle = acos(dot_prod)
else:
angle = -acos(dot_prod)
if angle < 0:
angle = angle + 2 * pi
return angle
def __neg__(self):
return Vec2D(-self.pos_x, -self.pos_y)
def __add__(self, other):
return Vec2D(self.pos_x + other.pos_x, self.pos_y + other.pos_y)
def __sub__(self, other):
return Vec2D(self.pos_x - other.pos_x, self.pos_y - other.pos_y)
def __mul__(self, other):
return Vec2D(self.pos_x * other, self.pos_y * other)
def __str__(self):
return "({x},{y})".format(x=self.pos_x, y=self.pos_y)
def is_equal(self, other):
return (self - other).length() < Vec2D.EPSILON
@staticmethod
def orientation(vec1, vec2, vec3):
"return positive number if the points are mathematically \
positively oriented negative number for negative orientation \
and zero for colinear points"
vec12 = vec2 - vec1
vec23 = vec3 - vec2
return vec12.cross(vec23)
| 31415us/linda-lidar-rangefinder-playground | linda/Vec2D.py | Python | mit | 2,599 |
# -*- coding: utf-8 -*-
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import current
from gluon.storage import Storage
from gluon.validators import IS_NOT_EMPTY, IS_EMPTY_OR, IS_IN_SET
from s3 import s3_date, S3Represent
T = current.T
settings = current.deployment_settings
"""
Template settings
All settings which are to configure a specific template are located here
Deployers should ideally not need to edit any other files outside of their template folder
"""
# Pre-Populate
# http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/PrePopulate
# Configure/disable pre-population of the database.
# To pre-populate the database On 1st run should specify directory(s) in
# /private/templates/
# eg:
# ["default"] (1 is a shortcut for this)
# ["Standard"]
# ["IFRC_Train"]
# ["roles", "user"]
# Unless doing a manual DB migration, where prepopulate = 0
# In Production, prepopulate = 0 (to save 1x DAL hit every page)
settings.base.prepopulate = ["EVASS"]
# Theme (folder to use for views/layout.html)
settings.base.theme = "EVASS"
settings.ui.formstyle = "foundation"
settings.ui.filter_formstyle = "foundation_inline"
# Enable Guided Tours
#settings.base.guided_tour = True
# Authentication settings
# These settings should be changed _after_ the 1st (admin) user is
# registered in order to secure the deployment
# Should users be allowed to register themselves?
#settings.security.self_registration = False
# Do new users need to verify their email address?
#settings.auth.registration_requires_verification = True
# Do new users need to be approved by an administrator prior to being able to login?
#settings.auth.registration_requires_approval = True
# Allow a new user to be linked to a record (and a new record will be created if it doesn't already exist)
#settings.auth.registration_link_user_to = {"staff":T("Staff"),
# "volunteer":T("Volunteer"),
# "member":T("Member")}
# Always notify the approver of a new (verified) user, even if the user is automatically approved
settings.auth.always_notify_approver = False
# The name of the teams that users are added to when they opt-in to receive alerts
#settings.auth.opt_in_team_list = ["Updates"]
# Uncomment this to set the opt in default to True
#settings.auth.opt_in_default = True
# Uncomment this to request the Mobile Phone when a user registers
#settings.auth.registration_requests_mobile_phone = True
# Uncomment this to have the Mobile Phone selection during registration be mandatory
#settings.auth.registration_mobile_phone_mandatory = True
# Uncomment this to request the Organisation when a user registers
#settings.auth.registration_requests_organisation = True
# Uncomment this to have the Organisation selection during registration be mandatory
#settings.auth.registration_organisation_required = True
# Uncomment this to have the Organisation input hidden unless the user enters a non-whitelisted domain
#settings.auth.registration_organisation_hidden = True
# Uncomment this to default the Organisation during registration
#settings.auth.registration_organisation_default = "My Organisation"
# Uncomment this to request the Organisation Group when a user registers
#settings.auth.registration_requests_organisation_group = True
# Uncomment this to have the Organisation Group selection during registration be mandatory
#settings.auth.registration_organisation_group_required = True
# Uncomment this to request the Site when a user registers
#settings.auth.registration_requests_site = True
# Uncomment this to allow Admin to see Organisations in user Admin even if the Registration doesn't request this
#settings.auth.admin_sees_organisation = True
# Uncomment to set the default role UUIDs assigned to newly-registered users
# This is a dictionary of lists, where the key is the realm that the list of roles applies to
# The key 0 implies not realm restricted
# The keys "organisation_id" and "site_id" can be used to indicate the user's "organisation_id" and "site_id"
#settings.auth.registration_roles = { 0: ["STAFF", "PROJECT_EDIT"]}
# Uncomment this to enable record approval
#settings.auth.record_approval = True
# Uncomment this and specify a list of tablenames for which record approval is required
#settings.auth.record_approval_required_for = ["project_project"]
# Uncomment this to request an image when users register
#settings.auth.registration_requests_image = True
# Uncomment this to direct newly-registered users to their volunteer page to be able to add extra details
# NB This requires Verification/Approval to be Off
# @ToDo: Extend to all optional Profile settings: Homepage, Twitter, Facebook, Mobile Phone, Image
#settings.auth.registration_volunteer = True
# Terms of Service to be able to Register on the system
# uses <template>/views/tos.html
#settings.auth.terms_of_service = True
# Uncomment this to allow users to Login using Gmail's SMTP
#settings.auth.gmail_domains = ["gmail.com"]
# Uncomment this to allow users to Login using OpenID
#settings.auth.openid = True
# Uncomment this to enable presence records on login based on HTML5 geolocations
#settings.auth.set_presence_on_login = True
# Uncomment this and specify a list of location levels to be ignored by presence records
#settings.auth.ignore_levels_for_presence = ["L0", "L1", "L2", "L3"]
# Uncomment this to enable the creation of new locations if a user logs in from an unknown location. Warning: This may lead to many useless location entries
#settings.auth.create_unknown_locations = True
# L10n settings
# Default timezone for users
#settings.L10n.utc_offset = "UTC +0000"
# Uncomment these to use US-style dates in English (localisations can still convert to local format)
#settings.L10n.time_format = T("%H:%M:%S")
settings.L10n.date_format = T("%d/%m/%Y")
# Start week on Sunday
#settings.L10n.firstDOW = 0
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = ","
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = "."
# Default Country Code for telephone numbers
settings.L10n.default_country_code = +39
# Make last name in person/user records mandatory
settings.L10n.mandatory_lastname = True
# Configure the list of Religions
settings.L10n.religions = OrderedDict([("unknown", T("Unknown")),
("bahai", T("Bahai")),
("buddhist", T("Buddhist")),
("christian", T("Christian")),
("hindu", T("Hindu")),
("jewish", T("Jewish")),
("muslim", T("Muslim")),
("other", T("other"))
])
# Configure the list of measurement units
# Edit the options order to change the measurement unit.
# option "1" will be used within EDEN.
settings.L10n.measurement_lenght_m = {1: "m",
2: "yd",
}
settings.L10n.measurement_lenght_cm = {1: "cm",
2: "in",
3: "ft",
}
settings.L10n.measurement_lenght_km = {1: "Km",
2: "mi",
3: "naut mi",
}
settings.L10n.measurement_area_m = {1: "m²",
2: "yd²",
3: "ac",
}
settings.L10n.measurement_area_km = {1: "Km²",
2: "mi²",
3: "ac",
}
settings.L10n.measurement_vol_l = {1: "l",
2: "US_gal",
3: "Uk_gal",
}
settings.L10n.measurement_weight_g = {1: "g",
2: "oz",
3: "ozt",
}
settings.L10n.measurement_weight_kg = {1: "kg",
2: "lb",
}
# Configure marital status options
settings.L10n.maritalStatus_default = 1
settings.L10n.maritalStatus = {1: T("Unknown"),
2: T("Single"),
3: T("Cohabit"),
4: T("Married"),
5: T("Divorced"),
6: T("Separated"),
7: T("Widowed"),
8: T("Other"),
}
# Uncomment this to Translate CMS Series Names
#settings.L10n.translate_cms_series = True
# Uncomment this to Translate Layer Names
#settings.L10n.translate_gis_layer = True
# Uncomment this to Translate Location Names
#settings.L10n.translate_gis_location = True
# Finance settings
settings.fin.currency_default = "EUR"
settings.fin.currencies = {
"EUR": T("Euros"),
"GBP": T("Great British Pounds"),
"USD": T("United States Dollars"),
}
#settings.fin.currency_writable = False # False currently breaks things
# PDF settings
# Default page size for reports (defaults to A4)
#settings.base.paper_size = T("Letter")
# Location of Logo used in pdfs headers
#settings.ui.pdf_logo = "static/img/mylogo.png"
# GIS (Map) settings
# Size of the Embedded Map
# Change this if-required for your theme
# NB API can override this in specific modules
#settings.gis.map_height = 600
#settings.gis.map_width = 1000
# Restrict the Location Selector to just certain countries
# NB This can also be over-ridden for specific contexts later
# e.g. Activities filtered to those of parent Project
settings.gis.countries = ["IT"]
# Uncomment to pass Addresses imported from CSV to a Geocoder to try and automate Lat/Lon
#settings.gis.geocode_imported_addresses = "google"
# Hide the Map-based selection tool in the Location Selector
#settings.gis.map_selector = False
# Hide LatLon boxes in the Location Selector
#settings.gis.latlon_selector = False
# Use Building Names as a separate field in Street Addresses?
settings.gis.building_name = False
# Use a non-default fillColor for Clustered points
#settings.gis.cluster_fill = "8087ff"
# Use a non-default strokeColor for Clustered points
#settings.gis.cluster_stroke = "2b2f76"
# Use a non-default fillColor for Selected points
#settings.gis.select_fill = "ffdc33"
# Use a non-default strokeColor for Selected points
#settings.gis.select_stroke = "ff9933"
# Display Resources recorded to Admin-Level Locations on the map
# @ToDo: Move into gis_config?
# Uncomment to fall back to country LatLon to show resources, if nothing better available
#settings.gis.display_L0 = True
# Currently unused
#settings.gis.display_L1 = False
# Uncomemnt this to do deduplicate lookups on Imports via PCode (as alternative to Name)
#settings.gis.lookup_pcode = True
# Set this if there will be multiple areas in which work is being done,
# and a menu to select among them is wanted.
#settings.gis.menu = "Maps"
# Maximum Marker Size
# (takes effect only on display)
#settings.gis.marker_max_height = 35
#settings.gis.marker_max_width = 30
# Duplicate Features so that they show wrapped across the Date Line?
# Points only for now
# lon<0 have a duplicate at lon+360
# lon>0 have a duplicate at lon-360
#settings.gis.duplicate_features = True
# Uncomment to use CMS to provide Metadata on Map Layers
#settings.gis.layer_metadata = True
# Uncomment to hide Layer Properties tool
#settings.gis.layer_properties = False
# Uncomment to hide the Base Layers folder in the LayerTree
#settings.gis.layer_tree_base = False
# Uncomment to hide the Overlays folder in the LayerTree
#settings.gis.layer_tree_overlays = False
# Uncomment to not expand the folders in the LayerTree by default
#settings.gis.layer_tree_expanded = False
# Uncomment to have custom folders in the LayerTree use Radio Buttons
#settings.gis.layer_tree_radio = True
# Uncomment to display the Map Legend as a floating DIV
#settings.gis.legend = "float"
# Mouse Position: 'normal', 'mgrs' or None
#settings.gis.mouse_position = "mgrs"
# Uncomment to hide the Overview map
#settings.gis.overview = False
# Uncomment to hide the permalink control
#settings.gis.permalink = False
# Uncomment to disable the ability to add PoIs to the main map
#settings.gis.pois = False
# PoIs to export in KML/OSM feeds from Admin locations
#settings.gis.poi_resources = ["cr_shelter", "hms_hospital", "org_office"]
# Uncomment to hide the ScaleLine control
#settings.gis.scaleline = False
# Uncomment to modify the Simplify Tolerance
#settings.gis.simplify_tolerance = 0.001
# Uncomment to hide the Zoom control
#settings.gis.zoomcontrol = False
# Messaging Settings
# If you wish to use a parser.py in another folder than "default"
#settings.msg.parser = "mytemplatefolder"
# Use 'soft' deletes
#settings.security.archive_not_delete = False
# AAA Settings
# Security Policy
# http://eden.sahanafoundation.org/wiki/S3AAA#System-widePolicy
# 1: Simple (default): Global as Reader, Authenticated as Editor
# 2: Editor role required for Update/Delete, unless record owned by session
# 3: Apply Controller ACLs
# 4: Apply both Controller & Function ACLs
# 5: Apply Controller, Function & Table ACLs
# 6: Apply Controller, Function, Table ACLs and Entity Realm
# 7: Apply Controller, Function, Table ACLs and Entity Realm + Hierarchy
# 8: Apply Controller, Function, Table ACLs, Entity Realm + Hierarchy and Delegations
#
settings.security.policy = 5
# Ownership-rule for records without owner:
# True = not owned by any user (strict ownership, default)
# False = owned by any authenticated user
#settings.security.strict_ownership = False
# Audit
# - can be a callable for custom hooks (return True to also perform normal logging, or False otherwise)
# NB Auditing (especially Reads) slows system down & consumes diskspace
#settings.security.audit_read = True
#settings.security.audit_write = True
# Lock-down access to Map Editing
#settings.security.map = True
# Allow non-MapAdmins to edit hierarchy locations? Defaults to True if not set.
# (Permissions can be set per-country within a gis_config)
#settings.gis.edit_Lx = False
# Allow non-MapAdmins to edit group locations? Defaults to False if not set.
#settings.gis.edit_GR = True
# Note that editing of locations used as regions for the Regions menu is always
# restricted to MapAdmins.
# Uncomment to disable that LatLons are within boundaries of their parent
#settings.gis.check_within_parent_boundaries = False
# Enable this for a UN-style deployment
#settings.ui.cluster = True
# Enable Social Media share buttons
#settings.ui.social_buttons = True
# Enable this to show pivot table options form by default
#settings.ui.hide_report_options = False
# Uncomment to show created_by/modified_by using Names not Emails
#settings.ui.auth_user_represent = "name"
# Uncomment to restrict the export formats available
#settings.ui.export_formats = ["kml", "pdf", "rss", "xls", "xml"]
# Uncomment to include an Interim Save button on CRUD forms
#settings.ui.interim_save = True
# -----------------------------------------------------------------------------
# CMS
# Uncomment to use Bookmarks in Newsfeed
#settings.cms.bookmarks = True
# Uncomment to use Rich Text editor in Newsfeed
#settings.cms.richtext = True
# Uncomment to show tags in Newsfeed
#settings.cms.show_tags = True
# -----------------------------------------------------------------------------
# Organisations
# Uncomment to use an Autocomplete for Organisation lookup fields
#settings.org.autocomplete = True
# Enable the use of Organisation Branches
settings.org.branches = True
# Enable the use of Organisation Groups & what their name is
#settings.org.groups = "Coalition"
#settings.org.groups = "Network"
# Enable the use of Organisation Regions
settings.org.regions = True
# Set the length of the auto-generated org/site code the default is 10
#settings.org.site_code_len = 3
# Set the label for Sites
#settings.org.site_label = "Facility"
# Uncomment to show the date when a Site (Facilities-only for now) was last contacted
#settings.org.site_last_contacted = True
# Uncomment to use an Autocomplete for Site lookup fields
#settings.org.site_autocomplete = True
# Extra fields to show in Autocomplete Representations
#settings.org.site_autocomplete_fields = ["instance_type", "location_id$L1", "organisation_id$name"]
# Uncomment to have Site Autocompletes search within Address fields
#settings.org.site_address_autocomplete = True
# Uncomment to hide inv & req tabs from Sites
#settings.org.site_inv_req_tabs = False
# Uncomment to add summary fields for Organisations/Offices for # National/International staff
#settings.org.summary = True
# Enable certain fields just for specific Organisations
# Requires a call to settings.set_org_dependent_field(field)
# empty list => disabled for all (including Admin)
#settings.org.dependent_fields = \
# {#"<table name>.<field name>" : ["<Organisation Name>"],
# "pr_person_details.mother_name" : [],
# "pr_person_details.father_name" : [],
# "pr_person_details.company" : [],
# "pr_person_details.affiliations" : [],
# "vol_volunteer.active" : [],
# "vol_volunteer_cluster.vol_cluster_type_id" : [],
# "vol_volunteer_cluster.vol_cluster_id" : [],
# "vol_volunteer_cluster.vol_cluster_position_id" : [],
# }
# -----------------------------------------------------------------------------
# Human Resource Management
# Uncomment to change the label for 'Staff'
#settings.hrm.staff_label = "Contacts"
# Uncomment to allow Staff & Volunteers to be registered without an email address
settings.hrm.email_required = False
# Uncomment to allow Staff & Volunteers to be registered without an Organisation
settings.hrm.org_required = False
# Uncomment to allow HR records to be deletable rather than just marking them as obsolete
settings.hrm.deletable = True
# Uncomment to filter certificates by (root) Organisation & hence not allow Certificates from other orgs to be added to a profile (except by Admin)
#settings.hrm.filter_certificates = True
# Uncomment to allow HRs to have multiple Job Titles
settings.hrm.multiple_job_titles = True
# Uncomment to hide the Staff resource
settings.hrm.show_staff = False
# Uncomment to allow hierarchical categories of Skills, which each need their own set of competency levels.
#settings.hrm.skill_types = True
# Uncomment to disable Staff experience
settings.hrm.staff_experience = False
# Uncomment to disable Volunteer experience
settings.hrm.vol_experience = False
# Uncomment to show the Organisation name in HR represents
settings.hrm.show_organisation = True
# Uncomment to consolidate tabs into a single CV
#settings.hrm.cv_tab = True
# Uncomment to consolidate tabs into Staff Record
#settings.hrm.record_tab = True
# Uncomment to disable the use of Volunteer Awards
#settings.hrm.use_awards = False
# Uncomment to disable the use of HR Certificates
#settings.hrm.use_certificates = False
# Uncomment to disable the use of HR Credentials
#settings.hrm.use_credentials = False
# Uncomment to disable the use of HR Description
#settings.hrm.use_description = False
# Uncomment to enable the use of HR Education
#settings.hrm.use_education = True
# Uncomment to disable the use of HR ID
#settings.hrm.use_id = False
# Uncomment to disable the use of HR Skills
#settings.hrm.use_skills = False
# Uncomment to disable the use of HR Teams
#settings.hrm.teams = False
# Uncomment to disable the use of HR Trainings
#settings.hrm.use_trainings = False
# -----------------------------------------------------------------------------
# Inventory Management
#settings.inv.collapse_tabs = False
# Uncomment to customise the label for Facilities in Inventory Management
#settings.inv.facility_label = "Facility"
# Uncomment if you need a simpler (but less accountable) process for managing stock levels
#settings.inv.direct_stock_edits = True
# Uncomment to call Stock Adjustments, 'Stock Counts'
#settings.inv.stock_count = True
# Use the term 'Order' instead of 'Shipment'
#settings.inv.shipment_name = "order"
# Uncomment to not track pack values
#settings.inv.track_pack_values = False
#settings.inv.show_mode_of_transport = True
#settings.inv.send_show_org = False
#settings.inv.send_show_time_in = True
#settings.inv.send_form_name = "Tally Out Sheet"
#settings.inv.send_short_name = "TO"
#settings.inv.send_ref_field_name = "Tally Out Number"
#settings.inv.recv_form_name = "Acknowledgement Receipt for Donations Received Form"
#settings.inv.recv_shortname = "ARDR"
# Types common to both Send and Receive
#settings.inv.shipment_types = {
# 0: T("-"),
# 1: T("Other Warehouse"),
# 2: T("Donation"),
# 3: T("Foreign Donation"),
# 4: T("Local Purchases"),
# 5: T("Confiscated Goods from Bureau Of Customs")
# }
#settings.inv.send_types = {
# 21: T("Distribution")
# }
#settings.inv.send_type_default = 1
#settings.inv.recv_types = {
# 32: T("Donation"),
# 34: T("Purchase"),
# }
#settings.inv.item_status = {
# 0: current.messages["NONE"],
# 1: T("Dump"),
# 2: T("Sale"),
# 3: T("Reject"),
# 4: T("Surplus")
# }
# -----------------------------------------------------------------------------
# Requests Management
# Uncomment to disable Inline Forms in Requests module
#settings.req.inline_forms = False
# Label for Inventory Requests
#settings.req.type_inv_label = "Donations"
# Label for People Requests
#settings.req.type_hrm_label = "Volunteers"
# Label for Requester
#settings.req.requester_label = "Site Contact"
#settings.req.requester_optional = True
# Uncomment if the User Account logging the Request is NOT normally the Requester
#settings.req.requester_is_author = False
# Filter Requester as being from the Site
#settings.req.requester_from_site = True
# Set the Requester as being an HR for the Site if no HR record yet & as Site contact if none yet exists
#settings.req.requester_to_site = True
#settings.req.date_writable = False
# Allow the status for requests to be set manually,
# rather than just automatically from commitments and shipments
#settings.req.status_writable = False
#settings.req.item_quantities_writable = True
#settings.req.skill_quantities_writable = True
#settings.req.show_quantity_transit = False
#settings.req.multiple_req_items = False
#settings.req.prompt_match = False
#settings.req.items_ask_purpose = False
# Uncomment to disable the Commit step in the workflow & simply move direct to Ship
#settings.req.use_commit = False
# Uncomment to have Donations include a 'Value' field
#settings.req.commit_value = True
# Uncomment to allow Donations to be made without a matching Request
#settings.req.commit_without_request = True
# Uncomment if the User Account logging the Commitment is NOT normally the Committer
#settings.req.comittter_is_author = False
# Should Requests ask whether Security is required?
#settings.req.ask_security = True
# Should Requests ask whether Transportation is required?
#settings.req.ask_transport = True
#settings.req.use_req_number = False
#settings.req.generate_req_number = False
#settings.req.req_form_name = "Request Issue Form"
#settings.req.req_shortname = "RIS"
# Restrict the type of requests that can be made, valid values in the
# list are ["Stock", "People", "Other"]. If this is commented out then
# all types will be valid.
#settings.req.req_type = ["Stock"]
# Uncomment to enable Summary 'Site Needs' tab for Offices/Facilities
#settings.req.summary = True
# Uncomment to restrict adding new commits to Completed commits
#settings.req.req_restrict_on_complete = True
# Custom Crud Strings for specific req_req types
#settings.req.req_crud_strings = dict()
#ADD_ITEM_REQUEST = T("Make a Request for Donations")
# req_req Crud Strings for Item Request (type=1)
#settings.req.req_crud_strings[1] = Storage(
# title_create = ADD_ITEM_REQUEST,
# title_display = T("Request for Donations Details"),
# title_list = T("Requests for Donations"),
# title_update = T("Edit Request for Donations"),
# title_search = T("Search Requests for Donations"),
# subtitle_create = ADD_ITEM_REQUEST,
# label_list_button = T("List Requests for Donations"),
# label_create_button = ADD_ITEM_REQUEST,
# label_delete_button = T("Delete Request for Donations"),
# msg_record_created = T("Request for Donations Added"),
# msg_record_modified = T("Request for Donations Updated"),
# msg_record_deleted = T("Request for Donations Canceled"),
# msg_list_empty = T("No Requests for Donations"))
#ADD_PEOPLE_REQUEST = T("Make a Request for Volunteers")
# req_req Crud Strings for People Request (type=3)
#settings.req.req_crud_strings[3] = Storage(
# title_create = ADD_PEOPLE_REQUEST,
# title_display = T("Request for Volunteers Details"),
# title_list = T("Requests for Volunteers"),
# title_update = T("Edit Request for Volunteers"),
# title_search = T("Search Requests for Volunteers"),
# subtitle_create = ADD_PEOPLE_REQUEST,
# label_list_button = T("List Requests for Volunteers"),
# label_create_button = ADD_PEOPLE_REQUEST,
# label_delete_button = T("Delete Request for Volunteers"),
# msg_record_created = T("Request for Volunteers Added"),
# msg_record_modified = T("Request for Volunteers Updated"),
# msg_record_deleted = T("Request for Volunteers Canceled"),
# msg_list_empty = T("No Requests for Volunteers"))
# -----------------------------------------------------------------------------
# Supply
#settings.supply.use_alt_name = False
# Do not edit after deployment
#settings.supply.catalog_default = T("Default")
# -----------------------------------------------------------------------------
# Projects
# Uncomment this to use settings suitable for a global/regional organisation (e.g. DRR)
settings.project.mode_3w = True
# Uncomment this to use DRR (Disaster Risk Reduction) extensions
settings.project.mode_drr = True
# Uncomment this to use settings suitable for detailed Task management
settings.project.mode_task = True
# Uncomment this to use Activities for projects
settings.project.activities = True
# Uncomment this to use Activity Types for Activities/Projects
settings.project.activity_types = True
# Uncomment this to use Codes for projects
settings.project.codes = True
# Uncomment this to call project locations 'Communities'
settings.project.community = True
# Uncomment this to enable Hazards in 3W projects
settings.project.hazards = True
# Uncomment this to enable Milestones in projects
settings.project.milestones = True
# Uncomment this to link Activities to Projects
settings.project.projects = True
# Uncomment this to disable Sectors in projects
#settings.project.sectors = False
# Uncomment this to enable Themes in 3W projects
#settings.project.themes = True
# Uncomment this to use Theme Percentages for projects
settings.project.theme_percentages = True
# Uncomment this to use multiple Budgets per project
settings.project.multiple_budgets = True
# Uncomment this to use multiple Organisations per project
settings.project.multiple_organisations = True
# Uncomment this to customise
# Links to Filtered Components for Donors & Partners
#settings.project.organisation_roles = {
# 1: T("Lead Implementer"), # T("Host National Society")
# 2: T("Partner"), # T("Partner National Society")
# 3: T("Donor"),
# 4: T("Customer"), # T("Beneficiary")?
# 5: T("Super"), # T("Beneficiary")?
#}
#settings.project.organisation_lead_role = 1
# -----------------------------------------------------------------------------
# Filter Manager
#settings.search.filter_manager = False
# if you want to have videos appearing in /default/video
#settings.base.youtube_id = [dict(id = "introduction",
# title = T("Introduction"),
# video_id = "HR-FtR2XkBU"),]
# -----------------------------------------------------------------------------
def customise_pr_person_resource(r, tablename):
""" Customise pr_person resource """
T = current.T
s3db = current.s3db
table = r.resource.table
# Disallow "unknown" gender and defaults to "male"
evr_gender_opts = dict((k, v) for k, v in s3db.pr_gender_opts.items()
if k in (2, 3))
gender = table.gender
gender.requires = IS_IN_SET(evr_gender_opts, zero=None)
gender.default = 3
if r.controller == "evr":
# Last name and date of birth mandatory in EVR module
table.last_name.requires = IS_NOT_EMPTY(
error_message = T("Please enter a last name"))
dob_requires = s3_date("dob",
future = 0,
past = 1320,
empty = False).requires
dob_requires.error_message = T("Please enter a date of birth")
table.date_of_birth.requires = dob_requires
# Disable unneeded physical details
pdtable = s3db.pr_physical_description
hide_fields = [
"race",
"complexion",
"height",
"hair_length",
"hair_style",
"hair_baldness",
"facial_hair_type",
"facial_hair_length",
"facial_hair_color",
"facial_hair_comment",
"body_hair",
"skin_marks",
"medical_conditions"
]
for fname in hide_fields:
field = pdtable[fname]
field.readable = field.writable = False
# This set is suitable for Italy
ethnicity_opts = ("Italian",
"Chinese",
"Albanese",
"Philippine",
"Pakistani",
"English",
"African",
"Other",
"Unknown",
)
ethnicity_opts = dict((v, T(v)) for v in ethnicity_opts)
ethnicity = pdtable.ethnicity
ethnicity.readable = ethnicity.writable = True
ethnicity.requires = IS_EMPTY_OR(IS_IN_SET(ethnicity_opts,
sort=True))
ethnicity.represent = S3Represent(options=ethnicity_opts,
translate=True)
# Enable place of birth
place_of_birth = s3db.pr_person_details.place_of_birth
place_of_birth.readable = place_of_birth.writable = True
settings.customise_pr_person_resource = customise_pr_person_resource
# -----------------------------------------------------------------------------
# Comment/uncomment modules here to disable/enable them
# @ToDo: Have the system automatically enable migrate if a module is enabled
# Modules menu is defined in modules/eden/menu.py
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = T("Home"),
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
module_type = None # No Menu
)),
("errors", Storage(
name_nice = T("Ticket Viewer"),
#description = "Needed for Breadcrumbs",
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = T("Synchronization"),
#description = "Synchronization",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("tour", Storage(
name_nice = T("Guided Tour Functionality"),
module_type = None,
)),
("translate", Storage(
name_nice = T("Translation Functionality"),
#description = "Selective translation of strings based on module.",
module_type = None,
)),
# Uncomment to enable internal support requests
#("support", Storage(
# name_nice = T("Support"),
# #description = "Support Requests",
# restricted = True,
# module_type = None # This item is handled separately for the menu
# )),
("gis", Storage(
name_nice = T("Map"),
#description = "Situation Awareness & Geospatial Analysis",
restricted = True,
module_type = 6, # 6th item in the menu
)),
("pr", Storage(
name_nice = T("Person Registry"),
#description = "Central point to record details on People",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = 10
)),
("org", Storage(
name_nice = T("Organizations"),
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
restricted = True,
module_type = 2
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = T("Staff"),
#description = "Human Resources Management",
restricted = True,
module_type = 3,
)),
("vol", Storage(
name_nice = T("Volunteers"),
#description = "Human Resources Management",
restricted = True,
module_type = 4,
)),
("cms", Storage(
name_nice = T("Content Management"),
#description = "Content Management System",
restricted = True,
module_type = 9,
)),
("doc", Storage(
name_nice = T("Documents"),
#description = "A library of digital resources, such as photos, documents and reports",
restricted = True,
module_type = None,
)),
("msg", Storage(
name_nice = T("Messaging"),
#description = "Sends & Receives Alerts via Email & SMS",
restricted = True,
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = 1,
)),
("supply", Storage(
name_nice = T("Supply Chain Management"),
#description = "Used within Inventory Management, Request Management and Asset Management",
restricted = True,
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouses"),
#description = "Receiving and Sending Items",
restricted = True,
module_type = 4
)),
#("asset", Storage(
#name_nice = T("Assets"),
##description = "Recording and Assigning Assets",
#restricted = True,
#module_type = 5,
#)),
#("req", Storage(
# name_nice = T("Requests"),
# #description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.",
# restricted = True,
# module_type = 10,
#)),
("cr", Storage(
name_nice = T("Shelters"),
#description = "Tracks the location, capacity and breakdown of victims in Shelters",
restricted = True,
module_type = 5
)),
("hms", Storage(
name_nice = T("Hospitals"),
#description = "Helps to monitor status of hospitals",
restricted = True,
module_type = 10
)),
("irs", Storage(
name_nice = T("Incidents"),
#description = "Incident Reporting System",
restricted = True,
module_type = 6
)),
#("dvi", Storage(
#name_nice = T("Disaster Victim Identification"),
##description = "Disaster Victim Identification",
#restricted = True,
#module_type = 10,
##access = "|DVI|", # Only users with the DVI role can see this module in the default menu & access the controller
#)),
#("dvr", Storage(
#name_nice = T("Disaster Victim Registry"),
##description = "Allow affected individuals & households to register to receive compensation and distributions",
#restricted = True,
#module_type = 10,
#)),
# @todo: implement evr module
# ("evr", Storage(
# name_nice = T("Evacuees"),
# #description = "Evacuees Registry",
# restricted = True, # use Access Control Lists to see this module
# module_type = 7
# )),
("event", Storage(
name_nice = T("Events"),
#description = "Activate Events (e.g. from Scenario templates) for allocation of appropriate Resources (Human, Assets & Facilities).",
restricted = True,
module_type = 8,
)),
("stats", Storage(
name_nice = T("Statistics"),
#description = "Manages statistics",
restricted = True,
module_type = None,
)),
# @ToDo: Rewrite in a modern style
#("budget", Storage(
# name_nice = T("Budgeting Module"),
# #description = "Allows a Budget to be drawn up",
# restricted = True,
# module_type = 10
# )),
])
| code-for-india/sahana_shelter_worldbank | private/templates/EVASS/config.py | Python | mit | 38,128 |
#Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Dubstep
#Problem level: 6 kyu
def song_decoder(song):
return " ".join(" ".join(song.split('WUB')).split())
| Kunalpod/codewars | dubstep.py | Python | mit | 163 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkManagementClientOperationsMixin:
async def check_dns_name_availability(
self,
location: str,
domain_name_label: str,
**kwargs: Any
) -> "_models.DnsNameAvailabilityResult":
"""Checks whether a domain name in the cloudapp.azure.com zone is available for use.
:param location: The location of the domain name.
:type location: str
:param domain_name_label: The domain name to be verified. It must conform to the following
regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
:type domain_name_label: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DnsNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.DnsNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DnsNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.check_dns_name_availability.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['domainNameLabel'] = self._serialize.query("domain_name_label", domain_name_label, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DnsNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_dns_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability'} # type: ignore
async def supported_security_providers(
self,
resource_group_name: str,
virtual_wan_name: str,
**kwargs: Any
) -> "_models.VirtualWanSecurityProviders":
"""Gives the supported security providers for the virtual wan.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN for which supported security providers are
needed.
:type virtual_wan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualWanSecurityProviders, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.VirtualWanSecurityProviders
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualWanSecurityProviders"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.supported_security_providers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualWanSecurityProviders', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_security_providers.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{virtualWANName}/supportedSecurityProviders'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/aio/operations/_network_management_client_operations.py | Python | mit | 7,052 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "drf_ember.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| riklaunim/django-examples | ember-drf-example/manage.py | Python | mit | 252 |
# all the imports
import sqlite3
from flask import Flask, request, session, g, redirect, url_for, \
abort, render_template, flash
from queries import *
from functools import wraps
from contextlib import closing
from wtforms import SelectField, PasswordField, validators
from flask_wtf import Form
from flask_wtf.file import FileField, FileRequired, FileAllowed
from os import urandom, path
import uuid
import pandas as pd
PERSON_COLS = (14, 0, 1 ,4, 2, 3, 8, 5, 7, 6, 9, 10, 11)
app = Flask(__name__)
if path.exists("deploy_conf.py"):
app.config.from_pyfile("deploy_conf.py")
print("Using deploy configuration...")
else:
app.config.from_pyfile("config.py")
print("Using mock configuration...")
# The forms
###########
rating_validator = validators.AnyOf(['0', '1', '2', '3', '4'],
message="You forgot to select an option")
class LoginForm(Form):
token = PasswordField('Enter your Token:', [validators.Required(),
validators.AnyOf(app.config['USERS'].keys(), message="Invalid token!")])
class PersonForm(Form):
pos = SelectField('What do YOU think is the position of the applicant?',
[rating_validator], choices=[('100', 'Pick one..'), ('0', 'Undergrad'),
('1', 'M.Sc./Ph.D. student or health pro.'),
('2', 'Postdoc, Associate Prof. or M.D.'), ('3', 'Principal Investigator')])
inst = SelectField('How do you rate the institution?', [rating_validator],
choices=[('100', 'Pick one..'), ('0', 'dubious'), ('1', 'average'),
('2', 'national leader'), ('3', 'international leader')])
dist = SelectField('How do you rate the travel distance?', [rating_validator],
choices=[('100', 'Pick one..'), ('0', 'local'), ('1', 'national'),
('2', 'international')])
topic = SelectField('How do you rate the research topic?',
[rating_validator], choices=[('100', 'Pick one..'), ('0', '0 - bad'),
('1', '1 - average'), ('2', '2 - amazing')])
class AbstractForm(Form):
abstract = SelectField('How do you rate the abstract(s)?', [rating_validator],
choices=[('100', 'Pick one..'), ('0', '0 - insufficient'), ('1', '1 - barely acceptable'),
('2', '2 - acceptable'), ('3', '3 - pretty good'), ('4', '4 - amazing')])
english = SelectField('How do you rate the quality of English?', [rating_validator],
choices=[('100', 'Pick one..'), ('0', 'insufficient'), ('1', 'acceptable'),
('2', 'fluent')])
class ImportForm(Form):
persons = FileField('Choose an applicant file', [FileAllowed(['.csv'])])
posters = FileField('Choose a poster abstracts file', [FileAllowed(['.csv'])])
talks = FileField('Choose a talk abstracts file', [FileAllowed(['.csv'])])
###########
# Utility functions
###########
def connect_db():
return sqlite3.connect(app.config['DATABASE'])
def add_fakes(db, n, base_id=1):
from faker import Faker
fake = Faker()
for i in range(n):
vals = (str(base_id+i), fake.first_name(), fake.last_name(), fake.email(),
'NA', fake.date(), fake.military_ship(), fake.company(), fake.state(),
fake.country(), 'Ph.D.', fake.job(), fake.sentence(), fake.sentence(),
fake.text(750), fake.name(), fake.company(), fake.sentence(),
fake.text(450), fake.name(), fake.company())
db.execute(insert_complete, vals)
db.commit()
def init_db(n=0):
with closing(connect_db()) as db:
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
if n > 0: add_fakes(db, n)
def make_token(n, word=None):
if word:
return uuid.uuid5(uuid.NAMESPACE_DNS, word + app.config['SECRET_KEY']).hex[0:n]
return uuid.uuid4().hex[0:n]
def tokenize(users):
return {make_token(16, u[0]): u for u in users}
@app.before_request
def before_request():
g.db = connect_db()
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
db.close()
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'user' in session:
return redirect(url_for('login'))
return f(*args, **kwargs)
return decorated_function
#########3
# Routes
@app.route('/login', methods=['POST', 'GET'])
def login():
form = LoginForm(request.form)
if request.method == 'POST' and form.validate():
token = form.token.data
session['user'] = app.config['USERS'][token][0]
session['role'] = app.config['USERS'][token][1]
session['rated'] = 0
session['p'] = None # next applicant to review
session['a'] = None # next abstract to review
flash('Thank you. Logging in...')
return redirect(url_for('show_entries'))
return render_template('login.html', form=form, n_user=len(app.config['USERS']))
@app.route('/')
@login_required
def show_entries():
data = {}
cur = g.db.execute(review_count, (session['user'],))
data['nrev'] = cur.fetchone()[0]
cur = g.db.execute("select count(*) from ratings")
data['ntotrev'] = cur.fetchone()[0]
cur = g.db.execute("select count(*) from abstracts")
data['nabstotrev'] = cur.fetchone()[0]
cur = g.db.execute(person_count)
data['ntot'] = cur.fetchone()[0]
cur = g.db.execute(abstract_count)
data['nabstot'] = cur.fetchone()[0]
cur = g.db.execute(abstract_rev_count, (session['user'],))
data['nabsrev'] = cur.fetchone()[0]
return render_template('index.html',user=session['user'],
role=session['role'], **data)
@app.route('/logout')
def logout():
session.pop('user', None)
session.pop('role', None)
session.pop('rated', None)
flash('You were logged out')
return redirect(url_for('login'))
@app.route('/applicants', methods=['GET', 'POST'])
@login_required
def rate_person():
if request.method == 'GET':
cur = g.db.execute(next_person, (session['user'],))
session['p'] = cur.fetchone()
form = PersonForm(request.form)
if request.method == 'POST' and form.validate() and session['p']:
g.db.execute(insert_person_rating, (session['p'][0], session['user'],
form.pos.data, form.inst.data, form.dist.data, form.topic.data))
g.db.commit()
session['rated'] += 1
return redirect(url_for('added', type='applicant'))
return render_template('applicants.html', form=form, p=session['p'],
user=session['user'], role=session['role'])
@app.route('/abstracts', methods=['GET', 'POST'])
@login_required
def rate_abstract():
if session['role'] != 'all':
return render_template('message.html', type='error', title='Nope...',
message='You are not allowed to review abstracts :(',
user=session['user'], role=session['role'])
if request.method == 'GET':
cur = g.db.execute(next_abstract, (session['user'],))
session['a'] = cur.fetchone()
form = AbstractForm(request.form)
if request.method == 'POST' and form.validate() and session['a']:
g.db.execute(insert_abstract_rating, (session['a'][0], session['user'],
form.abstract.data, form.english.data))
g.db.commit()
session['rated'] += 1
return redirect(url_for('added', type='abstract'))
return render_template('abstracts.html', form=form, a=session['a'],
user=session['user'], role=session['role'])
@app.route('/added/<type>')
@login_required
def added(type):
return render_template('added.html', rated=session['rated'],
user=session['user'], role=session['role'], type=type)
@app.route('/results')
@login_required
def results():
persons = pd.read_sql("select * from persons", g.db)
ratings = pd.read_sql(average_ratings, g.db)
abstracts = pd.read_sql(average_abstracts, g.db)
persons = pd.merge(persons, ratings, on='pid', how='left')
persons = pd.merge(persons, abstracts, on='pid', how='left', suffixes=('_applicant', '_abstract'))
persons["total"] = persons[['p_position', 'p_institution', 'p_distance',
'p_topic', 'p_abstract']].sum(axis=1).fillna(0)
persons = persons.sort_values(by="total", ascending=False)
persons.to_csv('static/res.csv', encoding='utf-8')
table = zip(range(1,persons.shape[0]+1), persons['first'] + ' ' + persons['last'],
persons['institution'], persons['country'],
persons['nrev_applicant'].fillna(0).astype(int).astype(str) +
' + ' + persons['nrev_abstract'].fillna(0).astype(int).astype(str),
persons['total'].round(2))
return render_template('results.html', table=table, user=session['user'],
role=session['role'])
@app.route('/import', methods=['POST', 'GET'])
@login_required
def file_import():
if session['role'] != 'all':
return render_template('message.html', type='error', title='Nope...',
message='You are not allowed to import data :(',
user=session['user'], role=session['role'])
form = ImportForm(request.form)
if request.method == 'POST' and form.validate():
try:
p = pd.read_csv(request.files['persons'], skipinitialspace=True).fillna("NA")
a_posters = pd.read_csv(request.files['posters'])
a_talks = pd.read_csv(request.files['talks'])
if p.shape[1] != 15:
raise ValueError("Wrong numbers of columns in applicant data!")
elif a_posters.shape[1] != 7:
raise ValueError("Wrong numbers of columns in poster data!")
elif a_talks.shape[1] != 8:
raise ValueError("Wrong numbers of columns in talk data!")
except BaseException as e:
msg = 'Could not parse the files. Please ensure that the uploaded \
files are CSV files that can be read by pandas. Error: ' + str(e)
return render_template('message.html', type='error', title='Parse error',
message=msg, user=session['user'], role=session['role'])
p.ix[:, 4] = p.ix[:, 4].str.strip().str.lower()
p.ix[:, 14] = p.ix[:, 14].astype('str')
inserter = zip(*[p.ix[:,i] for i in PERSON_COLS])
g.db.executemany(insert_person, inserter)
g.db.commit()
cur = g.db.execute(all_emails)
emails = [e[0] for e in cur.fetchall()]
a_posters['Email'] = a_posters['Email'].str.strip().str.lower()
a_talks['Email'] = a_talks['Email'].str.strip().str.lower()
a_posters['matched'] = a_posters['Email'].isin(emails)
a_talks['matched'] = a_talks['Email'].isin(emails)
# The weird order and columns is due to errors in the form
# design which we can not alter anymore
a = a_posters.loc[a_posters.matched]
inserter = zip(a.ix[:,3], a.ix[:,4], a.ix[:,6], a.ix[:,5], a.ix[:,2])
g.db.executemany(update_poster, inserter)
a = a_talks.loc[a_talks.matched]
inserter = zip(a.ix[:,3], a.ix[:,4], a.ix[:,5], a.ix[:,6], a.ix[:,2])
g.db.executemany(update_talk, inserter)
g.db.commit()
cur = g.db.execute(person_count)
n = cur.fetchone()[0]
emails_not_found = a_posters.loc[a_posters.matched == False]['Email'].append(
a_talks.loc[a_talks.matched == False]['Email'])
emails_not_found = emails_not_found.unique()
msg = '{} applicants in the database. Unmatched Emails ({}): {}'.format(
n, len(emails_not_found), ', '.join(emails_not_found))
return render_template('message.html', type='good', title='Added new data',
message=msg, user=session['user'], role=session['role'])
return render_template('import.html', form=form, user=session['user'],
role=session['role'])
if __name__ == '__main__':
app.run(host="0.0.0.0", port=8000, debug=True)
| cdiener/rater | app.py | Python | mit | 11,748 |
from django.contrib import admin
from .models import File, Link
from .forms import FileForm
class FileAdmin(admin.ModelAdmin):
list_display = ('id', 'md5', 'file', 'size')
list_per_page = 100
list_display_links = ('md5',)
form = FileForm
class LinkAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'file', 'user')
list_per_page = 100
list_display_links = ('name',)
admin.site.register(File, FileAdmin)
admin.site.register(Link, LinkAdmin)
| chaos-soft/chocola | files/admin.py | Python | mit | 479 |
"""Class definitions for Speaker Adapted Triphone trainer"""
from __future__ import annotations
import multiprocessing as mp
import os
import re
import shutil
import subprocess
import time
from queue import Empty
from typing import Dict, List, NamedTuple
import tqdm
from montreal_forced_aligner.acoustic_modeling.triphone import TriphoneTrainer
from montreal_forced_aligner.exceptions import KaldiProcessingError
from montreal_forced_aligner.utils import (
KaldiFunction,
KaldiProcessWorker,
Stopped,
log_kaldi_errors,
parse_logs,
thirdparty_binary,
)
__all__ = ["SatTrainer", "AccStatsTwoFeatsFunction", "AccStatsTwoFeatsArguments"]
class AccStatsTwoFeatsArguments(NamedTuple):
"""Arguments for :func:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsFunction`"""
log_path: str
dictionaries: List[str]
ali_paths: Dict[str, str]
acc_paths: Dict[str, str]
model_path: str
feature_strings: Dict[str, str]
si_feature_strings: Dict[str, str]
class AccStatsTwoFeatsFunction(KaldiFunction):
"""
Multiprocessing function for accumulating stats across speaker-independent and
speaker-adapted features
See Also
--------
:meth:`.SatTrainer.create_align_model`
Main function that calls this function in parallel
:meth:`.SatTrainer.acc_stats_two_feats_arguments`
Job method for generating arguments for this function
:kaldi_src:`ali-to-post`
Relevant Kaldi binary
:kaldi_src:`gmm-acc-stats-twofeats`
Relevant Kaldi binary
Parameters
----------
args: :class:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsArguments`
Arguments for the function
"""
progress_pattern = re.compile(r"^LOG \(gmm-acc-stats-twofeats.* Average like for this file.*")
done_pattern = re.compile(
r"^LOG \(gmm-acc-stats-twofeats.*Done (?P<utterances>\d+) files, (?P<no_posteriors>\d+) with no posteriors, (?P<no_second_features>\d+) with no second features, (?P<errors>\d+) with other errors.$"
)
def __init__(self, args: AccStatsTwoFeatsArguments):
self.log_path = args.log_path
self.dictionaries = args.dictionaries
self.ali_paths = args.ali_paths
self.acc_paths = args.acc_paths
self.model_path = args.model_path
self.feature_strings = args.feature_strings
self.si_feature_strings = args.si_feature_strings
def run(self):
"""Run the function"""
with open(self.log_path, "w", encoding="utf8") as log_file:
for dict_name in self.dictionaries:
ali_path = self.ali_paths[dict_name]
acc_path = self.acc_paths[dict_name]
feature_string = self.feature_strings[dict_name]
si_feature_string = self.si_feature_strings[dict_name]
ali_to_post_proc = subprocess.Popen(
[thirdparty_binary("ali-to-post"), f"ark:{ali_path}", "ark:-"],
stderr=log_file,
stdout=subprocess.PIPE,
env=os.environ,
)
acc_proc = subprocess.Popen(
[
thirdparty_binary("gmm-acc-stats-twofeats"),
self.model_path,
feature_string,
si_feature_string,
"ark,s,cs:-",
acc_path,
],
stderr=subprocess.PIPE,
encoding="utf8",
stdin=ali_to_post_proc.stdout,
env=os.environ,
)
for line in acc_proc.stderr:
log_file.write(line)
m = self.progress_pattern.match(line.strip())
if m:
yield 1, 0, 0, 0
else:
m = self.done_pattern.match(line.strip())
if m:
yield int(m.group("utterances")), int(m.group("no_posteriors")), int(
m.group("no_second_features")
), int(m.group("errors"))
class SatTrainer(TriphoneTrainer):
"""
Speaker adapted trainer (SAT), inherits from TriphoneTrainer
Parameters
----------
subset : int
Number of utterances to use, defaults to 10000
num_leaves : int
Number of states in the decision tree, defaults to 2500
max_gaussians : int
Number of gaussians in the decision tree, defaults to 15000
power : float
Exponent for number of gaussians according to occurrence counts, defaults to 0.2
See Also
--------
:class:`~montreal_forced_aligner.acoustic_modeling.triphone.TriphoneTrainer`
For acoustic model training parsing parameters
Attributes
----------
fmllr_iterations : list
List of iterations to perform fMLLR calculation
"""
def __init__(
self,
subset: int = 10000,
num_leaves: int = 2500,
max_gaussians: int = 15000,
power: float = 0.2,
**kwargs,
):
super().__init__(**kwargs)
self.subset = subset
self.num_leaves = num_leaves
self.max_gaussians = max_gaussians
self.power = power
self.fmllr_iterations = []
def acc_stats_two_feats_arguments(self) -> List[AccStatsTwoFeatsArguments]:
"""
Generate Job arguments for :func:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsFunction`
Returns
-------
list[:class:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsArguments`]
Arguments for processing
"""
feat_strings = self.worker.construct_feature_proc_strings()
si_feat_strings = self.worker.construct_feature_proc_strings(speaker_independent=True)
return [
AccStatsTwoFeatsArguments(
os.path.join(self.working_log_directory, f"acc_stats_two_feats.{j.name}.log"),
j.current_dictionary_names,
j.construct_path_dictionary(self.working_directory, "ali", "ark"),
j.construct_path_dictionary(self.working_directory, "two_feat_acc", "ark"),
self.model_path,
feat_strings[j.name],
si_feat_strings[j.name],
)
for j in self.jobs
]
def calc_fmllr(self) -> None:
self.worker.calc_fmllr()
def compute_calculated_properties(self) -> None:
"""Generate realignment iterations, initial gaussians, and fMLLR iterations based on configuration"""
super().compute_calculated_properties()
self.fmllr_iterations = []
max_fmllr_iter = int(self.num_iterations / 2) - 1
for i in range(1, max_fmllr_iter):
if i < max_fmllr_iter / 2 and i % 2 == 0:
self.fmllr_iterations.append(i)
self.fmllr_iterations.append(max_fmllr_iter)
def _trainer_initialization(self) -> None:
"""Speaker adapted training initialization"""
self.speaker_independent = False
if os.path.exists(os.path.join(self.working_directory, "1.mdl")):
return
if os.path.exists(os.path.join(self.previous_aligner.working_directory, "lda.mat")):
shutil.copyfile(
os.path.join(self.previous_aligner.working_directory, "lda.mat"),
os.path.join(self.working_directory, "lda.mat"),
)
self.tree_stats()
self._setup_tree()
self.compile_train_graphs()
self.convert_alignments()
os.rename(self.model_path, self.next_model_path)
self.iteration = 1
if os.path.exists(os.path.join(self.previous_aligner.working_directory, "trans.0.ark")):
for j in self.jobs:
for path in j.construct_path_dictionary(
self.previous_aligner.working_directory, "trans", "ark"
).values():
shutil.copy(
path,
path.replace(
self.previous_aligner.working_directory, self.working_directory
),
)
else:
self.worker.current_trainer = self
self.calc_fmllr()
parse_logs(self.working_log_directory)
def finalize_training(self) -> None:
"""
Finalize training and create a speaker independent model for initial alignment
Raises
------
:class:`~montreal_forced_aligner.exceptions.KaldiProcessingError`
If there were any errors in running Kaldi binaries
"""
try:
self.create_align_model()
super().finalize_training()
shutil.copy(
os.path.join(self.working_directory, f"{self.num_iterations+1}.alimdl"),
os.path.join(self.working_directory, "final.alimdl"),
)
except Exception as e:
if isinstance(e, KaldiProcessingError):
log_kaldi_errors(e.error_logs, self.logger)
e.update_log_file(self.logger)
raise
def train_iteration(self) -> None:
"""
Run a single training iteration
"""
if os.path.exists(self.next_model_path):
self.iteration += 1
return
if self.iteration in self.realignment_iterations:
self.align_utterances()
if self.debug:
self.compute_alignment_improvement()
if self.iteration in self.fmllr_iterations:
self.calc_fmllr()
self.acc_stats()
parse_logs(self.working_log_directory)
if self.iteration < self.final_gaussian_iteration:
self.increment_gaussians()
self.iteration += 1
@property
def alignment_model_path(self) -> str:
"""Alignment model path"""
path = self.model_path.replace(".mdl", ".alimdl")
if os.path.exists(path):
return path
return self.model_path
def create_align_model(self) -> None:
"""
Create alignment model for speaker-adapted training that will use speaker-independent
features in later aligning.
See Also
--------
:func:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsFunction`
Multiprocessing helper function for each job
:meth:`.SatTrainer.acc_stats_two_feats_arguments`
Job method for generating arguments for the helper function
:kaldi_src:`gmm-est`
Relevant Kaldi binary
:kaldi_src:`gmm-sum-accs`
Relevant Kaldi binary
:kaldi_steps:`train_sat`
Reference Kaldi script
"""
self.logger.info("Creating alignment model for speaker-independent features...")
begin = time.time()
arguments = self.acc_stats_two_feats_arguments()
with tqdm.tqdm(total=self.num_utterances) as pbar:
if self.use_mp:
manager = mp.Manager()
error_dict = manager.dict()
return_queue = manager.Queue()
stopped = Stopped()
procs = []
for i, args in enumerate(arguments):
function = AccStatsTwoFeatsFunction(args)
p = KaldiProcessWorker(i, return_queue, function, error_dict, stopped)
procs.append(p)
p.start()
while True:
try:
(
num_utterances,
no_posteriors,
no_second_features,
errors,
) = return_queue.get(timeout=1)
if stopped.stop_check():
continue
except Empty:
for proc in procs:
if not proc.finished.stop_check():
break
else:
break
continue
pbar.update(num_utterances + no_posteriors + no_second_features + errors)
for p in procs:
p.join()
if error_dict:
for v in error_dict.values():
raise v
else:
for args in arguments:
function = AccStatsTwoFeatsFunction(args)
for (
num_utterances,
no_posteriors,
no_second_features,
errors,
) in function.run():
pbar.update(num_utterances + no_posteriors + no_second_features + errors)
log_path = os.path.join(self.working_log_directory, "align_model_est.log")
with open(log_path, "w", encoding="utf8") as log_file:
acc_files = []
for x in arguments:
acc_files.extend(x.acc_paths.values())
sum_proc = subprocess.Popen(
[thirdparty_binary("gmm-sum-accs"), "-"] + acc_files,
stderr=log_file,
stdout=subprocess.PIPE,
env=os.environ,
)
est_proc = subprocess.Popen(
[
thirdparty_binary("gmm-est"),
"--remove-low-count-gaussians=false",
f"--power={self.power}",
self.model_path,
"-",
self.model_path.replace(".mdl", ".alimdl"),
],
stdin=sum_proc.stdout,
stderr=log_file,
env=os.environ,
)
est_proc.communicate()
parse_logs(self.working_log_directory)
if not self.debug:
for f in acc_files:
os.remove(f)
self.logger.debug(f"Alignment model creation took {time.time() - begin}")
| MontrealCorpusTools/Montreal-Forced-Aligner | montreal_forced_aligner/acoustic_modeling/sat.py | Python | mit | 14,192 |
# !/usr/bin/env python
__author__ = "Andrew Hankinson (andrew.hankinson@mail.mcgill.ca)"
__version__ = "1.5"
__date__ = "2011"
__copyright__ = "Creative Commons Attribution"
__license__ = """The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE."""
import multiprocessing
from optparse import OptionParser
import os
import sys
import hashlib
import codecs
import re
from pybagit.exceptions import *
from functools import reduce
# declare a default hashalgorithm
HASHALG = 'sha1'
ENCODING = "utf-8"
def write_manifest(datadir, encoding, update=False):
bag_root = os.path.split(os.path.abspath(datadir))[0]
manifest_file = os.path.join(bag_root, "manifest-{0}.txt".format(HASHALG))
checksums = dict()
files_to_checksum = set(dirwalk(datadir))
if update and os.path.isfile(manifest_file):
for line in codecs.open(manifest_file, 'rb', encoding):
checksum, file_ = line.strip().split(' ', 1)
full_file = os.path.join(bag_root, file_)
if full_file in files_to_checksum:
files_to_checksum.remove(full_file)
checksums[os.path.join(bag_root, file_)] = checksum
p = multiprocessing.Pool(processes=multiprocessing.cpu_count())
result = p.map_async(csumfile, files_to_checksum)
checksums.update((k, v) for v, k in result.get())
p.close()
p.join()
mfile = codecs.open(manifest_file, 'wb', encoding)
for file_, checksum in sorted(checksums.iteritems()):
rp = os.path.relpath(file_, bag_root)
fl = ensure_unix_pathname(rp).decode(ENCODING, 'replace')
mfile.write(u"{0} {1}\n".format(checksum, fl))
mfile.close()
def dirwalk(datadir):
datafiles = []
for dirpath, dirnames, filenames in os.walk(datadir):
for fn in filenames:
datafiles.append(os.path.join(dirpath, fn))
return datafiles
def csumfile(filename):
""" Based on
http://abstracthack.wordpress.com/2007/10/19/calculating-md5-checksum/
"""
hashalg = getattr(hashlib, HASHALG)() # == 'hashlib.md5' or 'hashlib.sha1'
blocksize = 0x10000
def __upd(m, data):
m.update(data)
return m
fd = open(filename, 'rb')
try:
contents = iter(lambda: fd.read(blocksize), "")
m = reduce(__upd, contents, hashalg)
finally:
fd.close()
return (m.hexdigest(), filename)
def ensure_unix_pathname(pathname):
# it's only windows we have to worry about
if sys.platform != "win32":
return pathname
replace = re.compile(r"\\", re.UNICODE)
fnm = re.sub(replace, "/", pathname)
return fnm
if __name__ == "__main__":
parser = OptionParser()
usage = "%prog [options] arg1 arg2"
parser.add_option(
"-a",
"--algorithm",
action="store",
help="checksum algorithm to use (sha1|md5)")
parser.add_option(
"-c",
"--encoding",
action="store",
help="File encoding to write manifest")
parser.add_option(
"-u",
"--update",
action="store_true",
help="Only update new/removed files")
(options, args) = parser.parse_args()
if options.algorithm:
if not options.algorithm in ('md5', 'sha1'):
raise BagCheckSumNotValid(
'You must specify either "md5" or "sha1" as the checksum algorithm')
HASHALG = options.algorithm
if options.encoding:
ENCODING = options.encoding
if len(args) < 1:
parser.error("You must specify a data directory")
write_manifest(args[0], ENCODING, update=options.update)
| WoLpH/pybagit | pybagit/multichecksum.py | Python | mit | 4,828 |
# -*- coding: utf-8 -*-
import sae.const
DEBUG = False
SITE_TITLE = u"博客标题"
SITE_SUB_TITLE = u"博客副标题"
SITE_KEYWORDS = u"博客关键字"
SITE_DECRIPTION = u"博客描述"
AUTHOR_NAME = u"博客作者" #显示在RSS订阅里面
#CONACT_MAIL = "xxx@gmail.com" #暂未用到
THEMES = ['octopress','admin']
LINK_BROLL = [
{'text': u"爱简单吧", 'url': "http://www.ijd8.com", 'title': u"ijd8官方博客"},
{'text': u"YouBBS", 'url': "http://youbbs.sinaapp.com", 'title': u"ijd8支持论坛"},
]
MAJOR_DOMAIN = 'www.yourdomain.com' #主域名
##Mysql 数据库信息
MYSQL_DB = sae.const.MYSQL_DB
MYSQL_USER = sae.const.MYSQL_USER
MYSQL_PASS = sae.const.MYSQL_PASS
MYSQL_HOST = "%s:%s" % (sae.const.MYSQL_HOST_S, sae.const.MYSQL_PORT)
MYSQL_HOST_M = "%s:%s" % (sae.const.MYSQL_HOST, sae.const.MYSQL_PORT)
JQUERY = "http://lib.sinaapp.com/js/jquery/1.9.1/jquery-1.9.1.min.js"
COOKIE_SECRET = "11orTzKXQAsaYdkL5gEtGeJJFuYh7EQnp2XdTP1o/Vo="
LANGUAGE = 'zh-CN'
EACH_PAGE_POST_NUM = 10 #每页显示文章数
RECENT_POST_NUM = 10 #边栏显示最近文章数
RELATED_NUM = 10 #显示相关文章数
SIDER_TAG_NUM = 100 #边栏显示标签数
SIDER_CAT_NUM = 100 #边栏显示分类数
SHORTEN_CONTENT_WORDS = 150 #文章列表截取的字符数
DESCRIPTION_CUT_WORDS = 100 #meta description 显示的字符数
FEED_NUM = 10 #订阅输出文章数
#######下面是保存附件的空间,可选SAE Storage 和 七牛(有免费配额),只选一个
## 1) 用SAE Storage 需要在SAE 控制面板开通
BUCKET = "" #Domain Name, 如 upload 。不用或用七牛请留空
## 2) 七牛 注册可获永久10G空间和每月10G流量,注册地址 http://t.cn/z8h5lsg
QN_AK = "" #七牛 ACCESS_KEY
QN_SK = "" #七牛 SECRET_KEY
QN_BUCKET = "" #空间名称 , 如 upload
| ego008/ijd8 | sae/setting.py | Python | mit | 1,814 |
from os.path import dirname, join
from pystacia import lena
dest = join(dirname(__file__), '../_static/generated')
image = lena(256)
image.desaturate()
image.write(join(dest, 'lena_desaturate.jpg'))
image.close()
| squeaky-pl/pystacia | doc/source/image/desaturate.py | Python | mit | 216 |
import _plotly_utils.basevalidators
class ColorbarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="colorbar", parent_name="bar.marker", **kwargs):
super(ColorbarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "ColorBar"),
data_docs=kwargs.pop(
"data_docs",
"""
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.bar.mar
ker.colorbar.Tickformatstop` instances or dicts
with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.bar.marker.colorbar.tickformatstopdefaults),
sets the default property values to use for
elements of bar.marker.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of
the axis. The default value for inside tick
labels is *hide past domain*. In other cases
the default is *hide past div*.
ticklabelposition
Determines where tick labels are drawn relative
to the ticks. Left and right options are used
when `orientation` is "h", top and bottom when
`orientation` is "v".
ticklabelstep
Sets the spacing between tick labels as
compared to the spacing between ticks. A value
of 1 (default) means each tick gets a label. A
value of 2 means shows every 2nd label. A
larger value n means only every nth tick is
labeled. `tick0` determines which labels are
shown. Not implemented for axes with `type`
"log" or "multicategory", or when `tickmode` is
"array".
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for `ticktext`.
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for `tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.bar.marker.colorba
r.Title` instance or dict with compatible
properties
titlefont
Deprecated: Please use
bar.marker.colorbar.title.font instead. Sets
this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
bar.marker.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Defaults to
"top" when `orientation` if "v" and defaults
to "right" when `orientation` if "h". Note that
the title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction). Defaults to 1.02 when `orientation`
is "v" and 0.5 when `orientation` is "h".
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar. Defaults to "left" when `orientation` is
"v" and "center" when `orientation` is "h".
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction). Defaults to 0.5 when `orientation`
is "v" and 1.02 when `orientation` is "h".
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
Defaults to "middle" when `orientation` is "v"
and "bottom" when `orientation` is "h".
ypad
Sets the amount of padding (in px) along the y
direction.
""",
),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/bar/marker/_colorbar.py | Python | mit | 12,732 |