repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
MaximeKjaer/dailyprogrammer-challenges | Challenge-172/02-Intermediate-2/password_maker.py | Python | mit | 774 | 0.009044 | #!/usr/bin/python
import sys
if len(sys.argv)<2:
print "\nYou need to provide an argument!\nThe syntax | is the following:\n\npython password_maker.py [password]"
exit()
import hashlib
import uuid
password = sys.argv[1]
sal | t = uuid.uuid4().hex
hashed_password = hashlib.sha512(password + salt).hexdigest()
f = open('salt.txt', 'wb')
f.write(salt)
f.close()
f = open('encrypted.txt', 'wb')
f.write(hashed_password)
f.close()
print "\n=============================================================================\n\n"
print "Your password has been hashed and salted, and is ready to use on the web interface."
print "To reset it, log in through SSH and run this script again."
print "\n\n=============================================================================\n"
|
shimpe/frescobaldi | setup.py | Python | gpl-2.0 | 3,867 | 0.00931 | import os
import sys
from frescobaldi_app import appinfo
try:
from setuptools import setup
USE_SETUPTOOLS = True
except ImportError:
from distutils.core import setup
USE_SETUPTOOLS = False
def packagelist(directory):
"""Returns a sorted list with package names for all packages under the given directory."""
return list(sorted(root.replace(os.sep, '.')
for root, dirs, files in os.walk(directory)
if '__init__.py' in files))
scripts = ['frescobaldi']
packages = packagelist('frescobaldi_app')
package_data = {
'frescobaldi_app.css': ['*.png'],
'frescobaldi_app.help': ['*.png'],
'frescobaldi_app.hyphdicts': ['*.dic'],
'frescobaldi_app.icons': [
'*.ico',
'*.svg',
'*x*/*.png',
'Tango/index.theme',
'Tango/scalable/*.svg',
'TangoExt/index.theme',
'TangoExt/scalable/*.svg',
],
'frescobaldi_app.layoutcontrol': ['*.ly', '*.ily'],
'frescobaldi_app.po': ['*.mo'],
'frescobaldi_app.scorewiz': ['*.png'],
'frescobaldi_app.splashscreen': ['*.png'],
'frescobaldi_app.svgview': ['*.js', '*.html'],
'frescobaldi_app.symbols': ['*.svg'],
'frescobaldi_app.userguide': ['*.md', '*.png'],
}
options = {
'sdist': {
'force_manifest': 1,
}
}
if sys.platform.startswith('win'):
scripts.append('windows/frescobaldi-wininst.py')
options['bdist_wininst'] = {
'install_script': 'windows/frescobaldi-wininst.py',
'bitmap': 'windows/frescobaldi-wininst.bmp',
}
data_files = []
else:
data_files = [
('share/icons/hicolor/scalable/apps', ['frescobaldi_app/icons/frescobaldi.svg']),
('share/applications', ['frescobaldi.desktop']),
('share/man/man1', ['frescobaldi.1']),
]
setup_extra_args = {}
if USE_SETUPTOOLS:
setup_extra_args['install_requires'] = ['python-ly', 'python-poppler-qt4']
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: MacOS X',
'Environment :: Win32 (MS Windows)',
'Environment :: X11 Applications :: Qt',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU General Public License (GPL)',
# Natural Language :: Chinese (Hong Kong) is not yet accepted by pypi
#'Natural Language :: Chinese (Hong Kong)',
'Natural Language :: Chinese (Simplified)',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: Czech',
'Natural Language :: Dutch',
'Natural Language :: English',
'Natural Language :: French',
'Natural Language :: Galician',
'Natural Language :: German',
'Natural Language :: Italian',
'Natural Language :: Polish',
'Natural Language :: Portuguese (Brazilian)',
'Natural Language :: Russian',
'Natural Language :: Spanish',
'Natural Language :: Turkish',
'Natural Language :: Ukranian',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Progr | amming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Multimedia :: Sound/Audio',
'Topic :: Multimedia :: Graphics',
'Topic :: Text Editors',
]
setup(
name = appinfo.name,
version = appinfo.version,
description = appinfo.description,
long_descript | ion = appinfo.long_description,
maintainer = appinfo.maintainer,
maintainer_email = appinfo.maintainer_email,
url = appinfo.url,
license = appinfo.license,
scripts = scripts,
packages = packages,
package_data = package_data,
data_files = data_files,
classifiers = classifiers,
options = options,
**setup_extra_args
)
|
Maxence1/flask-wtf | examples/recaptcha/app.py | Python | bsd-3-clause | 1,299 | 0 | from flask import Flask, render_template, flash, session, redirect, url_for
from wtforms import Te | xtAreaField
from wtforms.validators import DataRequired
from flask.ext.wtf import Form
from flask.ext.wtf.r | ecaptcha import RecaptchaField
DEBUG = True
SECRET_KEY = 'secret'
# keys for localhost. Change as appropriate.
RECAPTCHA_PUBLIC_KEY = '6LeYIbsSAAAAACRPIllxA7wvXjIE411PfdB2gt2J'
RECAPTCHA_PRIVATE_KEY = '6LeYIbsSAAAAAJezaIq3Ft_hSTo0YtyeFG-JgRtu'
app = Flask(__name__)
app.config.from_object(__name__)
class CommentForm(Form):
comment = TextAreaField("Comment", validators=[DataRequired()])
recaptcha = RecaptchaField()
@app.route("/")
def index(form=None):
if form is None:
form = CommentForm()
comments = session.get("comments", [])
return render_template("index.html",
comments=comments,
form=form)
@app.route("/add/", methods=("POST",))
def add_comment():
form = CommentForm()
if form.validate_on_submit():
comments = session.pop('comments', [])
comments.append(form.comment.data)
session['comments'] = comments
flash("You have added a new comment")
return redirect(url_for("index"))
return index(form)
if __name__ == "__main__":
app.run()
|
liam2/larray | larray/example.py | Python | gpl-3.0 | 3,569 | 0.001961 | import os
from pathlib import Path
import larray as la
_TEST_DIR = Path(__file__).parent / 'tests'
EXAMPLE_FILES_DIR = _TEST_DIR / 'data'
AVAILABLE_EXAMPLE_DATA = {
'demography': EXAMPLE_FILES_DIR / 'demography.h5',
'demography_eurostat': EXAMPLE_FILES_DIR / 'demography_eurostat.h5'
}
EXAMPLE_EXCEL_TEMPLATES_DIR = _TEST_DIR / 'excel_template'
def get_example_filepath(fname) -> Path:
r"""Return absolute path to an example file if exist.
Parameters
----------
fname : str
Filename of an existing example file.
Returns
-------
Filepath
Absolute filepath to an example file if exists.
Notes
-----
A ValueError is raised if the provided filename does not represent an existing example file.
Examples
--------
>>> fpath = get_example_filepath('examples.xlsx')
"""
fpath = (EXAMPLE_FILES_DIR / fname).absolute()
if not fpath.exists():
AVAILABLE_EXAMPLE_FILES = os.listdir(EXAMPLE_FILES_DIR)
raise ValueError(f"Example file {fname} does not exist. "
f"Available example files are: {AVAILABLE_EXAMPLE_FILES}")
return fpath
# Note that we skip doctests because they require pytables, which is only an optional dependency and its hard
# to skip doctests selectively.
# CHECK: We might want to use .csv files for the example data, so that it can be loaded with any optional dependency.
def load_example_data(name):
r"""Load arr | ays used in the tutorial so that | all examples in it can be reproduced.
Parameters
----------
name : str
Example data to load. Available example datasets are:
- demography
- demography_eurostat
Returns
-------
Session
Session containing one or several arrays.
Examples
--------
>>> demo = load_example_data('demography') # doctest: +SKIP
>>> print(demo.summary()) # doctest: +SKIP
hh: time, geo, hh_type (26 x 3 x 7) [int64]
pop: time, geo, age, sex, nat (26 x 3 x 121 x 2 x 2) [int64]
qx: time, geo, age, sex, nat (26 x 3 x 121 x 2 x 2) [float64]
>>> demo = load_example_data('demography_eurostat') # doctest: +SKIP
>>> print(demo.summary()) # doctest: +SKIP
Metadata:
title: Demographic datasets for a small selection of countries in Europe
source: demo_jpan, demo_fasec, demo_magec and migr_imm1ctz tables from Eurostat
gender: gender ['Male' 'Female'] (2)
country: country ['Belgium' 'France' 'Germany'] (3)
country_benelux: country_benelux ['Belgium' 'Luxembourg' 'Netherlands'] (3)
citizenship: citizenship ['Belgium' 'Luxembourg' 'Netherlands'] (3)
time: time [2013 2014 2015 2016 2017] (5)
even_years: time[2014 2016] >> even_years (2)
odd_years: time[2013 2015 2017] >> odd_years (3)
births: country, gender, time (3 x 2 x 5) [int32]
deaths: country, gender, time (3 x 2 x 5) [int32]
immigration: country, citizenship, gender, time (3 x 3 x 2 x 5) [int32]
pop: country, gender, time (3 x 2 x 5) [int32]
pop_benelux: country, gender, time (3 x 2 x 5) [int32]
"""
if name is None:
name = 'demography'
if not isinstance(name, str):
raise TypeError("Expected string for argument example_data")
if name not in AVAILABLE_EXAMPLE_DATA:
available_datasets = list(AVAILABLE_EXAMPLE_DATA.keys())
raise ValueError(f"example_data must be chosen from list {available_datasets}")
return la.Session(AVAILABLE_EXAMPLE_DATA[name])
|
Ultimaker/Cura | plugins/UM3NetworkPrinting/src/Models/Http/ClusterPrintJobConstraints.py | Python | lgpl-3.0 | 717 | 0.004184 | # Copyright (c) 2021 Ultimake | r B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Optional
from ..BaseModel | import BaseModel
class ClusterPrintJobConstraints(BaseModel):
"""Class representing a cloud cluster print job constraint"""
def __init__(self, require_printer_name: Optional[str] = None, **kwargs) -> None:
"""Creates a new print job constraint.
:param require_printer_name: Unique name of the printer that this job should be printed on.
Should be one of the unique_name field values in the cluster, e.g. 'ultimakersystem-ccbdd30044ec'
"""
self.require_printer_name = require_printer_name
super().__init__(**kwargs)
|
RexFuzzle/sfepy | sfepy/base/plotutils.py | Python | bsd-3-clause | 4,703 | 0.03317 | import numpy as nm
try:
import matplotlib.pyplot as plt
import matplotlib as mpl
except (ImportError, RuntimeError):
plt = mpl = None
#print 'matplotlib import failed!'
from sfepy.base.base import output, pause
def spy(mtx, eps=None, color='b', **kwargs):
"""
Show sparsity structure of a `scipy.sparse` matrix.
"""
aux = mtx.tocoo()
ij, val = nm.concatenate((aux.row[:,nm.newaxis],
aux.col[:,nm.newaxis]), 1), aux.data
n_item = aux.getnnz()
n_row, n_col = aux.shape
if eps is not None:
output('using eps =', eps)
ij = nm.compress(nm.absolute(val) > eps, ij, 0)
n_item = ij.shape[0]
else:
output('showing all')
output('n_item:', n_item)
if n_item:
args = {'marker' : '.', 'markersize' : 0.5, 'markeredgewidth' : 0.5}
args.update(kwargs)
plt.plot(ij[:,1] + 0.5, ij[:,0] + 0.5, color, linestyle='None',
**args)
plt.axis([-0.5, n_row+0.5, -0.5, n_col+0.5])
plt.axis('image')
plt.xlabel(r'%d x %d: %d nnz, %.2f%% fill'
% (n_row, n_col, n_item, 100. * n_item /
(float(n_row) * float(n_col))))
ax = plt.gca()
ax.set_ylim(ax.get_ylim()[::-1])
def spy_and_show(mtx, **kwargs):
spy(mtx, **kwargs)
plt.show()
##
# 13.12.2005, c
def print_matrix_diff( title, legend, mtx1, mtx2, mtx_da, mtx_dr, iis ):
import copy
print '%s: ir, ic, %s, %s, adiff, rdiff' % ((title,) + tuple( legend ))
aux = copy.copy(mtx_da)
aux.data = nm.ones(mtx_da.data.sha | pe[0])
irs, ics = aux.nonzero()
for ii in iis:
ir, i | c = irs[ii], ics[ii]
print '%5d %5d %11.4e %11.4e %9.2e %9.2e'\
% (ir, ic, mtx1[ir,ic], mtx2[ir,ic], mtx_da[ir,ic], mtx_dr[ir,ic] )
print 'total: %d' % len( iis )
##
# 13.12.2005, c
# 14.12.2005
# 15.12.2005
# 18.07.2007
def plot_matrix_diff( mtx1, mtx2, delta, legend, mode ):
eps = 1e-16
print nm.amin( mtx1.data ), nm.amin( mtx2.data )
print nm.amax( mtx1.data ), nm.amax( mtx2.data )
mtx_da = mtx1.copy() # To preserve structure of mtx1.
mtx_da.data[:] = nm.abs( mtx1.data - mtx2.data )
mtx_dr = mtx_da.copy()
mtx_dr.data[:] = -1
iin = nm.where( nm.abs( mtx1.data ) > eps )[0]
mtx_dr.data[iin] = mtx_da.data[iin] / nm.abs( mtx1.data[iin] )
print nm.amin( mtx_da.data ), nm.amax( mtx_da.data )
print nm.amin( mtx_dr.data ), nm.amax( mtx_dr.data )
epsilon = max( 1e-5, 10 * delta )
print 'epsilon:', epsilon
pause()
ija = nm.where( mtx_da.data > epsilon )[0]
print_matrix_diff( '--- absolute diff', legend,
mtx1, mtx2, mtx_da, mtx_dr, ija )
pause()
iin = nm.where( nm.abs( mtx1.data ) > epsilon )[0]
ij = nm.where( nm.abs( mtx_dr.data[iin] ) > epsilon )[0]
ij = iin[ij]
print_matrix_diff( '--- relative diff', legend,
mtx1, mtx2, mtx_da, mtx_dr, ij )
pause()
ijb = nm.intersect1d( ija, ij )
print_matrix_diff( '--- a-r', legend,
mtx1, mtx2, mtx_da, mtx_dr, ijb )
pause()
ii = nm.argsort( mtx_dr.data[ijb] )
n_s = min( 20, len( ii ) )
ijbs = ijb[ii[-1:-n_s-1:-1]]
print_matrix_diff( '--- a-r 20 biggest (by r)', legend,
mtx1, mtx2, mtx_da, mtx_dr, ijbs )
pause()
if mode < 2: return
h = 100
plt.figure( h ); plt.clf()
plt.axes( [0.04, 0.6, 0.3, 0.3], frameon = True )
spy( mtx_da, epsilon )
plt.title( 'absolute diff' )
plt.axes( [0.68, 0.6, 0.3, 0.3], frameon = True )
iia = nm.where( mtx_dr.data )[0]
mtx_dr.data[nm.setdiff1d( iia, iin )] = 0.0
spy( mtx_dr, epsilon )
plt.title( 'relative diff' )
plt.axes( [0.36, 0.6, 0.3, 0.3], frameon = True )
mtx = mtx_dr.copy()
mtx.data[:] = 0.0
ii = nm.intersect1d( nm.where( mtx_dr.data > epsilon )[0],
nm.where( mtx_da.data > epsilon )[0] )
mtx.data[ii] = 1.0
spy( mtx, epsilon )
plt.title( 'a-r intersection' )
plt.axes( [0.04, 0.08, 0.42, 0.42], frameon = True )
spy( mtx1, epsilon )
plt.title( legend[0] )
plt.axes( [0.54, 0.08, 0.42, 0.42], frameon = True )
spy( mtx2, epsilon )
plt.title( legend[1] )
plt.show()
##
# 02.05.2006, c
def set_axes_font_size( ax, size ):
labels = ax.get_xticklabels() + ax.get_yticklabels()
for label in labels:
label.set_size( size )
##
# 27.09.2006, c
def font_size( size ):
return mpl.font_manager.FontProperties( size = size )
##
# 28.08.2007, c
def iplot( *args, **kwargs ):
plt.ion()
plt.plot( *args, **kwargs )
plt.draw()
plt.ioff()
pause()
|
isovic/aligneval | src/plot_with_seabourne_v2.py | Python | mit | 37,633 | 0.048628 | #! /usr/bin/python
import os;
import sys;
try:
import numpy as np;
except Exception, e:
USE_MATPLOTLIB = False;
print e;
print 'Warning: NumPy not installed!';
exit(0);
USE_MATPLOTLIB = True;
try:
import matplotlib.pyplot as plt;
except Exception, e:
USE_MATPLOTLIB = False;
print e;
print 'Warning: Matplotlib not installed!';
exit(0);
try:
from matplotlib.font_manager import FontProperties;
except Exception, e:
USE_MATPLOTLIB = False;
print e;
print 'Matplotlib problem 2!';
try:
import seaborn as sns;
except Exception, e:
USE_MATPLOTLIB = False;
print e;
print 'Warning: Seaborn Python module not installed!';
HIGH_DPI_PLOT = False;
HIGH_DPI_PLOT = True;
def ReadlinesWrapper(file_path):
try:
fp = open(file_path, 'r');
lines = fp.readlines();
fp.close();
return [line.strip() for line in lines];
except IOError:
sys.stderr.write('ERROR: Could not open file "%s" for reading!' % file_path);
return [];
def WritelineWrapper(line, file_path):
try:
fp = open(file_path, 'w');
fp.write(line);
fp.close();
except IOError:
sys.stderr.write('ERROR: Could not open file "%s" for writing!' % file_path);
# LoadPlotFile parses a plot CSV file which contains lines, each with > 3 columns:
# x or y - denoting the axis of the parameter
# name of the mapper or '-' if not available
# name of the parameter
# parameter values
# Since for each parameter, there can and will be more than one mapper reported, two
# dicts are used to store the data:
# 1. dict has the key of the atribute name, it is used to retrieve all mappers
# which have this atribute reported
# 2. for each key in the atribute dict, another dict is stored where keys are
# mapper names, and values are the split components of the original line.
def LoadPlotFile(plot_file_path, suffix=''):
lines = ReadlinesWrapper(plot_file_path);
if (lines == []):
return [];
atributes = {};
for line in lines:
if (len(line) == 0):
continue;
split_line = line.split('\t');
if (len(split_line) < 4):
continue;
if (len(split_line[0]) != 1 or (split_line[0] in 'xXyYzZ') == False):
continue;
axis_name = split_line[0];
mapper_name = split_line[1];
atribute_name = split_line[2];
atribute_values = split_line[3:];
if (suffix != ''):
mapper_name = mapper_name.split('-' + suffix)[0];
try:
mappers_in_atribute = atributes[atribute_name];
mappers_in_atribute[mapper_name] = split_line;
except Exception, e:
mappers_in_attribute = {};
mappers_in_attribute[mapper_name] = split_line;
atributes[atribute_name] = mappers_in_attribute;
return atributes;
def GetAtributesForPlot(atributes, x_atribute_name, y_atribute_name):
#print atributes;
x_value_dict = atributes[x_atribute_name];
if (len(x_value_dict) != 1):
print 'ERROR: X values are not selected properly! More than one occurance of parameter in the input file!';
return [];
x_value = x_value_dict.values()[0][3:];
y_value_dict = atributes[y_atribute_name];
y_values = [];
labels = [];
for key in sorted(y_value_dict.keys()):
labels.append(key);
y_values.append(y_value_dict[key][3:]);
return [x_value, y_values, labels];
def GetAtributesForROC(atributes, x_atribute_name, y_atribute_name):
x_value_dict = atributes[x_atribute_name];
x_values = [];
labels = [];
for key in sorted(x_value_dict.keys()):
labels.append(key);
x_values.append(x_value_dict[key][3:]);
y_value_dict = atributes[y_atribute_name];
y_values = [];
for key in labels:
y_values.append(y_value_dict[key][3:]);
return [x_values, y_values, labels];
#def AutoLabel(ax, rects):
#for rect in rects:
#h = rect.get_height()
##ax.text(rect.get_x()+rect.get_width()/2., 1.05*h, '%d'%int(h),
##ha='center', va='bottom')
#ax.text(rect.get_x()+rect.get_width()/2., 1.01*h, '%d'%int(h),
#ha='center', va='bottom')
def GetMapperName(label):
mapper_name = label;
# mapper_name = label.split('-')[0];
# try:
# mapper_name = mapper_name_lookup[mapper_name];
# except:
# mapper_name = mapper_name[0].upper() + mapper_name[1:];
return mapper_name;
#try:
#mapper_name = mapper_name_lookup[label];
#except:
#split_label = label.split('-');
#mapper_name = ' '.join(split_label[0:-1]);
#mapper_name = mapper_name[0].upper() + mapper_name[1:].lower();
#if (split_label[-1].startswith('params_')):
#mapper_name += ' (%s)' % ((split_label[-1].split('params_')[-1]));
#return mapper_name;
def PlotLines(x_value, y_values, labels, x_min='', x_max='', x_title='X', y_title='Y', title='', out_png_path=''):
fig = None;
if USE_MATPLOTLIB == True:
plt.clf();
#sns.set_style("whitegrid");
sns.set_style("darkgrid");
sns.set_style("white")
sns.set_style("ticks");
x_min_index = 0;
x_max_index = len(x_value);
if (x_min != ''):
i = 0;
while i < len(x_value):
if (float(x_value[i]) >= float(x_min)):
x_min_index = i;
break;
i += 1;
if (x_ | max != ''):
i = len(x_value) - 1;
while i >= 0:
if (float(x_value[i]) < float(x_max)):
x_max_index = i + 1;
break;
i -= 1;
i = 0;
while i < len(y_values):
mapper_name = GetMapperName(labels[i]);
# linestyle or ls [ '-' | '--' | '-.' | ' | :' | 'steps' | ...]
if (i < 5):
plt.plot(x_value[x_min_index:x_max_index], y_values[i][x_min_index:x_max_index], label=mapper_name);
elif (i < 10):
plt.plot(x_value[x_min_index:x_max_index], y_values[i][x_min_index:x_max_index], '--', label=mapper_name);
else:
plt.plot(x_value[x_min_index:x_max_index], y_values[i][x_min_index:x_max_index], '-.', label=mapper_name);
i += 1;
plt.grid();
lgd = plt.legend(loc='upper center', bbox_to_anchor=(0.5, -0.15), ncol=3)
plt.xlabel(x_title);
plt.ylabel(y_title);
#plt.title(title);
# plt.text(0.5, 1.08, title,
# horizontalalignment='center',
# fontsize=12,
# transform = plt.gca().transAxes)
#sns.despine();
sns.despine(offset=10, trim=True);
if (out_png_path != ''):
if (HIGH_DPI_PLOT == False):
plt.savefig(out_png_path, bbox_inches='tight'); # , dpi=1000);
else:
plt.savefig(out_png_path, bbox_inches='tight', dpi=1000);
def AutoLabel(ax, rects, prefix='', suffix=''):
for rect in rects:
h = rect.get_height()
#ax.text(rect.get_x()+rect.get_width()/2., 1.01*h, '%s%d%s' % (prefix, int(h), suffix),
#ha='center', va='bottom', fontsize=5)
#ax.text(rect.get_x()+rect.get_width()/2., 1.00*h + (-25), '%s' % (prefix),
#ha='center', va='bottom', fontsize=5)
#ax.text(rect.get_x()+rect.get_width()/2., 1.00*h + 100, '%d%s' % (int(h), suffix),
#ha='center', va='bottom', fontsize=10)
ax.text(rect.get_x()+rect.get_width()/2., 1.00*h + (0), '%s' % (prefix),
ha='center', va='bottom', fontsize=5)
ax.text(rect.get_x()+rect.get_width()/2., 1.00*h + 1, '%d%s' % (int(h), suffix),
ha='center', va='bottom', fontsize=10)
def PlotBars(x_value, y_values, labels, x_param_name, x_title='X', y_title='Y', title='', out_png_path=''):
fig = None;
if USE_MATPLOTLIB == True:
plt.clf();
#sns.set_style("whitegrid");
sns.set_style("darkgrid");
sns.set_style("white")
sns.set_style("ticks");
multihist_width = 0.75;
bar_width = multihist_width / len(y_values);
center = (multihist_width - bar_width) / 2;
#color_list = ['b', 'g', 'r', 'm', 'k', 'y'];
x_index = 0;
if (x_param_name == ''):
return;
i = 0;
while i < len(x_value):
if (x_value[i] == x_param_name):
x_index = i;
break;
i += 1;
x_value_num = np.arange(len(x_value));
color_list = plt.rcParams['axes.color_cycle']
i = 0;
print 'y_values: ', y_values;
while i < len(y_values):
print ('y_values[%d]: ' % i), y_values[i];
y_value_num = np.array([float(value) for value in y_values[i]]);
print 'y_value_num: ', y_value_num;
mapper_name = GetMapperName(labels[i]);
rect = plt.bar(i, y_value_num[x_index], width=bar_width, color=color_list[i],align='center', label=mapper_name);
AutoLabel(plt, rect, ('%' if '%' in y_title |
Achuth17/scikit-bio | skbio/io/tests/test_fastq.py | Python | bsd-3-clause | 30,554 | 0.000425 | # ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
from future.builtins import zip
from six import StringIO
import unittest
import warnings
from functools import partial
from skbio import (read, write, Sequence, DNA, RNA, Protein,
SequenceCollection, Alignment)
from skbio.io import FASTQFormatError
from skbio.io.fastq import (
_fastq_sniffer, _fastq_to_generator, _fastq_to_sequence_collection,
_fastq_to_alignment, _generator_to_fastq, _sequence_collection_to_fastq,
_alignment_to_fastq)
from skbio.util import get_data_path
import numpy as np
# Note: the example FASTQ files with file extension .fastq are taken from the
# following open-access publication's supplementary data:
#
# P.J.A. Cock, C.J. Fields, N. Goto, M.L. Heuer and P.M. Rice (2009). The
# Sanger FASTQ file format for sequences with quality scores, and the
# Solexa/Illumina FASTQ variants.
#
# See licenses/fastq-example-files-readme.txt for the original README that
# accompanied these files, which includes the terms of use and detailed
# description of the files.
#
# The example files bearing the original filenames have not been modified from
# their original form.
def _drop_kwargs(kwargs, *args):
for arg in args:
if arg in kwargs:
kwargs.pop(arg)
class TestSniffer(unittest.TestCase):
def setUp(self):
self.positives = [get_data_path(e) for e in [
'fastq_multi_seq_sanger',
'fastq_multi_blank_between_records',
'fastq_multi_ws_lines_between_records',
'fastq_multi_blank_end_of_file',
'fastq_multi_ws_lines_end_of_file',
'fastq_multi_whitespace_stripping',
'fastq_blank_lines',
'fastq_whitespace_only_lines',
'fastq_single_seq_illumina1.3',
'fastq_wrapping_as_illumina_no_description',
'fastq_wrapping_as_sanger_no_description',
'fastq_wrapping_original_sanger_no_description',
'fastq_writer_illumina1.3_defaults',
'fastq_writer_sanger_defaults',
'fastq_writer_sanger_non_defaults',
'fastq_5_blanks_start_of_file',
'fastq_5_ws_lines_start_of_file',
'illumina_full_range_as_illumina.fastq',
'illumina_full_range_as_sanger.fastq',
'illumina_full_range_original_illumina.fastq',
'longreads_as_illumina.fastq',
'longreads_as_sanger.fastq',
'longreads_original_sanger.fastq',
'misc_dn | a_as_illumina.fastq',
| 'misc_dna_as_sanger.fastq',
'misc_dna_original_sanger.fastq',
'misc_rna_as_illumina.fastq',
'misc_rna_as_sanger.fastq',
'misc_rna_original_sanger.fastq',
'sanger_full_range_as_illumina.fastq',
'sanger_full_range_as_sanger.fastq',
'sanger_full_range_original_sanger.fastq',
'solexa_full_range_original_solexa.fastq',
'wrapping_as_illumina.fastq',
'wrapping_as_sanger.fastq',
'wrapping_original_sanger.fastq'
]]
self.negatives = [get_data_path(e) for e in [
'empty',
'whitespace_only',
'fastq_multi_blank_start_of_file',
'fastq_multi_ws_lines_start_of_file',
'fastq_invalid_blank_after_header',
'fastq_invalid_blank_after_seq',
'fastq_invalid_blank_after_plus',
'fastq_invalid_blank_within_seq',
'fastq_invalid_blank_within_qual',
'fastq_invalid_ws_line_after_header',
'fastq_invalid_ws_line_after_seq',
'fastq_invalid_ws_line_after_plus',
'fastq_invalid_ws_line_within_seq',
'fastq_invalid_ws_line_within_qual',
'fastq_invalid_missing_header',
'fastq_invalid_missing_seq_data',
'error_diff_ids.fastq',
'error_double_qual.fastq',
'error_double_seq.fastq',
'error_long_qual.fastq',
'error_no_qual.fastq',
'error_qual_del.fastq',
'error_qual_escape.fastq',
'error_qual_null.fastq',
'error_qual_space.fastq',
'error_qual_tab.fastq',
'error_qual_unit_sep.fastq',
'error_qual_vtab.fastq',
'error_short_qual.fastq',
'error_spaces.fastq',
'error_tabs.fastq',
'error_trunc_at_seq.fastq',
'error_trunc_at_plus.fastq',
'error_trunc_at_qual.fastq',
'error_trunc_in_title.fastq',
'error_trunc_in_seq.fastq',
'error_trunc_in_plus.fastq',
'error_trunc_in_qual.fastq',
]]
def test_positives(self):
for fp in self.positives:
self.assertEqual(_fastq_sniffer(fp), (True, {}))
def test_negatives(self):
for fp in self.negatives:
self.assertEqual(_fastq_sniffer(fp), (False, {}))
class TestReaders(unittest.TestCase):
def setUp(self):
self.valid_configurations = [
([get_data_path('empty'),
get_data_path('whitespace_only')],
[{},
{'variant': 'illumina1.8'},
{'phred_offset': 33,
'constructor': DNA}],
[]),
([get_data_path('fastq_single_seq_illumina1.3')], [
{'variant': 'illumina1.3'},
{'phred_offset': 64},
{'variant': 'illumina1.3',
'constructor': Protein},
], [
('', 'bar\t baz', 'aCGT', [33, 34, 35, 36])
]),
([get_data_path('fastq_multi_seq_sanger'),
get_data_path('fastq_whitespace_only_lines'),
get_data_path('fastq_blank_lines'),
get_data_path('fastq_multi_blank_between_records'),
get_data_path('fastq_multi_ws_lines_between_records'),
get_data_path('fastq_multi_blank_end_of_file'),
get_data_path('fastq_multi_ws_lines_end_of_file'),
get_data_path('fastq_multi_blank_start_of_file'),
get_data_path('fastq_multi_ws_lines_start_of_file'),
get_data_path('fastq_multi_whitespace_stripping')], [
{'variant': 'sanger'},
{'phred_offset': 33, 'seq_num': 2},
{'variant': 'sanger',
'constructor': partial(RNA, validate=False),
'seq_num': 3},
], [
('foo', 'bar baz', 'AACCGG',
[16, 17, 18, 19, 20, 21]),
('bar', 'baz foo', 'TTGGCC',
[23, 22, 21, 20, 19, 18]),
('baz', 'foo bar', 'GATTTC',
[20, 21, 22, 23, 24, 18])
]),
]
self.invalid_files = [(get_data_path(e[0]), e[1], e[2]) for e in [
('fastq_invalid_blank_after_header', FASTQFormatError,
'blank or whitespace-only line.*after header.*in FASTQ'),
('fastq_invalid_blank_after_seq', FASTQFormatError,
"blank or whitespace-only line.*before '\+' in FASTQ"),
('fastq_invalid_blank_after_plus', FASTQFormatError,
"blank or whitespace-only line.*after '\+'.*in FASTQ"),
('fastq_invalid_blank_within_seq', FASTQFormatError,
'blank or whitespace-only line.*within sequence.*FASTQ'),
('fastq_invalid_blank_within_qual', FASTQFormatError,
"blank or whitespace-only line.*within quality scores.*in FASTQ"),
('fastq_invalid_ws_line_after_header', FASTQFormatError,
'blank or whitespace-only line.*after header.*in FASTQ'),
('fastq_invalid_ws_line_after_seq', FASTQFormatError,
"blank or whitespace-onl |
bastianh/evelink | tests/cache/test_shelve.py | Python | mit | 815 | 0.006135 | import os
import tempfile
from tests.com | pat import unittest
from evelink.cache.shelf import ShelveCache
class ShelveCacheTestCase(unittest.TestCase):
def setUp(self):
self.cache_dir = tempfile.mkdtemp()
self.cache_path = os.path.join(self.cache_dir, 'shelf')
self.cache = ShelveCache(self.cache_path)
def tearDown(self):
self.cache.cache.close()
try:
os.remove(self.cache_path)
except OSError:
pass
| try:
os.rmdir(self.cache_dir)
except OSError:
pass
def test_cache(self):
self.cache.put('foo', 'bar', 3600)
self.assertEqual(self.cache.get('foo'), 'bar')
def test_expire(self):
self.cache.put('baz', 'qux', -1)
self.assertEqual(self.cache.get('baz'), None)
|
mozilla/addons-server | src/olympia/shelves/migrations/0018_delete_shelfmanagement.py | Python | bsd-3-clause | 304 | 0 | # Generated by Django 3.2.4 on 2021-07-28 17:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('shelves', '0017_auto_20210726_1422'),
]
operations = [
migrations.D | eleteModel(
name='ShelfManagement',
| ),
]
|
Cimbali/pympress | pympress/talk_time.py | Python | gpl-2.0 | 10,340 | 0.003967 | # -*- coding: utf-8 -*-
#
# talk_time.py
#
# Copyright 2017 Cimbali <me@cimba.li>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
:mod:`pympress.talk_time` -- Manages the clock of elapsed talk time
-------------------------------------------------------------------
"""
import logging
logger = logging.getLogger(__name__)
import time
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GLib
class TimeLabelColorer(object):
""" Manage the colors of a label with a set of colors between which to fade, based on how much time remains.
Times are given in seconds (<0 has run out of time). In between timestamps the color will interpolated linearly,
outside of the intervals the closest color will be used.
Args:
label_time (:class:`Gtk.Label`): the label where the talk time is displayed
"""
#: The :class:`Gtk.Label` whose colors need updating
label_time = None
#: :class:`~Gdk.RGBA` The default color of the info labels
label_color_default = None
#: :class:`~Gtk.CssProvider` affecting the style context of the labels
color_override = None
#: `list` of tuples (`int`, :class:`~Gdk.RGBA`), which are the desired colors at the corresponding timestamps.
#: Sorted on the timestamps.
color_map = []
def __init__(self, label_time):
self.label_time = label_time
style_context = self.label_time.get_style_context()
self.color_override = Gtk.CssProvider()
style_context.add_provider(self.color_override, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION + 1)
self.label_color_default = self.load_color_from_css(style_context)
label_color_ett_reached = self.load_color_from_css(style_context, "ett-reached")
label_color_ett_info = self.load_color_from_css(style_context, "ett-info")
label_color_ett_warn = self.load_color_from_css(style_context, "ett-warn")
self.color_map = [
( 300, self.label_color_default),
( 0, label_color_ett_reached),
(-150, label_color_ett_info),
(-300, label_color_ett_warn)
]
def load_color_from_css(self, style_context, class_name = None):
""" Add class class_name to the time label and return its color.
Args:
label_time (:class:`Gtk.Label`): the label where the talk time is displayed
style_context (:class:`~Gtk.StyleContext`): the CSS context managing the color of the label
class_name (`str` or `None`): The name of the class, if any
Returns:
:class:`~Gdk.RGBA`: The color of the label with class "class_name"
"""
if class_name:
style_context.add_class(class_name)
self.label_time.show()
color = style_context.get_color(Gtk.StateType.NORMAL)
if class_name:
style_context.remove_class(class_name)
return color
def default_color(self):
""" Forces to reset the default colors on the label.
"""
self.color_override.load_from_data(''.encode('ascii'))
def update_time_color(self, remaining):
""" Update the color of the time label based on how much time is remaining.
Args:
remaining (`int`): Remaining time until estimated talk time is reached, in seconds.
"""
if (remaining <= 0 and remaining > -5) or (remaining <= -300 and remaining > -310):
self.label_time.get_style_context().add_class("time-warn")
else:
self.label_time.get_style_context().remove_class("time-warn")
prev_time, prev_color = None, None
for timestamp, color in self.color_map:
if remaining >= timestamp:
break
prev_time, prev_color = (timestamp, color)
else:
# if remaining < all timestamps, use only last color
prev_color = None
if prev_color:
position = (remaining - prev_time) / (timestamp - prev_time)
color_spec = '* {{color: mix({}, {}, {})}}'.format(prev_color.to_string(), color.to_string(), position)
else:
color_spec = '* {{color: {}}}'.format(color.to_string())
self.color_override.load_from_data(color_spec.encode('ascii'))
class TimeCounter(object):
""" A double counter, that displays the time elapsed in the talk and a clock.
Args:
buil | der (builder.Builder): The builder from which to load widgets.
ett (`int`): the estimated time for the talk, in seconds.
timing_tracker: (:class:`~pympress.extras.TimingReport`): to inform when the slides change
autoplay: (:class:`~pympress.dialog.AutoPlay`): to adjust the timer display if we’re | auto-playing/looping slides
"""
#: Elapsed time :class:`~Gtk.Label`
label_time = None
#: Clock :class:`~Gtk.Label`
label_clock = None
#: Time at which the counter was started, `int` in seconds as returned by :func:`~time.time()`
restart_time = 0
#: Time elapsed since the beginning of the presentation, `int` in seconds
elapsed_time = 0
#: Timer paused status, `bool`
paused = True
#: :class:`~TimeLabelColorer` that handles setting the colors of :attr:`label_time`
label_colorer = None
#: :class:`~pympress.editable_label.EstimatedTalkTime` that handles changing the ett
ett = None
#: The pause-timer :class:`~Gio.Action`
pause_action = None
#: The :class:`~pympress.extras.TimingReport`, needs to know when the slides change
timing_tracker = None
#: The :class:`~pympress.dialog.AutoPlay`, to adjust the timer display if we’re auto-playing/looping slides
autoplay = None
def __init__(self, builder, ett, timing_tracker, autoplay):
super(TimeCounter, self).__init__()
self.label_colorer = TimeLabelColorer(builder.get_object('label_time'))
self.ett = ett
self.timing_tracker = timing_tracker
self.autoplay = autoplay
builder.load_widgets(self)
builder.setup_actions({
'pause-timer': dict(activate=self.switch_pause, state=self.paused),
'reset-timer': dict(activate=self.reset_timer),
})
self.pause_action = builder.get_application().lookup_action('pause-timer')
# Setup timer for clocks
GLib.timeout_add(250, self.update_time)
def switch_pause(self, gaction, param=None):
""" Switch the timer between paused mode and running (normal) mode.
Returns:
`bool`: whether the clock's pause was toggled.
"""
if self.paused:
self.unpause()
else:
self.pause()
return None
def pause(self):
""" Pause the timer if it is not paused, otherwise do nothing.
Returns:
`bool`: whether the clock's pause was toggled.
"""
if self.paused:
return False
self.paused = True
self.pause_action.change_state(GLib.Variant.new_boolean(self.paused))
self.elapsed_time += time.time() - self.restart_time
self.timing_tracker.end_time = self.elapsed_time
if self.autoplay.is_looping():
self.autoplay.pause()
self.update_time()
return True
def unpause(self):
""" Unpause the timer if it is paused, otherwise do nothing.
Returns:
`bool`: whether the clock's pause |
bwasti/caffe2 | caffe2/python/operator_test/rank_loss_operator_test.py | Python | apache-2.0 | 4,147 | 0.000241 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
class TestPairWiseLossOps(hu.HypothesisTestCase):
@given(X=hu.arrays(dims=[2, 1],
elements=st.floats(min_value=0.0, max_value=10.0)),
label=hu.arrays(dims=[2, 1],
elements=st.integers(min_value=0, max_value=1),
dtype=np.float32),
**hu.gcs_cpu_only)
def test_pair_wise_loss_predictions(self, X, label, gc, dc):
workspace.FeedBlob('X', X)
workspace.FeedBlob('label', label)
new_label = np.array([label[1], label[0]])
new_x = np.array([X[1], X[0]])
workspace.FeedBlob('new_x', new_x)
workspace.FeedBlob('new_label', new_label)
net = core.Net('net')
net.PairWiseLoss(['X', 'label'], ['output'])
net.PairWiseLoss(['new_x', 'new_label'], ['new_output'])
plan = core.Plan('predict_data')
plan.AddStep(core.execution_step('predict_data',
[net], num_iter=1))
workspace.RunPlan(plan)
output = workspace.FetchBlob('output')
new_output = workspace.FetchBlob('new_output')
sign = 1 if label[0] > label[1] else -1
if label[0] == label[1]:
self.assertEqual(np.asscalar(output), 0)
return
self.assertAlmostEqual(
np.asscalar(output),
np.asscalar(np.log(1 + np.exp(sign * (X[1] - X[0])))),
delta=1e-4
)
# check swapping row order doesn't alter overall loss
self.assertAlmostEqual(output, new_output)
@given(X=hu.arrays(dims=[2, 1],
elements=st.floats(min_value=0.0, max_value=10.0)),
label=hu.arrays(dims=[2, 1],
elements=st.integers(min_value=0, max_value=1),
dtype=np.float32),
dY=hu.arrays(dims=[1],
elements=st.floats(min_value=1, max_value=10)),
**hu.gcs_cpu_only)
def test_pair_wise_loss_gradient(self, X, label, dY, gc, dc):
workspace.FeedBlob('X', X)
workspace.FeedBlob('dY', dY)
workspace.FeedBlob('label', label)
net = core.Net('net')
net.PairWiseLossGradient(
['X', 'label', 'dY'],
['dX'],
)
plan = core.Plan('predict_data')
plan.AddStep(core.execution_step('predict_data',
[net], num_iter=1))
workspace.RunPlan(plan)
dx = workspace.FetchBlob('dX')
sign = 1 if label[0] > label[1] else -1
if label[0] == label[1]:
self.assertEqual(np.asscalar(dx[0]), 0)
return
self.assertAlmostEqual(
np.asscalar(dx[0]),
np.asscalar(-dY[0] * sign / (1 + np.exp(sign * ( | X[0] - X[1])))),
delta=1e-2 * abs(np.asscalar(dx[0])))
self.assertEqual(np.asscalar(dx[0]), np.asscalar(-dx[1]))
delta = 1e-3
up_x = np.array([[X[0] + delta], [X[1]]], dtype=np.float32)
down_x = np.array([[X[ | 0] - delta], [X[1]]], dtype=np.float32)
workspace.FeedBlob('up_x', up_x)
workspace.FeedBlob('down_x', down_x)
new_net = core.Net('new_net')
new_net.PairWiseLoss(['up_x', 'label'], ['up_output'])
new_net.PairWiseLoss(['down_x', 'label'], ['down_output'])
plan = core.Plan('predict_data')
plan.AddStep(core.execution_step('predict_data', [new_net], num_iter=1))
workspace.RunPlan(plan)
down_output_pred = workspace.FetchBlob('down_output')
up_output_pred = workspace.FetchBlob('up_output')
np.testing.assert_allclose(
np.asscalar(dx[0]),
np.asscalar(
0.5 * dY[0] *
(up_output_pred[0] - down_output_pred[0]) / delta),
rtol=1e-2, atol=1e-2)
|
openSUSE/vdsm | vdsm/SecureXMLRPCServer.py | Python | gpl-2.0 | 7,492 | 0.00307 | # Copyright 2008 Red Hat, Inc. and/or its affiliates.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""SecureXMLRPCServer.py - simple XML RPC server supporting SSL.
"""
import SimpleXMLRPCServer
import xmlrpclib
import ssl
import httplib
import socket
import SocketServer
SecureXMLRPCRequestHandler = SimpleXMLRPCServer.SimpleXMLRPCRequestHandler
class SecureXMLRPCServer(SimpleXMLRPCServer.SimpleXMLRPCServer):
def __init__(self, addr,
requestHandler=SimpleXMLRPCServer.SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True,
keyfile=None, certfile=None, ca_certs=None,
timeout=None):
"""Initialize a SimpleXMLRPCServer instance but wrap its .socket member with ssl."""
SimpleXMLRPCServer.SimpleXMLRPCServer.__init__(self, addr,
requestHandler,
logRequests, allow_none, encoding,
bind_and_activate=False)
self.socket = ssl.wrap_socket(self.socket,
keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs, server_side=True,
cert_reqs=ssl.CERT_REQUIRED,
do_handshake_on_connect=False)
if timeout is not None:
self.socket.settimeout = timeout
if bind_and_activate:
self.server_bind()
self.server_activate()
def finish_request(self, request, client_address):
request.do_handshake()
return SimpleXMLRPCServer.SimpleXMLRPCServer.finish_request(self, request,
client_address)
def handle_error(self, request, client_address):
import logging
logging.error('client %s', client_address, exc_info=True)
class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
SecureXMLRPCServer): pass
class VerifyingHTTPSConnection(httplib.HTTPSConnection):
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
ca_certs=None, cert_reqs=ssl.CERT_REQUIRED):
httplib.HTTPSConnection.__init__(self, host, port, key_file, cert_file,
stric | t, timeout)
self.ca_certs = ca_certs
self.cert_reqs = cert_reqs
def connect(self):
"Connect to a host on a given (SSL) port."
sock = socket.create_connection((self.host, self.po | rt), self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
# DK added: pass ca_cert to sslsocket
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
ca_certs=self.ca_certs, server_side=False,
cert_reqs=self.cert_reqs)
class VerifyingSafeTransport(xmlrpclib.SafeTransport):
def __init__(self, use_datetime=0, key_file=None, cert_file=None,
ca_certs=None, cert_reqs=ssl.CERT_REQUIRED):
xmlrpclib.SafeTransport.__init__(self, use_datetime)
self.key_file = key_file
self.cert_file = cert_file
self.ca_certs = ca_certs
self.cert_reqs = cert_reqs
def make_connection(self, host):
"""Return VerifyingHTTPS object that is aware of ca_certs, and will
create VerifyingHTTPSConnection.
In Python 2.7, return VerifyingHTTPSConnection object"""
chost, self._extra_headers, x509 = self.get_host_info(host)
if hasattr(xmlrpclib.SafeTransport, "single_request"): # Python 2.7
return VerifyingHTTPSConnection(
chost, None, key_file=self.key_file, strict=None,
cert_file=self.cert_file, ca_certs=self.ca_certs,
cert_reqs=self.cert_reqs)
else:
return VerifyingHTTPS(
chost, None, key_file=self.key_file,
cert_file=self.cert_file, ca_certs=self.ca_certs,
cert_reqs=self.cert_reqs)
class VerifyingHTTPS(httplib.HTTPS):
_connection_class = VerifyingHTTPSConnection
def __init__(self, host='', port=None, key_file=None, cert_file=None,
strict=None, ca_certs=None, cert_reqs=ssl.CERT_REQUIRED):
"""A ca_cert-aware HTTPS object, that creates a VerifyingHTTPSConnection"""
# provide a default host, pass the X509 cert info
# urf. compensate for bad input.
if port == 0:
port = None
self._setup(self._connection_class(host, port, key_file,
cert_file, strict, ca_certs=ca_certs,
cert_reqs=cert_reqs))
# we never actually use these for anything, but we keep them
# here for compatibility with post-1.5.2 CVS.
self.key_file = key_file
self.cert_file = cert_file
class __Test(object):
"""Self-signed key, generated with
make -C /etc/pki/tls/certs /tmp/selfsign.pem
with CN=127.0.0.1
"""
KEYFILE = CERTFILE = CACERT = 'selfsign.pem'
host = '127.0.0.1'
port = 8443
def server(self):
"""Test xml rpc over https server"""
class xmlrpc_registers:
def add(self, x, y):
return x + y
def wait(self):
import time
time.sleep(10)
return 1
server = SecureXMLRPCServer((self.host, self.port),
keyfile=self.KEYFILE, certfile=self.CERTFILE, ca_certs=self.CACERT)
server.register_instance(xmlrpc_registers())
print "Serving HTTPS on", self.host, "port", self.port
server.serve_forever()
def client(self):
vtransport=VerifyingSafeTransport(key_file=self.KEYFILE,
cert_file=self.CERTFILE, ca_certs=self.CACERT)
s = xmlrpclib.ServerProxy('https://%s:%s' % (self.host, self.port),
transport=vtransport)
print s.add(2, 3)
if __name__ == '__main__':
import sys
if len(sys.argv) == 1:
__Test().client()
else:
__Test().server()
|
duplocloud/duploiotagent | AWSIoTPythonSDK/core/shadow/shadowManager.py | Python | apache-2.0 | 3,636 | 0.00385 | # /*
# * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# *
# * Licensed under the Apache License, Version 2.0 (the "License").
# * You may not use this file except in compliance with the License.
# * A copy of the License is located at
# *
# * http://aws.amazon.com/apache2.0
# *
# * or in the "license" file accompanying this file. This file is distributed
# * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# * express or implied. See the License for the specific language governing
# * permissions and limitations under the License.
# */
import logging
import time
from threading import Lock
class _shadowAction:
_actionType = ["get", "update", "delete", | "delta"]
def __init__(self, srcShadowName, srcActionName):
if srcActionName is None o | r srcActionName not in self._actionType:
raise TypeError("Unsupported shadow action.")
self._shadowName = srcShadowName
self._actionName = srcActionName
self.isDelta = srcActionName == "delta"
if self.isDelta:
self._topicDelta = "$aws/things/" + str(self._shadowName) + "/shadow/update/delta"
else:
self._topicGeneral = "$aws/things/" + str(self._shadowName) + "/shadow/" + str(self._actionName)
self._topicAccept = "$aws/things/" + str(self._shadowName) + "/shadow/" + str(self._actionName) + "/accepted"
self._topicReject = "$aws/things/" + str(self._shadowName) + "/shadow/" + str(self._actionName) + "/rejected"
def getTopicGeneral(self):
return self._topicGeneral
def getTopicAccept(self):
return self._topicAccept
def getTopicReject(self):
return self._topicReject
def getTopicDelta(self):
return self._topicDelta
class shadowManager:
_logger = logging.getLogger(__name__)
def __init__(self, srcMQTTCore):
# Load in mqttCore
if srcMQTTCore is None:
raise TypeError("None type inputs detected.")
self._mqttCoreHandler = srcMQTTCore
self._shadowSubUnsubOperationLock = Lock()
def basicShadowPublish(self, srcShadowName, srcShadowAction, srcPayload):
currentShadowAction = _shadowAction(srcShadowName, srcShadowAction)
self._mqttCoreHandler.publish(currentShadowAction.getTopicGeneral(), srcPayload, 0, False)
def basicShadowSubscribe(self, srcShadowName, srcShadowAction, srcCallback):
with self._shadowSubUnsubOperationLock:
currentShadowAction = _shadowAction(srcShadowName, srcShadowAction)
if currentShadowAction.isDelta:
self._mqttCoreHandler.subscribe(currentShadowAction.getTopicDelta(), 0, srcCallback)
else:
self._mqttCoreHandler.subscribe(currentShadowAction.getTopicAccept(), 0, srcCallback)
self._mqttCoreHandler.subscribe(currentShadowAction.getTopicReject(), 0, srcCallback)
time.sleep(2)
def basicShadowUnsubscribe(self, srcShadowName, srcShadowAction):
with self._shadowSubUnsubOperationLock:
currentShadowAction = _shadowAction(srcShadowName, srcShadowAction)
if currentShadowAction.isDelta:
self._mqttCoreHandler.unsubscribe(currentShadowAction.getTopicDelta())
else:
self._logger.debug(currentShadowAction.getTopicAccept())
self._mqttCoreHandler.unsubscribe(currentShadowAction.getTopicAccept())
self._logger.debug(currentShadowAction.getTopicReject())
self._mqttCoreHandler.unsubscribe(currentShadowAction.getTopicReject())
|
ACJTeam/enigma2 | lib/python/Components/Language.py | Python | gpl-2.0 | 5,188 | 0.024752 | # -*- coding: UTF-8 -*-
import gettext
import locale
import os
from Tools.Directories import SCOPE_LANGUAGE, resolveFilename
class Language:
def __init__(self):
gettext.install('enigma2', resolveFilename(SCOPE_LANGUAGE, ""), unicode=0, codeset="utf-8")
gettext.bindtextdomain("enigma2", resolveFilename(SCOPE_LANGUAGE))
gettex | t.textdomain("enigma2")
self.activeLanguage = 0
self.catalog = None
self.lang = {}
self.langlist = []
# FIXME make list dynamically
# name, iso-639 language, iso-3166 country. Please don't mix language&country!
self.addLanguage("Arabic", "ar", "AE", "ISO-8859-15")
self.addLanguage("Български", "bg", "BG", "ISO-8859-15")
self.addLanguage("Català", "ca", "AD", "ISO-8859-15")
self.addLanguage("Česky", "cs", "CZ", "ISO-8859-15")
self.a | ddLanguage("Dansk", "da", "DK", "ISO-8859-15")
self.addLanguage("Deutsch", "de", "DE", "ISO-8859-15")
self.addLanguage("Ελληνικά", "el", "GR", "ISO-8859-7")
self.addLanguage("English", "en", "EN", "ISO-8859-15")
self.addLanguage("Español", "es", "ES", "ISO-8859-15")
self.addLanguage("Eesti", "et", "EE", "ISO-8859-15")
self.addLanguage("فارسی", "fa", "IR", "UTF-8")
self.addLanguage("Suomi", "fi", "FI", "ISO-8859-15")
self.addLanguage("Français", "fr", "FR", "ISO-8859-15")
self.addLanguage("Frysk", "fy", "NL", "ISO-8859-15")
self.addLanguage("Hebrew", "he", "IL", "ISO-8859-15")
self.addLanguage("TChinese", "hk", "HK", "UTF-8")
self.addLanguage("Hrvatski", "hr", "HR", "ISO-8859-15")
self.addLanguage("Magyar", "hu", "HU", "ISO-8859-15")
self.addLanguage("Indonesian", "id", "ID", "ISO-8859-15")
self.addLanguage("Íslenska", "is", "IS", "ISO-8859-15")
self.addLanguage("Italiano", "it", "IT", "ISO-8859-15")
self.addLanguage("Kurdish", "ku", "KU", "ISO-8859-15")
self.addLanguage("Lietuvių", "lt", "LT", "ISO-8859-15")
self.addLanguage("Latviešu", "lv", "LV", "ISO-8859-15")
self.addLanguage("Nederlands", "nl", "NL", "ISO-8859-15")
self.addLanguage("Norsk Bokmål","nb", "NO", "ISO-8859-15")
self.addLanguage("Norsk Nynorsk", "nn", "NO", "ISO-8859-15")
self.addLanguage("Polski", "pl", "PL", "ISO-8859-15")
self.addLanguage("Português", "pt", "PT", "ISO-8859-15")
self.addLanguage("Português do Brasil","pt", "BR", "ISO-8859-15")
self.addLanguage("Romanian", "ro", "RO", "ISO-8859-15")
self.addLanguage("Русский", "ru", "RU", "ISO-8859-15")
self.addLanguage("Slovensky", "sk", "SK", "ISO-8859-15")
self.addLanguage("Slovenščina", "sl", "SI", "ISO-8859-15")
self.addLanguage("Srpski", "sr", "YU", "ISO-8859-15")
self.addLanguage("Svenska", "sv", "SE", "ISO-8859-15")
self.addLanguage("ภาษาไทย", "th", "TH", "ISO-8859-15")
self.addLanguage("Türkçe", "tr", "TR", "ISO-8859-15")
self.addLanguage("Ukrainian", "uk", "UA", "ISO-8859-15")
self.addLanguage("SChinese", "zh", "CN", "UTF-8")
self.callbacks = []
def addLanguage(self, name, lang, country, encoding):
try:
self.lang[str(lang + "_" + country)] = ((name, lang, country, encoding))
self.langlist.append(str(lang + "_" + country))
except:
print "Language " + str(name) + " not found"
def activateLanguage(self, index):
try:
if index not in self.lang:
print "Selected language %s does not exist, fallback to en_EN!" % index
index = "en_EN"
lang = self.lang[index]
print "Activating language " + lang[0]
self.catalog = gettext.translation('enigma2', resolveFilename(SCOPE_LANGUAGE, ""), languages=[index])
self.catalog.install(names=("ngettext", "pgettext"))
self.activeLanguage = index
for x in self.callbacks:
x()
except:
print "Error in activating language!"
# NOTE: we do not use LC_ALL, because LC_ALL will not set any of the categories, when one of the categories fails.
# We'd rather try to set all available categories, and ignore the others
for category in [locale.LC_CTYPE, locale.LC_COLLATE, locale.LC_TIME, locale.LC_MONETARY, locale.LC_MESSAGES, locale.LC_NUMERIC]:
try:
locale.setlocale(category, (self.getLanguage(), 'UTF-8'))
except:
pass
# HACK: sometimes python 2.7 reverts to the LC_TIME environment value, so make sure it has the correct value
os.environ["LC_TIME"] = self.getLanguage() + '.UTF-8'
os.environ["GST_SUBTITLE_ENCODING"] = self.getGStreamerSubtitleEncoding()
def activateLanguageIndex(self, index):
if index < len(self.langlist):
self.activateLanguage(self.langlist[index])
def getLanguageList(self):
return [ (x, self.lang[x]) for x in self.langlist ]
def getActiveLanguage(self):
return self.activeLanguage
def getActiveCatalog(self):
return self.catalog
def getActiveLanguageIndex(self):
idx = 0
for x in self.langlist:
if x == self.activeLanguage:
return idx
idx += 1
return None
def getLanguage(self):
try:
return str(self.lang[self.activeLanguage][1]) + "_" + str(self.lang[self.activeLanguage][2])
except:
return ''
def getGStreamerSubtitleEncoding(self):
try:
return str(self.lang[self.activeLanguage][3])
except:
return 'ISO-8859-15'
def addCallback(self, callback):
self.callbacks.append(callback)
language = Language()
|
ZacharyJacobCollins/Fallen | tagger.py | Python | mit | 554 | 0.00361 | from sentence import Sentence
from | textblob import TextBlob
from itertools import chain
from collections import Counter
def findSubject(lines):
sentences = []
| if len(lines) == 0:
print "messages are empty"
return None
for m in lines:
sentences.append(Sentence(m).nouns)
if len(sentences) != 0:
maxNoun = Counter(list(chain(*sentences))).most_common()[0]
else:
print "No nouns"
return None
if maxNoun[1] >= 2:
return maxNoun[0].replace(" ","_")
else:
return None
|
cl4u2/chirp | chirpui/mainapp.py | Python | gpl-3.0 | 67,000 | 0.001866 | # Copyright 2008 Dan Smith <dsmith@danplanet.com>
# Copyright 2012 Tom Hayward <tom@tomh.us>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import tempfile
import urllib
from glob import glob
import shutil
import time
import gtk
import gobject
gobject.threads_init()
if __name__ == "__main__":
import sys
sys.path.insert(0, "..")
from chirpui import inputdialog, common
try:
import serial
except ImportError,e:
common.log_exception()
common.show_error("\nThe Pyserial module is not installed!")
from chirp import platform, generic_xml, generic_csv, directory, util
from chirp import ic9x, kenwood_live, idrp, vx7, vx5, vx6
from chirp import CHIRP_VERSION, chirp_common, detect, errors
from chirp import icf, ic9x_icf
from chirpui import editorset, clone, miscwidgets, config, reporting, fips
from chirpui import bandplans
CONF = config.get()
KEEP_RECENT = 8
RB_BANDS = {
"--All--" : 0,
"10 meters (29MHz)" : 29,
"6 meters (54MHz)" : 5,
"2 meters (144MHz)" : 14,
"1.25 meters (220MHz)" : 22,
"70 centimeters (440MHz)" : 4,
"33 centimeters (900MHz)" : 9,
"23 centimeters (1.2GHz)" : 12,
}
def key_bands(band):
if band.startswith("-"):
return -1
amount, units, mhz = band.split(" ")
scale = units == "meters" and 100 or 1
return 100000 - (float(amount) * scale)
class ModifiedError(Exception):
pass
class ChirpMain(gtk.Window):
def get_current_editorset(self):
page = self.tabs.get_current_page()
if page is not None:
return self.tabs.get_nth_page(page)
else:
return None
def ev_tab_switched(self, pagenum=None):
def set_action_sensitive(action, sensitive):
self.menu_ag.get_action(action).set_sensitive(sensitive)
if pagenum is not None:
eset = self.tabs.get_nth_page(pagenum)
else:
eset = self.get_current_editorset()
upload_sens = bool(eset and
isinstance(eset.radio, chirp_common.CloneModeRadio))
if not eset or isinstance(eset.radio, chirp_common.LiveRadio):
save_sens = False
elif isinstance(eset.radio, chirp_common.NetworkSourceRadio):
save_sens = False
else:
save_sens = True
for i in ["import", "importsrc", "stock"]:
set_action_sensitive(i,
eset is not None and not eset.get_read_only())
for i in ["save", "saveas"]:
set_action_sensitive(i, save_sens)
for i in ["upload"]:
set_action_sensitive(i, upload_sens)
for i in ["cancelq"]:
set_action_sensitive(i, eset is not None and not save_sens)
for i in ["export", "close", "columns", "irbook", "irfinder",
"move_up", "move_dn", "exchange", "iradioreference",
"cut", "copy", "paste", "delete", "viewdeveloper"]:
| set_action_sensitive(i, eset is not None)
def ev_status(self, editorset, msg):
self.sb_radio.pop(0)
self.sb_radio.push(0, msg)
| def ev_usermsg(self, editorset, msg):
self.sb_general.pop(0)
self.sb_general.push(0, msg)
def ev_editor_selected(self, editorset, editortype):
mappings = {
"memedit" : ["view", "edit"],
}
for _editortype, actions in mappings.items():
for _action in actions:
action = self.menu_ag.get_action(_action)
action.set_sensitive(editortype.startswith(_editortype))
def _connect_editorset(self, eset):
eset.connect("want-close", self.do_close)
eset.connect("status", self.ev_status)
eset.connect("usermsg", self.ev_usermsg)
eset.connect("editor-selected", self.ev_editor_selected)
def do_diff_radio(self):
if self.tabs.get_n_pages() < 2:
common.show_error("Diff tabs requires at least two open tabs!")
return
esets = []
for i in range(0, self.tabs.get_n_pages()):
esets.append(self.tabs.get_nth_page(i))
d = gtk.Dialog(title="Diff Radios",
buttons=(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL),
parent=self)
choices = []
for eset in esets:
choices.append("%s %s (%s)" % (eset.rthread.radio.VENDOR,
eset.rthread.radio.MODEL,
eset.filename))
choice_a = miscwidgets.make_choice(choices, False, choices[0])
choice_a.show()
chan_a = gtk.SpinButton()
chan_a.get_adjustment().set_all(1, -1, 999, 1, 10, 0)
chan_a.show()
hbox = gtk.HBox(False, 3)
hbox.pack_start(choice_a, 1, 1, 1)
hbox.pack_start(chan_a, 0, 0, 0)
hbox.show()
d.vbox.pack_start(hbox, 0, 0, 0)
choice_b = miscwidgets.make_choice(choices, False, choices[1])
choice_b.show()
chan_b = gtk.SpinButton()
chan_b.get_adjustment().set_all(1, -1, 999, 1, 10, 0)
chan_b.show()
hbox = gtk.HBox(False, 3)
hbox.pack_start(choice_b, 1, 1, 1)
hbox.pack_start(chan_b, 0, 0, 0)
hbox.show()
d.vbox.pack_start(hbox, 0, 0, 0)
r = d.run()
sel_a = choice_a.get_active_text()
sel_chan_a = chan_a.get_value()
sel_b = choice_b.get_active_text()
sel_chan_b = chan_b.get_value()
d.destroy()
if r == gtk.RESPONSE_CANCEL:
return
if sel_a == sel_b:
common.show_error("Can't diff the same tab!")
return
print "Selected %s@%i and %s@%i" % (sel_a, sel_chan_a,
sel_b, sel_chan_b)
eset_a = esets[choices.index(sel_a)]
eset_b = esets[choices.index(sel_b)]
def _show_diff(mem_b, mem_a):
# Step 3: Show the diff
diff = common.simple_diff(mem_a, mem_b)
common.show_diff_blob("Differences", diff)
def _get_mem_b(mem_a):
# Step 2: Get memory b
job = common.RadioJob(_show_diff, "get_raw_memory", int(sel_chan_b))
job.set_cb_args(mem_a)
eset_b.rthread.submit(job)
if sel_chan_a >= 0 and sel_chan_b >= 0:
# Diff numbered memory
# Step 1: Get memory a
job = common.RadioJob(_get_mem_b, "get_raw_memory", int(sel_chan_a))
eset_a.rthread.submit(job)
elif isinstance(eset_a.rthread.radio, chirp_common.CloneModeRadio) and\
isinstance(eset_b.rthread.radio, chirp_common.CloneModeRadio):
# Diff whole (can do this without a job, since both are clone-mode)
a = util.hexprint(eset_a.rthread.radio._mmap.get_packed())
b = util.hexprint(eset_b.rthread.radio._mmap.get_packed())
common.show_diff_blob("Differences", common.simple_diff(a, b))
else:
common.show_error("Cannot diff whole live-mode radios!")
def do_new(self):
eset = editorset.EditorSet(_("Untitled") + ".csv", self)
self._connect_editorset(eset)
eset.prime()
eset.show()
tab = self.tabs.append_page(eset, eset.get_tab_label())
self.tabs.set_current_page(tab)
def _do_manual_select(self, filename):
radiolist = {}
for drv, radio in directory.DRV_TO_RADIO.items():
|
datastax/python-driver | tests/integration/standard/test_udts.py | Python | apache-2.0 | 31,537 | 0.002568 | # Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from collections import namedtuple
from functools import partial
import six
from cassandra import InvalidRequest
from cassandra.cluster import UserTypeDoesNotExist, ExecutionProfile, EXEC_PROFILE_DEFAULT
from cassandra.query import dict_factory
from cassandra.util import OrderedMap
from tests.integration import use_singledc, execute_until_pass, \
BasicSegregatedKeyspaceUnitTestCase, greaterthancass20, lessthancass30, greaterthanorequalcass36, TestCluster
from tests.integration.datatype_utils import update_datatypes, PRIMITIVE_DATATYPES, PRIMITIVE_DATATYPES_KEYS, \
COLLECTION_TYPES, get_sample, get_collection_sample
nested_collection_udt = namedtuple('nested_collection_udt', ['m', 't', 'l', 's'])
nested_collection_udt_nested = namedtuple('nested_collection_udt_nested', ['m', 't', 'l', 's', 'u'])
def setup_module():
use_singledc()
update_datatypes()
@greaterthancass20
class UDTTests(BasicSegregatedKeyspaceUnitTestCase):
@property
def table_name(self):
return self._testMethodName.lower()
def setUp(self):
super(UDTTests, self).setUp()
self.session.set_keyspace(self.keyspace_name)
@greaterthanorequalcass36
def test_non_frozen_udts(self):
"""
Test to ensure that non frozen udt's work with C* >3.6.
@since 3.7.0
@jira_ticket PYTHON-498
@expected_result Non frozen UDT's are supported
@test_category data_types, udt
"""
self.session.execute("USE {0}".format(self.keyspace_name))
self.session.execute("CREATE TYPE user (state text, has_corn boolean)")
self.session.execute("CREATE TABLE {0} (a int PRIMARY KEY, b user)".format(self.function_table_name))
User = namedtuple('user', ('state', 'has_corn'))
self.cluster.register_user_type(self.keyspace_name, "user", User)
self.session.execute("INSERT INTO {0} (a, b) VALUES (%s, %s)".format(self.function_table_name), (0, User("Nebraska", True)))
self.session.execute("UPDATE {0} SET b.has_corn = False where a = 0".format(self.function_table_name))
result = self.session.execute("SELECT * FROM {0}".format(self.function_table_name))
self.assertFalse(result[0].b.has_corn)
table_sql = self.cluster.metadata.keyspaces[self.keyspace_name].tables[self.function_table_name].as_cql_query()
self.assertNotIn("<frozen>", table_sql)
def test_can_insert_unprepared_registered_udts(self):
"""
Test the | insertion of unprepared, registered UDTs
"""
c = TestCluster()
s = c.connect(self.keyspace_name, wait_for_all_pools=True)
s.execute("CREATE TYPE user (age int, name text)")
s.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<user>)")
Use | r = namedtuple('user', ('age', 'name'))
c.register_user_type(self.keyspace_name, "user", User)
s.execute("INSERT INTO mytable (a, b) VALUES (%s, %s)", (0, User(42, 'bob')))
result = s.execute("SELECT b FROM mytable WHERE a=0")
row = result[0]
self.assertEqual(42, row.b.age)
self.assertEqual('bob', row.b.name)
self.assertTrue(type(row.b) is User)
# use the same UDT name in a different keyspace
s.execute("""
CREATE KEYSPACE udt_test_unprepared_registered2
WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1' }
""")
s.set_keyspace("udt_test_unprepared_registered2")
s.execute("CREATE TYPE user (state text, is_cool boolean)")
s.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<user>)")
User = namedtuple('user', ('state', 'is_cool'))
c.register_user_type("udt_test_unprepared_registered2", "user", User)
s.execute("INSERT INTO mytable (a, b) VALUES (%s, %s)", (0, User('Texas', True)))
result = s.execute("SELECT b FROM mytable WHERE a=0")
row = result[0]
self.assertEqual('Texas', row.b.state)
self.assertEqual(True, row.b.is_cool)
self.assertTrue(type(row.b) is User)
s.execute("DROP KEYSPACE udt_test_unprepared_registered2")
c.shutdown()
def test_can_register_udt_before_connecting(self):
"""
Test the registration of UDTs before session creation
"""
c = TestCluster()
s = c.connect(wait_for_all_pools=True)
s.execute("""
CREATE KEYSPACE udt_test_register_before_connecting
WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1' }
""")
s.set_keyspace("udt_test_register_before_connecting")
s.execute("CREATE TYPE user (age int, name text)")
s.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<user>)")
s.execute("""
CREATE KEYSPACE udt_test_register_before_connecting2
WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1' }
""")
s.set_keyspace("udt_test_register_before_connecting2")
s.execute("CREATE TYPE user (state text, is_cool boolean)")
s.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<user>)")
# now that types are defined, shutdown and re-create Cluster
c.shutdown()
c = TestCluster()
User1 = namedtuple('user', ('age', 'name'))
User2 = namedtuple('user', ('state', 'is_cool'))
c.register_user_type("udt_test_register_before_connecting", "user", User1)
c.register_user_type("udt_test_register_before_connecting2", "user", User2)
s = c.connect(wait_for_all_pools=True)
s.set_keyspace("udt_test_register_before_connecting")
s.execute("INSERT INTO mytable (a, b) VALUES (%s, %s)", (0, User1(42, 'bob')))
result = s.execute("SELECT b FROM mytable WHERE a=0")
row = result[0]
self.assertEqual(42, row.b.age)
self.assertEqual('bob', row.b.name)
self.assertTrue(type(row.b) is User1)
# use the same UDT name in a different keyspace
s.set_keyspace("udt_test_register_before_connecting2")
s.execute("INSERT INTO mytable (a, b) VALUES (%s, %s)", (0, User2('Texas', True)))
result = s.execute("SELECT b FROM mytable WHERE a=0")
row = result[0]
self.assertEqual('Texas', row.b.state)
self.assertEqual(True, row.b.is_cool)
self.assertTrue(type(row.b) is User2)
s.execute("DROP KEYSPACE udt_test_register_before_connecting")
s.execute("DROP KEYSPACE udt_test_register_before_connecting2")
c.shutdown()
def test_can_insert_prepared_unregistered_udts(self):
"""
Test the insertion of prepared, unregistered UDTs
"""
c = TestCluster()
s = c.connect(self.keyspace_name, wait_for_all_pools=True)
s.execute("CREATE TYPE user (age int, name text)")
s.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<user>)")
User = namedtuple('user', ('age', 'name'))
insert = s.prepare("INSERT INTO mytable (a, b) VALUES (?, ?)")
s.execute(insert, (0, User(42, 'bob')))
select = s.prepare("SELECT b FROM mytable WHERE a=?")
result = s.execute(select, (0,))
row = result[0]
self.assertEqual(42, row.b.age)
self.assertEqual('bob', row.b.name)
# use the same UDT name in a different keyspace
s.execute("""
CREATE KEYSPACE udt_test_prepared_unregistered2
WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1' }
|
moonso/vcf_parser | vcf_parser/utils/__init__.py | Python | mit | 481 | 0 | from __future__ import absolute_import
from .check_info import check_info_annotation
from .rank_scores import build_rank_score_dict
from .build_info import (build | _info_string, bu | ild_info_dict)
from .build_compounds import build_compounds_dict
from .build_models import build_models_dict
from .build_vep import (build_vep_string, build_vep_annotation)
from .split_genotype import split_genotype
from .format_variant import format_variant
from .split_variants import split_variants
|
mdcic/ssp | setup.py | Python | gpl-3.0 | 1,270 | 0.002362 | # -*- coding: utf-8 -*-
from setuptools import setup
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
DESCRIPTION = open(os.path.join(here, 'DESCRIPTION')).read()
version = '0.0.1'
setup(name='ssp',
version=version,
description="System Service Processor communication library",
long_description=DESCRIPTION,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Systems Administration',
],
author='Yury Konovalov',
author_email='YKonovalov@gmail.com',
url='https://github.com/mdcic/ssp',
license='GPLv3+',
packages=[
'ssp', 'ssp.remote', 'ssp.chassis',
'ssp.chassis.common', 'ssp.chassis.dell',
'ssp.chassis.ibm', 'ssp.chassis.wbem',
| 'ssp.chassis.ipmi'
],
scripts=['tools/ssp-chassis-scanner'],
include_package_data=True,
install_requires=[
'pywbem',
'paramiko'
] |
)
|
jhogan/boog | boog.py | Python | unlicense | 23,069 | 0.013308 | #! /bin/python
# jessehogandeliamariahogan
#vim: set ts=4 sw=4 et
import copy
import os
import curses
import time
import sys
false=False
true=True
class char:
def __init__(self, x, y, char):
self.x = x
self.y = y
self.letter = char
def str(self):
return "%s,%s %s" % (self.x, self.y, self.letter)
class board:
def __init__(self):
# create matrix with 5 elements for both dimentions
# to hold char objects
self.chars=[None]*5
for | i in range(5):
self.chars[i] = [None] * 5
y=0
if false:
for line in str.split("\n"):
x=0
for c in line:
self.chars[y][x] = char(x, y, c)
x += 1
y += 1
def isvalid(self):
f | or x in range(5):
for y in range(5):
c = self.getchar(x,y)
if c == None or (c.letter.isupper() and c.letter != 'Q'):
return false
return true
def getchar(self, x, y):
return self.chars[y][x]
def setchar(self, x, y, c):
self.chars[y][x] = char(x, y, c)
def str(self):
r=''
for w in self.words:
r+= "%s\n" % w.str()
return r
class word:
def __init__(self):
self.word=[]
def str(self):
r=""
for c in self.word:
l = c.letter
if l == 'Q':
l='qu'
r+=l
return r
def append(self, c):
self.word.append(c)
def contains(self, char):
for c in self.word:
if c is char:
return True
return False
def pop(self):
self.word.pop()
def len(self):
return len(self.word)
def graph(self, board):
r=""
for x in range(5):
for y in range(5):
c = board.getchar(x,y)
inword=false
for c0 in self.word:
if c.x == c0.x and c.y == c0.y:
r += c.letter.upper()
inword=true
break
if not inword:
r += c.letter.lower()
r += "\n"
return r
class words:
def __init__(self):
self.words=[]
def clear(self):
self.words=[]
def append(self, word):
self.words.append(word)
self.raiseonappend(word)
def raiseonappend(self, word):
self.onappend(word)
def uniqappend(self, word):
if not self.contains(word):
self.append(word)
def contains(self, word):
for w in self.words:
if word.str() == w.str():
return true
return false
def str(self):
r=''
for w in self.words:
r+= "%s\n" % w.str()
return r
def graph(self, board):
r=''
for w in self.words:
r += "%s\n\n\n\n" % w.graph(board)
return r
def sort(self):
new=[]
smalllist=copy.copy(self.words)
lennew = len(new)
lenwords = len(self.words)
while lennew < lenwords:
smallest=None
for w in smalllist:
if smallest == None or w.len() < smallest:
smallest = w.len()
smallestword = w
smalllist.remove(smallestword)
new.append(smallestword)
lennew += 1
new.reverse()
self.words=new
class finger:
def __init__(self, board):
self.b=board
self.word=word()
self.reset()
def raiseonboardupd(self):
self.onboardupd(self)
def reset(self):
self.startchar=None
def nextstart(self):
if self.startchar == None:
self.startchar = self.b.getchar(0,0)
else:
x=self.startchar.x
y=self.startchar.y
if x < 4:
x += 1
elif y < 4:
x = 0
y += 1
else:
return false # we would be at the end
self.startchar = self.b.getchar(x,y)
self.x=self.startchar.x
self.y=self.startchar.y
#print "starting at (%s,%s)" % (self.x, self.y)
self.word=word()
self.word.append(self.b.getchar(self.x, self.y))
return true
def mv(self, direction):
xincr=0
yincr=0
d0=direction[0]
if len(direction) == 2:
if direction[1] == 'l':
xincr=-1
else:
xincr=1 # assume 'r'
if d0 == 'u':
yincr=-1
elif d0 == 'd':
yincr=1
elif d0 == 'l':
xincr=-1
elif d0 == 'r':
xincr=1
prevchar = self.b.getchar(self.x, self.y)
self.x = self.x + xincr
self.y = self.y + yincr
if self.x < 0 or self.y < 0 or self.x > 4 or self.y > 4:
self.x=prevchar.x
self.y=prevchar.y
return false
char = self.b.getchar(self.x, self.y)
if self.word.contains(char):
self.x=prevchar.x
self.y=prevchar.y
return False
self.word.append(char)
return true
def curchar(self):
return self.b.getchar(self.x, self.y)
def revert(self):
self.word.word.pop()
if len(self.word.word) > 0:
c = self.word.word[-1]
self.x = c.x
self.y = c.y
else:
self.x = None
self.y = None
def strword(self):
r=""
for i in range(self.word.len()):
l=self.word.word[i].letter
if l == 'Q':
l='qu'
r += l
return r
def str(self):
r=""
for y in range(5):
for x in range(5):
char = self.b.getchar(x,y)
letter = char.letter
for c in self.word.word:
if c is char:
letter = letter.upper()
r += letter + ' '
r += "\n"
return r
class boogler:
def __init__(self, dict, board):
self.words=words()
self.dict = dict
self.b = board
self.f = finger(self.b)
self.depth = 0
def find(self):
self.words.clear()
self.f.reset()
while self.f.nextstart():
self.find_()
def find_(self):
#print "depth: %s" % self.depth
self.depth +=1
if self.dict.startswith(self.f.strword()):
for d in ('d', 'u', 'l', 'r', 'dl', 'dr', 'ul', 'ur'):
if self.f.mv(d):
#self.f.raiseonboardupd()
#print self.f.str()
strword = self.f.strword()
if len(strword) > 3:
#print "--reverting--"
#print self.f.str()
if self.dict.find(strword):
self.words.uniqappend(copy.deepcopy(self.f.word))
self.find_()
self.f.revert()
self.depth -=1
def str(self):
return self.words.str()
def graph(self):
return self.words.graph(self.b)
class dict:
def __init__(self, file):
self.d={}
self.l=[]
f = open(file)
try:
for w in f:
if w[0].islower():
self.d[w.rstrip()] = ''
self.l.append(w.rstrip())
self.l.sort()
finally:
f.close()
def find(self, k):
return (k in self.d)
def len(self):
return len(self.d)
def startswith(self, str):
hi=len(self.l)
lo=0
while lo < hi:
mid = (lo+hi)//2
word=self.l[mid]
if word.startswith(str):
return true
elif str < word:
hi = mid
else:
|
ikn/wearhouse | game/engine/gfx/graphic.py | Python | gpl-3.0 | 51,464 | 0.002856 | """Basic graphic representing an image.
---NODOC---
TODO:
- use subsurface for crop transform (but requires rect to be within surface)
- GraphicView probably doesn't work if in different manager - need to have own _dirty?
- something that wraps a Graphic to be a copy of it, like Animation does, and has .graphic setter
- use in Animation, etc.
---NODOC---
"""
from math import sin, cos, pi
import pygame as pg
from pygame import Rect
from ..conf import conf
from ..util import (ir, pos_in_rect, align_rect, normalise_colour, has_alpha,
blank_sfc, combine_drawn)
class Graphic (object):
"""Something that can be drawn to the screen.
Graphic(img, pos=(0, 0), layer=0, pool=conf.DEFAULT_RESOURCE_POOL,
res_mgr=conf.GAME.resources)
:arg img: surface or filename (under :data:`conf.IMG_DIR`) to load. If a
surface, it should be already converted for blitting.
:arg pos: initial ``(x, y)`` position. The existence of a default is because
you might use :meth:`align` immediately on adding to a
:class:`GraphicsManager <engine.gfx.container.GraphicsManager>`.
:arg layer: the layer to draw in, lower being closer to the 'front'. This can
actually be any hashable object except ``None``, as long as all
layers used in the same
:class:`GraphicsManager <engine.gfx.container.GraphicsManager>` can
be ordered with respect to each other.
:arg pool: :class:`ResourceManager <engine.res.ResourceManager>` resource pool
name to cache any loaded images in.
:arg res_mgr: :class:`ResourceManager <engine.res.ResourceManager>` instance to
use to load any images.
Many properties of a graphic, such as :attr:`pos` and :attr:`size`, can be
changed in two main ways: by setting the attribute directly, or by calling the
corresponding method. The former is more natural, and is useful for
:meth:`sched.Scheduler.interp() <engine.sched.Scheduler.interp>`, while the
latter all return the graphic, and so can be chained together.
Position and size can also be retrieved and altered using list indexing, like
with Pygame rects. Altering size in any way applies the :meth:`resize`
transformation.
:meth:`resize`, :meth:`crop`, :meth:`flip`, :meth:`opacify` and :meth:`rotate`
correspond to builtin transforms (see :meth:`transform`).
"""
is_view = False
_builtin_transforms = ('crop', 'flip', 'tint', 'resize', 'rotate')
def __init__ (self, img, pos=(0, 0), layer=0,
pool=conf.DEFAULT_RESOURCE_POOL, res_mgr=None):
self._resource_pool = pool
self._resource_manager = res_mgr
if isinstance(img, basestring):
#: Filename of the loaded image, or ``None`` if a surface was
#: given.
self.fn = img
img = self._load_img(img)
else:
self.fn = None
self._orig_sfc = self._surface = img
# postrot is the rect drawn in
self._postrot_rect = self._rect = Rect(pos, img.get_size())
self._last_postrot_rect = Rect(self._postrot_rect)
#: :attr:`rect` at the time of the last draw.
self.last_rect = Rect(self._rect)
self._anchor = (0, 0)
self._rot_anchor = 'center'
self._rot_offset = (0, 0) # postrot_pos = pos + rot_offset
self._must_apply_rot = False
#: A list of transformations applied to the graphic. Always contains
#: the builtin transforms as strings (though they do nothing
#: by default); other transforms are added through :meth:`transform`,
#: and are functions.
self.transforms = list(self._builtin_transforms)
self._last_transforms = list(self.transforms)
# {function: (args, previous_surface, resulting_surface, apply_fn,
# undo_fn)}
# last 2 None for non-builtins
self._transforms = {}
# {function: (args, previous_size, resulting_size, apply_fn, undo_fn)}
# last 4 None for non-builtins
self._queued_transforms = {}
#: Whether the graphic is completely opaque; do not change.
self.opaque = not has_alpha(img)
self._manager = None
self._mgr_requires = False
self._layer = layer
#: When blitting the surface, this is passed as the ``special_flags``
#: argument.
self._last_blit_flags = self.blit_flags = 0
#: Whether currently (supposed to be) visible on-screen.
self.visible = True
#: Whether this graphic was visible at the time of the last draw; do
#: not change.
self.was_visible = False
self._scale = (1, 1)
self._cropped_rect = None
self._flipped = (False, False)
self._tint_colour = (255, 255, 255, 255)
self._angle = 0
self._scale_fn = pg.transform.smoothscale
self._rotate_fn = lambda sfc, angle: \
pg.transform.rotozoom(sfc, angle * 180 / pi, 1)
self._rotate_threshold = 2 * pi / 500
self._orig_dirty = False # where original surface is changed
# where final surface is changed; gets used (and reset) by manager
self._dirty = []
def __getitem__ (self, i):
if isinstance(i, slice):
# Rect is weird and only accepts slices through slice syntax
# this is the easiest way around it (and slicing doesn't work with
# Python 3 anyway)
r = self._rect
return [r[i] for i in range(4)[i]]
else:
return self._rect[i]
def __setitem__ (self, i, v):
r = Rect(self._rect)
if isinstance(i, slice):
for v_i, r_i in enumerate(range(4)[i]):
r[r_i] = v[v_i]
else:
r[i] = v
self.rect = r
@property
def orig_sfc (self):
"""The surface before any transforms.
When setting this, the surface should be already converted for blitting.
"""
return self._orig_sfc
@orig_sfc.setter
def orig_sfc (self, sfc):
size = sfc.get_size()
old_sfc = self._orig_sfc
self._orig_sfc = sfc
if size != old_sfc.get_size():
self.size_changed(size)
self._orig_dirty = True
@property
def surface (self):
"""The (possibly transformed) surface that will be used for drawing.
Accessing this will cause all queued transformations to be applied.
"""
self.render()
return self._surface
# appearance properties
@property
def rect (self):
"""``pygame.Rect`` giving the on-screen area covered.
May be set directly, but not altered in-place.
This is actually the rect before rotation, which is probably what you want,
really. To get the real rect, use :attr:`postrot_rect`.
"""
return self._rect
@rect.setter
def rect (self, rect):
# need to set dirty in old and new rects (if changed)
rect = Rect(rect)
old_rect = self._rect
self._rect = Rect(rect.topleft, self._rect.size)
if rect.size != old_rect.size:
| self.resize(*rect.size)
@property
def x (self):
"""``x`` co-ordinate of the top-left corner of :attr:`rect`."""
return self._rect[0]
@x.setter
def x (self, x):
r = Rect(self._rect)
r[0] = x
self.rect = r
@property
def y (self):
"""``y`` co-ordinate of the top-left corner of :attr:`rect`."""
return self._rect[1]
@y.setter
def y (self, y):
r = Rect(sel | f._rect)
r[1] = y
self.rect = r
@property
def pos (self):
"""``(``:attr:`x` ``,`` :attr:`y` ``)``."""
return self._rect.topleft
@pos.setter
def pos (self, pos):
self.rect = (pos, self._rect.size)
@property
def w (self):
"""Width of :attr:`rect`; uses :meth:`resize`."""
return self._rect[2]
@w.setter
def w (self, w):
r = Rect(self._rect)
r[2] = w
self.rect = r
@property
def h (self):
"""Height of :attr:`rect`; uses :meth:`resize`."""
return self._rect[3]
@h.setter
def h ( |
sniemi/SamPy | sandbox/src1/TCSE3-3rd-examples/src/py/examples/simviz/simviz1cpGUI_unit.py | Python | bsd-2-clause | 1,881 | 0.006911 | #!/usr/bin/env python
"""
As simviz1cpGUI.py, but the parameters have dimensions.
Only the import statement for SimViz differs from simviz1cpGUI.py.
"""
import sys, os, math
from scitools.ParameterInterface import Parameters, AutoSimVizGUI
from simviz1cp_unit import SimViz
class SimVizGUI(SimViz):
def __init__(self, parent, layout='sort'):
self.cwd = os.getcwd()
self.p = Parameters(interface='GUI')
self.master = parent
self.initialize()
self.GUI | = AutoSimVizGUI()
if layout == 'sort':
# widgets sorted in columns:
self.GUI.make_prmGUI(self.master, self.p,
sort_widgets=1,
height=300, pane=1)
else:
# only one column of input parameters:
self.GUI.make_prmGUI(self.master, self.p,
sort_widgets=0,
height=300, pane=0)
help = """\
Sim | ulate: run oscillator code for solving the
differential equation for the spring system.
Visualize: run Gnuplot to make plots in PNG and PostScript
format and on the screen (optional). Plots are stored
in the subdirectory with name equal to 'case'.
"""
self.GUI.make_buttonGUI(self.master,
buttons=[('Simulate', self.simulate),
('Visualize', self.visualize)],
logo=os.path.join(os.environ['scripting'],
'src','misc','figs','simviz2.xfig.t.gif'),
help=None)
# help=help)
if __name__ == '__main__':
from Tkinter import *
import Pmw
root = Tk()
Pmw.initialise(root)
import scitools.misc; scitools.misc.fontscheme2(root)
root.title('Oscillator GUI')
try: layout = sys.argv[1]
except: layout = 'nosort'
widget = SimVizGUI(root, layout)
root.mainloop()
|
Carryzhou/MyPython- | dir_walk1.py | Python | gpl-3.0 | 388 | 0.012887 | #!/ | usr/bin/python
#use recursion to read a dir
import os
def dir_walk(path):
li = os.listdir(path)
for p in li:
pathname = os.path.join(path, p)
if not os.path.isfile(pathname):
dir_walk(pathname)
else:
print pathname
def test(path):
dir_walk(path)
if __name__ == "__main | __":
path = "/home/zhoulihua/kk"
test(path)
|
unnikrishnankgs/va | venv/lib/python3.5/site-packages/html5lib/treewalkers/lxmletree.py | Python | bsd-2-clause | 5,980 | 0.000836 | from __future__ import absolute_import, division, unicode_literals
from six import text_type
from lxml import etree
from ..treebuilders.etree import tag_regexp
from . import _base
from .. import ihatexml
def ensure_str(s):
if s is None:
return None
elif isinstance(s, text_type):
return s
else:
return s.decode("utf-8", "strict")
class Root(object):
def __init__(self, et):
self.elementtree = et
self.children = []
if et.docinfo.internalDTD:
self.children.append(Doctype(self,
ensure_str(et.docinfo.root_name),
ensure_str(et.docinfo.public_id),
ensure_str(et.docinfo.system_url)))
root = et.getroot()
node = root
while node.getprevious() is not None:
node = node.getprevious()
while node is not None:
self.children.append(node)
node = node.getnext()
self.text = None
self.tail = None
def __getitem__(self, key):
return self.children[key]
def getnext(self):
return None
def __len__(self):
return 1
class Doctype(object):
def __init__(self, root_node, name, public_id, system_id):
self.root_node = root_node
self.name = name
self.public_id = public_id
self.system_id = system_id
self.text = None
self.tail = None
def getnext(self):
return self.root_node.children[1]
class FragmentRoot(Root):
def __init__(self, children):
self.children = [FragmentWrapper(self, child) for child in children]
self.text = self.tail = None
def getnext(self):
return None
class FragmentWrapper(object):
def __init__(self, fragment_root, obj):
self.root_node = fragment_root
self.obj = obj
if hasattr(self.obj, 'text'):
self.text = ensure_str(self.obj.text)
else:
self.text = None
if hasattr(self.obj, 'tail'):
self.tail = ensure_str(self.obj.tail)
else:
self.tail = None
def __getattr__(self, name):
return getattr(self.obj, name)
def getnext(self):
siblings = self.root_node.children
idx = siblings.index(self)
if idx < len(siblings) - 1:
return siblings[idx + 1]
else:
return None
def __getitem__(self, key):
return self.obj[key]
def __bool__(self):
return bool(self.obj)
def getparent(self):
return None
def __str__(self):
return str(self.obj)
def __unicode__(self):
return str(self.obj)
def __ | len__(self):
return len(self.obj)
class TreeWalker(_base.NonRecursiveTreeWalker):
def __init__(self, tree):
if hasattr(tree, "getroot"):
tree = Root(tree)
elif isinstance(tree, list):
tree = FragmentRoot(tree)
_base.NonRecursiveTreeWalker.__init__(self, tree)
self.filter = ihatexml.InfosetFilter()
def getNodeDe | tails(self, node):
if isinstance(node, tuple): # Text node
node, key = node
assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
return _base.TEXT, ensure_str(getattr(node, key))
elif isinstance(node, Root):
return (_base.DOCUMENT,)
elif isinstance(node, Doctype):
return _base.DOCTYPE, node.name, node.public_id, node.system_id
elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"):
return _base.TEXT, node.obj
elif node.tag == etree.Comment:
return _base.COMMENT, ensure_str(node.text)
elif node.tag == etree.Entity:
return _base.ENTITY, ensure_str(node.text)[1:-1] # strip &;
else:
# This is assumed to be an ordinary element
match = tag_regexp.match(ensure_str(node.tag))
if match:
namespace, tag = match.groups()
else:
namespace = None
tag = ensure_str(node.tag)
attrs = {}
for name, value in list(node.attrib.items()):
name = ensure_str(name)
value = ensure_str(value)
match = tag_regexp.match(name)
if match:
attrs[(match.group(1), match.group(2))] = value
else:
attrs[(None, name)] = value
return (_base.ELEMENT, namespace, self.filter.fromXmlName(tag),
attrs, len(node) > 0 or node.text)
def getFirstChild(self, node):
assert not isinstance(node, tuple), "Text nodes have no children"
assert len(node) or node.text, "Node has no children"
if node.text:
return (node, "text")
else:
return node[0]
def getNextSibling(self, node):
if isinstance(node, tuple): # Text node
node, key = node
assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
if key == "text":
# XXX: we cannot use a "bool(node) and node[0] or None" construct here
# because node[0] might evaluate to False if it has no child element
if len(node):
return node[0]
else:
return None
else: # tail
return node.getnext()
return (node, "tail") if node.tail else node.getnext()
def getParentNode(self, node):
if isinstance(node, tuple): # Text node
node, key = node
assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
if key == "text":
return node
# else: fallback to "normal" processing
return node.getparent()
|
Stargrazer82301/CAAPR | CAAPR/CAAPR_AstroMagic/PTS/pts/core/tools/serialization.py | Python | mit | 1,823 | 0.004391 | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.core.tools.filesystem Provides useful functions for manipulating the local file system.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard m | odules
import json
import pickle
# -----------------------------------------------------------------
def to_json(object):
"""
This function ...
:param self:
:return:
"""
return json.dumps(object, default=lambda o: o.__dict__, sort_keys=True, in | dent=4)
# -----------------------------------------------------------------
def dump(object, path, method="pickle", protocol=None):
"""
This function ...
:param object:
:param path:
:param method:
:param protocol:
:return:
"""
# Serialize using pickle
if method == "pickle":
pickle.dump(object, open(path, 'wb'), protocol=protocol)
# Serialize to the json format
elif method == "json":
with open(path, 'w') as out_file:
json.dump(object, out_file, default=lambda o: o.__dict__, sort_keys=True, indent=4)
# Not a valid serialization method
else: raise ValueError("Not a valid method")
# -----------------------------------------------------------------
def load(path):
"""
This function ...
:param path:
:return:
"""
return pickle.load(open(path, 'r'))
# -----------------------------------------------------------------
|
infinyte/oppia | core/controllers/reader.py | Python | apache-2.0 | 18,063 | 0.000111 | # Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the Oppia learner view."""
__author__ = 'Sean Lip'
import copy
import logging
from core.controllers import base
from core.controllers import pages
from core.domain import config_domain
from core.domain import dependency_registry
from core.domain import event_services
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import feedback_services
from core.domain import fs_domain
from core.domain import interaction_registry
from core.domain import param_domain
from core.domain import rating_services
from core.domain import rights_manager
from core.domain import rte_component_registry
from core.domain import rule_domain
from core.domain import skins_services
import feconf
import jinja_utils
import utils
import jinja2
SHARING_OPTIONS = config_domain.ConfigProperty(
'sharing_options', {
'type': 'dict',
'properties': [{
'name': 'gplus',
'schema': {
'type': 'bool',
}
}, {
'name': 'facebook',
'schema': {
'type': 'bool',
}
}, {
'name': 'twitter',
'schema': {
'type': 'bool',
}
}]
},
'Sharing options to display in the learner view',
default_value={
'gplus': False,
'facebook': False,
'twitter': False,
})
SHARING_OPTIONS_TWITTER_TEXT = config_domain.ConfigProperty(
'sharing_options_twitter_text', {
'type': 'unicode',
},
'Default text for the Twitter share message',
default_value=(
'Check out this interactive lesson from Oppia - a free, open-source '
'learning platform!'))
def require_playable(handler):
"""Decorator that checks if the user can play the given exploration."""
def test_can_play(self, exploration_id, **kwargs):
"""Checks if the user for the current session is logged in."""
if rights_manager.Actor(self.user_id).can_play(exploration_id):
return handler(self, exploration_id, **kwargs)
else:
raise self.PageNotFoundException
return test_can_play
def _get_updated_param_dict(param_dict, param_changes, exp_param_specs):
"""Updates a param dict using the given list of param_changes.
Note that the list of parameter changes is ordered. Parameter
changes later in the list may depend on parameter changes that have
been set earlier in the same list.
"""
new_param_dict = copy.deepcopy(param_dict)
for pc in param_changes:
try:
obj_type = exp_param_specs[pc.name].obj_type
except:
raise Exception('Parameter %s not found' % pc.name)
new_param_dict[pc.name] = pc.get_normalized_value(
obj_type, new_param_dict)
return new_param_dict
def classify(
exp_id, exp_param_specs, state, handler_name, answer, params):
"""Normalize the answer and return the first rulespec that it satisfies."""
interaction_instance = interaction_registry.Registry.get_interaction_by_id(
state.interaction.id)
normalized_answer = interaction_instance.normalize_answer(
answer, handler_name)
handler = next(
h for h in state.interaction.handlers if h.name == handler_name)
fs = fs_domain.AbstractFileSystem(fs_domain.ExplorationFileSystem(exp_id))
input_type = interaction_instance.get_handler_by_name(
handler_name).obj_type
for rule_spec in handler.rule_specs:
if rule_domain.evaluate_rule(
rule_spec.definition, exp_param_specs, input_type, params,
normalized_answer, fs):
return rule_spec
raise Exception(
'No matching rule found for handler %s. Rule specs are %s.' % (
handler.name,
[rule_spec.to_dict() for rule_spec in handler.rule_specs]
)
)
class ExplorationPage(base.BaseHandler):
"""Page describing a single exploration."""
PAGE_NAME_FOR_CSRF = 'player'
def _make_first_letter_uppercase(self, s):
"""Converts the first letter of a string to its uppercase equivalent,
and returns the result.
"""
# This guards against empty strings.
if s:
return s[0].upper() + s[1:]
else:
return s
@require_playable
def get(self, exploration_id):
"""Handles GET requests."""
version = self.request.get('v')
if not version:
# The default value for a missing parameter seems to be ''.
version = None
else:
version = int(version)
try:
exploration = exp_services.get_exploration_by_id(
exploration_id, version=version)
except Exception as e:
raise self.PageNotFoundException(e)
version = exploration.version
if not rights_manager.Actor(self.user_id).can_view(exploration_id):
raise self.PageNotFoundException
is_iframed = (self.request.get('iframed') == 'true')
# TODO(sll): Cache these computations.
interaction_ids = exploration.get_interaction_ids()
dependency_ids = (
interaction_registry.Registry.get_deduplicated_dependency_ids(
interaction_ids))
dependencies_html, additional_angular_modules = (
dependency_registry.Registry.get_deps_html_and_angular_modules(
dependency_ids))
interaction_templates = (
rte_component_registry.Registry.get_html_for_all_components() +
interaction_registry.Registry.get_interaction_html(
interaction_ids))
self.values.update({
| 'INTERACTION_SPECS': interaction_registry.Registry.get_all_specs(),
'SHARING_OPTIONS': | SHARING_OPTIONS.value,
'SHARING_OPTIONS_TWITTER_TEXT': SHARING_OPTIONS_TWITTER_TEXT.value,
'additional_angular_modules': additional_angular_modules,
'can_edit': (
bool(self.username) and
self.username not in config_domain.BANNED_USERNAMES.value and
rights_manager.Actor(self.user_id).can_edit(exploration_id)
),
'dependencies_html': jinja2.utils.Markup(
dependencies_html),
'exploration_title': exploration.title,
'exploration_version': version,
'iframed': is_iframed,
'interaction_templates': jinja2.utils.Markup(
interaction_templates),
'is_private': rights_manager.is_exploration_private(
exploration_id),
# Note that this overwrites the value in base.py.
'meta_name': exploration.title,
# Note that this overwrites the value in base.py.
'meta_description': self._make_first_letter_uppercase(
exploration.objective),
'nav_mode': feconf.NAV_MODE_EXPLORE,
'skin_templates': jinja2.utils.Markup(
skins_services.Registry.get_skin_templates(
[exploration.default_skin])),
'skin_js_url': skins_services.Registry.get_skin_js_url(
exploration.default_skin),
'skin_tag': jinja2.utils.Markup(
skins_services.Registry.get_skin_tag(exploration.default_skin)
),
})
if is_iframed:
self.render_template(
'player/exploration_player.html', iframe_restriction=None)
else:
self.ren |
wesm/arrow | python/pyarrow/json.py | Python | apache-2.0 | 858 | 0 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distribut | ed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF AN | Y
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from pyarrow._json import ReadOptions, ParseOptions, read_json # noqa
|
gluke77/rally | tests/unit/task/test_context.py | Python | apache-2.0 | 9,745 | 0 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import jsonschema
import mock
from rally import exceptions
from rally.task import context
from tests.unit import fakes
from tests.unit import test
@ddt.ddt
class BaseContextTestCase(test.TestCase):
@ddt.data({"config": {"bar": "spam"}, "expected": {"bar": "spam"}},
{"config": {"bar": "spam"}, "expected": {"bar": "spam"}},
{"config": {}, "expected": {}},
{"config": None, "expected": None},
{"config": 42, "expected": 42},
{"config": "foo str", "expected": "foo str"},
{"config": [], "expected": ()},
{"config": [11, 22, 33], "expected": (11, 22, 33)})
@ddt.unpack
def test_init(self, config, expected):
ctx = {"config": {"foo": 42, "fake": config}, "task": "foo_task"}
ins = fakes.FakeContext(ctx)
self.assertEqual(ins.config, expected)
self.assertEqual(ins.task, "foo_task")
self.assertEqual(ins.context, ctx)
def test_init_with_default_config(self):
@context.configure(name="foo", order=1)
class FooContext(fakes.FakeContext):
DEFAULT_CONFIG = {"alpha": "beta", "delta": "gamma"}
ctx = {"config": {"foo": {"ab": "cd"}, "bar": 42}, "task": "foo_task"}
ins = FooContext(ctx)
self.assertEqual({"ab": "cd", "alpha": "beta", "delta": "gamma"},
ins.config)
def test_init_empty_context(self):
ctx0 = {
"task": mock.MagicMock(),
"config": {"fake": {"foo": 42}}
}
ctx = fakes.FakeContext(ctx0)
self.assertEqual(ctx.config, ctx0["config"]["fake"])
self.assertEqual(ctx.task, ctx0["task"])
self.assertEqual(ctx.context, ctx0)
def test_validate__context(self):
fakes.FakeContext.validate({"test": 2})
def test_validate__wrong_context(self):
self.assertRaises(jsonschema.ValidationError,
fakes.FakeContext.validate, {"nonexisting": 2})
def test_validate__hidden(self):
fakes.FakeHiddenContext.validate({"test": 2})
self.assertRaises(exceptions.PluginNotFound,
fakes.FakeHiddenContext.validate,
{"test": 2}, non_hidden=True)
def test_setup_is_abstract(self):
@context.configure("test_abstract_setup", 0)
class A(context.Context):
def cleanup(self):
pass
self.assertRaises(TypeError, A)
def test_cleanup_is_abstract(self):
@context.configure("test_abstract_cleanup", 0)
class A(context.Context):
def setup(self):
pass
self.assertRaises(TypeError, A)
def test_with_statement(self):
ctx0 = {
"task": mock.MagicMock()
}
ctx = fakes.FakeContext(ctx0)
ct | x.setup = mock.MagicMock()
ctx.cleanup = mock.MagicMock()
with ctx as entered_ctx:
self.assertEqual(ctx, entered_ctx)
ctx.cleanup.assert_called_once_with()
def tes | t_lt(self):
@context.configure(name="lt", order=fakes.FakeContext.get_order() - 1)
class FakeLowerContext(fakes.FakeContext):
pass
ctx = mock.MagicMock()
self.assertTrue(FakeLowerContext(ctx) < fakes.FakeContext(ctx))
self.assertFalse(fakes.FakeContext(ctx) < FakeLowerContext(ctx))
self.assertFalse(fakes.FakeContext(ctx) < fakes.FakeContext(ctx))
def test_gt(self):
@context.configure(name="f", order=fakes.FakeContext.get_order() + 1)
class FakeBiggerContext(fakes.FakeContext):
pass
ctx = mock.MagicMock()
self.assertTrue(FakeBiggerContext(ctx) > fakes.FakeContext(ctx))
self.assertFalse(fakes.FakeContext(ctx) > FakeBiggerContext(ctx))
self.assertFalse(fakes.FakeContext(ctx) > fakes.FakeContext(ctx))
def test_eq(self):
@context.configure(name="fake2",
order=fakes.FakeContext.get_order() + 1)
class FakeOtherContext(fakes.FakeContext):
pass
ctx = mock.MagicMock()
self.assertFalse(FakeOtherContext(ctx) == fakes.FakeContext(ctx))
self.assertTrue(FakeOtherContext(ctx) == FakeOtherContext(ctx))
class ContextManagerTestCase(test.TestCase):
@mock.patch("rally.task.context.Context.get")
def test_validate(self, mock_context_get):
config = {
"ctx1": mock.MagicMock(),
"ctx2": mock.MagicMock()
}
context.ContextManager.validate(config)
for ctx in ("ctx1", "ctx2"):
mock_context_get.assert_has_calls([
mock.call(ctx),
mock.call().validate(config[ctx], non_hidden=False),
])
@mock.patch("rally.task.context.Context.get")
def test_validate_non_hidden(self, mock_context_get):
config = {
"ctx1": mock.MagicMock(),
"ctx2": mock.MagicMock()
}
context.ContextManager.validate(config, non_hidden=True)
for ctx in ("ctx1", "ctx2"):
mock_context_get.assert_has_calls([
mock.call(ctx),
mock.call().validate(config[ctx], non_hidden=True),
])
def test_validate__non_existing_context(self):
config = {
"nonexisting": {"nonexisting": 2}
}
self.assertRaises(exceptions.PluginNotFound,
context.ContextManager.validate, config)
@mock.patch("rally.task.context.Context.get")
def test_setup(self, mock_context_get):
mock_context = mock.MagicMock()
mock_context.return_value = mock.MagicMock(__lt__=lambda x, y: True)
mock_context_get.return_value = mock_context
ctx_object = {"config": {"a": [], "b": []}}
manager = context.ContextManager(ctx_object)
result = manager.setup()
self.assertEqual(result, ctx_object)
mock_context_get.assert_has_calls(
[mock.call("a"), mock.call("b")], any_order=True)
mock_context.assert_has_calls(
[mock.call(ctx_object), mock.call(ctx_object)], any_order=True)
self.assertEqual([mock_context(), mock_context()], manager._visited)
mock_context.return_value.assert_has_calls(
[mock.call.setup(), mock.call.setup()], any_order=True)
@mock.patch("rally.task.context.Context.get")
def test_cleanup(self, mock_context_get):
mock_context = mock.MagicMock()
mock_context.return_value = mock.MagicMock(__lt__=lambda x, y: True)
mock_context_get.return_value = mock_context
ctx_object = {"config": {"a": [], "b": []}}
manager = context.ContextManager(ctx_object)
manager.cleanup()
mock_context_get.assert_has_calls(
[mock.call("a"), mock.call("b")], any_order=True)
mock_context.assert_has_calls(
[mock.call(ctx_object), mock.call(ctx_object)], any_order=True)
mock_context.return_value.assert_has_calls(
[mock.call.cleanup(), mock.call.cleanup()], any_order=True)
@mock.patch("rally.task.context.Context.get")
def test_cleanup_exception(self, mock_context_get):
mock_context = mock.MagicMock()
mock_context.return_value = mock.MagicMock(__lt__=lambda x, y: True)
mock_context.cleanup.side_effect = Exception()
mock_context_get.return_value = mock_context
ctx_object = {"config": {"a": [], "b": []}}
manager = context.ContextManager(ctx_object) |
rustychris/stompy | setup.py | Python | mit | 603 | 0.001658 | from distutils.core import setup
setup(
nam | e='stompy',
version='0.1',
packages=['stompy', 'stompy.grid', 'stompy.io', 'stompy.io.local',
'stompy.model', 'stompy.model.delft', 'stompy.model.fvcom',
'stompy.model.pypart', 'stompy.model.suntans',
| 'stompy.plot', 'stompy.plot.cmaps',
'stompy.spatial'],
package_data={'stompy':['tide_consts.txt']},
license='MIT',
url="https://github.com/rustychris/stompy",
author="Rusty Holleman",
author_email="rustychris@gmail.com",
long_description=open('README.md').read(),
)
|
diegojromerolopez/djshop | src/djshop/apps/club/migrations/0008_auto_20170523_1218.py | Python | mit | 752 | 0.00266 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-05-23 12:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('club', '0007_member_autocheckout_secret_code_checksum'),
]
operations = [
| migrations.AddField(
model_name='member',
name='email',
field=models.EmailField(blank=True, default='', max_length=254, verbose_name='Email of the member'),
),
migrations.AddField(
model_name='member',
name='telephone_number',
field=models.Ch | arField(blank=True, default='', max_length=32, verbose_name='Last name of the member'),
),
]
|
dotKom/onlineweb4 | apps/mailinglists/appconfig.py | Python | mit | 138 | 0 | from django.apps import AppConfi | g
class MailinglistsConfig(AppC | onfig):
name = 'apps.mailinglists'
verbose_name = 'Mailinglists'
|
Jufianto/LearnPython | fistLearn/konsidi.py | Python | mit | 165 | 0.042424 | a = raw_input("Yakin ? | (y/t) : ")
list = ["y","Y"]
| if a in list :
print "baiklah"
elif a in ["t","T"]:
print "Sayang Sekali"
else:
print "Liat Pilihan OON" |
ehiggs/hanythingondemand | hod/utils.py | Python | gpl-2.0 | 2,111 | 0.004263 | #!/usr/bin/env python
# ##
# Copyright 2009-2015 Ghent University
#
# This file is part of hanythingondemand
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/hanythingondemand
#
# hanythingondemand is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# hanythingondemand is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with hanythingondemand. If not, see <http://www.gnu.org/licenses/>.
"""
Utility functions for hanythingondemand
@author: Kenneth Hoste (Universiteit Gent)
"""
import os
def only_if_module_is_available(modname):
"""Decorator to guard functions/methods against missing required module with specified name."""
def wrap(orig):
"""Decorated function, raises ImportError if specified module | is not available."""
try:
__import__(modname)
return orig
except ImportError as err:
def error(*args):
raise ImportError("%s; required module '%s' is not available" % (err, modname))
return error
return wrap
def setup_diagnostic_envi | ronment():
"""
When we run diagnostic functions (e.g. genconfig, help-template), we need to
pretend we are in a job so we poke some values into the environment.
"""
if 'PBS_DEFAULT' not in os.environ:
os.environ['PBS_DEFAULT'] = 'pbs-master'
os.environ['PBS_JOBID'] = '123.%s' % os.environ['PBS_DEFAULT']
|
anthonyfok/frescobaldi | frescobaldi_app/snippet/edit.py | Python | gpl-2.0 | 8,855 | 0.004178 | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
The dialog for editing a snippet
"""
import re
from PyQt5.QtCore import QItemSelectionModel, QSize
from PyQt5.QtGui import QIcon, QKeySequence, QTextCharFormat
from PyQt5.QtWidgets import (
QAction, QDialog, QDialogButtonBox, QGridLayout, QLabel, QLineEdit,
QMessageBox, QPushButton, QTextEdit, QVBoxLayout)
import actioncollectionmanager
import app
import qutil
import userguide
import arrowkeys
import icons
import textformats
import wordboundary
import gadgets.indenter
import gadgets.matcher
import gadgets.homekey
import widgets
from . import model
from . import snippets
from . import builtin
from . import expand
from . import highlight
from . import completer
class Edit(QDialog):
"""Dialog for editing a snippet. It is used for one edit.
Use None as the name to create a new snippet. In that case, text
is set as a default in the text edit.
"""
def __init__(self, widget, name, text=""):
super(Edit, self).__init__(widget)
self._name = name
layout = QVBoxLayout()
self.setLayout(layout)
self.topLabel = QLabel()
self.text = QTextEdit(cursorWidth=2, acceptRichText=False)
self.titleLabel = QLabel()
self.titleEntry = QLineEdit()
self.shortcutLabel = QLabel()
self.shortcutButton = ShortcutButton(clicked=self.editShortcuts)
layout.addWidget(self.topLabel)
layout.addWidget(self.text)
grid = QGridLayout()
layout.addLayout(grid)
grid.addWidget(self.titleLabel, 0, 0)
grid.addWidget(self.titleEntry, 0, 1)
grid.addWidget(self.shortcutLabel, 1, 0)
grid.addWidget(self.shortcutButton, 1, 1)
layout.addWidget(widgets.Separator())
b = QDialogButtonBox(accepted=self.accept, rejected=self.reject)
layout.addWidget(b)
buttons = QDialogButtonBox.Ok | QDialogButtonBox.Cancel
if name and name in builtin.builtin_snippets:
b.setStandardButtons(buttons | QDialogButtonBox.RestoreDefaults)
b.button(QDialogButtonBox.RestoreDefaults).clicked.connect(self.slotDefaults)
else:
b.setStandardButtons(buttons)
userguide.addButton(b, "snippet_editor")
# PyQt5.10 en sip4.14.5 delete the Highlighter, even tho | ugh it is
# constructed with a parent, that's why we save it in an unused attribute.
self._highlighter = highlight.Highlighter(self.text.document())
Matcher(self.text)
gadgets.indenter.Indenter(self.text)
self.text.installEventFilter(gadgets.homekey.handler)
self.text.i | nstallEventFilter(arrowkeys.handler)
wordboundary.handler.install_textedit(self.text)
completer.Completer(self.text)
if name:
self.titleEntry.setText(snippets.title(name, False) or '')
self.text.setPlainText(snippets.text(name))
ac = self.parent().parent().snippetActions
self.setShortcuts(ac.shortcuts(name))
else:
self.text.setPlainText(text)
self.setShortcuts(None)
app.translateUI(self)
self.readSettings()
app.settingsChanged.connect(self.readSettings)
qutil.saveDialogSize(self, "snippettool/editor/size", QSize(400, 300))
self.show()
def translateUI(self):
title = _("Edit Snippet") if self._name else _("New Snippet")
self.setWindowTitle(app.caption(title))
self.topLabel.setText(_("Snippet Text:"))
self.titleLabel.setText(_("Title:"))
self.shortcutLabel.setText(_("Shortcut:"))
self.shortcutButton.updateText()
def done(self, result):
if result:
if not self.text.toPlainText():
QMessageBox.warning(self,
_("Empty Snippet"),
_("A snippet can't be empty."))
return
self.saveSnippet()
elif self.text.document().isModified():
res = QMessageBox.warning(self, self.windowTitle(),
_("The snippet has been modified.\n"
"Do you want to save your changes or discard them?"),
QMessageBox.Save | QMessageBox.Discard | QMessageBox.Cancel)
if res == QMessageBox.Cancel:
return
elif res != QMessageBox.Discard:
self.saveSnippet()
super(Edit, self).done(result)
def readSettings(self):
data = textformats.formatData('editor')
self.text.setFont(data.font)
self.text.setPalette(data.palette())
def shortcuts(self):
return self.shortcutButton.shortcuts()
def setShortcuts(self, shortcuts):
self.shortcutButton.setShortcuts(shortcuts)
def editShortcuts(self):
from widgets import shortcuteditdialog
ac = self.parent().parent().snippetActions
action = QAction(None)
if self._name:
action.setShortcuts(self.shortcuts())
action.setIcon(snippets.icon(self._name) or QIcon())
default = ac.defaults().get(self._name)
text = snippets.title(self._name)
else:
default = None
text = self.titleEntry.text() or _("Untitled")
action.setText(text.replace('&', '&&'))
cb = self.actionManager().findShortcutConflict
skip = (self.parent().parent().snippetActions, self._name)
dlg = shortcuteditdialog.ShortcutEditDialog(self, cb, skip)
if dlg.editAction(action, default):
self.setShortcuts(action.shortcuts())
def saveSnippet(self):
index = model.model().saveSnippet(self._name,
self.text.toPlainText(), self.titleEntry.text())
# set snippet current in the editor that called us
self.parent().treeView.selectionModel().setCurrentIndex(
index, QItemSelectionModel.SelectCurrent | QItemSelectionModel.Rows)
#remove the shortcuts conflicts
self.actionManager().removeShortcuts(self.shortcuts())
self.parent().treeView.update()
# get the name that was used
name = model.model().name(index)
self.parent().parent().snippetActions.setShortcuts(name, self.shortcuts())
self.text.document().setModified(False)
def actionManager(self):
mainwindow = self.parent().parent().mainwindow()
return actioncollectionmanager.manager(mainwindow)
def slotDefaults(self):
t = builtin.builtin_snippets[self._name]
self.text.setPlainText(t.text)
self.titleEntry.setText(t.title() if t.title else '')
self.setShortcuts(self.parent().parent().snippetActions.defaults().get(self._name))
class ShortcutButton(QPushButton):
def __init__(self, **args):
super(ShortcutButton, self).__init__(**args)
self.setIcon(icons.get("preferences-desktop-keyboard-shortcuts"))
self._shortcuts = []
def shortcuts(self):
return self._shortcuts
def setShortcuts(self, shortcuts):
self._shortcuts = shortcuts or []
self.updateText()
def updateText(self):
if not self._shortcu |
ikreymer/pywb | pywb/utils/loaders.py | Python | gpl-3.0 | 14,279 | 0.00021 | from __future__ import absolute_import
"""
This module provides loaders for local file system and over http
local and remote access
"""
import os
import hmac
import requests
import yaml
import six
from six.moves.urllib.parse import unquote_plus, urlsplit, urlencode
import time
import pkgutil
import base64
import cgi
import re
from io import open, BytesIO
from warcio.limitreader import LimitReader
from pywb.utils.io import no_except_close, StreamClosingReader
try:
import boto3
from botocore import UNSIGNED
from botocore.client import Config
s3_avail = True
except ImportError: # pragma: no cover
s3_avail = False
# ============================================================================
def init_yaml_env_vars():
"""Initializes the yaml parser to be able to set
the value of fields from environment variables
:rtype: None
"""
env_rx = re.compile(r'\$\{[^}]+\}')
yaml.add_implicit_resolver('!envvar', env_rx)
def envvar_constructor(loader, node):
value = loader.construct_scalar(node)
value = os.path.expandvars(value)
return value
yaml.add_constructor('!envvar', envvar_constructor)
# ============================================================================
def load_py_name(string):
import importlib
string = string.split(':', 1)
mod = importlib.import_module(string[0])
return getattr(mod, string[1])
# =================================================================
def is_http(filename):
return filename.startswith(('http://', 'https://'))
# =================================================================
def to_file_url(filename):
""" Convert a filename to a file:// url
"""
url = 'file://' + os.path.abspath(filename).replace(os.path.sep, '/')
return url
# =================================================================
def from_file_url(url):
""" Convert from file:// url to file path
"""
if url.startswith('file://'):
url = url[len('file://'):].replace('/', os.path.sep)
return url
# =================================================================
def load(filename):
return BlockLoader().load(filename)
# =============================================================================
def load_yaml_config(config_file):
config = None
configdata = None
try:
configdata = load(config_file)
co | nfig = yaml.load(configdata, Loader=yaml.Loader)
finally:
no_except_close(configdata)
return config
# =============================================================================
def load_overlay_config(main_env_var, main_default_file='',
overlay_env_var='', overlay_file=''):
configfile = os.environ.get(main_env_var, main_default_file)
config = None
if configfile:
configfile = | os.path.expandvars(configfile)
config = load_yaml_config(configfile)
config = config or {}
overlay_configfile = os.environ.get(overlay_env_var, overlay_file)
if overlay_configfile:
overlay_configfile = os.path.expandvars(overlay_configfile)
config.update(load_yaml_config(overlay_configfile))
return config
# =================================================================
def extract_client_cookie(env, cookie_name):
cookie_header = env.get('HTTP_COOKIE')
if not cookie_header:
return None
# attempt to extract cookie_name only
inx = cookie_header.find(cookie_name)
if inx < 0:
return None
end_inx = cookie_header.find(';', inx)
if end_inx > 0:
value = cookie_header[inx:end_inx]
else:
value = cookie_header[inx:]
value = value.split('=')
if len(value) < 2:
return None
value = value[1].strip()
return value
# =================================================================
def read_last_line(fh, offset=256):
""" Read last line from a seekable file. Start reading
from buff before end of file, and double backwards seek
until line break is found. If reached beginning of file
(no lines), just return whole file
"""
fh.seek(0, 2)
size = fh.tell()
while offset < size:
fh.seek(-offset, 2)
lines = fh.readlines()
if len(lines) > 1:
return lines[-1]
offset *= 2
fh.seek(0, 0)
return fh.readlines()[-1]
# =================================================================
class BaseLoader(object):
def __init__(self, **kwargs):
pass
def load(self, url, offset=0, length=-1):
raise NotImplemented()
# =================================================================
class BlockLoader(BaseLoader):
"""
a loader which can stream blocks of content
given a uri, offset and optional length.
Currently supports: http/https and file/local file system
"""
loaders = {}
profile_loader = None
def __init__(self, **kwargs):
super(BlockLoader, self).__init__()
self.cached = {}
self.kwargs = kwargs
def load(self, url, offset=0, length=-1):
loader, url = self._get_loader_for_url(url)
return loader.load(url, offset, length)
def _get_loader_for_url(self, url):
"""
Determine loading method based on uri
"""
parts = url.split('://', 1)
if len(parts) < 2:
type_ = 'file'
else:
type_ = parts[0]
if '+' in type_:
profile_name, scheme = type_.split('+', 1)
if len(parts) == 2:
url = scheme + '://' + parts[1]
else:
profile_name = ''
scheme = type_
loader = self.cached.get(type_)
if loader:
return loader, url
loader_cls = self._get_loader_class_for_type(scheme)
if not loader_cls:
raise IOError('No Loader for type: ' + scheme)
profile = self.kwargs
if self.profile_loader:
profile = self.profile_loader(profile_name, scheme)
loader = loader_cls(**profile)
self.cached[type_] = loader
return loader, url
def _get_loader_class_for_type(self, type_):
loader_cls = self.loaders.get(type_)
return loader_cls
@staticmethod
def init_default_loaders():
BlockLoader.loaders['http'] = HttpLoader
BlockLoader.loaders['https'] = HttpLoader
BlockLoader.loaders['s3'] = S3Loader
BlockLoader.loaders['file'] = LocalFileLoader
BlockLoader.loaders['pkg'] = PackageLoader
BlockLoader.loaders['webhdfs'] = WebHDFSLoader
@staticmethod
def set_profile_loader(src):
BlockLoader.profile_loader = src
@staticmethod
def _make_range_header(offset, length):
if length > 0:
range_header = 'bytes={0}-{1}'.format(offset, offset + length - 1)
else:
range_header = 'bytes={0}-'.format(offset)
return range_header
# =================================================================
class PackageLoader(BaseLoader):
def load(self, url, offset=0, length=-1):
if url.startswith('pkg://'):
url = url[len('pkg://'):]
# then, try as package.path/file
pkg_split = url.split('/', 1)
if len(pkg_split) == 1:
raise
data = pkgutil.get_data(pkg_split[0], pkg_split[1])
if offset > 0:
data = data[offset:]
if length > -1:
data = data[:length]
buff = BytesIO(data)
buff.name = url
return buff
# afile = pkg_resources.resource_stream(pkg_split[0],
# pkg_split[1])
# =================================================================
class LocalFileLoader(PackageLoader):
def load(self, url, offset=0, length=-1):
"""
Load a file-like reader from the local file system
"""
# if starting with . or /, can only be a file path..
file_only = url.startswith(('/', '.'))
# convert to filename
filename = from_file_url(url)
if filename != url:
file_onl |
camilortte/Recomendador | manage.py | Python | mit | 255 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Recomendad | or.settings")
from django.core.management import execute_f | rom_command_line
execute_from_command_line(sys.argv)
|
MrTheodor/espressopp | testsuite/interaction_potentials/unittest/PTestFENE.py | Python | gpl-3.0 | 1,424 | 0.007725 | # Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed | in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU | General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import espressopp.unittest
from espressopp.interaction.FENE import *
from espressopp import Real3D, infinity
class Test0FENE(espressopp.unittest.TestCase) :
def test0Energy(self) :
fene=FENE(K=1.0, r0=1.0, rMax=0.5)
# root = minimum
self.assertAlmostEqual(fene.computeEnergy(1.0), 0.0)
self.assertAlmostEqual(fene.computeEnergy(1.0, 0.0, 0.0), 0.0)
self.assertAlmostEqual((fene.computeForce(1.0, 0.0, 0.0) - Real3D(0.0, 0.0, 0.0)).sqr(), 0.0)
if __name__ == "__main__":
unittest.main()
|
pism/pism | util/plot_profiling.py | Python | gpl-3.0 | 4,816 | 0.001869 | #!/usr/bin/env python3
import pylab as plt
import numpy as np
from argparse import ArgumentParser
import importlib
import sys
import os.path
""" Produce pie charts using PISM's profiling output produced using
the -profile option. """
parser = ArgumentParser()
parser.add_argument("FILE", nargs=1)
options = parser.parse_args()
filename = options.FILE[0]
dirname, basename = os.path.split(filename)
sys.path.insert(0, dirname)
modulename = os.path.splitext(basename)[0]
r = importlib.import_module(modulename)
colors = [(141, 211, 199), (255, 255, 179), (190, 186, 218), (2 | 51, 128, 114),
(128, 177, 211), (253, 180, | 98), (179, 222, 105), (252, 205, 229),
(217, 217, 217), (188, 128, 189), (204, 235, 197), (255, 237, 111)]
colors = np.array(colors) / 255.0
n_procs = r.size
s = r.Stages["time-stepping loop"]
big_events = ["basal_yield_stress",
"stress_balance",
"surface",
"ocean",
"age",
"energy",
"basal_hydrology",
"fracture_density",
"mass_transport",
"calving",
"bed_deformation",
"io"]
small_events = {}
small_events["energy"] = ["ice_energy", "btu"]
small_events["stress_balance"] = ["stress_balance.shallow", "stress_balance.modifier",
"stress_balance.strain_heat", "stress_balance.vertical_velocity"]
small_events["stress_balance.modifier"] = ["sia.bed_smoother",
"sia.gradient", "sia.flux", "sia.3d_velocity"]
small_events["io"] = ["io.backup", "io.extra_file", "io.model_state"]
better_names = {"stress_balance.shallow": "SSA",
"stress_balance.modifier": "SIA",
"stress_balance.strain_heat": "Strain heating",
"stress_balance.vertical_velocity": "Vertical velocity"}
def get_event_times(event, n_procs):
result = [s[event][j]["time"] for j in range(n_procs)]
max = np.max(result)
min = np.min(result)
if max > 0:
return max, min, min / max
else:
return max, min, 0.0
total_time = np.max([s["summary"][j]["time"] for j in range(n_procs)])
def get_data(event_list):
"Get event data from the time-stepping loop stage."
return {e: get_event_times(e, n_procs) for e in event_list if e in list(s.keys())}
def aggregate(data, total_time):
"Combine small events."
d = data.copy()
other = [0, 0, 0]
other_label = ""
for event in data:
if data[event][0] / float(total_time) < 0.01:
print("Lumping '%s' (%f%%) with others..." % (event,
100.0 * data[event][0] / total_time))
del d[event]
other[0] += data[event][0]
other[1] += data[event][1]
if other[0] > 0:
other[2] = other[1] / other[0]
else:
other[2] = 0.0
other_label += "\n{}".format(event)
d["other"] = other
return d
def plot(data, total, grand_total):
events = [(e, data[e][0]) for e in data]
events.sort(key=lambda x: x[1])
def better_name(n):
if n in list(better_names.keys()):
return better_names[n]
else:
return n
names = [e[0] for e in events]
times = [e[1] for e in events]
times_percent = [100.0 * t / float(total) for t in times]
if grand_total is not None:
comments = ["(%3.1f s, %3.1f%%)" % (time, 100.0 * time / grand_total) for time in times]
else:
comments = ["(%3.1f s)" % time for time in times]
labels = [better_name(name) + " " + comment for name, comment in zip(names, comments)]
explode = [0.05]*len(times)
plt.pie(times_percent, autopct="%3.1f%%", labels=labels, colors=colors, startangle=0.0, explode=explode)
plt.margins(x=0.2, y=0.1)
plt.axis('equal')
def figure(title, event_list, total, grand_total=None):
plt.figure(figsize=(10,5))
plt.title("%s (%s)" % (title, filename))
data = get_data(event_list)
plot(aggregate(data, total), total, grand_total)
# plot(data, total, grand_total)
return data
big = figure("Time-stepping loop",
big_events,
total_time)
energy = figure("Energy step",
small_events["energy"],
big["energy"][0], total_time)
stressbalance = figure("Stress balance",
small_events["stress_balance"],
big["stress_balance"][0], total_time)
sia = figure("SIA",
small_events["stress_balance.modifier"],
stressbalance["stress_balance.modifier"][0], total_time)
io = figure("I/O during run",
small_events["io"],
big["io"][0], total_time)
plt.show()
|
atvcaptain/enigma2 | lib/python/Plugins/Extensions/AtileHD/plugin.py | Python | gpl-2.0 | 28,142 | 0.028889 | # -*- coding: utf-8 -*-
#This plugin is free software, you are allowed to
#modify it (if you keep the license),
#but you are not allowed to distribute/publish
#it without source code (this version and your modifications).
#This means you also have to distribute
#source code of your modifications.
from __future__ import print_function
from enigma import eTimer
from Components.ActionMap import ActionMap
from Components.config import config, getConfigListEntry, ConfigSubsection, ConfigSelection, ConfigYesNo, NoSave, ConfigNothing, ConfigNumber
from Components.ConfigList import ConfigListScreen
from Components.Label import Label
from Components.MenuList import MenuList
from Components.Pixmap import Pixmap
from Components.Sources.List import List
from Components.Sources.StaticText import StaticText
from Plugins.Plugin import PluginDescriptor
from Screens.SkinSelector import SkinSelector
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.Screen import Screen
from Screens.Standby import TryQuitMainloop
from Tools.Directories import *
from Tools.LoadPixmap import LoadPixmap
from Tools.WeatherID import get_woeid_from_yahoo
import Tools.Notifications
from os import listdir, remove, rename, system, path, symlink, chdir, makedirs, mkdir
import shutil
cur_skin = config.skin.primary_skin.value.replace('/skin.xml', '')
# Atile
config.plugins.AtileHD = ConfigSubsection()
config.plugins.AtileHD.refreshInterval = ConfigNumber(default=10)
config.plugins.AtileHD.woeid = ConfigNumber(default = 638242)
config.plugins.AtileHD.tempUnit = ConfigSelection(default="Celsius", choices = [
("Celsius", _("Celsius")),
("Fahrenheit", _("Fahrenheit"))
])
def Plugins(**kwargs):
return [PluginDescriptor(name=_("%s Setup") % cur_skin, description=_("Personalize your Skin"), where = PluginDescriptor.WHERE_MENU, icon="plugin.png", fnc=menu)]
def menu(menuid, **kwargs):
if menuid == "system" and not config.skin.primary_skin.value == "MetrixHD/skin.MySkin.xml" and not config.skin.primary_skin.value == "MetrixHD/skin.xml" and not config.skin.primary_skin.value =="SevenHD/skin.xml" and not config.skin.primary_skin.value == "KravenVB/skin.xml":
return [(_("Setup - %s") % cur_skin, main, "atilehd_setup", None)]
else:
pass
return [ ]
def main(session, **kwargs):
print("[%s]: Config ..." % cur_skin)
session.open(AtileHD_Config)
def isInteger(s):
try:
int(s)
return True
except ValueError:
return False
class WeatherLocationChoiceList(Screen):
skin = """
<screen name="WeatherLocationChoiceList" position="center,center" size="1280,720" title="Location list" >
<widget source="Title" render="Label" position="70,47" size="950,43" font="Regular;35" transparent="1" />
<widget name="choicelist" position="70,115" size="700,480" scrollbarMode="showOnDemand" scrollbarWidth="6" transparent="1" />
<eLabel position=" 55,675" size="290, 5" zPosition="-10" backgroundColor="red" />
<eLabel position="350,675" size="290, 5" zPosition="-10" backgroundColor="green" />
<eLabel position="645,675" size="290, 5" zPosition="-10" backgroundColor="yellow" />
<eLabel position="940,675" size="290, 5" zPosition="-10" backgroundColor="blue" />
<widget name="key_red" position="70,635" size="260,25" zPosition="1" font="Regular;20" halign="left" foregroundColor="foreground" transparent="1" />
<widget name="key_green" position="365,635" size="260,25" zPosition="1" font="Regular;20" halign="left" foregroundColor="foreground" transparent="1" />
</screen>
"""
def __init__(self, session, location_list):
self.session = session
self.location_list = location_list
list = []
Screen.__init__(self, session)
self.title = _("Location list")
self["choicelist"] = MenuList(list)
se | lf["key_red"] = Label(_("Cancel"))
self["key_green"] = Label(_("OK"))
self["myActionMap"] = ActionMap(["SetupActions", "ColorActions"],
{
"ok": self.keyOk,
"green": self.keyOk,
"cancel": self.keyCancel,
"red": self.keyCancel,
}, -1)
self.createChoiceList()
def createChoiceList(self):
list = []
print(self.location_list)
for x in self.location_list:
list.append((str(x[1]), s | tr(x[0])))
self["choicelist"].l.setList(list)
def keyOk(self):
returnValue = self["choicelist"].l.getCurrentSelection()[1]
if returnValue is not None:
self.close(returnValue)
else:
self.keyCancel()
def keyCancel(self):
self.close(None)
class AtileHD_Config(Screen, ConfigListScreen):
skin = """
<screen name="AtileHD_Config" position="center,center" size="1280,720" title="AtileHD Setup" >
<widget source="Title" render="Label" position="70,47" size="950,43" font="Regular;35" transparent="1" />
<widget name="config" position="70,115" size="700,480" scrollbarMode="showOnDemand" scrollbarWidth="6" transparent="1" />
<widget name="Picture" position="808,342" size="400,225" alphatest="on" />
<eLabel position=" 55,675" size="290, 5" zPosition="-10" backgroundColor="red" />
<eLabel position="350,675" size="290, 5" zPosition="-10" backgroundColor="green" />
<eLabel position="645,675" size="290, 5" zPosition="-10" backgroundColor="yellow" />
<eLabel position="940,675" size="290, 5" zPosition="-10" backgroundColor="blue" />
<widget name="key_red" position="70,635" size="260,25" zPosition="1" font="Regular;20" halign="left" foregroundColor="foreground" transparent="1" />
<widget name="key_green" position="365,635" size="260,25" zPosition="1" font="Regular;20" halign="left" foregroundColor="foreground" transparent="1" />
<widget name="key_yellow" position="660,635" size="260,25" zPosition="1" font="Regular;20" halign="left" foregroundColor="foreground" transparent="1" />
<widget name="key_blue" position="955,635" size="260,25" zPosition="0" font="Regular;20" halign="left" foregroundColor="foreground" transparent="1" />
</screen>
"""
def __init__(self, session, args = 0):
self.session = session
self.skin_lines = []
self.changed_screens = False
Screen.__init__(self, session)
self.start_skin = config.skin.primary_skin.value
if self.start_skin != "skin.xml":
self.getInitConfig()
self.list = []
ConfigListScreen.__init__(self, self.list, session = self.session, on_change = self.changedEntry)
self["key_red"] = Label(_("Cancel"))
self["key_green"] = Label(_("OK"))
self["key_yellow"] = Label()
self["key_blue"] = Label(_("About"))
self["setupActions"] = ActionMap(["SetupActions", "ColorActions"],
{
"green": self.keyGreen,
"red": self.cancel,
"yellow": self.keyYellow,
"blue": self.about,
"cancel": self.cancel,
"ok": self.keyOk,
"menu": self.setWeather,
}, -2)
self["Picture"] = Pixmap()
if not self.selectionChanged in self["config"].onSelectionChanged:
self["config"].onSelectionChanged.append(self.selectionChanged)
if self.start_skin == "skin.xml":
self.onLayoutFinish.append(self.openSkinSelectorDelayed)
else:
self.createConfigList()
def setWeather(self):
try:
from Plugins.Extensions.WeatherPlugin.setup import MSNWeatherPluginEntriesListConfigScreen
self.session.open(MSNWeatherPluginEntriesListConfigScreen)
except:
self.session.open(MessageBox, _("'weatherplugin' is not installed!"), MessageBox.TYPE_INFO)
def getInitConfig(self):
global cur_skin
self.is_atile = False
if cur_skin == 'AtileHD':
self.is_atile = True
self.title = _("%s - Setup") % cur_skin
self.skin_base_dir = "/usr/share/enigma2/%s/" % cur_skin
if self.is_atile:
self.default_font_file = "font_atile_Roboto.xml"
self.default_color_file = "colors_atile_Grey_transparent.xml"
else:
self.default_font_file = "font_Original.xml"
self.default_color_file = "colors_Original.xml"
self.default_background_file = "background_Original.xml"
self.default_sb_file = "sb_Original.xml"
self.default_infobar_file = "infobar_Original.xml"
self.default_sib_file = "sib_Original.xml"
self.default_ch_se_file = "ch_se_Original.xml"
self.default_ev_file = "ev_Original.xml"
self.default_clock_file = "clock_Original.xml"
self.default_ul_file = "ul_Original.xml"
self.color_file = "skin_user_ |
sjl767/woo | examples/shapepack.py | Python | gpl-2.0 | 1,342 | 0.046945 | import woo.core, woo.dem
from woo.dem import *
import woo.utils
from minieigen import *
from math import *
from woo import utils
m=woo.utils.defaultMaterial()
zeroSphere=woo.utils.sphere((0,0,0),.4) # sphere which is entirely inside the thing
for p in [woo.utils.sphere((0,0,0),1,mat=m),woo.utils.ellipsoid((0,0,0),semiAxes=(.8,1,1.2),mat=m),woo.utils.ellipsoid((0,0,0),semiAxes=(1.,1.,1.),mat=m),woo.utils.capsule((0,0,0),radius=.8,shaft=.6,mat=m)]:
print 100*'#'
print p.shape
#S=woo.core.Scene(fields=[DemField()])
#S.dem.par.add(p)
| sp=woo.dem.ShapePack()
sp.add([p.shape,zeroSphere.shape])
r=sp.raws[0]
if isinstance(r,SphereClumpGeom):
for i in range | (len(r.radii)): print r.centers[i],r.radii[i]
else:
for rr in r.rawShapes: print rr,rr.className,rr.center,rr.radius,rr.raw
# print [i for i in r.rawShapes]
r.recompute(div=10)
print 'equivRad',r.equivRad,p.shape.equivRadius
print 'volume',r.volume,p.mass/m.density
print 'inertia',r.inertia,p.inertia/m.density
print 'pos',r.pos,p.pos
print 'ori',r.ori,p.ori
print 50*'='
ee=p.shape
print ee
print 'volume',ee.volume
print 'equivRadius',ee.equivRadius
rr=(ee.volume/((4/3.)*pi))**(1/3.)
print 'sphere radius of the same volume',rr
print 'sphere volume',(4/3.)*pi*rr**3
|
GiovanniConserva/TestDeploy | venv/Lib/site-packages/clyent/errors.py | Python | bsd-3-clause | 150 | 0.013333 | from __future__ | import absolute_import, print_function, unicode_literals
class ShowHelp(Exception):
pass
class ClyentError | (Exception):
pass
|
GoogleCloudPlatform/rad-lab | radlab-launcher/radlab.py | Python | apache-2.0 | 44,933 | 0.010816 | #!/usr/bin/env python3
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# PREREQ: installer_prereq.py
import os
import re
import sys
import json
import glob
import shutil
import string
import random
import requests
import argparse
import platform
import subprocess
from art import *
from os import path
from pprint import pprint
from google.cloud import storage
from googleapiclient import discovery
from colorama import Fore, Back, Style
from python_terraform import Terraform
from oauth2client.client import GoogleCredentials
ACTION_CREATE_DEPLOYMENT = "1"
ACTION_UPDATE_DEPLOYMENT = "2"
ACTION_DELETE_DEPLOYMENT = "3"
ACTION_LIST_DEPLOYMENT = "4"
def main(varcontents={}, module_name=None , action=None, projid=None, tfbucket=None):
orgid = ""
folderid = ""
billing_acc = ""
currentusr = ""
setup_path = os.getcwd()
# Setting "gcloud auth application-default" to deploy RAD Lab Modules
currentusr = radlabauth(currentusr)
# Setting up Project-ID
projid = set_proj(projid)
# Checking for User Permissions
launcherperm(projid,currentusr)
# Listing / Selecting from available RAD Lab modules
if module_name is None:
module_name = list_modules()
# Checking Module specific permissions
moduleperm(projid,module_name,currentusr)
# Validating user input Terraform variables against selected module
validate_tfvars(varcontents, module_name)
# Select Action to perform
if action is None or action == "":
action = select_action() | .strip()
# Setting up required attributes for any RAD Lab module deployment
env_path,tfbucket,orgid,billing_acc,folderid,randomid = module_deploy_common_settin | gs(action,module_name,setup_path,varcontents,projid,tfbucket)
# Utilizing Terraform Wrapper for init / apply / destroy
env(action, orgid, billing_acc, folderid, env_path, randomid, tfbucket, projid)
print("\nGCS Bucket storing Terrafrom Configs: "+ tfbucket +"\n")
print("\nTERRAFORM DEPLOYMENT COMPLETED!!!\n")
def radlabauth(currentusr):
try:
token = subprocess.Popen(["gcloud auth application-default print-access-token"],shell=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout.read().strip().decode('utf-8')
r = requests.get('https://www.googleapis.com/oauth2/v3/tokeninfo?access_token='+token)
currentusr = r.json()["email"]
# Setting Credentials for non Cloud Shell CLI
if(platform.system() != 'Linux' and platform.processor() !='' and not platform.system().startswith('cs-')):
# countdown(5)
x = input("\nWould you like to proceed the RAD Lab deployment with user - " + Fore.YELLOW + currentusr + Style.RESET_ALL + ' ?\n[1] Yes\n[2] No\n'+ Fore.YELLOW + Style.BRIGHT + 'Choose a number : ' + Style.RESET_ALL ).strip()
if(x == '1'):
pass
elif(x == '2'):
print("\nLogin with User account with which you would like to deploy RAD Lab Modules...\n")
os.system("gcloud auth application-default login")
else:
currentusr = '0'
except:
print("\nLogin with User account with which you would like to deploy RAD Lab Modules...\n")
os.system("gcloud auth application-default login")
finally:
if(currentusr == '0'):
sys.exit(Fore.RED + "\nError Occured - INVALID choice.\n")
else:
token = subprocess.Popen(["gcloud auth application-default print-access-token"],shell=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout.read().strip().decode('utf-8')
r = requests.get('https://www.googleapis.com/oauth2/v3/tokeninfo?access_token='+token)
currentusr = r.json()["email"]
print("\nUser to deploy RAD Lab Modules (Selected) : " + Fore.GREEN + Style.BRIGHT + currentusr + Style.RESET_ALL )
return currentusr
def set_proj(projid):
if projid is None:
projid = os.popen("gcloud config list --format 'value(core.project)' 2>/dev/null").read().strip()
if(projid != ""):
select_proj = input("\nWhich Project would you like to use for RAD Lab management (Example - Creating/Utilizing GCS bucket where Terraform states will be stored) ? :" + "\n[1] Currently set project - " + Fore.GREEN + projid + Style.RESET_ALL + "\n[2] Enter a different Project ID" +Fore.YELLOW + Style.BRIGHT + "\nChoose a number for the RAD Lab management Project" + Style.RESET_ALL + ': ').strip()
if(select_proj == '2'):
projid = input(Fore.YELLOW + Style.BRIGHT + "Enter the Project ID" + Style.RESET_ALL + ': ').strip()
os.system("gcloud config set project " + projid)
elif(select_proj != '1' and select_proj != '2'):
sys.exit(Fore.RED + "\nError Occured - INVALID choice.\n")
else:
projid = input(Fore.YELLOW + Style.BRIGHT + "\nEnter the Project ID for RAD Lab management" + Style.RESET_ALL + ': ').strip()
os.system("gcloud config set project " + projid)
else:
os.system("gcloud config set project " + projid)
print("\nProject ID (Selected) : " + Fore.GREEN + Style.BRIGHT + projid + Style.RESET_ALL)
return projid
def launcherperm(projid,currentusr):
# Hardcoded Project level required RAD Lab Launcher roles
launcherprojroles = ['roles/storage.admin','roles/serviceusage.serviceUsageConsumer']
# Hardcoded Org level required RAD Lab Launcher roles
launcherorgroles = ['roles/iam.organizationRoleViewer']
credentials = GoogleCredentials.get_application_default()
service0 = discovery.build('cloudresourcemanager', 'v3', credentials=credentials)
request0 = service0.projects().getIamPolicy(resource='projects/'+projid)
response0 = request0.execute()
projiam = True
for role in launcherprojroles:
rolefound = False
for y in range(len(response0['bindings'])):
# print("ROLE --->")
# print(response0['bindings'][y]['role'])
# print("MEMBERS --->")
# print(response0['bindings'][y]['members'])
if(role == response0['bindings'][y]['role']):
rolefound = True
if('user:'+currentusr not in response0['bindings'][y]['members']):
projiam = False
sys.exit(Fore.RED + "\nError Occured - RADLAB LAUNCHER PERMISSION ISSUE | " + role + " permission missing...\n(Review https://github.com/GoogleCloudPlatform/rad-lab/tree/main/radlab-launcher#iam-permissions-prerequisites for more details)\n" +Style.RESET_ALL )
else:
pass
if rolefound == False:
sys.exit(Fore.RED + "\nError Occured - RADLAB LAUNCHER PERMISSION ISSUE | " + role + " permission missing...\n(Review https://github.com/GoogleCloudPlatform/rad-lab/tree/main/radlab-launcher#iam-permissions-prerequisites for more details)\n" +Style.RESET_ALL )
if projiam == True:
print(Fore.GREEN + '\nRADLAB LAUNCHER - Project Permission check passed' + Style.RESET_ALL)
service1 = discovery.build('cloudresourcemanager', 'v3', credentials=credentials)
request1 = service1.projects().get(name='projects/'+projid)
response1 = request1.execute()
if 'parent' in response1.keys():
service2 = discovery.build('cloudresourcemanager', 'v3', credentials=credentials)
org = findorg(response1['parent'])
request2 = service2.organizations().getIamPolicy(resource=org)
response2 = request2.execute |
rawjam/django-allauth | allauth/socialaccount/providers/twitter/provider.py | Python | mit | 2,869 | 0.005228 | from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
from allauth.socialaccount.models import SocialApp, SocialToken
import oauth2 as oauth
import urllib, urllib2, json
class TwitterAccount(ProviderAccount):
def get_screen_name(self):
return self.account.extra_data.get('screen_name')
def get_profile_url(self):
ret = None
screen_name = self.get_screen_name()
if screen_name:
ret = 'http://twitter.com/' + screen_name
return ret
def get_avatar_url(self):
ret = None
profile_image_url = self.account.extra_data.get('profile_image_url')
if profile_image_url:
# Hmm, hack to get our hands on the large image. Not
# really documented, but seems to work.
ret = profile_image_url.replace('_normal', '')
return ret
def has_valid_authentication(self):
account = self.account
app = SocialApp.objects.get_current(self.account.get_provider().id)
tokens = SocialToken.objects.filter(app=app, account=account).order_by('-id')
if tokens:
token = tokens[0]
consumer = oauth.Consumer(key=app.key, secret=app.secret)
access_token = oauth.Token(key=token.token, secret=token.token_secret)
client = oauth.Client(consumer, access_token)
try:
response, data = client.request('https://api.twitter.com/1.1/account/verify_credentials.json')
return True
except urllib2.HTTPError:
return False
return False
def request_url(self, url, args={}, callback=None):
account = self.account
app = SocialApp.objects.get_current(self.account.get_provider().id)
tokens = SocialToken.objects.filter(app=app, account=account).order_by('-id')
if tokens:
token = tokens[0]
consumer = oauth.Consumer(key=app.key, secret=app.secret)
access_token = oauth.Token(key=token.token, secret=token.token_secret)
client = oauth.Client(consumer, access_token)
full_url = '%s?%s' % (url, urllib.urlencode(args))
response, data = client.request(full_url)
|
if callback: callback(full_url, data)
return json.loads(data)
return None
def __unicode__(self):
screen_name = self.get_screen_name()
| return screen_name or super(TwitterAccount, self).__unicode__()
class TwitterProvider(OAuthProvider):
id = 'twitter'
name = 'Twitter'
package = 'allauth.socialaccount.providers.twitter'
account_class = TwitterAccount
providers.registry.register(TwitterProvider)
|
thingsboard/thingsboard-gateway | tests/converters/test_bytes_can_uplink_converter.py | Python | apache-2.0 | 7,578 | 0.00066 | # Copyright 2020. ThingsBoard
#
# Licensed under the Apache L | icense, Version 2.0 (the "License"];
# you may not use this file except in compliance with the License.
# You may obtain a copy of t | he License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import _struct
import unittest
from math import isclose
from random import randint, uniform, choice
from string import ascii_lowercase
from thingsboard_gateway.connectors.can.bytes_can_uplink_converter import BytesCanUplinkConverter
class BytesCanUplinkConverterTests(unittest.TestCase):
def setUp(self):
self.converter = BytesCanUplinkConverter()
def _has_no_data(self, data):
return bool(data is None or not data.get("attributes", []) and not data.get("telemetry", []))
def test_wrong_type(self):
can_data = [0, 1, 0, 0, 0]
configs = [{
"key": "var",
"is_ts": True,
"type": "wrong_type"
}]
tb_data = self.converter.convert(configs, can_data)
self.assertTrue(self._has_no_data(tb_data))
def test_bool_true(self):
can_data = [0, 1, 0, 0, 0]
configs = [{
"key": "boolVar",
"is_ts": True,
"type": "bool",
"start": 1
}]
tb_data = self.converter.convert(configs, can_data)
self.assertTrue(tb_data["telemetry"]["boolVar"])
def test_bool_false(self):
can_data = [1, 0, 1, 1, 1]
configs = [{
"key": "boolVar",
"is_ts": False,
"type": "bool",
"start": 1
}]
tb_data = self.converter.convert(configs, can_data)
self.assertFalse(tb_data["attributes"]["boolVar"])
def _test_int(self, type, byteorder):
int_value = randint(-32768, 32767)
int_size = 2
can_data = [0, 0]
configs = [{
"key": type + "Var",
"is_ts": True,
"type": type,
"start": len(can_data),
"length": int_size,
"byteorder": byteorder,
"signed": int_value < 0
}]
can_data.extend(int_value.to_bytes(int_size, byteorder, signed=(int_value < 0)))
tb_data = self.converter.convert(configs, can_data)
self.assertEqual(tb_data["telemetry"][type + "Var"], int_value)
def test_int_big_byteorder(self):
self._test_int("int", "big")
def test_int_little_byteorder(self):
self._test_int("int", "little")
def test_long_big_byteorder(self):
self._test_int("long", "big")
def test_long_little_byteorder(self):
self._test_int("long", "little")
def _test_float_point_number(self, type, byteorder):
float_value = uniform(-3.1415926535, 3.1415926535)
can_data = [0, 0]
configs = [{
"key": type + "Var",
"is_ts": True,
"type": type,
"start": len(can_data),
"length": 4 if type[0] == "f" else 8,
"byteorder": byteorder
}]
can_data.extend(_struct.pack((">" if byteorder[0] == "b" else "<") + type[0],
float_value))
tb_data = self.converter.convert(configs, can_data)
self.assertTrue(isclose(tb_data["telemetry"][type + "Var"], float_value, rel_tol=1e-05))
def test_float_big_byteorder(self):
self._test_float_point_number("float", "big")
def test_float_little_byteorder(self):
self._test_float_point_number("float", "little")
def test_double_big_byteorder(self):
self._test_float_point_number("double", "big")
def test_double_little_byteorder(self):
self._test_float_point_number("double", "little")
def _test_string(self, encoding="ascii"):
str_length = randint(1, 8)
str_value = ''.join(choice(ascii_lowercase) for _ in range(str_length))
configs = [{
"key": "stringVar",
"is_ts": True,
"type": "string",
"start": 0,
"length": str_length,
"encoding": encoding
}]
can_data = str_value.encode(encoding)
tb_data = self.converter.convert(configs, can_data)
self.assertEqual(tb_data["telemetry"]["stringVar"], str_value)
def test_string_default_ascii_encoding(self):
self._test_string()
def test_string_utf_8_string(self):
self._test_string("utf-8")
def _test_eval_int(self, number, strict_eval, expression):
can_data = number.to_bytes(1, "big", signed=(number < 0))
# By default the strictEval flag is True
configs = [{
"key": "var",
"is_ts": True,
"type": "int",
"start": 0,
"length": 1,
"byteorder": "big",
"signed": number < 0,
"expression": expression,
"strictEval": strict_eval
}]
return self.converter.convert(configs, can_data)
def test_strict_eval_violation(self):
number = randint(-128, 256)
tb_data = self._test_eval_int(number, True, "pow(value, 2)")
self.assertTrue(self._has_no_data(tb_data))
def test_strict_eval(self):
number = randint(-128, 256)
tb_data = self._test_eval_int(number, True, "value * value")
self.assertEqual(tb_data["telemetry"]["var"], number * number)
def test_no_strict_eval(self):
number = randint(-128, 256)
tb_data = self._test_eval_int(number, False, "pow(value, 2)")
self.assertEqual(tb_data["telemetry"]["var"], number * number)
def test_multiple_valid_configs(self):
bool_value = True
int_value = randint(0, 256)
can_data = [0, int(bool_value), int_value, 0, 0, 0]
configs = [
{
"key": "boolVar",
"type": "boolean",
"is_ts": True,
"start": 1
},
{
"key": "intVar",
"type": "int",
"is_ts": False,
"start": 2,
"length": 4,
"byteorder": "little",
"signed": False
}
]
tb_data = self.converter.convert(configs, can_data)
self.assertEqual(tb_data["telemetry"]["boolVar"], bool_value)
self.assertEqual(tb_data["attributes"]["intVar"], int_value)
def test_multiple_configs_one_invalid(self):
bool_value = True
invalid_length = 3 # Float requires 4 bytes
can_data = [0, int(bool_value), randint(0, 256), 0, 0, 0]
configs = [
{
"key": "validVar",
"type": "boolean",
"is_ts": True,
"start": 1
},
{
"key": "invalidVar",
"type": "float",
"is_ts": False,
"start": 2,
"length": invalid_length
}
]
tb_data = self.converter.convert(configs, can_data)
self.assertEqual(tb_data["telemetry"]["validVar"], bool_value)
self.assertIsNone(tb_data["attributes"].get("invalidVar"))
if __name__ == '__main__':
unittest.main()
|
bonyuta0204/NetDissec | src/adeseg.py | Python | mit | 5,296 | 0.000189 | import colorname
import glob
import os
import re
import numpy
from loadseg import AbstractSegmentation
from scipy.io import loadmat
from scipy.misc import imread
from collections import namedtuple
class AdeSegmentation(AbstractSegmentation):
def __init__(self, directory=None, version=None):
# Default to value of ADE20_ROOT env variable
if directory is None:
directory = os.environ['ADE20K_ROOT']
directory = os.path.expanduser(directory)
# Default to the latest version present in the directory
if version is None:
contents = os.listdir(directory)
if not list(c for c in contents if re.match('^index.*mat$', c)):
version = sorted(c for c in contents if os.path.isdir(
os.path.join(directory, c)))[-1]
else:
version = ''
self.root = directory
self.version = version
mat = loadmat(self.expand(self.version, 'index*.mat'), squeeze_me=True)
index = mat['index']
Ade20kIndex = namedtuple('Ade20kIndex', index.dtype.names)
self.index = Ade20kIndex(
**{name: index[name][()] for name in index.dtype.names})
self.scenes = ['-'] + [
norm_name(s) for s in sorted(set(self.index.scene))]
self.scene_map = dict((s, i) for i, s in enumerate(self.scenes))
# Zero out special ~ scene names, which mean unlabeled.
for k in self.scene_map:
if k.startswith('~'):
self.scene_map[k] = 0
self.raw_mat = mat
def all_names(self, category, j):
if j == 0:
return []
if category == 'color':
return [colorname.color_names[j - 1] + '-c']
if category == 'scene':
return [self.scenes[j] + '-s']
result = self.index.objectnames[j - 1]
return re.split(',\s*', result)
def size(self):
'''Returns the number of images in this dataset.'''
return len(self.index.filename)
def filename(self, n):
'''Returns the filename for the nth dataset image.'''
filename = self.index.filename[n]
folder = self.index.folder[n]
return self.expand(folder, filename)
def metadata(self, i):
'''Returns an object that can be used to create all segmentations.'''
return dict(
filename=self.filename(i),
seg_filename=self.seg_filename(i),
part_filenames=self.part_filenames(i),
scene=self.scene_map[norm_name(self.index.scene[i])]
)
@classmethod
def resolve_segmentation(cls, m, categories=None):
result = {}
if wants('scene', categories):
result['scene'] = m['scene']
if wants('part', categories):
result['part'] = load_parts(m)
if wants('object', categories):
result['object'] = load_segmentation(m)
if wants('color', categories):
result['color'] = colorname.label_major_colors(load_image(m)) + 1
arrs = [a for a in result.values() if numpy.shape(a) >= 2]
shape = arrs[0].sha | pe[-2:] if arrs else (1, 1)
return result, shape
# End of contract for AbstractSegmentation
def seg_filename(self, n):
'''Returns the segmentation filename for the nth dataset image.'''
return re.sub(r'\.jpg$', '_seg.png', self.filename(n) | )
def part_filenames(self, n):
'''Returns all the subpart images for the nth dataset image.'''
filename = self.filename(n)
level = 1
result = []
while True:
probe = re.sub(r'\.jpg$', '_parts_%d.png' % level, filename)
if not os.path.isfile(probe):
break
result.append(probe)
level += 1
return result
def expand(self, *path):
'''Expands a filename and directories with the ADE dataset'''
result = os.path.join(self.root, *path)
if '*' in result or '?' in result:
globbed = glob.glob(result)
if len(globbed):
return globbed[0]
return result
def norm_name(s):
return s.replace(' - ', '-').replace('/', '-')
def load_image(m):
'''Returns the nth dataset image as a numpy array.'''
return imread(m['filename'])
def load_segmentation(m):
'''Returns the nth dataset segmentation as a numpy array,
where each entry at a pixel is an object class value.
'''
data = imread(m['seg_filename'])
return decodeClassMask(data)
def load_parts(m):
'''Returns an list of part segmentations for the nth dataset item,
with one array for each level available.
'''
result = []
for fn in m['part_filenames']:
data = imread(fn)
result.append(decodeClassMask(data))
if not result:
return []
return numpy.concatenate(tuple(m[numpy.newaxis] for m in result))
def decodeClassMask(im):
'''Decodes pixel-level object/part class and instance data from
the given image, previously encoded into RGB channels.'''
# Classes are a combination of RG channels (dividing R by 10)
return (im[:, :, 0] // 10) * 256 + im[:, :, 1]
def wants(what, option):
if option is None:
return True
return what in option
|
washimimizuku/tools-for-biology | toolsforbiology/urls.py | Python | mit | 306 | 0.013072 | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
| # Examples:
# url(r'^$', 'toolsforbiology.views.home', name='home'),
# url | (r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2018_03_01/operations/_policy_assignments_operations.py | Python | mit | 47,343 | 0.004964 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_delete_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_request(
scope: str,
policy_assignment_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/policyAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
scope: str,
policy_assignment_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/{scope}/providers/Microsoft.Authorization/polic | yAssignments/{policyAssignmentName}')
path_format_arguments = {
"scope": _SERIALIZER.url("scope", scope, 'str', skip_quote=True),
"policyAssignmentName": _SERIALIZER.url("policy_assignment_name", policy_assignment_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
| query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_group_request(
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_for_resource_request(
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyAssignments')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"parentResourcePath": _SERIALIZER.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
"resourceType": _SERIALIZER.url("resource_type", resource_type, 'str', skip_quote=True),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any |
dask/zict | zict/common.py | Python | bsd-3-clause | 1,419 | 0.001409 | try:
from collections.abc import Mapping, MutableMapping
except ImportError:
from collections import Mapping, MutableMapping
class ZictBase(MutableMapping):
"""
Base class for zict mappings.
"""
def update(*args, **kwds):
# Boilerplate for implementing an update() method
if not args:
raise TypeError(
"descriptor 'update' of MutableMapping object " "needs an argument"
)
self = args[0]
args = args[1:]
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, got %d" % len(args))
items = []
if args:
other = ar | gs[0]
if isinstance(other, Mapping) or hasattr(other, "items"):
items += other.items()
else:
# Assuming (key, value) pairs
items += other
if kwds:
items += kwds.items()
self._do_update(items)
def _do_update(self, items):
# Default implementation, can be overriden for speed
for k, v in ite | ms:
self[k] = v
def close(self):
"""
Release any system resources held by this object.
"""
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def close(z):
"""
Close *z* if possible.
"""
if hasattr(z, "close"):
z.close()
|
pudo/jsonmapping | jsonmapping/transforms.py | Python | mit | 2,402 | 0 | import re
import six
from hashlib import sha1
from unidecode import unidecode
import normality
COLLAPSE = re.compile(r'\s+')
def transliterate(text):
""" Utility to properly transliterate text. """
text = unidecode(six.text_type(text))
text = text.replace('@', 'a')
return text
def coalesce(mapping, bind, values):
""" Given a list of values, return the first non-null value. """
for value in values:
if value is not None:
return [value]
return []
def slugify(mapping, bind, values):
""" Transform all values into URL-capable slugs. """
for value in values:
if isinstance(value, six.string_types):
value = transliterate(value)
value = normality.slugify(value)
yield value
def latinize(mapping, bind, values):
""" Transliterate a given string into the latin alphabet. """
for v in values:
if isinstance(v, six.string_types):
v = transliterate(v)
yield v
def join(mapping, bind, values):
""" Merge all the strings. Put space between them. """
return [' '.join([six.text_type(v) for v in values if v is not None])]
def str_func(name):
""" Apply f | unctions like upper(), lower() and strip(). """
def func(mapping, bind, values):
for v in values:
if isinstance(v, six.string_types):
v = getattr(v, name)()
yield v
return func
def hash(mapping, bind, values):
""" Generate a sha1 for each of the given values. """
for v in values:
if v is None:
continue
if not isinstance(v, six.string_types):
v = six.text_type(v)
| yield sha1(v.encode('utf-8')).hexdigest()
def clean(mapping, bind, values):
""" Perform several types of string cleaning for titles etc.. """
categories = {'C': ' '}
for value in values:
if isinstance(value, six.string_types):
value = normality.normalize(value, lowercase=False, collapse=True,
decompose=False,
replace_categories=categories)
yield value
TRANSFORMS = {
'coalesce': coalesce,
'slugify': slugify,
'clean': clean,
'latinize': latinize,
'join': join,
'upper': str_func('upper'),
'lower': str_func('lower'),
'strip': str_func('strip'),
'hash': hash
}
|
vivekanand1101/neutron | neutron/tests/api/test_allowed_address_pair.py | Python | apache-2.0 | 5,586 | 0 | # Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://w | ww.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or i | mplied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from neutron.tests.api import base
from neutron.tests.tempest import config
from neutron.tests.tempest import test
CONF = config.CONF
class AllowedAddressPairTestJSON(base.BaseNetworkTest):
"""
Tests the Neutron Allowed Address Pair API extension using the Tempest
REST client. The following API operations are tested with this extension:
create port
list ports
update port
show port
v2.0 of the Neutron API is assumed. It is also assumed that the following
options are defined in the [network-feature-enabled] section of
etc/tempest.conf
api_extensions
"""
@classmethod
def resource_setup(cls):
super(AllowedAddressPairTestJSON, cls).resource_setup()
if not test.is_extension_enabled('allowed-address-pairs', 'network'):
msg = "Allowed Address Pairs extension not enabled."
raise cls.skipException(msg)
cls.network = cls.create_network()
cls.create_subnet(cls.network)
port = cls.create_port(cls.network)
cls.ip_address = port['fixed_ips'][0]['ip_address']
cls.mac_address = port['mac_address']
@test.attr(type='smoke')
@test.idempotent_id('86c3529b-1231-40de-803c-00e40882f043')
def test_create_list_port_with_address_pair(self):
# Create port with allowed address pair attribute
allowed_address_pairs = [{'ip_address': self.ip_address,
'mac_address': self.mac_address}]
body = self.client.create_port(
network_id=self.network['id'],
allowed_address_pairs=allowed_address_pairs)
port_id = body['port']['id']
self.addCleanup(self.client.delete_port, port_id)
# Confirm port was created with allowed address pair attribute
body = self.client.list_ports()
ports = body['ports']
port = [p for p in ports if p['id'] == port_id]
msg = 'Created port not found in list of ports returned by Neutron'
self.assertTrue(port, msg)
self._confirm_allowed_address_pair(port[0], self.ip_address)
@test.attr(type='smoke')
def _update_port_with_address(self, address, mac_address=None, **kwargs):
# Create a port without allowed address pair
body = self.client.create_port(network_id=self.network['id'])
port_id = body['port']['id']
self.addCleanup(self.client.delete_port, port_id)
if mac_address is None:
mac_address = self.mac_address
# Update allowed address pair attribute of port
allowed_address_pairs = [{'ip_address': address,
'mac_address': mac_address}]
if kwargs:
allowed_address_pairs.append(kwargs['allowed_address_pairs'])
body = self.client.update_port(
port_id, allowed_address_pairs=allowed_address_pairs)
allowed_address_pair = body['port']['allowed_address_pairs']
self.assertEqual(allowed_address_pair, allowed_address_pairs)
@test.attr(type='smoke')
@test.idempotent_id('9599b337-272c-47fd-b3cf-509414414ac4')
def test_update_port_with_address_pair(self):
# Update port with allowed address pair
self._update_port_with_address(self.ip_address)
@test.attr(type='smoke')
@test.idempotent_id('4d6d178f-34f6-4bff-a01c-0a2f8fe909e4')
def test_update_port_with_cidr_address_pair(self):
# Update allowed address pair with cidr
cidr = str(netaddr.IPNetwork(CONF.network.tenant_network_cidr))
self._update_port_with_address(cidr)
@test.attr(type='smoke')
@test.idempotent_id('b3f20091-6cd5-472b-8487-3516137df933')
def test_update_port_with_multiple_ip_mac_address_pair(self):
# Create an ip _address and mac_address through port create
resp = self.client.create_port(network_id=self.network['id'])
newportid = resp['port']['id']
self.addCleanup(self.client.delete_port, newportid)
ipaddress = resp['port']['fixed_ips'][0]['ip_address']
macaddress = resp['port']['mac_address']
# Update allowed address pair port with multiple ip and mac
allowed_address_pairs = {'ip_address': ipaddress,
'mac_address': macaddress}
self._update_port_with_address(
self.ip_address, self.mac_address,
allowed_address_pairs=allowed_address_pairs)
def _confirm_allowed_address_pair(self, port, ip):
msg = 'Port allowed address pairs should not be empty'
self.assertTrue(port['allowed_address_pairs'], msg)
ip_address = port['allowed_address_pairs'][0]['ip_address']
mac_address = port['allowed_address_pairs'][0]['mac_address']
self.assertEqual(ip_address, ip)
self.assertEqual(mac_address, self.mac_address)
class AllowedAddressPairIpV6TestJSON(AllowedAddressPairTestJSON):
_ip_version = 6
|
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/moto/ec2/models.py | Python | agpl-3.0 | 37,412 | 0.001123 | import copy
import itertools
from collections import defaultdict
from boto.ec2.instance import Instance as BotoInstance, Reservation
from moto.core import BaseBackend
from .exceptions import (
InvalidIdError,
DependencyViolationError,
InvalidDHCPOptionsIdError
)
from .utils import (
random_ami_id,
random_dhcp_option_id,
random_eip_allocation_id,
random_eip_association_id,
random_gateway_id,
random_instance_id,
random_ip,
random_key_pair,
random_reservation_id,
random_route_table_id,
random_security_group_id,
random_snapshot_id,
random_spot_request_id,
random_subnet_id,
random_volume_id,
random_vpc_id,
)
class InstanceState(object):
def __init__(self, name='pending', code=0):
self.name = name
self.code = code
class Instance(BotoInstance):
def __init__(self, image_id, user_data, security_groups):
super(Instance, self).__init__()
self.id = random_instance_id()
self.image_id = image_id
self._state = InstanceState("running", 16)
self.user_data = user_data
self.security_groups = security_groups
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json):
properties = cloudformation_json['Properties']
security_group_ids = properties.get('SecurityGroups', [])
group_names = [ec2_backend.get_security_group_from_id(group_id).name for group_id in security_group_ids]
reservation = ec2_backend.add_instances(
image_id=properties['ImageId'],
user_data=properties.get('UserData'),
count=1,
security_group_names=group_names,
)
return reservation.instances[0]
@property
def physical_resource_id(self):
return self.id
def start(self, *args, **kwargs):
self._state.name = "running"
self._state.code = 16
def stop(self, *args, **kwargs):
self._state.name = "stopped"
self._state.code = 80
def terminate(self, *args, **kwargs):
self._state.name = "terminated"
self._state.code = 48
def reboot(self, *args, **kwargs):
self._state.name = "running"
self._state.code = 16
def get_tags(self, *args, **kwargs):
tags = ec2_backend.describe_tags(self.id)
return tags
class InstanceBackend(object):
def __init__(self):
self.reservations = {}
super(InstanceBackend, self).__init__()
def get_instance(self, instance_id):
for instance in self.all_instances():
if instance.id == instance_id:
return instance
def add_instances(self, image_id, count, user_data, security_group_names):
new_reservation = Reservation()
new_reservation.id = random_reservation_id()
security_groups = [self.get_security_group_from_name(name) for name in security_group_names]
for index in range(count):
new_instance = Instance(
image_id,
user_data,
security_groups,
)
new_reservation.instances.append(new_instance)
self.reservations[new_reservation.id] = new_reservation
return new_reservation
def start_instances(self, instance_ids):
started_instances = []
for instance in self.all_instances():
if instance.id in instance_ids:
instance.start()
started_instances.append(instance)
return started_instances
def stop_instances(self, instance_ids):
stopped_instances = []
for instance in self.all_instances():
if instance.id in instance_ids:
instance.stop()
stopped_instances.append(instance)
return stopped_instances
def terminate_instances(self, instance_ids):
terminated_instances = []
for instance in self.all_instances():
if instance.id in instance_ids:
instance.terminate()
terminated_instances.append(instance)
return terminated_instances
def reboot_instances(self, instance_ids):
rebooted_instances = []
for instance in self.all_instances():
if instance.id in instance_ids:
instance.reboot()
rebooted_instances.append(instance)
return rebooted_instances
def modify_instance_attribute(self, instance_id, key, value):
instance = self.get_instance(instance_id)
setattr(instance, key, value)
return instance
def describe_instance_attribute(self, instance_id, key):
instance = self.get_instance(instance_id)
value = getattr(instance, key)
return instance, value
def all_instances(self):
instances = []
for reservation in self.all_reservations():
for instance in reservation.instances:
instances.append(instance)
return instances
def get_instance_by_id(self, instance_id):
for reservation in self.all_reservations():
for instance i | n reservation.instances:
if instance.id == instance_id:
return instance
def get_reservations_by_instance_ids(self, instance_ids):
""" Go through all of the reservations and filter to onl | y return those
associated with the given instance_ids.
"""
reservations = []
for reservation in self.all_reservations(make_copy=True):
reservation_instance_ids = [instance.id for instance in reservation.instances]
matching_reservation = any(instance_id in reservation_instance_ids for instance_id in instance_ids)
if matching_reservation:
# We need to make a copy of the reservation because we have to modify the
# instances to limit to those requested
reservation.instances = [instance for instance in reservation.instances if instance.id in instance_ids]
reservations.append(reservation)
found_instance_ids = [instance.id for reservation in reservations for instance in reservation.instances]
if len(found_instance_ids) != len(instance_ids):
invalid_id = list(set(instance_ids).difference(set(found_instance_ids)))[0]
raise InvalidIdError(invalid_id)
return reservations
def all_reservations(self, make_copy=False):
if make_copy:
# Return copies so that other functions can modify them with changing
# the originals
return [copy.deepcopy(reservation) for reservation in self.reservations.values()]
else:
return [reservation for reservation in self.reservations.values()]
class KeyPairBackend(object):
def __init__(self):
self.keypairs = defaultdict(dict)
super(KeyPairBackend, self).__init__()
def create_key_pair(self, name):
if name in self.keypairs:
raise InvalidIdError(name)
self.keypairs[name] = keypair = random_key_pair()
keypair['name'] = name
return keypair
def delete_key_pair(self, name):
if name in self.keypairs:
self.keypairs.pop(name)
return True
def describe_key_pairs(self, filter_names=None):
results = []
for name, keypair in self.keypairs.iteritems():
if not filter_names or name in filter_names:
keypair['name'] = name
results.append(keypair)
return results
class TagBackend(object):
def __init__(self):
self.tags = defaultdict(dict)
super(TagBackend, self).__init__()
def create_tag(self, resource_id, key, value):
self.tags[resource_id][key] = value
return value
def delete_tag(self, resource_id, key):
return self.tags[resource_id].pop(key)
def describe_tags(self, filter_resource_ids=None):
results = []
for resource_id, tags in self.tags.iteritems():
ami = 'ami' in resource_id
for key, value in tags.iteritems():
if not filter_ |
xpharry/Udacity-DLFoudation | tutorials/reinforcement/gym/gym/scoreboard/client/util.py | Python | mit | 1,383 | 0.003615 | import functools
import logging
import os
import random
import sys
import time
from gym import error
logger = logging.getLogger(__name__)
def utf8(value):
if isinstance(value, unicode) and sys.version_info < (3, 0):
return value.encode('utf-8')
else:
return value
def file_size(f):
return os.fstat(f.fileno()).st | _size
def retry_exponential_backoff(f, errors, max_retries=5, interval=1):
@functools.wraps(f)
def wrapped(*args, **kwargs):
num_retries = 0
caught_errors = []
while True:
try:
result = f(*args, **kwargs)
except errors as e:
logger.error("Caught error in %s: %s" % (f.__name__, e))
| caught_errors.append(e)
if num_retries < max_retries:
backoff = random.randint(1, 2 ** num_retries) * interval
logger.error("Retrying in %.1fs..." % backoff)
time.sleep(backoff)
num_retries += 1
else:
msg = "Exceeded allowed retries. Here are the individual error messages:\n\n"
msg += "\n\n".join("%s: %s" % (type(e).__name__, str(e)) for e in caught_errors)
raise error.RetriesExceededError(msg)
else:
break
return result
return wrapped
|
twitter/pants | tests/python/pants_test/base/test_deprecated.py | Python | apache-2.0 | 7,818 | 0.012024 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import warnings
from builtins import object, str
from contextlib import contextmanager
import mock
from packaging.version import Version
from pants.base.deprecated import (BadDecoratorNestingError, BadSemanticVersionError,
CodeRemovedError, InvalidSemanticVersionOrderingError,
MissingSemanticVersionError, NonDevSemanticVersionError,
deprecated, deprecated_conditional, deprecated_module,
warn_or_error)
from pants.util.collections import assert_single_element
from pants_test.test_base import TestBase
_FAKE_CUR_VERSION = '2.0.0.dev0'
class DeprecatedTest(TestBase):
FUTURE_VERSION = '9999.9.9.dev0'
@contextmanager
def _test_deprecation(self, deprecation_expected=True):
with warnings.catch_warnings(record=True) as seen_warnings:
def assert_deprecation_warning():
if deprecation_expected:
warning = assert_single_element(seen_warnings)
self.assertEqual(warning.category, DeprecationWarning)
return warning.message
else:
self.assertEqual(0, len(seen_warnings))
warnings.simplefilter('always')
self.assertEqual(0, len(seen_warnings))
yield assert_deprecation_warning
assert_deprecation_warning()
def test_deprecated_function(self):
expected_return = 'deprecated_function'
@deprecated(self.FUTURE_VERSION)
def deprecated_function():
return expected_return
with self._test_deprecation():
self.assertEqual(expected_return, deprecated_function())
def test_deprecated_method(self):
expected_return = 'deprecated_method'
class Test(object):
@deprecated(self.FUTURE_VERSION)
def deprecated_method(self):
return expected_return
with self._test_deprecation():
self.assertEqual(expected_return, Test().deprecated_method())
def test_deprecated_conditional_true(self):
predicate = lambda: True
with self._test_deprecation():
deprecated_conditional(predicate, self.FUTURE_VERSION, "test hint message", stacklevel=0)
def test_deprecated_conditional_false(self):
predicate = lambda: False
with self._test_deprecation(deprecation_expected=False):
deprecated_conditional(predicate, self.FUTURE_VERSION, "test hint message", stacklevel=0)
def test_deprecated_property(self):
expected_return = 'deprecated_property'
class Test(object):
@property
@deprecated(self.FUTURE_VERSION)
def deprecated_property(self):
return expected_return
with self._test_deprecation():
self.assertEqual(expected_return, Test().deprecated_property)
def test_deprecated_module(self):
with self._test_deprecation() as extract_deprecation_warning:
# Note: Attempting to import here a dummy module that just calls deprecated_module() does not
# properly trigger the deprecation, due to a bad interaction with pytest that I've not fully
# understood. But we trust python to correctly execute modules on import, so just testing a
# direct call of deprecated_module() here is fine.
deprecated_module(self.FUTURE_VERSION, hint_message='Do not use me.')
warning_message = str(extract_deprecation_warning())
self.assertIn('module will be removed', warning_message)
self.assertIn('Do not use me', warning_message)
def test_deprecation_hint(self):
hint_message = 'Find the foos, fast!'
expected_return = 'deprecated_function'
@deprecated(self.FUTURE_VERSION, hint_message=hint_message)
def deprecated_function():
return expected_return
with self._test_deprecation() as extract_deprecation_warning:
self.assertEqual(expected_return, deprecated_function())
self.assertIn(hint_message, str(extract_deprecation_warning()))
def test_deprecation_subject(self):
subject = '`./pants blah`'
expected_return = 'deprecated_function'
| @deprecated(self.FUTURE_VERSION, subject=subject)
d | ef deprecated_function():
return expected_return
with self._test_deprecation() as extract_deprecation_warning:
self.assertEqual(expected_return, deprecated_function())
self.assertIn(subject, str(extract_deprecation_warning()))
def test_removal_version_required(self):
with self.assertRaises(MissingSemanticVersionError):
@deprecated(None)
def test_func():
pass
def test_removal_version_bad(self):
with self.assertRaises(BadSemanticVersionError):
warn_or_error('a.a.a', 'dummy description')
with self.assertRaises(BadSemanticVersionError):
@deprecated('a.a.a')
def test_func0():
pass
with self.assertRaises(BadSemanticVersionError):
warn_or_error(1.0, 'dummy description')
with self.assertRaises(BadSemanticVersionError):
@deprecated(1.0)
def test_func1():
pass
with self.assertRaises(BadSemanticVersionError):
warn_or_error('1.a.0', 'dummy description')
with self.assertRaises(BadSemanticVersionError):
@deprecated('1.a.0')
def test_func1a():
pass
def test_removal_version_non_dev(self):
with self.assertRaises(NonDevSemanticVersionError):
@deprecated('1.0.0')
def test_func1a():
pass
@mock.patch('pants.base.deprecated.PANTS_SEMVER', Version(_FAKE_CUR_VERSION))
def test_removal_version_same(self):
with self.assertRaises(CodeRemovedError):
warn_or_error(_FAKE_CUR_VERSION, 'dummy description')
@deprecated(_FAKE_CUR_VERSION)
def test_func():
pass
with self.assertRaises(CodeRemovedError):
test_func()
def test_removal_version_lower(self):
with self.assertRaises(CodeRemovedError):
warn_or_error('0.0.27.dev0', 'dummy description')
@deprecated('0.0.27.dev0')
def test_func():
pass
with self.assertRaises(CodeRemovedError):
test_func()
def test_bad_decorator_nesting(self):
with self.assertRaises(BadDecoratorNestingError):
class Test(object):
@deprecated(self.FUTURE_VERSION)
@property
def test_prop(this):
pass
def test_deprecation_start_version_validation(self):
with self.assertRaises(BadSemanticVersionError):
warn_or_error(removal_version='1.0.0.dev0',
deprecated_entity_description='dummy',
deprecation_start_version='1.a.0')
with self.assertRaises(InvalidSemanticVersionOrderingError):
warn_or_error(removal_version='0.0.0.dev0',
deprecated_entity_description='dummy',
deprecation_start_version='1.0.0.dev0')
@mock.patch('pants.base.deprecated.PANTS_SEMVER', Version(_FAKE_CUR_VERSION))
def test_deprecation_start_period(self):
with self.assertRaises(CodeRemovedError):
warn_or_error(removal_version=_FAKE_CUR_VERSION,
deprecated_entity_description='dummy',
deprecation_start_version='1.0.0.dev0')
with self.warnings_catcher() as w:
warn_or_error(removal_version='999.999.999.dev999',
deprecated_entity_description='dummy',
deprecation_start_version=_FAKE_CUR_VERSION)
self.assertWarning(w, DeprecationWarning,
'DEPRECATED: dummy will be removed in version 999.999.999.dev999.')
self.assertIsNone(
warn_or_error(removal_version='999.999.999.dev999',
deprecated_entity_description='dummy',
deprecation_start_version='500.0.0.dev0'))
|
eaobservatory/hedwig | test/test_file_poll.py | Python | gpl-3.0 | 6,323 | 0 | # Copyright (C) 2016-2018 East Asian Observatory
# All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful,but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,51 Franklin
# Street, Fifth Floor, Boston, MA 02110-1301, USA
from __future__ import absolute_import, division, print_function, \
unicode_literals
from os.path import exists
from pymoc import MOC
from hedwig.config import get_config
from hedwig.file.poll import process_moc, \
process_proposal_figure, process_proposal_pdf, process_review_figure
from hedwig.type.enum import AttachmentState, BaseTextRole, \
FigureType, FormatType
from .dummy_db import DBTestCase
from .dummy_file import example_png, example_pdf
class FilePollTestCase(DBTestCase):
| def test_poll_process_moc(self):
facility_id = self.db.ensure_facility('test')
# Should initially find nothing to process.
self.assertEqual(process_moc(self.db), 0)
# Insert a new MOC.
moc = MOC(order=10, cells=(1234, 4321))
moc_id = self.db.add_moc(
facility_id, 'test', 'Test', FormatType.PLAIN, True, moc)
# The MOC should be marked NEW.
moc | s = self.db.search_moc(facility_id, None, moc_id=moc_id)
self.assertEqual(list(mocs.keys()), [moc_id])
self.assertEqual(mocs[moc_id].state, AttachmentState.NEW)
# Should now find 1 MOC to process.
self.assertEqual(process_moc(self.db), 1)
# The MOC should now be marked READY.
mocs = self.db.search_moc(facility_id, None, moc_id=moc_id)
self.assertEqual(list(mocs.keys()), [moc_id])
self.assertEqual(mocs[moc_id].state, AttachmentState.READY)
def test_poll_proposal_figure(self):
# Should initially find nothing to process.
self.assertEqual(process_proposal_figure(self.db), 0)
# Create a proposal and add a figure.
proposal_id = self._create_test_proposal()
person_id = self.db.add_person('Figure Uploader')
(link_id, figure_id) = self.db.add_proposal_figure(
BaseTextRole, proposal_id, BaseTextRole.TECHNICAL_CASE,
FigureType.PNG, example_png, 'Dummy Caption',
'dummy.png', person_id)
# The figure should initially be marked NEW.
figures = self.db.search_proposal_figure(proposal_id=proposal_id)
self.assertEqual(list(figures.keys()), [link_id])
self.assertEqual(figures[link_id].state, AttachmentState.NEW)
# Should now find 1 figure to process.
self.assertEqual(process_proposal_figure(self.db), 1)
# The figure should now be marked READY.
figures = self.db.search_proposal_figure(proposal_id=proposal_id)
self.assertEqual(list(figures.keys()), [link_id])
self.assertEqual(figures[link_id].state, AttachmentState.READY)
def test_poll_review_figure(self):
# Should initially find nothing to process.
self.assertEqual(process_review_figure(self.db), 0)
# Create a proposal and add a figure.
proposal_id = self._create_test_proposal()
reviewer_id = self._create_test_reviewer(proposal_id)
person_id = self.db.add_person('Figure Uploader')
(link_id, figure_id) = self.db.add_review_figure(
reviewer_id, FigureType.PNG, example_png, 'Dummy Caption',
'dummy.png', person_id)
# The figure should initially be marked NEW.
figures = self.db.search_review_figure(reviewer_id=reviewer_id)
self.assertEqual(list(figures.keys()), [link_id])
self.assertEqual(figures[link_id].state, AttachmentState.NEW)
# Should now find 1 figure to process.
self.assertEqual(process_review_figure(self.db), 1)
# The figure should now be marked READY.
figures = self.db.search_review_figure(reviewer_id=reviewer_id)
self.assertEqual(list(figures.keys()), [link_id])
self.assertEqual(figures[link_id].state, AttachmentState.READY)
def test_poll_proposal_pdf(self):
# Should initially find nothing to process.
self.assertEqual(process_proposal_pdf(self.db), 0)
# Determine if we have the configured PDF processing application so
# that the test can be skipped if it is not present.
config = get_config()
pdf_renderer = config.get('proposal_pdf', 'renderer')
if not pdf_renderer:
self.skipTest('Proposal PDF renderer not configured')
if pdf_renderer == 'ghostscript':
if not exists(get_config().get('utilities', 'ghostscript')):
self.skipTest('Ghostscript not available')
elif pdf_renderer == 'pdftocairo':
if not exists(get_config().get('utilities', 'pdftocairo')):
self.skipTest('pdftocairo not available')
else:
self.fail('Unexpected proposal PDF renderer')
# Create a proposal and add a PDF.
proposal_id = self._create_test_proposal()
person_id = self.db.add_person('PDF Uploader')
(link_id, pdf_id) = self.db.set_proposal_pdf(
BaseTextRole, proposal_id, BaseTextRole.TECHNICAL_CASE,
example_pdf, 1, 'dummy.pdf', person_id)
# The PDF should initially be marked NEW.
pdfs = self.db.search_proposal_pdf(proposal_id=proposal_id)
self.assertEqual(list(pdfs.keys()), [link_id])
self.assertEqual(pdfs[link_id].state, AttachmentState.NEW)
# Should now find 1 PDF to process.
self.assertEqual(process_proposal_pdf(self.db), 1)
# The PDF should now be marked READY.
pdfs = self.db.search_proposal_pdf(proposal_id=proposal_id)
self.assertEqual(list(pdfs.keys()), [link_id])
self.assertEqual(pdfs[link_id].state, AttachmentState.READY)
|
castlecms/castle.cms | castle/cms/browser/survey.py | Python | gpl-2.0 | 849 | 0.002356 | from zope.interface import Interface
from zope import schema
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from z3c.form import form
class ICastleSurvey(Interface):
survey_api_url = schema.TextLine(
title=u'GovSurvey API URL',
description=u'GovSurvey.us API URL; https://govsurvey.us for more information.',
required=False
)
survey_account_id = schema.TextLine(
title=u'GovSurvey API Account ID',
description=u'Account ID from your account on the GovSurvey system API',
require | d=False
)
class CastleSurvey(form.Form):
label = u"Survey"
description = u"A GovSurvey survey"
formErrorsMessage = 'There were errors in a Castle Survey'
ignoreContext = True
schema = ICastleSurvey
template = ViewPageTe | mplateFile("templates/survey.pt")
|
girder/girder_worker | tests/integration/common_tasks/common_tasks/test_tasks/fib.py | Python | apache-2.0 | 440 | 0 | from girder | _worker.utils import girder_job
from girder_worker.app import app
@girder_job(title='Fibonacci Job')
@app.task
def fibonacci(n, **kwargs):
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
@app.task(bind=True)
def f | ib_seq(self, n, **kwargs):
if n < 0:
raise Exception('Must pass in positive integer!')
for _n in range(1, n+1):
print('%s: %s' % (_n, fibonacci(_n)))
|
xorpaul/shinken | shinken/dispatcher.py | Python | agpl-3.0 | 29,613 | 0.004424 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
"""
This is the class of the dispatcher. Its role is to dispatch
configurations to other elements like schedulers, reactionner,
pollers, receivers and brokers. It is responsible for high availability part. If an
element dies and the element type has a spare, it sends the config of the
dead one to the spare
"""
import time
import random
import itertools
from shinken.util import alive_then_spare_then_deads
from shinken.log import logger
# Always initialize random :)
random.seed()
# Dispatcher Class
class Dispatcher:
# Load all elements, set them as not assigned
# and add them to elements, so loop will be easier :)
def __init__(self, conf, arbiter):
self.arbiter = arbiter
# Pointer to the whole conf
self.conf = conf
self.realms = conf.realms
# Direct pointer to important elements for us
for sat_type in ('arbiters', 'schedulers', 'reactionners', 'brokers', 'receivers', 'pollers'):
setattr(self, sat_type, getat | tr(self.conf, sat_ty | pe))
# for each satellite, we look if current arbiter have a specific satellitemap value setted for this satellite
# if so, we give this map to the satellite (used to build satellite URI later)
if arbiter is None:
continue
key = sat_type[:-1] + '_name' # i.e: schedulers -> scheduler_name
for satellite in getattr(self, sat_type):
sat_name = getattr(satellite, key)
satellite.set_arbiter_satellitemap(arbiter.satellitemap.get(sat_name, {}))
self.dispatch_queue = {'schedulers': [], 'reactionners': [],
'brokers': [], 'pollers': [], 'receivers': []}
self.elements = [] # all elements, sched and satellites
self.satellites = [] # only satellites not schedulers
for cfg in self.conf.confs.values():
cfg.is_assigned = False
cfg.assigned_to = None
# We try to remember each "push", so we
# can know with configuration ids+flavor
# if a satellite already got it or not :)
cfg.push_flavor = 0
# Add satellites in the good lists
self.elements.extend(self.schedulers)
# Others are in 2 lists
self.elements.extend(self.reactionners)
self.satellites.extend(self.reactionners)
self.elements.extend(self.pollers)
self.satellites.extend(self.pollers)
self.elements.extend(self.brokers)
self.satellites.extend(self.brokers)
self.elements.extend(self.receivers)
self.satellites.extend(self.receivers)
# Some flag about dispatch need or not
self.dispatch_ok = False
self.first_dispatch_done = False
# Prepare the satellites confs
for satellite in self.satellites:
satellite.prepare_for_conf()
# Some properties must be given to satellites from global
# configuration, like the max_plugins_output_length to pollers
parameters = {'max_plugins_output_length': self.conf.max_plugins_output_length}
for poller in self.pollers:
poller.add_global_conf_parameters(parameters)
# Reset need_conf for all schedulers.
for s in self.schedulers:
s.need_conf = True
# Same for receivers
for rec in self.receivers:
rec.need_conf = True
# checks alive elements
def check_alive(self):
for elt in self.elements:
#print "Updating elements", elt.get_name(), elt.__dict__
elt.update_infos()
# Not alive needs new need_conf
# and spare too if they do not have already a conf
# REF: doc/shinken-scheduler-lost.png (1)
if not elt.alive or hasattr(elt, 'conf') and elt.conf is None:
elt.need_conf = True
for arb in self.arbiters:
# If not me, but not the master too
if arb != self.arbiter and arb.spare:
arb.update_infos()
#print "Arb", arb.get_name(), "alive?", arb.alive, arb.__dict__
# Check if all active items are still alive
# the result goes into self.dispatch_ok
# TODO: finish need conf
def check_dispatch(self):
# Check if the other arbiter has a conf, but only if I am a master
for arb in self.arbiters:
# If not me and I'm a master
if arb != self.arbiter and self.arbiter and not self.arbiter.spare:
if not arb.have_conf(self.conf.magic_hash):
if not hasattr(self.conf, 'whole_conf_pack'):
logger.error('CRITICAL: the arbiter try to send a configureion but it is not a MASTER one?? Look at your configuration.')
continue
arb.put_conf(self.conf.whole_conf_pack)
# Remind it that WE are the master here!
arb.do_not_run()
else:
# Ok, it already has the conf. I remember that
# it does not have to run, I'm still alive!
arb.do_not_run()
# We check for confs to be dispatched on alive scheds. If not dispatched, need dispatch :)
# and if dispatch on a failed node, remove the association, and need a new dispatch
for r in self.realms:
for cfg_id in r.confs:
push_flavor = r.confs[cfg_id].push_flavor
sched = r.confs[cfg_id].assigned_to
if sched is None:
if self.first_dispatch_done:
logger.info("Scheduler configuration %d is unmanaged!!" % cfg_id)
self.dispatch_ok = False
else:
if not sched.alive:
self.dispatch_ok = False # so we ask a new dispatching
logger.warning("Scheduler %s had the configuration %d but is dead, I am not happy." % (sched.get_name(), cfg_id))
sched.conf.assigned_to = None
sched.conf.is_assigned = False
sched.conf.push_flavor = 0
sched.push_flavor = 0
sched.conf = None
# Maybe the scheduler restarts, so is alive but without the conf we think it was managing
# so ask it what it is really managing, and if not, put the conf unassigned
if not sched.do_i_manage(cfg_id, push_flavor):
self.dispatch_ok = False # so we ask a new dispatching
logger.warning("Scheduler %s did not managed its configuration %d,I am not happy." % (sched.get_name(), cfg_id))
if sched.conf:
sched.conf.assigned_to = None
sched.conf.is_assigned = False
sched.conf.push_flavor = 0
sched.push_flavor = 0
sched.need_conf = True
sched.conf = None
# Else: ok the conf is managed by a living scheduler
# Maybe satellites are alive, but do not have a c |
lduarte1991/edx-platform | openedx/core/djangoapps/schedules/migrations/0001_initial.py | Python | agpl-3.0 | 1,410 | 0.004965 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
| ('student', '0010_auto_20170207_0458'),
]
operations = [
migrations.CreateModel(
name='Schedule',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.Auto | LastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('active', models.BooleanField(default=True, help_text='Indicates if this schedule is actively used')),
('start', models.DateTimeField(help_text='Date this schedule went into effect')),
('upgrade_deadline', models.DateTimeField(help_text='Deadline by which the learner must upgrade to a verified seat', null=True, blank=True)),
('enrollment', models.OneToOneField(to='student.CourseEnrollment')),
],
options={
'verbose_name': 'Schedule',
'verbose_name_plural': 'Schedules',
},
),
]
|
owlabs/incubator-airflow | airflow/settings.py | Python | apache-2.0 | 13,677 | 0.001975 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import atexit
import json
import logging
import os
import pendulum
import sys
from typing import Any
from sqlalchemy import create_engine, exc
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.pool import NullPool
from airflow.configuration import conf, AIRFLOW_HOME, WEBSERVER_CONFIG # NOQA F401
from airflow.logging_config import configure_logging
from airflow.utils.sqlalchemy import setup_event_handlers
log = logging.getLogger(__name__)
RBAC = conf.getboolean('webserver', 'rbac')
TIMEZONE = pendulum.timezone('UTC')
try:
tz = conf.get("core", "default_timezone")
if tz == "system":
TIMEZONE = pendulum.local_timezone()
else:
TIMEZONE = pendulum.timezone(tz)
except Exception:
pass
log.info("Configured default timezone %s" % TIMEZONE)
class DummyStatsLogger(object):
@classmethod
def incr(cls, stat, count=1, rate=1):
pass
@classmethod
def decr(cls, stat, count=1, rate=1):
pass
@classmethod
def gauge(cls, stat, value, rate=1, delta=False):
pass
@classmethod
def timing(cls, stat, dt):
pass
class AllowListValidator:
def __init__(self, allow_list=None):
if allow_list:
self.allow_list = tuple([item.strip().lower() for item in allow_list.split(',')])
else:
self.allow_list = None
def test(self, stat):
if self.allow_list is not None:
return stat.strip().lower().startswith(self.allow_list)
else:
return True # default is all metrics allowed
class SafeStatsdLogger:
def __init__(self, statsd_client, allow_list_validator=AllowListValidator()):
self.statsd = statsd_client
self.allow_list_validator = allow_list_validator
def incr(self, stat, count=1, rate=1):
if self.allow_list_validator.test(stat):
return self.statsd.incr(stat, count, rate)
def decr(self, stat, count=1, rate=1):
if self.allow_list_validator.test(stat):
return self.statsd.decr(stat, count, rate)
def gauge(self, stat, value, rate=1, delta=False):
if self.allow_list_validator.test(stat):
return self.statsd.gauge(stat, value, rate, delta)
def timing(self, stat, dt):
if self.allow_list_validator.test(stat):
return self.statsd.timing(stat, dt)
Stats = DummyStatsLogger # type: Any
if conf.getboolean('scheduler', 'statsd_on'):
from statsd import StatsClient
statsd = StatsClient(
host=conf.get('scheduler', 'statsd_host'),
port=conf.getint('scheduler', 'statsd_port'),
prefix=conf.get('scheduler', 'statsd_prefix'))
allow_list_validator = AllowListValidator(conf.get('scheduler', 'statsd_allow_list', fallback=None))
Stats = SafeStatsdLogger(statsd, allow_list_validator)
else:
Stats = DummyStatsLogger
HEADER = '\n'.join([
r' ____________ _____________',
r' ____ |__( )_________ __/__ /________ __',
r'____ /| |_ /__ ___/_ /_ __ /_ __ \_ | /| / /',
r'___ ___ | / _ / _ __/ _ / / /_/ /_ |/ |/ /',
r' _/_/ |_/_/ /_/ /_/ /_/ \____/____/|__/',
])
LOGGING_LEVEL = logging.INFO
# the prefix to append to gunicorn worker processes after init
GUNICORN_WORKER_READY_PREFIX = "[ready] "
LOG_FORMAT = conf.get('core', 'log_format')
SIMPLE_LOG_FORMAT = conf.get('core', 'simple_log_format')
SQL_ALCHEMY_CONN = None
DAGS_FOLDER = None
PLUGINS_FOLDER = None
LOGGING_CLASS_PATH = None
engine = None
Session = None
# The JSON library to use for DAG Serialization and De-Serialization
json = json
def policy(task_instance):
"""
This policy setting allows altering task instances right before they
are executed. It allows administrator to rewire some task parameters.
Note that the ``TaskInstance`` object has an attribute ``task`` pointing
to its related task object, that in turns has a reference to the DAG
object. So you can use the attributes of all of these to define your
policy.
To define policy, add a ``airflow_local_settings`` module
to your PYTHONPATH that defines this ``policy`` function. It receives
a ``TaskInstance`` object and can alter it where needed.
Here are a few examples of how this can be useful:
* You could enforce a specific queue (say the ``spark`` queue)
for tasks using the ``SparkOperator`` to make sure that these
task instances get wired to the right workers
* You could force all task instances running on an
``execution_date`` older than a week old to run in a ``backfill``
pool.
* ...
"""
def pod_mutation_hook(pod):
"""
This setting allows altering ``Pod`` objects before they are passed to
the Kubernetes client by the ``PodLauncher`` for scheduling.
To define a pod mutation hook, add a ``airflow_local_settings`` module
to your PYTHONPATH that defines this ``pod_mutation_hook`` function.
It receives a ``Pod`` object and can alter it where needed.
This could be used, for instance, to add sidecar or init containers
to every worker pod launched by KubernetesExecutor or KubernetesPodOperator.
"""
def configure_vars():
global SQL_ALCHEMY_CONN
global DAGS_FOLDER
global PLUGINS_FOLDER
SQL_ALCHEMY_CONN = conf.get('core', 'SQL_ALCHEMY_CONN')
DAGS_FOLDER = os.path.expanduser(conf.get('core', 'DAGS_FOLDER'))
PLUGINS_FOLDER = conf.get(
'core',
'plugins_folder',
fallback=os.path.join(AIRFLOW_HOME, 'plugins')
)
def configure_orm(disable_connection_pool=False):
log.debug("Setting up DB connection pool (PID %s)" % os.getpid())
global engine
global Session
engine_args = {}
pool_connections = conf.getboolean('core', 'SQL_ALCHEMY_POOL_ENABLED')
if disable_connection_pool or not pool_connections:
engine_args['poolclass'] = NullPool
log.debug("settings.configure_orm(): Using NullPool")
elif 'sqlite' not in SQL_ALCHEMY_CONN:
# Pool size engine args not supported by sqlite.
# If no config value is defined for the pool size, select a reasonable value.
# 0 means no limit, which could lead to exceeding the Database connection limit.
pool_size = conf.getint('core', 'SQL_ALCHEMY_POOL_SIZE', fallback=5)
# The maximum overflow size of the pool.
# When the number of checked-out connections reaches the size set in pool_size,
# additional connections will be returned up to this limit.
# When those additional connections are returned to the pool, they are disconnected and discarded.
# It follows then that the total number of simultane | ous connections
# the pool will allow is pool_size + max_overflow,
# and the total number of “sleeping” connections the pool will allow is pool_size.
# max_overflow can be set to -1 to indicate no overflow limit;
# no limit will be placed on the total number
# of concurrent connections. Defaults to 10.
max_overflow = conf.g | etint('core', 'SQL_ALCHEMY_MAX_OVERFLOW', fallback=10)
# The DB server already has a value for wait_timeout (number of seconds |
zaqwes8811/ml-cv | ml_tests.py | Python | apache-2.0 | 241 | 0.020747 | from sklearn import preprocessing
import numpy as np
X = np.array([[ 1., -1., 2.],
[ 2., 0., | 0.],
[ 2., 0., | 0.],
[ 0., 1., -1.]])
print X
X_scaled = preprocessing.scale(X)
print X_scaled
|
jbs1/jhack14 | rooms.py | Python | gpl-2.0 | 1,739 | 0.016676 | from classes import *
sign = Object('sign', 'To the left is a sign.', 'The sign says: Den of Evil')
opening = Room('opening', 'You are standing in front of a cave.', {}, {'sign' : sign})
#sign.set_room(opening)
opening_w = Room('opening_w', 'You are standing in front of an impassable jungle. There is nothing here you can do.')
openi | ng_w.add_room('east', opening)
opening.add_room('west', opening_w)
opening_e = Room('opening_e', 'You are standing in front of an impassable jungle. There is nothing here you can do.')
opening_e.add_room('west', opening)
opening.add_room('east', opening_e)
lamp = Item('Lamp', 'On the ground is an old and rusty oil lamp.', False)
sh | ed = Room('shed', 'In the dim light from outside you can see a small and dirty room.', {'lamp':lamp}, {}, {}, 'The broken door blocks the entrance')
door=Object('door','The door barely hangs in its place', None, 'The door explodes under your force.', None, shed)
opening_s = Room('opening_s', 'There is a small shed at west side of the road. A path leads back to the north.', {}, {'door':door})
#door.set_room(opening_s)
opening_s.add_room('north', opening)
opening_s.add_room('west', shed)
opening.add_room('south', opening_s)
shed.add_room('east',opening_s)
thief = Entity('thief','A filthy looking thief stands at the wall.')
cave_entrance = Room('cave_entrance', 'You are entering a long tunnel going north, that is dimly lit by the light of your lamp.', {}, {}, {'thief':thief}, 'It is to dark to see anything.')
cave_entrance.add_room('south', opening)
opening.add_room('north', cave_entrance)
rooms = {
'opening' : opening,
'opening_w' : opening_w,
'opening_e' : opening_e,
'opening_s' : opening_s,
'cave_entrance' : cave_entrance,
'shed' : shed
}
|
khchine5/django-shop | shop/urls/rest_api.py | Python | bsd-3-clause | 688 | 0.001453 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from rest_framework import routers
from shop.views.cart import CartViewSet, WatchViewSet
from | shop.views.checkout import CheckoutViewSet
from shop.views.catalog import ProductSelectView
router = routers.DefaultRouter() # TODO: try with trailing_slash=False
router.register(r'cart', CartViewSe | t, base_name='cart')
router.register(r'watch', WatchViewSet, base_name='watch')
router.register(r'checkout', CheckoutViewSet, base_name='checkout')
urlpatterns = [
url(r'^select_product/?$', ProductSelectView.as_view(), name='select-product'),
url(r'^', include(router.urls)),
]
|
CollabNet/puppet-bucky | bucky/collectd.py | Python | apache-2.0 | 13,071 | 0.00023 | # -*- coding: utf-8 -
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os
import six
import copy
import struct
import logging
from bucky.errors import ConfigError, ProtocolError
from bucky.udpserver import UDPServer
log = logging.getLogger(__name__)
class CPUConverter(object):
PRIORITY = -1
def __call__(self, sample):
return ["cpu", sample["plugin_instance"], sample["type_instance"]]
class InterfaceConverter(object):
PRIORITY = -1
def __call__(self, sample):
return filter(None, [
"interface",
sample.get("plugin_instance", ""),
sample.get("type_instance", ""),
sample["type"],
sample["value_name"]
])
class MemoryConverter(object):
PRIORITY = -1
def __call__(self, sample):
return ["memory", sample["type_instance"]]
class DefaultConverter(object):
PRIORITY = -1
def __call__(self, sample):
parts = []
parts.append(sample["plugin"].strip())
if sample.get("plugin_instance"):
parts.append(sample["plugin_instance"].strip())
stype = sample.get("type", "").strip()
if stype and stype != "value":
parts.append(stype)
stypei = sample.get("type_instance", "").strip()
if stypei:
parts.append(stypei)
vname = sample.get("value_name").strip()
if vname and vname != "value":
parts.append(vname)
return parts
DEFAULT_CONVERTERS = {
"cpu": CPUConverter(),
"interface": InterfaceConverter(),
"memory": MemoryConverter(),
"_default": DefaultConverter(),
}
class CollectDTypes(object):
def __init__(self, types_dbs=[]):
self.types = {}
self.type_ranges = {}
if not types_dbs:
types_dbs = filter(os.path.exists, [
"/usr/share/collectd/types.db",
"/usr/local/share/collectd/types.db",
])
if not types_dbs:
raise ConfigError("Unable to locate types.db")
self.types_dbs = types_dbs
self._load_types()
def get(self, name):
t = self.types.get(name)
if t is None:
raise ProtocolError("Invalid type name: %s" % name)
return t
def _load_types(self):
for types_db in self.types_dbs:
with open(types_db) as handle:
for line in handle:
if line.lstrip()[:1] == "#":
continue
if not line.strip():
continue
self._add_type_line(line)
log.info("Loaded collectd types from %s", types_db)
def _add_type_line(self, line):
types = {
"COUNTER": 0,
"GAUGE": 1,
"DERIVE": 2,
"ABSOLUTE": 3
}
name, spec = line.split(None, 1)
self.types[name] = []
self.type_ranges[name] = {}
vals = spec.split(", ")
for val in vals:
vname, vtype, minv, maxv = val.strip().split(":")
vtype = types.get(vtype)
if vtype is None:
raise ValueError("Invalid value type: %s" % vtype)
minv = None if minv == "U" else float(minv)
maxv = None if maxv == "U" else float(maxv)
self.types[name].append((vname, vtype))
self.type_ranges[name][vname] = (minv, maxv)
class CollectDParser(object):
def __init__(self, types_dbs=[]):
self.types = CollectDTypes(types_dbs=types_dbs)
def parse(self, data):
for sample in self.parse_samples(data):
yield sample
def parse_samples(self, data):
types = {
0x0000: self._parse_string("host"),
0x0001: self._parse_time("time"),
0x0008: self._parse_time_hires("time"),
0x0002: self._parse_string("plugin"),
0x0003: self._parse_string("plugin_instance"),
0x0004: self._parse_string("type"),
0x0005: self._parse_string("type_instance"),
0x0006: None, # handle specially
0x0007: self._parse_time("interval"),
0x0009: self._parse_time_hires("interval")
}
sample = {}
for (ptype, data) in self.parse_data(data):
if ptype not in types:
log.debug("Ignoring part type: 0x%02x", ptype)
continue
if ptype != 0x0006:
types[ptype](sample, data)
continue
for vname, vtype, val in self.parse_values(sample["type"], data):
sample["value_name"] = vname
sample["value_type"] = vtype
sample["value"] = val
yield copy.deepcopy(sample)
def parse_data(self, data):
types = set([
0x0000, 0x0001, 0x0002, 0x0003, 0x0004,
0x0005, 0x0006, 0x0007, 0x0008, 0x0009,
0x0100, 0x0101, 0x0200, 0x0210
])
while len(data) > 0:
if len(data) < 4:
raise ProtocolError("Truncated header.")
(part_type, part_len) = struct.unpack("!HH", data[:4])
data = data[4:]
if part_type not in types:
raise ProtocolError("Invalid part type: 0x%02x" % part_type)
part_len -= 4 # includes four header bytes we just parsed
if len(data) < part_len:
raise ProtocolError("Truncated value.")
part_data, data = data[:part_len], data[part_len:]
yield (part_type, part_data)
def parse_values(self, stype, data):
types = {0: "!Q", 1: "<d", 2: "!q", 3: "!Q"}
(nvals,) = struct.unpack("!H", data[:2])
data = data[2:]
if len(data) != 9 * nvals:
raise ProtocolError("Invalid value structure length.")
vtypes = self.types.get(stype)
if nvals != len(vtypes):
raise ProtocolError("Values different than types.db info.")
for i in range(nvals):
if six.PY3:
vtype = data[i]
else:
(vtype,) = struct.unpack("B", data[i])
if vtype != vtypes[i][1]:
raise ProtocolError("Type mismatch with types.db")
data = data[nvals:]
for i in range(nvals):
vdata, data = data[:8], data[8:]
(val,) = struct.unpack(types[vtypes[i][1]], vdata)
yield vtypes[i][0], vtypes[i][1], val
def _parse_string(self, name):
def _parser(sample, data):
if six.PY3:
data = data.decode()
if data[-1] != '\0':
raise ProtocolError("Invalid string detected.")
sample[name] = data[:-1]
return _parser
def _parse_time(self, name):
def _parser(sample, data):
if len(data) != 8:
raise ProtocolError("Invalid time data length.")
(val,) = struct.unpack("!Q", data)
sample[name] = float(val)
return _parser
def _parse_time_hires(self, name):
def _parser(sample, data):
if len(data) != 8:
raise ProtocolError("Invalid hires time data length.")
(val,) = struct.unpack("!Q", data)
sample[name] = val * (2 ** -30)
return _parser
class CollectDConverter(object):
def __init__(self, cfg) | :
self.converters = dict(DEFAULT_CONVERTERS)
| self._load_converters(cfg)
def convert(self, sample):
default = self.converters["_default"]
handler = self.converters.get(sample["plugin"], default)
try:
name = '.'.join(handler(sample) |
jcassee/django-analytical | tests/unit/test_tag_woopra.py | Python | mit | 3,458 | 0.000289 | """
Tests for the Woopra template tags and filters.
"""
import pytest
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from utils import TagTestCase
from analytical.templatetags.woopra import WoopraNode
from analytical.utils import AnalyticalException
@override_settings(WOOPRA_DOMAIN='example.com')
class WoopraTagTestCase(TagTestCase):
"""
Tests for the ``woopra`` template tag.
"""
def test_tag(self):
r = self.render_tag('woopra', 'woopra')
assert 'var woo_settings = {"domain": "example.com"};' in r
def test_node(self):
r = WoopraNode().render(Context({}))
assert 'var woo_settings = {"domain": "example.com"};' in r
@override_settings(WOOPRA_DOMAIN=None)
def test_no_domain(self):
with pytest.raises(AnalyticalException):
WoopraNode()
@override_settings(WOOPRA_DOMAIN='this is not a domain')
def test_wrong_domain(self):
with pytest.raises(AnalyticalException):
WoopraNode()
@override_settings(WOOPRA_IDLE_TIMEOUT=1234)
def test_idle_timeout(self):
r = WoopraNode().render(Context({}))
assert 'var woo_settings = {"domain": "example.com", "idle_timeout": "1234"};' in r
def test_custom(self):
r = WoopraNode().render(Context({
'woopra_var1': 'val1',
'woopra_var2': 'val2',
}))
assert 'var woo_visitor = {"var1": "val1", "var2": "val2"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_name_and_email(self):
r = WoopraNode().render(Context({
'user': User(username='test',
first_name='Firstname | ',
last_name='Lastname',
email="test@example.com"),
}))
assert 'var woo_visitor = '
'{"email": "test@example.com", "name": "Firstname Lastname"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_username_no_email(self):
r = WoopraNode().render(Context | ({'user': User(username='test')}))
assert 'var woo_visitor = {"name": "test"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_no_identify_when_explicit_name(self):
r = WoopraNode().render(Context({
'woopra_name': 'explicit',
'user': User(username='implicit'),
}))
assert 'var woo_visitor = {"name": "explicit"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_no_identify_when_explicit_email(self):
r = WoopraNode().render(Context({
'woopra_email': 'explicit',
'user': User(username='implicit'),
}))
assert 'var woo_visitor = {"email": "explicit"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_anonymous_user(self):
r = WoopraNode().render(Context({'user': AnonymousUser()}))
assert 'var woo_visitor = {};' in r
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = WoopraNode().render(context)
assert r.startswith('<!-- Woopra disabled on internal IP address')
assert r.endswith('-->')
|
StackPointCloud/libcloud | libcloud/compute/drivers/gridspot.py | Python | apache-2.0 | 4,135 | 0.000242 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.compute.base import NodeDriver, Node
from libcloud.compute.base import NodeState
from libcloud.common.base import ConnectionKey, JsonResponse
from libcloud.compute.types import Provider
from libcloud.common.types import InvalidCredsError
class GridspotAPIException(Exception):
def __str__(self):
return self.args[0]
def __repr__(self):
return "<GridspotAPIException '%s'>" % (self.args[0])
class GridspotResponse(JsonResponse):
"""
Response class for Gridspot
"""
def parse_body(self):
body = super(GridspotResponse, self).parse_body()
if 'exception_name' in b | ody and body['exception_name']:
raise GridspotAPIException(body['exception_name'])
return body
def parse_error(self):
# Gridspot 404s on invalid api key or instance_id
raise InvalidCredsError("Invalid api key/instance_id")
class GridspotConnection(ConnectionKey):
"""
Connection class to connect to Gridspot's API servers
"""
host = 'gridspot.com'
responseCls = GridspotResponse
def add_default_params(self, params):
params | ['api_key'] = self.key
return params
class GridspotNodeDriver(NodeDriver):
"""
Gridspot (http://www.gridspot.com/) node driver.
"""
type = Provider.GRIDSPOT
name = 'Gridspot'
website = 'http://www.gridspot.com/'
connectionCls = GridspotConnection
NODE_STATE_MAP = {
'Running': NodeState.RUNNING,
'Starting': NodeState.PENDING
}
def list_nodes(self):
data = self.connection.request(
'/compute_api/v1/list_instances').object
return [self._to_node(n) for n in data['instances']]
def destroy_node(self, node):
data = {'instance_id': node.id}
self.connection.request('/compute_api/v1/stop_instance', data).object
return True
def _get_node_state(self, state):
result = self.NODE_STATE_MAP.get(state, NodeState.UNKNOWN)
return result
def _add_int_param(self, params, data, field):
if data[field]:
try:
params[field] = int(data[field])
except:
pass
def _to_node(self, data):
port = None
ip = None
state = self._get_node_state(data['current_state'])
if data['vm_ssh_wan_ip_endpoint'] != 'null':
parts = data['vm_ssh_wan_ip_endpoint'].split(':')
ip = parts[0]
port = int(parts[1])
extra_params = {
'winning_bid_id': data['winning_bid_id'],
'port': port
}
# Spec is vague and doesn't indicate if these will always be present
self._add_int_param(extra_params, data, 'vm_num_logical_cores')
self._add_int_param(extra_params, data, 'vm_num_physical_cores')
self._add_int_param(extra_params, data, 'vm_ram')
self._add_int_param(extra_params, data, 'start_state_time')
self._add_int_param(extra_params, data, 'ended_state_time')
self._add_int_param(extra_params, data, 'running_state_time')
return Node(
id=data['instance_id'],
name=data['instance_id'],
state=state,
public_ips=[ip],
private_ips=[],
driver=self.connection.driver,
extra=extra_params)
|
benhoff/vexbot | vexbot/extensions/system.py | Python | gpl-3.0 | 1,216 | 0.001645 | import psutil
_mb_conversion = 1024 * 1024
def cpu_times(*args, **kwargs):
return psutil.cpu_times()
def cpu_count(logical=True, *args, **kwargs):
cores = psutil.cpu_count(logical)
if cores == 1:
word = 'Core'
else:
word = 'Cores'
return '{} CPU {}'.format(cores, word)
def cpu_frequency(*args, **kwargs):
freq = psutil.cpu_freq()
if freq is None:
return ('CPU frequency file moved or not present. See: '
'https://stackoverflow.com/questions/42979943/python3-psutils')
return [x.max for x in freq]
def virtual_memory_percent(*arg, **kwargs):
percent = psutil.virtual_memory().percent
return '{}%'.format(percent)
def virtua | l_memory_total(*args, **kwargs):
total = int(psutil.virtual_memory().tot | al / _mb_conversion)
return '{} Mb'.format(total)
def virtual_memory_used(*args, **kwargs):
used = int(psutil.virtual_memory().used / _mb_conversion)
return '{} Mb'.format(used)
def swap(*args, **kwargs):
swap = psutil.swap_memory()
used = swap.used
total = swap.total
used = int(used/_mb_conversion)
total = int(total/_mb_conversion)
return 'Used: {} | Total: {}'.format(used, total)
|
takeshik/metatweet-old | resource/configuration/global.py | Python | gpl-3.0 | 522 | 0.015326 | import clr
clr.AddRefer | ence("System")
clr.AddReference("System.Core")
clr.AddReference("Linx")
clr.AddReference("MetaTweetObjectModel")
clr.AddReference("MetaTweetFoundation")
from System import *
from System.Collections.Generic import *
from XSpect import *
from XSpect.MetaTweet import *
from XSpect.MetaTweet.Modul | es import *
from XSpect.MetaTweet.Requesting import *
def Create(body):
child = Dynamic.ExpandoObject()
for e in body:
IDictionary[String, Object].Add(child, e, body[e])
return child
|
astrilchuk/sd2xmltv | libschedulesdirect/common/program.py | Python | mit | 6,469 | 0.001082 | import logging
from datetime import date
from util import parse_date
from programtitles import ProgramTitles
from programeventdetails import ProgramEventDetails
from programdescriptionlist import ProgramDescriptionList
from programmetadata import ProgramMetadata
from programcast import ProgramCast
from programcrew import ProgramCrew
from programcontentrating import ProgramContentRating
from programrecommendation import ProgramRecommendation
from programmovie import ProgramMovie
from programkeywords import ProgramKeywords
from image import Image
from programaward import ProgramAward
class Program(object):
def __init__(self):
self.program_id = None # type: unicode
self.md5 = None # type: unicode
self.titles = None # type: ProgramTitles
self.event_deta | ils = None # type: ProgramEventDetails
self.descriptions = None # type: ProgramDescriptionList
self.original_air_date = None # type: date
self.genres = [] # type: List[unicode]
self.episode_title = None # type: unicode
| self.metadata = None # type: ProgramMetadata
self.cast = [] # type: List[ProgramCast]
self.crew = [] # type: List[ProgramCrew]
self.show_type = None # type: unicode
self.has_image_artwork = False # type: bool
self.content_ratings = [] # type: List[ProgramContentRating]
self.content_advisories = [] # type: List[unicode]
self.recommendations = [] # type: List[ProgramRecommendation]
self.movie = None # type: ProgramMovie
self.episode_num = None # type: int
self.animation = None # type: unicode
self.audience = None # type: unicode
self.holiday = None # type: unicode
self.keywords = None # type: ProgramKeywords
self.official_url = None # type: unicode
self.entity_type = None # type: unicode
self.resource_id = None # type: unicode
self.episode_image = None # type: Image
self.duration = None # type: int
self.awards = None # type: List[ProgramAward]
@property
def artwork_id(self): # type: () -> unicode
if not self.has_image_artwork:
return None
return self.program_id[0:10]
@property
def is_show_entity(self): # type: () -> bool
return self.entity_type == u"Show"
@property
def is_episode_entity(self): # type: () -> bool
return self.entity_type == u"Episode"
@property
def is_movie_entity(self): # type: () -> bool
return self.entity_type == u"Movie"
@property
def is_sports_entity(self): # type: () -> bool
return self.entity_type == u"Sports"
def __unicode__(self): # type: () -> unicode
return u"{0.program_id} '{1.title120}'".format(self, self.titles)
def __str__(self):
return unicode(self).encode("utf-8")
def get_content_rating(self, body):
return next((content_rating for content_rating in self.content_ratings if content_rating.body == body), None)
def get_cast(self, in_roles): # type: (List[unicode]) -> List[ProgramCast]
return [cast for cast in self.cast if cast.role in in_roles]
def get_crew(self, in_roles): # type: (List[unicode]) -> List[ProgramCrew]
return [crew for crew in self.crew if crew.role in in_roles]
@staticmethod
def from_dict(dct): # type: (dict) -> Program
"""
:param dct:
:return:
"""
if "programID" not in dct or "md5" not in dct:
return dct
program = Program()
program.program_id = dct.pop("programID")
if program.program_id[:2] == "EP":
program.episode_num = int(program.program_id[-4:])
program.titles = ProgramTitles.from_iterable(dct.pop("titles"))
program.md5 = dct.pop("md5")
if "eventDetails" in dct:
program.event_details = ProgramEventDetails.from_dict(dct.pop("eventDetails"))
if "descriptions" in dct:
program.descriptions = ProgramDescriptionList.from_dict(dct.pop("descriptions"))
if "originalAirDate" in dct:
program.original_air_date = parse_date(dct.pop("originalAirDate"))
if "genres" in dct:
program.genres = dct.pop("genres")
if "episodeTitle150" in dct:
program.episode_title = dct.pop("episodeTitle150")
if "metadata" in dct:
program.metadata = ProgramMetadata.from_iterable(dct.pop("metadata"))
if "cast" in dct:
program.cast = ProgramCast.from_iterable(dct.pop("cast"))
if "crew" in dct:
program.crew = ProgramCrew.from_iterable(dct.pop("crew"))
if "showType" in dct:
program.show_type = dct.pop("showType")
if "hasImageArtwork" in dct:
program.has_image_artwork = dct.pop("hasImageArtwork")
if "contentRating" in dct:
program.content_ratings = ProgramContentRating.from_iterable(dct.pop("contentRating"))
if "contentAdvisory" in dct:
program.content_advisories = dct.pop("contentAdvisory")
if "recommendations" in dct:
program.recommendations = ProgramRecommendation.from_iterable(dct.pop("recommendations"))
if "movie" in dct:
program.movie = ProgramMovie.from_dict(dct.pop("movie"))
if "animation" in dct:
program.animation = dct.pop("animation")
if "audience" in dct:
program.audience = dct.pop("audience")
if "holiday" in dct:
program.holiday = dct.pop("holiday")
if "keyWords" in dct:
program.keywords = ProgramKeywords.from_dict(dct.pop("keyWords"))
if "officialURL" in dct:
program.official_url = dct.pop("officialURL")
if "entityType" in dct:
program.entity_type = dct.pop("entityType")
if "resourceID" in dct:
program.resource_id = dct.pop("resourceID")
if "episodeImage" in dct:
program.episode_image = Image.from_dict(dct.pop("episodeImage"))
if "duration" in dct:
program.duration = dct.pop("duration")
if "awards" in dct:
program.awards = ProgramAward.from_iterable(dct.pop("awards"))
if len(dct) != 0:
logging.warn("Key(s) not processed for Program: %s", ", ".join(dct.keys()))
return program
|
amaozhao/algorithms | algorithms/dp/fib.py | Python | mit | 1,513 | 0.003966 | def fib_recursive(n):
"""[summary]
Computes the n-th fibonacci number recursive.
Problem: This implementation is very slow.
approximate O(2^n)
Arguments:
n {[int]} -- [description]
Returns:
[int] -- [description]
"""
# precondition
assert n >= 0, 'n must be a positive integer'
if n <= 1:
return n
else:
return fib_recursive(n-1) + fib_recursive(n-2)
# print(fib_recursive(35)) # => 9227465 (slow)
def fib_list(n):
"""[summary]
This algorithm computes the n-th fibbonacci number
very quick. approximate O(n)
The algorithm use dynamic programming.
Arguments:
n {[int]} -- [description]
Returns:
[int] -- [description]
"""
# precondition
assert n >= 0, 'n must be a positive integer'
list_results = [0, 1]
for i in range(2, n+1):
list_results.append(list_results[i-1] + list_results[i-2])
return list_results[n]
# print(fib_list(100)) # => 35422484817926 | 1915075
def fib_iter(n):
"""[summary]
Works iterative approximate O(n)
A | rguments:
n {[int]} -- [description]
Returns:
[int] -- [description]
"""
# precondition
assert n >= 0, 'n must be positive integer'
fib_1 = 0
fib_2 = 1
sum = 0
if n <= 1:
return n
for i in range(n-1):
sum = fib_1 + fib_2
fib_1 = fib_2
fib_2 = sum
return sum
# => 354224848179261915075
# print(fib_iter(100))
|
johnwlockwood/txt2vote | txttovote/info_extractors/tests/test_info_extractors.py | Python | apache-2.0 | 5,895 | 0.00017 | from operator import itemgetter
import unittest
from karld.conversion_operators import join_stripped_values
from karld.conversion_operators import join_stripped_gotten_value
from info_extractors import get_full_name
from info_extractors import get_phone
from info_extra | ctors import get_zip
from info_ | extractors import number_getter
from info_extractors import lower_getter
from info_extractors import lower_list_getter
from info_extractors import title_getter
from info_extractors import title_list_getter
from info_extractors import get_number_prefix
class TestValueJoiner(unittest.TestCase):
def test_join_stripped_values(self):
"""
Ensure joiner gets the values from
data with the getter, coerce to str,
strips padding whitespace and join
with the separator.
"""
getter = itemgetter(0, 1, 2, 3)
data = (" A", "B ", 2, "D")
separator = "+"
self.assertEqual(join_stripped_values(separator, getter, data),
"A+B+2+D")
class TestGettersValueJoiner(unittest.TestCase):
def test_join_stripped_gotten_value(self):
"""
Ensure joiner gets the values from
data with the getters, coerce to str,
strips padding whitespace and join
with the separator.
"""
getters = (itemgetter(0), itemgetter(1), itemgetter(2), itemgetter(3))
data = (" A", "B ", 2, "D")
separator = "+"
self.assertEqual(join_stripped_gotten_value(separator, getters, data),
"A+B+2+D")
class TestFullName(unittest.TestCase):
def test_full_name_builds_full_name(self):
"""
Ensure name parts of the data are extracted
and joined with a space, excluding empty parts,
and stripping padding spaces.
"""
data = [
("Jack ", "12356", "Danger", "Johnson"),
(" Mary", "54354", "", "Hill"),
]
name_parts_getter = itemgetter(0, 2, 3)
self.assertEqual(get_full_name(name_parts_getter, data[0]),
"Jack Danger Johnson")
self.assertEqual(get_full_name(name_parts_getter, data[1]),
"Mary Hill")
class TestPhone(unittest.TestCase):
def test_get_phone_builds_phone(self):
"""
Ensure phone parts of the data are extracted
and joined with a space, excluding empty parts,
and stripping padding spaces.
"""
data = [
("Jack ", "3801", "479", 981, "Johnson"),
(" Mary", "1234", "555", "155", "Hill"),
("Bob", "2234", "", "155", "Ted"),
]
phone_parts_getter = itemgetter(2, 3, 1)
self.assertEqual(get_phone(phone_parts_getter, data[0]),
"479-981-3801")
self.assertEqual(get_phone(phone_parts_getter, data[1]),
"555-155-1234")
self.assertEqual(get_phone(phone_parts_getter, data[2]),
"155-2234")
def test_get_zip(self):
"""
Ensure get_zip calls uses - for the separator and
gets all the benefits of join_stripped_values.
"""
self.assertEqual(get_zip(itemgetter(0, 1), (" 2", 3)), "2-3")
class TestGetterTransformers(unittest.TestCase):
def test_get_number_prefix_none(self):
"""
Ensure get_number_prefix returns an empty
string given None
"""
self.assertEqual("", get_number_prefix(None))
def test_get_number_prefix_number(self):
"""
Ensure get_number_prefix returns the same
string when it is given one with all digits
"""
self.assertEqual("1234", get_number_prefix("1234"))
def test_get_number_prefix_float(self):
"""
Ensure get_number_prefix returns the
first digits when it is given a float
"""
self.assertEqual("123", get_number_prefix("123.99"))
def test_get_number_prefix_letter(self):
"""
Ensure get_number_prefix returns the
first digits when it is given a float
"""
self.assertEqual("99", get_number_prefix("99b"))
def test_get_number_prefix_only(self):
"""
Ensure get_number_prefix returns the
first digits when it is given a float
"""
self.assertEqual("", get_number_prefix("b99"))
def test_number_getter(self):
"""
Ensure number_getter takes only the first digits
of a string.
"""
data = ("123b",)
getter = itemgetter(0)
self.assertEqual("123", number_getter(getter, data))
def test_lower_getter(self):
"""
Ensure lower_getter returns the lower case of
the results of the getter given data.
"""
data = ("HELLO WOrld",)
getter = itemgetter(0)
self.assertEqual("hello world", lower_getter(getter, data))
def test_lower_list_getter(self):
"""
Ensure lower_getter returns the lower case of
the results of the getter given data.
"""
data = ("HELLO", "WOrld",)
getter = itemgetter(0, 1)
self.assertEqual(["hello", "world"],
lower_list_getter(getter, data))
def test_title_getter(self):
"""
Ensure title_getter returns the lower case of
the results of the getter given data.
"""
data = ("HELLO WOrld",)
getter = itemgetter(0)
self.assertEqual("Hello World", title_getter(getter, data))
def test_title_list_getter(self):
"""
Ensure title_getter returns the lower case of
the results of the getter given data.
"""
data = ("HELLO", "WOrld",)
getter = itemgetter(0, 1)
self.assertEqual(["Hello", "World"],
title_list_getter(getter, data))
|
gghezzo/prettypython | PythonEveryDay2015/MontyHall/tkintermh.py | Python | mit | 1,654 | 0.036276 | # Monty Hall and the gaot - GUI
# Adding a Tkinter Gui (pass 2 )
# Typer: Ginny C Ghezzo
# What I learned:
import random
from tkinter import *
import webbrowser
def playVideo():
url = "http://9gag.com/gag/6681409"
webbrowser.open(url, 1)
# todo: Wow this is crazy ugly
# steal this http://www.python-course.eu/tkinter_entry_widgets.php
def iuGrid(root):
Label(root, text="Which door do you pick? ").grid(row=0)
e1 = Entry(root).grid(row=0, column=1)
aLabel = Label(root, text="Door 1", font=('times', 20), bg='red').grid(row=1, column=0, padx=5, pady=5)
bLabel = Label(root, text="Door 2",font=('times', 20, 'bold'), bg='blue').grid(row=1, column=1, pady=5 )
cLabel = Label(root, text="Door 3",font=('times', 20, 'bold'), bg='green').grid(row=1, column=2, pady=5, padx=5)
next = B | utton(root, text ="Ready?", command = playVideo).grid(row=2, columnspan =2)
doors = ["A", "B", "C"]
totWins = 0
again = 'y'
root = Tk().wm_title("Monty Hall ")
iuGrid(root)
while again == 'y':
initial = input("Door A, B or C : ").upper() # fix bad selection
action = input("Switch or Hold (S,H):").upper()
placed = random.choice(doors)
chosen = initial
used = [chosen, placed] # pick the other one
avail = [door for door in doors if door not | in used] #omg ... hilarious
opened = random.choice(avail)
if action == "S":
avail = [door for door in doors if door not in [chosen, opened]]
chosen = avail[0]
youWin = (chosen == placed)
if youWin : print("Congratulations! You win! It was in ", placed)
else: print("Sorry it was in ", placed)
again = input("Do you want to play again? (y,n)").lower()
root.mainloop() |
treverhines/RBF | rbf/pde/nodes.py | Python | mit | 22,858 | 0.000044 | '''
This module provides functions for generating nodes used for solving PDEs with
the RBF and RBF-FD method.
'''
from __future__ import division
import logging
import numpy as np
from scipy.sparse import csc_matrix
from scipy.sparse.csgraph import reverse_cuthill_mckee
from rbf.utils import assert_shape, KDTree
from rbf.pde.domain import as_domain
from rbf.pde.sampling import rejection_sampling, poisson_discs
logger = logging.getLogger(__name__)
def _disperse(nodes, rho, fixed_nodes, neighbors, delta):
'''
Returns the new position of the free nodes after a dispersal step. This
does not handle node intersections with the boundary.
'''
# form collection of all nodes
all_nodes = np.vstack((nodes, fixed_nodes))
# find index and distance to nearest nodes
d, i = KDTree(all_nodes).query(nodes, neighbors)
# dont consider a node to be one of its own nearest neighbors
d, i = d[:, 1:], i[:, 1:]
# compute the force proportionality constant between each node
# based on their charges
c = 1.0/(rho(all_nodes)[i, None]*rho(nodes)[:, None, None])
# calculate forces on each node resulting from the `m` nearest nodes.
forces = c*( | nodes[:, None, :] - all_nodes[i, :])/d[:, :, None]**3
| # sum up all the forces for each node
direction = np.sum(forces, axis=1)
# normalize the net forces to one
direction /= np.linalg.norm(direction, axis=1)[:, None]
# in the case of a zero vector replace nans with zeros
direction = np.nan_to_num(direction)
# move in the direction of the force by an amount proportional to the
# distance to the nearest neighbor
step = delta*d[:, 0, None]*direction
# new node positions
out = nodes + step
return out
def disperse(nodes, domain,
rho=None,
fixed_nodes=None,
neighbors=None,
delta=0.1):
'''
Slightly disperses the nodes within the domain. The disperson is analogous
to electrostatic repulsion, where neighboring node exert a repulsive force
on eachother. If a node is repelled into a boundary then it bounces back
in.
Parameters
----------
nodes : (n, d) float array
Initial node positions
domain : (p, d) float array and (q, d) int array
Vertices of the domain and connectivity of the vertices.
rho : callable, optional
Takes an (n, d) array as input and returns the repulsion force for a
node at those position.
fixed_nodes : (k, d) float array, optional
Nodes which do not move and only provide a repulsion force
neighbors : int, optional
The number of adjacent nodes used to determine repulsion forces for
each node
delta : float, optional
The step size. Each node moves in the direction of the repulsion force
by a distance `delta` times the distance to the nearest neighbor.
Returns
-------
(n, d) float array
'''
domain = as_domain(domain)
nodes = np.asarray(nodes, dtype=float)
assert_shape(nodes, (None, domain.dim), 'nodes')
if rho is None:
def rho(x):
return np.ones(x.shape[0])
if fixed_nodes is None:
fixed_nodes = np.zeros((0, domain.dim), dtype=float)
else:
fixed_nodes = np.asarray(fixed_nodes)
assert_shape(fixed_nodes, (None, domain.dim), 'fixed_nodes')
if neighbors is None:
# the default number of neighboring nodes to use when computing the
# repulsion force is 4 for 2D and 5 for 3D
if domain.dim == 2:
neighbors = 4
elif domain.dim == 3:
neighbors = 5
# ensure that the number of neighboring nodes used for the repulsion force
# is less than or equal to the total number of nodes
neighbors = min(neighbors, nodes.shape[0] + fixed_nodes.shape[0])
# if m is 0 or 1 then the nodes remain stationary
if neighbors <= 1:
return np.array(nodes, copy=True)
# node positions after repulsion
out = _disperse(nodes, rho, fixed_nodes, neighbors, delta)
# indices of nodes which are now outside the domain
crossed = domain.intersection_count(nodes, out) > 0
crossed, = crossed.nonzero()
# points where nodes intersected the boundary and the simplex they
# intersected at
intr_pnt, intr_idx = domain.intersection_point(
nodes[crossed], out[crossed])
# normal vector to intersection points
intr_norms = domain.normals[intr_idx]
# distance that the node wanted to travel beyond the boundary
res = out[crossed] - intr_pnt
# bounce node off the boundary
out[crossed] -= 2*intr_norms*np.sum(res*intr_norms, 1)[:, None]
# check to see if the bounced nodes still intersect the boundary. If they
# do, then set them back to their original position
still_crossed = domain.intersection_count(nodes[crossed], out[crossed]) > 0
out[crossed[still_crossed]] = nodes[crossed[still_crossed]]
return out
def neighbor_argsort(nodes, m=None):
'''
Returns a permutation array that sorts `nodes` so that each node and its
`m` nearest neighbors are close together in memory. This is done through
the use of a KD Tree and the Reverse Cuthill-McKee algorithm.
Parameters
----------
nodes : (n, d) float array
m : int, optional
Returns
-------
(N,) int array
Examples
--------
>>> nodes = np.array([[0.0, 1.0],
[2.0, 1.0],
[1.0, 1.0]])
>>> idx = neighbor_argsort(nodes, 2)
>>> nodes[idx]
array([[ 2., 1.],
[ 1., 1.],
[ 0., 1.]])
'''
nodes = np.asarray(nodes, dtype=float)
assert_shape(nodes, (None, None), 'nodes')
if m is None:
# this should be roughly equal to the stencil size for the RBF-FD
# problem
m = 5**nodes.shape[1]
m = min(m, nodes.shape[0])
# find the indices of the nearest m nodes for each node
_, idx = KDTree(nodes).query(nodes, m)
# efficiently form adjacency matrix
col = idx.ravel()
row = np.repeat(np.arange(nodes.shape[0]), m)
data = np.ones(nodes.shape[0]*m, dtype=bool)
mat = csc_matrix((data, (row, col)), dtype=bool)
permutation = reverse_cuthill_mckee(mat)
return permutation
def _check_spacing(nodes, rho=None):
'''
Check if any nodes are unusually close to eachother. If so, a warning will
be printed.
'''
n, dim = nodes.shape
if rho is None:
def rho(x):
return np.ones(x.shape[0])
# distance to nearest neighbor
dist = KDTree(nodes).query(nodes, 2)[0][:, 1]
if np.any(dist == 0.0):
is_zero = (dist == 0.0)
indices, = is_zero.nonzero()
for idx in indices:
logger.warning(
'Node %s (%s) is in the same location as another node.'
% (idx, nodes[idx]))
density = 1.0/dist**dim
normalized_density = np.log10(density / rho(nodes))
percs = np.percentile(normalized_density, [10, 50, 90])
med = percs[1]
idr = percs[2] - percs[0]
is_too_close = normalized_density < (med - 2*idr)
if np.any(is_too_close):
indices, = is_too_close.nonzero()
for idx in indices:
logger.warning(
'Node %s (%s) is unusually close to a neighboring node.'
% (idx, nodes[idx]))
def prepare_nodes(nodes, domain,
rho=None,
iterations=20,
neighbors=None,
dispersion_delta=0.1,
pinned_nodes=None,
snap_delta=0.5,
boundary_groups=None,
boundary_groups_with_ghosts=None,
include_vertices=False,
orient_simplices=True):
'''
Prepares a set of nodes for solving PDEs with the RBF and RBF-FD method.
This includes: dispersing the nodes away from eachother to ensure a more
even spacing, snapping nodes to the boundary, determining the normal
vectors for each node, determining the group that each node belongs to,
creating ghost nodes, sorting the nodes so that adja |
WPI-ARC/lightning_ros | scripts/RR_action_server.py | Python | bsd-3-clause | 22,385 | 0.004423 | #!/usr/bin/env python
"""
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, University of California, Berkeley
# All rights reserved.
# Authors: Cameron Lee (cameronlee@berkeley.edu) and Dmitry Berenson (
berenson@eecs.berkeley.edu)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of University of California, Berkeley nor the names
of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
"""
This node advertises an action which is used by the main lightning node
(see run_lightning.py) to run the Retrieve and Repair portion of LightningROS.
This node relies on a planner_stoppable type node to repair the paths, the
PathTools library to retrieve paths from the library (this is not a separate
node; just a python library that it calls), and the PathTools python library
which calls the collision_checker service and advertises a topic for displaying
stuff in RViz.
"""
import roslib
import rospy
import actionlib
import threading
from tools.PathTools import PlanTrajectoryWrapper, InvalidSectionWrapper, DrawPointsWrapper
from pathlib.PathLibrary import *
from lightning.msg import Float64Array, RRAction, RRResult
from lightning.msg import StopPlanning, RRStats
from lightning.srv import ManagePathLibrary, ManagePathLibraryResponse
import sys
import pickle
import time
# Name of this node.
RR_NODE_NAME = "rr_node"
# Name to use for stopping the repair planner. Published from this node.
STOP_PLANNER_NAME = "stop_rr_planning"
# Topic to subscribe to for stopping the whole node in the middle of processing.
STOP_RR_NAME = "stop_all_rr"
# Name of library managing service run from this node.
MANAGE_LIBRARY = "manage_path_library"
STATE_RETRIEVE, STATE_REPAIR, STATE_RETURN_PATH, STATE_FINISHED, STATE_FINISHED = (0, 1, 2, 3, 4)
class RRNode:
def __init__(self):
# Retrieve ROS parameters and configuration and cosntruct various objects.
self.robot_name = rospy.get_param("robot_name")
self.planner_config_name = rospy.get_param("planner_config_name")
self.current_joint_names = []
self.current_group_name = ""
self.plan_trajectory_wrapper = PlanTrajectoryWrapper("rr", int(rospy.get_param("~num_rr_planners")))
self.invalid_section_wrapper = InvalidSectionWrapper()
self.path_library = PathLibrary(rospy.get_param("~path_library_dir"), rospy.get_param("step_size"), node_size=int(rospy.get_param("~path_library_path_node_size")), sg_node_size=int(rospy.get_param("~path_library_sg_node_size")), dtw_dist=float(rospy.get_param("~dtw_distance")))
self.num_paths_checked = int(rospy.get_param("~num_paths_to_collision_check"))
self.stop_lock = threading.Lock()
self.stop = True
self.rr_server = actionlib.SimpleActionServer(RR_NODE_NAME, RRAction, execute_cb=self._retrieve_repair, auto_start=False)
self.rr_server.start()
self.stop_rr_subscriber = rospy.Subscriber(STOP_RR_NAME, StopPlanning, self._stop_rr_planner)
self.stop_rr_planner_publisher = rospy.Publisher(STOP_PLANNER_NAME, StopPlanning, queue_size=10)
self.manage_library_service = rospy.Service(MANAGE_LIBRARY, ManagePathLibrary, self._do_manage_action)
self.stats_pub = rospy.Publisher("rr_stats", RRStats, queue_size=10)
self.repaired_sections_lock = threading.Lock()
self.repaired_sections = []
self.working_lock = threading.Lock() #to ensure that node is not doing RR and doing a library management action at the same time
#if draw_points is True, then display points in rviz
self.draw_points = rospy.get_param("draw_points")
if self.draw_points:
self.draw_points_wrapper = DrawPointsWrapper()
def _set_repaired_section(self, index, section):
"""
After you have done the path planning to repair a section, store
the repaired path section.
Args:
index (int): the index corresponding to the section being repaired.
section (path, list of list of float): A path to store.
"""
self.repaired_sections_lock.acquire()
self.repaired_sections[index] = section
self.repaired_sections_lock.release()
def _call_planner(self, start, goal, planning_time):
"""
Calls a standard planner to plan between two points | with an allowed
planning time.
Args:
start (list of float): A joint configuration corresponding to the
start position of the path.
goal (list of float): The jount configuration corresponding to the
goal position for the path.
Returns:
path: A | list of joint configurations corresponding to the planned
path.
"""
ret = None
planner_number = self.plan_trajectory_wrapper.acquire_planner()
if not self._need_to_stop():
ret = self.plan_trajectory_wrapper.plan_trajectory(start, goal, planner_number, self.current_joint_names, self.current_group_name, planning_time, self.planner_config_name)
self.plan_trajectory_wrapper.release_planner(planner_number)
return ret
def _repair_thread(self, index, start, goal, start_index, goal_index, planning_time):
"""
Handles repairing a portion of the path.
All that this function really does is to plan from scratch between
the start and goal configurations and then store the planned path
in the appropriate places and draws either the repaired path or, if
the repair fails, the start and goal.
Args:
index (int): The index to pass to _set_repaired_section(),
corresponding to which of the invalid sections of the path we are
repairing.
start (list of float): The start joint configuration to use.
goal (list of float): The goal joint configuration to use.
start_index (int): The index in the overall path corresponding to
start. Only used for debugging info.
goal_index (int): The index in the overall path corresponding to
goal. Only used for debugging info.
planning_time (float): Maximum allowed time to spend planning, in
seconds.
"""
repaired_path = self._call_planner(start, goal, planning_time)
if self.draw_points:
if repaired_path is not None and len(repaired_path) > 0:
rospy.loginfo("RR action server: got repaired section with start = %s, goal = %s" % (repaired_path[0], repaired_path[-1]))
self.draw_points_wrapper.draw_points(repaired_path, self.current_group_name, "repaired"+str(start_index)+"_"+str(goal_index), DrawPointsWrapper.ANGLES, DrawPointsWrapper.GREENBLUE, 1.0, 0.01)
else:
|
atumanov/ray | python/ray/rllib/evaluation/postprocessing.py | Python | apache-2.0 | 2,480 | 0 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import scipy.signal
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.annotations import DeveloperAPI
def discount(x, gamma):
return scipy.signal.lfilter([1], [1, -gamma], x[::-1], axis=0)[::-1]
class Postprocessing(object):
"""Constant definitions for postprocessing."""
ADVANTAGES = "advantages"
VALUE_TARGETS = "value_targets"
@DeveloperAPI
def compute_advantages(rollout, last_r, gamma=0.9, lambda_=1.0, use_gae=True):
"""Given a rollout, compute its value targets and the advantage.
Args:
rollout (SampleBatch): SampleBatch of a single trajectory
last_r (float): Value estimation for last observation
gamma (float): Discount factor.
lambda_ (float): Parameter for GAE
use_gae (bool): Using Generalized Advantage Estamation
Returns:
SampleBatch (SampleBatch): Object with experience from rollout and
processed rewards.
"""
traj = {}
trajsize = len(rollout[SampleBatch.ACTIONS])
for key in rollout:
traj[key] = np.stack(rollout[key])
if use_gae:
assert SampleBatch.VF_PREDS in rollout, "Values not found!"
vpred_t = np.concatenate(
[rollout[SampleBatch.VF_PREDS],
np.array([last_r])])
delta_t = (
traj[SampleBatch.REWARDS] + gamma * vpred_t[1:] - vpred_t[:-1])
# This formula for the advantage comes
# "Generalized Advantage Estimation": https://arxiv.org/abs/1506.02438
traj[Postprocessing.ADVANTAGES] = discount(delta_t, gamma * lambda_)
| traj[Postprocessing.VALUE_TARGETS] = (
traj[Postprocessing.ADVANTAGES] +
traj[SampleBatch.VF_PREDS]).copy().astype(np.float32)
else:
rewards_plus_v = np.concatenate(
[rollout[SampleBatch.REWARDS],
np.array([last_r])])
traj[Postprocessing.ADVANTAGES] = discount(rewards_plus_v, gamma)[:-1]
# TODO(ekl): support using a critic with | out GAE
traj[Postprocessing.VALUE_TARGETS] = np.zeros_like(
traj[Postprocessing.ADVANTAGES])
traj[Postprocessing.ADVANTAGES] = traj[
Postprocessing.ADVANTAGES].copy().astype(np.float32)
assert all(val.shape[0] == trajsize for val in traj.values()), \
"Rollout stacked incorrectly!"
return SampleBatch(traj)
|
XeCycle/indico | indico/MaKaC/webinterface/pages/abstracts.py | Python | gpl-3.0 | 69,950 | 0.008806 | # This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from flask import session
from xml.sax.saxutils import quoteattr
import urllib
from pytz import timezone
from MaKaC.common.search import get_authors_from_author_index
import MaKaC.webinterface.wcomponents as wcomponents
import MaKaC.webinterface.urlHandlers as urlHandlers
import MaKaC.webinterface.navigation as navigation
import MaKaC.review as review
from MaKaC.webinterface.pages.conferences import WPConferenceModifBase, WPConferenceDefaultDisplayBase, WPConferenceModifAbstractBase
from MaKaC.webinterface.pages.conferences import WConfDisplayBodyBase
from indico.core.config import Config
from MaKaC.webinterface.common.abstractStatusWrapper import AbstractStatusList
from MaKaC.i18n import _
from indico.util.i18n import i18nformat
from indico.util.date_time import format_time, format_date, format_datetime
from MaKaC.common.timezoneUtils import nowutc, getAdjustedDate, DisplayTZ
from MaKaC.common.fossilize import fossilize
from MaKaC.fossils.conference import ILocalFileAbstractMaterialFossil
from MaKaC.review import AbstractStatusSubmitted
from MaKaC.review import AbstractTextField
from MaKaC.common.TemplateExec import render
from indico.util.string import render_markdown, natural_sort_key
class WConfCFADeactivated(WConfDisplayBodyBase):
_linkname = 'call_for_abstracts'
def __init__(self, aw, conf):
self._conf = conf
self._aw = aw
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
return wvars
class WPCFAInactive(WPConferenceDefaultDisplayBase):
def _getBody(self, params):
wc = WConfCFADeactivated(self._getAW(), self._conf)
return wc.getHTML()
class WCFANotYetOpened(WConfDisplayBodyBase):
_linkname = 'abstract_submission'
def __init__(self, aw, conf):
self._conf = conf
self._aw = aw
def getVars(self):
cfaMgr = self._conf.getAbstractMgr()
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
wvars["start_date"] = format_date(cfaMgr.getStartSubmissionDate(), "long")
return wvars
class WPCFANotYetOpened(WPConferenceDefaultDisplayBase):
menu_entry_name = 'abstract_submission'
def _getBody(self, params):
wc = WCFANotYetOpened(self._getAW(), self._conf)
return wc.getHTML()
class WCFAClosed(WConfDisplayBodyBase):
_linkname = 'abstract_submission'
def __init__(self, aw, conf):
self._conf = conf
self._aw = aw
def getVars(self):
cfaMgr = self._conf.getAbstractMgr()
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
wvars["end_date"] = format_date(cfaMgr.getEndSubmissionDate(), "long")
return wvars
class WPCFAClosed(WPConferenceDefaultDisplayBase):
menu_entry_name = 'abstract_submission'
def __init__(self, rh, conf, is_modif):
WPConferenceDefaultDisplayBase.__init__(self, rh, conf)
self._is_modif = is_modif
def _getBody(self, params):
wc = WCFAClosed(self._getAW(), self._conf)
return wc.getHTML({'is_modif': self._is_modif})
class WConfCFA(WConfDisplayBodyBase):
_linkname = 'call_for_abstracts'
def __init__(self, aw, conf):
self._conf = conf
self._aw = aw
def _getActionsHTML(self):
html = ""
cfa = self._conf.getAbstractMgr()
if nowutc() < cfa.getStartSubmissionDate():
return html
else:
submitOpt = ""
if cfa.inSubmissionPeriod():
submitOpt = i18nformat("""<li><a href="%s"> _("Submit a new abstract")</a></li>""") % (
urlHandlers.UHAbstractSubmission.getURL(self._conf))
html = i18nformat("""
<b> _("Possible actions you can carry out"):</b>
<ul>
%s
<li><a href="%s"> _("View or modify your already submitted abstracts")</a></li>
</ul>
""") % (submitOpt, urlHandlers.UHUserAbstracts.getURL(self._conf))
return html
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
cfa = self._conf.getAbstractMgr()
if cfa.inSubmissionPeriod():
wvars["status"] = _("OPENED")
else:
wvars["status"] = _("CLOSED")
wvars["startDate"] = cfa.getStartSubmissionDate().strftime("%d %B %Y")
wvars["endDate"] = cfa.getEndSubmissionDate().strftime("%d %B %Y")
wvars["actions"] = self._getActionsHTML()
wvars["announcement"] = cfa.getAnnouncement()
wvars["body_title"] = self._getTitle()
return wvars
class WPConferenceCFA( WPConferenceDefaultDisplayBase ):
navigationEntry = navigation.NEConferenceCFA
menu_entry_name = 'call_for_abstracts'
def _getBody(self, params):
wc = WConfCFA(self._getAW(), self._conf)
return wc.getHTML()
class WPAbstractSubmission( WPConferenceDefaultDisplayBase ):
navigationEntry = navigation.NEAbstractSubmission
menu_entry_name = 'abstract_submission'
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + \
self._asset_env['contributions_sass'].urls()
def getJSFiles(self):
return WPConferenceDefaultDisplayBase.getJSFiles(self) + \
self._includeJSPackage('Management') + \
self._asset_env['abstracts_js'].urls()
def _getHeadContent(self):
return WPConferenceDefaultDisplayBase._getHeadContent(self) + render('js/mathjax.config.js.tpl') + \
'\n'.join(['<script src="{0}" type="text/javascript"></script>'.format(url)
for url in self._asset_env['mathjax_js'].urls()])
def _getBody( self, params ):
params["postURL"] = urlHandlers.UHAbstractSubmission.getURL( self._conf )
params["origin"] = "display"
wc = WAbstractDataModification( self._conf )
return wc.getHTML( params )
class WUserAbstracts(WConfDisplayBodyBase):
_linkname = 'user_abstracts'
def __init__(self, aw, conf):
self._aw = aw
self._conf = conf
def _getAbstractStatus(self, abstract):
status = abstract.getCurrentStatus()
if isinstance(status, review.AbstractStatusAccepted):
statusLabel = _("Accepted")
if status.getType() is not None and status.getType() != "":
return "%s as %s" % (statusLabel, status.getType().getName())
elif isinstance(status, review.AbstractStatusRejected):
return _("Rejected") |
elif isinstance(status, review.AbstractStatusWithdrawn):
return _("Withdrawn")
elif isinstance(status, review.AbstractStatusDuplicated):
return _("Duplicated")
elif isinstance(status, review.AbstractStatusMerged):
| return _("Merged")
elif isinstance(status, (review.AbstractStatusProposedToAccept, review.AbstractStatusProposedToReject)):
return _("Under Review")
elif isinstance(status, (review.AbstractInConflict)):
return _("In Conflict")
return _("Submitted")
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
cfaMgr = self._conf.getAbstractMgr()
abstracts = cfaMgr.getAbstractListForAvatar(self._aw.getUser())
abstracts += cfaMgr.getAbstractListForAuthorEmail(self._aw. |
mariano/snakefire | snakefire/dialogs.py | Python | mit | 26,264 | 0.003884 | from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import QtWebKit
from renderers import MessageRenderer
from qtx import ClickableQLabel, IdleTimer, RowPushButton, SpellTextEditor
class AboutDialog(QtGui.QDialog):
def __init__(self, mainFrame):
super(AboutDialog, self).__init__(mainFrame)
self._mainFrame = mainFrame
self.setWindowTitle(self._mainFrame._("About {name}").format(name=self._mainFrame.NAME))
self._setupUI()
def _website(self):
QtGui.QDesktopServices.openUrl(QtCore.QUrl("http://{url}".format(url=self._mainFrame.DOMAIN)))
def _setupUI(self):
label = ClickableQLabel()
label.setPixmap(QtGui.QPixmap(":/images/snakefire-big.png"))
label.setAlignment(QtCore.Qt.AlignCenter)
self.connect(label, QtCore.SIGNAL("clicked()"), self._website)
urlLabel = QtGui.QLabel("<a href=\"http://{url}\">{name}</a>".format(
url=self._mainFrame.DOMAIN,
name=self._mainFrame.DOMAIN
))
urlLabel.setOpenExternalLinks(True)
websiteBox = QtGui.QHBoxLayout()
websiteBox.addWidget(QtGui.QLabel(self._mainFrame._("Website:")))
websiteBox.addWidget(urlLabel)
websiteBox.addStretch(1)
twitterLabel = QtGui.QLabel("<a href=\"http://twitter.com/snakefirelinux\">@snakefirelinux</a>")
twitterLabel.setOpenExternalLinks(True)
twitterBox = QtGui.QHBoxLayout()
twitterBox.addWidget(QtGui.QLabel(self._mainFrame._("Twitter:")))
twitterBox.addWidget(twitterLabel)
twitterBox.addStretch(1)
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
layout.addStretch(0.5)
layout.addWidget(QtGui.QLabel("<strong>{name} v{version}</strong>".format(
name=self._mainFrame.NAME,
version=self._mainFrame.VERSION
)))
layout.addStretch(0.5)
layout.addLayout(websiteBox)
layout.addLayout(twitterBox)
# Buttons
self._okButton = QtGui.QPushButton(self._mainFrame._("&OK"), self)
self.connect(self._okButton, QtCore.SIGNAL('clicked()'), self.close)
# Main layout
hbox = QtGui.QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self._okButton)
vbox = QtGui.QVBoxLayout()
vbox.addLayout(layout)
vbox.addLayout(hbox)
self.setLayout(vbox)
class AlertsDialog(QtGui.QDialog):
def __init__(self, mainFrame):
super(AlertsDialog, self).__init__(mainFrame)
self._mainFrame = mainFrame
self.setWindowTitle(self._mainFrame._("Alerts"))
self._setupUI()
def ok(self):
self._save()
self.close()
def cancel(self):
self.close()
def add(self, match=None):
row = self._table.rowCount()
self._table.insertRow(row)
column = QtGui.QTableWidgetItem()
column.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsEditable)
if match:
column.setText(match['match'])
self._table.setItem(row, 0, column)
checkbox = QtGui.QCheckBox(self._table)
checkbox.setChecked(match['regex'] if match else False)
self._table.setCellWidget(row, 1, checkbox)
button = RowPushButton(row, self._mainFrame._("Delete"), self._table)
self.connect(button, QtCore.SIGNAL('clicked(int)'), self.delete)
self._table.setCellWidget(row, 2, button)
self._table.setCurrentCell(row, 0)
def delete(self, row):
self._table.removeRow(row)
self.validate()
def validate(self):
isValid = True
rowCount = self._table.rowCount()
for i in range(rowCount):
match = self._table.item(i, 0).text().trimmed()
if match.isEmpty():
isValid = False
break
self._addButton.setEnabled(isValid)
self._okButton.setEnabled(isValid)
return isValid
def _save(self):
matches = []
for i in range(self._table.rowCount()):
matches.append({
'match': str(self._table.item(i, 0).text().trimmed()),
'regex': self._table.cellWidget(i, 1).isChecked()
})
self._mainFrame.setSettings("matches", matches)
alertsSettings = {
"notify_ping": self._notifyOnPingField.isChecked(),
"notify_inactive_tab": self._notifyOnInactiveTabField.isChecked(),
"notify_blink": self._notifyBlinkField.isChecked(),
"notify_notify": self._notifyNotifyField.isChecked()
}
self._mainFrame.setSettings("alerts", alertsSettings)
def _setupUI(self):
self._addButton = QtGui.QPushButton(self._mainFrame._("Add"), self)
self.connect(self._addButton, QtCore.SIGNAL('clicked()'), self.add)
addBox = QtGui.QHBoxLayout()
addBox.addStretch(1)
addBox.addWidget(self._addButton)
headers = QtCore.QStringList()
headers.append(QtCore.QString(self._mainFrame._("Search text")))
headers.append(QtCore.QString(self._mainFrame._("RegEx")))
headers.append(QtCore.QString(self._mainFrame._("Delete")))
self._table = QtGui.QTableWidget(self)
self._table.setColumnCount(3)
self._table.setHorizontalHeaderLabels(headers)
self._table.resizeColumnsToContents()
self._table.horizontalHeader().setResizeMode(0, QtGui.QHeaderView.Stretch)
tableBox = QtGui.QVBoxLayout()
tableBox.addWidget(self._table)
tableBox.addLayout(addBox)
# Options
self._notifyOnPingField = QtGui.QCheckBox(self._mainFrame._("Alert me whenever I get a &direct message"), self)
self._notifyOnInactiveTabField = QtGui.QCheckBox(self._mainFrame._("Notify me of every message sent while I'm &inactive"), self)
optionsGrid = QtGui.QGridLayout()
optionsGrid.addWidget(self._notifyOnPingField, 1, 0)
optionsGrid.addWidget(self._notifyOnInactiveTabField, 2, 0)
optionsGroupBox = QtGui.QGroupBox(self._mainFrame._("Alerts && Notifications"))
optionsGroupBox.setLayout(optionsGrid)
# Methods
self._notifyBlinkField = QtGui.QCheckBox(self._mainFrame._("&Blink the systray icon when notifying"), self)
self._notifyNotifyField = QtGui.QCheckBox(self._mainFrame._("Trigger a &Notification using the OS notification system"), self)
methodsGrid = QtGui.QGridLayout()
methodsGrid.addWidget(self._notifyBlinkField, 1, 0)
methodsGrid.addWidget(self._notifyNotifyField, 2, 0)
methodsGroupBox = QtGui.QGroupBox(self._mainFrame._("Notification methods"))
methodsGroupBox.setLayout(methodsGrid)
# Buttons
self._okButton = QtGui.QPushButton(self._mainFrame._("&OK"), self)
self._cancelButton = QtGui.QPushButton(self._mainFrame._("&Cancel"), self)
self.connect(self._okButton, QtCore.SIGNAL('clicked()'), self.ok)
self.connect(self._cancelButton, QtCore.SIGNAL('clicked()'), self.cancel)
# Main layout
hbox = QtGui.QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(self._okButton)
hbox.addWidget(self._cancelButton)
vbox = QtGui.QVBoxLayout()
vbo | x.addLayout(tableBox)
vbox.addWidget(optionsGroupBox)
vbox.addWidget(methodsGroupBox)
vbox.addLayout(hbox)
self.setLayout(vbox)
# Load settings
alertsSettings = self._mainFrame.getSettings("alerts")
matches = self._mainFrame.getSettings("matches")
self._notifyOnPingField.setChecked(alertsSe | ttings["notify_ping"])
self._notifyOnInactiveTabField.setChecked(alertsSettings["notify_inactive_tab"])
self._notifyBlinkField.setChecked(alertsSettings["notify_blink"])
self._notifyNotifyField.setChecked(alertsSettings["notify_notify"])
if matches:
for match in matches:
self.add(match)
# Only connect to signal after adding rows
self.connect(self._table, QtCore.SIGNAL('cellChanged(int,int)'), self.validate)
self.validate()
class OptionsDialog(QtGui.Q |
gem/oq-hazardlib | openquake/hmtk/sources/source_conversion_utils.py | Python | agpl-3.0 | 5,833 | 0.001372 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# LICENSE
#
# Copyright (c) 2010-2017, GEM Foundation, G. Weatherill, M. Pagani,
# D. Monelli.
#
# The Hazard Modeller's Toolkit is free software: you can redistribute
# it and/or modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>
#
# DISCLAIMER
#
# The software Hazard Modeller's Toolkit (openquake.hmtk) provided herein
# is released as a prototype implementation on behalf of
# scientists and engineers working within the GEM Foundation (Global
# Earthquake Model).
#
# It is distributed for the purpose of open collaboration and in the
# hope that it will be useful to the scientific, engineering, disaster
# risk and software design communities.
#
# The software is NOT distributed as part of GEM's OpenQuake suite
# (http://www.globalquakemodel.org/openquake) and must be considered as a
# separate entity. The software provided herein is designed and implemented
# by scientific staff. It is not developed to the design standards, nor
# subject to same level of critical review by professional software
# developers, as GEM's OpenQuake software suite.
#
# Feedback and contribution to the software is welcome, and can be
# directed to the hazard scientific staff of the GEM Model Facility
# (hazard@globalquakemodel.org).
#
# The Hazard Modeller's Toolkit (openquake.hmtk) is therefore distributed WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# The GEM Foundation, and the authors of the software, assume no
# liability for use of the software.
# -*- coding: utf-8 -*-
import abc
from decimal import Decimal
from openquake.hazardlib. | pmf import PMF
from openquake.hazardlib.geo.nodalplane | import NodalPlane
from openquake.hazardlib import mfd
from openquake.hazardlib.scalerel import get_available_scalerel
from openquake.hazardlib.scalerel.base import BaseMSR
from openquake.hazardlib.scalerel.wc1994 import WC1994
SCALE_RELS = get_available_scalerel()
def render_aspect_ratio(aspect_ratio, use_default=False):
'''
Returns the aspect ratio if one is defined for the source, otherwise
if defaults are accepted a default value of 1.0 is returned or else
a ValueError is raised
:param float aspect_ratio:
Ratio of along strike-length to down-dip width of the rupture
:param bool use_default:
If true, when aspect_ratio is undefined will return default value of
1.0, otherwise will raise an error.
'''
if aspect_ratio:
assert aspect_ratio > 0.
return aspect_ratio
else:
if use_default:
return 1.0
else:
raise ValueError('Rupture aspect ratio not defined!')
def mag_scale_rel_to_hazardlib(mag_scale_rel, use_default=False):
"""
Returns the magnitude scaling relation in a format readable by
openquake.hazardlib
"""
if isinstance(mag_scale_rel, BaseMSR):
return mag_scale_rel
elif isinstance(mag_scale_rel, str):
if not mag_scale_rel in SCALE_RELS.keys():
raise ValueError('Magnitude scaling relation %s not supported!'
% mag_scale_rel)
else:
return SCALE_RELS[mag_scale_rel]()
else:
if use_default:
# Returns the Wells and Coppersmith string
return WC1994()
else:
raise ValueError('Magnitude Scaling Relation Not Defined!')
def npd_to_pmf(nodal_plane_dist, use_default=False):
"""
Returns the nodal plane distribution as an instance of the PMF class
"""
if isinstance(nodal_plane_dist, PMF):
# Aready in PMF format - return
return nodal_plane_dist
else:
if use_default:
return PMF([(1.0, NodalPlane(0.0, 90.0, 0.0))])
else:
raise ValueError('Nodal Plane distribution not defined')
def hdd_to_pmf(hypo_depth_dist, use_default=False):
"""
Returns the hypocentral depth distribtuion as an instance of the :class:
openquake.hazardlib.pmf.
"""
if isinstance(hypo_depth_dist, PMF):
# Is already instance of PMF
return hypo_depth_dist
else:
if use_default:
# Default value of 10 km accepted
return PMF([(1.0, 10.0)])
else:
# Out of options - raise error!
raise ValueError('Hypocentral depth distribution not defined!')
def simple_trace_to_wkt_linestring(trace):
'''
Coverts a simple fault trace to well-known text format
:param trace:
Fault trace as instance of :class: openquake.hazardlib.geo.line.Line
:returns:
Well-known text (WKT) Linstring representation of the trace
'''
trace_str = ""
for point in trace:
trace_str += ' %s %s,' % (point.longitude, point.latitude)
trace_str = trace_str.lstrip(' ')
return 'LINESTRING (' + trace_str.rstrip(',') + ')'
def simple_edge_to_wkt_linestring(edge):
'''
Coverts a simple fault trace to well-known text format
:param trace:
Fault trace as instance of :class: openquake.hazardlib.geo.line.Line
:returns:
Well-known text (WKT) Linstring representation of the trace
'''
trace_str = ""
for point in edge:
trace_str += ' %s %s %s,' % (point.longitude, point.latitude,
point.depth)
trace_str = trace_str.lstrip(' ')
return 'LINESTRING (' + trace_str.rstrip(',') + ')'
|
OpenNumismat/open-numismat | OpenNumismat/ListView.py | Python | gpl-3.0 | 51,632 | 0.000775 | import operator
import pickle
import os.path
from PyQt5 import QtCore
from PyQt5.QtCore import Qt, pyqtSignal, QSortFilterProxyModel
from PyQt5.QtCore import QCollator, QLocale
from PyQt5.QtCore import QAbstractProxyModel, QModelIndex, QItemSelectionModel
from PyQt5.QtCore import QRectF, QRect
from PyQt5.QtSql import QSqlQuery
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.Qt import QMargins
import OpenNumismat
from OpenNumismat.EditCoinDialog.EditCoinDialog import EditCoinDialog
from OpenNumismat.Collection.CollectionFields import FieldTypes as Type
from OpenNumismat.Collection.CollectionFields import StatusesOrder
from OpenNumismat.SelectColumnsDialog import SelectColumnsDialog
from OpenNumismat.Collection.HeaderFilterMenu import FilterMenuButton
from OpenNumismat.Tools import Gui, TemporaryDir
from OpenNumismat.Reports.Report import Report
from OpenNumismat.Reports.Preview import PreviewDialog
from OpenNumismat.Settings import Settings
from OpenNumismat.Reports.ExportList import ExportToExcel, ExportToHtml, ExportToCsv, ExportToCsvUtf8
from OpenNumismat.Tools.Gui import getSaveFileName
from OpenNumismat.Collection.HeaderFilterMenu import ColumnFilters, ValueFilter, DataFilter, BlankFilter
def textToClipboard(text):
for c in '\t\n\r':
if c in text:
return '"' + text.replace('"', '""') + '"'
return text
def clipboardToText(text):
for c in '\t\n\r':
if c in text:
return text[1:-1].replace('""', '"')
return text
class BaseTableView(QTableView):
rowChanged = pyqtSignal(object)
# TODO: Changes mime type
MimeType = 'num/data'
def __init__(self, listParam, parent=None):
super().__init__(parent)
self.proxyModel = None
self.sortingChanged = False
self.searchText = ''
self.listParam = listParam
self.selectedId = None
self.listCountLabel = QLabel()
self.listSelectedLabel = QLabel(QApplication.translate('BaseTableView', "0 coins selected"))
def _sortChangedMessage(self):
return QMessageBox.information(
self, QApplication.translate('BaseTableView', "Custom sorting"),
QApplication.translate('BaseTableView',
"Default sort order changed.\n"
"Changing item position avalaible only on default "
"sort order. Clear sort order now?"),
QMessageBox.Yes | QMessageBox.Cancel,
QMessageBox.Cancel)
def tryDragMode(self):
if self.sortingChanged:
result = self._sortChangedMessage()
if result == QMessageBox.Yes:
sel | f.clearSorting()
else:
return False
| self.setSelectionMode(QAbstractItemView.SingleSelection)
self.setDragDropMode(QAbstractItemView.InternalMove)
self.setDragEnabled(True)
self.setAcceptDrops(True)
self.setDragDropOverwriteMode(False)
self.setDropIndicatorShown(True)
return True
def selectMode(self):
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.setDragEnabled(False)
self.setAcceptDrops(False)
def isDragMode(self):
return self.dragDropMode() == QAbstractItemView.InternalMove
def modelChanged(self):
# Fetch all selected records
while self.model().canFetchMore():
self.model().fetchMore()
newCount = self.model().rowCount()
# Show updated coins count
sql = "SELECT count(*) FROM coins"
query = QSqlQuery(sql, self.model().database())
query.first()
totalCount = query.record().value(0)
labelText = QApplication.translate('BaseTableView', "%d/%d coins") % (newCount, totalCount)
self.listCountLabel.setText(labelText)
def itemDClicked(self, _index):
self._edit(self.currentIndex())
def keyPressEvent(self, event):
key = event.key()
if (key == Qt.Key_Return) or (key == Qt.Key_Enter):
indexes = self.selectedCoins()
if len(indexes) == 1:
self._edit(indexes[0])
elif len(indexes) > 1:
self._multiEdit(indexes)
elif event.matches(QKeySequence.Copy):
self._copy(self.selectedCoins())
elif event.matches(QKeySequence.Paste):
self._paste()
elif event.matches(QKeySequence.Delete):
self._delete(self.selectedCoins())
elif event.matches(QKeySequence.MoveToStartOfDocument):
index = self.model().index(0, 0)
self.scrollToIndex(index)
self.clearSelection()
elif event.matches(QKeySequence.MoveToEndOfDocument):
index = self.model().index(self.model().rowCount() - 1, 0)
self.scrollToIndex(index)
self.clearSelection()
else:
return super().keyPressEvent(event)
def contextMenuEvent(self, pos):
raise NotImplementedError
def currentChanged(self, current, previous):
index = self.currentIndex()
if index.isValid():
id_col = self.model().fieldIndex('id')
id_index = self.model().index(index.row(), id_col)
self.selectedId = self.model().dataDisplayRole(id_index)
return super().currentChanged(current, previous)
def selectionChanged(self, selected, deselected):
count = len(self.selectedCoins())
label = QApplication.translate('BaseTableView', "%n coin(s) selected",
'', count)
self.listSelectedLabel.setText(label)
return super().selectionChanged(selected, deselected)
def _mapToSource(self, index):
return self.proxyModel.mapToSource(index)
def currentIndex(self):
index = super().currentIndex()
return self._mapToSource(index)
def selectedCoins(self):
raise NotImplementedError
def clearSorting(self):
sort_column_id = self.model().fields.sort_id.id
self.sortByColumn(sort_column_id, Qt.AscendingOrder)
self.sortingChanged = False
def saveSorting(self):
pass
def report(self):
indexes = []
for i in range(self.model().rowCount()):
index = self.proxyModel.index(i, 0)
real_index = self.proxyModel.mapToSource(index)
indexes.append(real_index)
if indexes:
preview = PreviewDialog(self.model(), indexes, self)
preview.exec_()
else:
QMessageBox.information(
self, QApplication.translate('BaseTableView', "Report preview"),
QApplication.translate('BaseTableView', "Nothing selected"))
def viewInBrowser(self, template=None):
if not template:
template = Settings()['template']
template_name = os.path.basename(template)
dstPath = os.path.join(TemporaryDir.path(), template_name + '.htm')
report = Report(self.model(), template, dstPath, self)
indexes = []
for i in range(self.model().rowCount()):
index = self.proxyModel.index(i, 0)
real_index = self.proxyModel.mapToSource(index)
indexes.append(real_index)
if indexes:
fileName = report.generate(indexes)
if fileName:
executor = QDesktopServices()
executor.openUrl(QtCore.QUrl.fromLocalFile(fileName))
else:
QMessageBox.information(
self, QApplication.translate('BaseTableView', "Report preview"),
QApplication.translate('BaseTableView', "Nothing selected"))
def saveTable(self):
filters = (QApplication.translate('BaseTableView', "Excel document (*.xls)"),
QApplication.translate('BaseTableView', "Web page (*.htm *.html)"),
QApplication.translate('BaseTableView', "Text file (*.csv)"),
QApplication.translate('BaseTableView', "Text file UTF-8 (*.csv)"))
if not ExportToExcel.isAvailable():
availableFilters = filters[1:]
else:
availableFilt |
mbdriscoll/asp-old | tests/asp_module_tests.py | Python | bsd-3-clause | 18,180 | 0.00495 | import unittest2 as unittest
import asp.jit.asp_module as asp_module
import asp.codegen.cpp_ast as cpp_ast
from mock import Mock
class TimerTest(unittest.TestCase):
def test_timer(self):
pass
# mod = asp_module.ASPModule()
# mod.add_function("void test(){;;;;}", "test")
# # mod.test()
# self.failUnless("test" in mod.times.keys())
class ASPDBTests(unittest.TestCase):
def test_creating_db(self):
db = asp_module.ASPDB("test_specializer")
def test_create_db_if_nonexistent(self):
db = asp_module.ASPDB("test")
self.assertTrue(db.connection)
def test_create_table(self):
db = asp_module.ASPDB("test")
db.close() # close the real connection so we can mock it out
db.connection = Mock()
db.create_specializer_table()
db.connection.execute.assert_called_with(
'create table test (fname text, variant text, key text, perf real)')
def test_insert(self):
db = asp_module.ASPDB("test")
db.close() # close the real connection so we can mock it out
db.connection = Mock()
db.table_exists = Mock(return_value = True)
db.create_specializer_table()
db.insert("func", "func", "KEY", 4.321)
db.connection.execute.assert_called_with(
'insert into test values (?,?,?,?)', ("func", "func", "KEY", 4.321))
def test_create_if_insert_into_nonexistent_table(self):
db = asp_module.ASPDB("test")
db.close() # close the real connection so we can mock it out
db.connection = Mock()
# this is kind of a complicated situation. we want the cursor to
# return an array when fetchall() is called on it, and we want this
# cursor to be created when the mock connection is asked for a cursor
mock_cursor = Mock()
mock_cursor.fetchall.return_value = []
db.connection.cursor.return_value = mock_cursor
db.create_specializer_table = Mock()
db.insert("func", "v1", "KEY", 4.321)
self.assertTrue(db.create_specializer_table.called)
def test_get(self):
db = asp_module.ASPDB("test")
db.close() # close the real connection so we can mock it out
db.connection = Mock()
db.table_exists = Mock(return_value = True)
db.create_specializer_table()
# see note about mocks in test_create_if...
mock_cursor = Mock()
mock_cursor.fetchall.return_value = ['hello']
db.connection.cursor.return_value = mock_cursor
db.create_specializer_table = Mock()
db.get("func")
mock_cursor.execute.assert_called_with("select * from test where fname=?",
("func",))
def test_update(self):
db = asp_module.ASPDB("test")
db.close() # close the real connection so we can mock it out
db.connection = Mock()
db.table_exists = Mock(return_value = True)
db.update("foo", "foo_v1", "KEY", 3.21)
db.connection.execute.assert_called_with("update test set perf=? where fname=? and variant=? and key=?",
(3.21, "foo", "foo_v1", "KEY"))
def test_delete(self):
db = asp_module.ASPDB("test")
db.close() # close the real connection so we can mock it out
| db.connection = Mock()
db.table_exists = Mock(return_value = True)
db.delete("foo", "foo_v1", "KEY")
db.connection.execute.assert_called_with("delete from test where fname=? and variant=? and key=?",
("foo", "foo_v1", "KEY"))
class SpecializedFunctionTests(unittest.TestCase):
def test_creating(self):
a = asp_module.SpecializedFunction("foo", None, Mock())
def | test_add_variant(self):
mock_backend = asp_module.ASPBackend(Mock(), None, Mock())
a = asp_module.SpecializedFunction("foo", mock_backend, Mock())
a.add_variant("foo_1", "void foo_1(){return;}")
self.assertEqual(a.variant_names[0], "foo_1")
self.assertEqual(len(a.variant_funcs), 1)
# also check to make sure the backend added the function
self.assertTrue(mock_backend.module.add_to_module.called)
self.assertRaises(Exception, a.add_variant, "foo_1", None)
def test_add_variant_at_instantiation(self):
mock_backend = asp_module.ASPBackend(Mock(), None, Mock())
a = asp_module.SpecializedFunction("foo", mock_backend, Mock(),
["foo_1"], ["void foo_1(){return;}"])
self.assertEqual(len(a.variant_funcs), 1)
self.assertTrue(mock_backend.module.add_to_module.called)
def test_call(self):
# this is a complicated situation. we want the backend to have a fake
# module, and that fake module should return a fake compiled module.
# we'll cheat by just returning itself.
mock_backend_module = Mock()
mock_backend_module.compile.return_value = mock_backend_module
mock_backend = asp_module.ASPBackend(mock_backend_module, None, Mock())
mock_db = Mock()
mock_db.get.return_value = []
a = asp_module.SpecializedFunction("foo", mock_backend, mock_db)
a.add_variant("foo_1", "void foo_1(){return;}")
# test a call
a()
# it should call foo() on the backend module
self.assertTrue(mock_backend_module.foo_1.called)
def test_calling_with_multiple_variants(self):
# this is a complicated situation. we want the backend to have a fake
# module, and that fake module should return a fake compiled module.
# we'll cheat by just returning itself.
mock_backend_module = Mock()
mock_backend_module.compile.return_value = mock_backend_module
mock_backend = asp_module.ASPBackend(mock_backend_module, None, Mock())
mock_db = Mock()
mock_db.get.return_value = []
a = asp_module.SpecializedFunction("foo", mock_backend, mock_db)
a.add_variant("foo_1", "void foo_1(){return;}")
a.add_variant("foo_2", "void foo_2(){}")
# test 2 calls
a()
# ensure the second one sees that foo_1 was called the first time
mock_db.get.return_value = [["foo", "foo_1", None, None]]
a()
# it should call both variants on the backend module
self.assertTrue(mock_backend_module.foo_1.called)
self.assertTrue(mock_backend_module.foo_2.called)
def test_pick_next_variant(self):
mock_db = Mock()
mock_db.get.return_value = []
a = asp_module.SpecializedFunction("foo", Mock(), mock_db)
a.add_variant("foo_1", "void foo_1(){return;}")
a.add_variant("foo_2", "void foo_2(){}")
self.assertEqual(a.pick_next_variant(), "foo_1")
# now if one has run
mock_db.get.return_value = [[None, "foo_1", None, None]]
self.assertEqual(a.pick_next_variant(), "foo_2")
# now if both have run
mock_db.get.return_value = [[None, "foo_1", None, 1.0],
[None, "foo_2", None, 2.0]]
self.assertEqual(a.pick_next_variant(), "foo_1")
class HelperFunctionTests(unittest.TestCase):
def test_creating(self):
f = asp_module.HelperFunction("foo", "void foo(){}", Mock())
def test_call(self):
# this is a complicated situation. we want the backend to have a fake
# module, and that fake module should return a fake compiled module.
# we'll cheat by just returning itself.
mock_backend_module = Mock()
mock_backend_module.compile.return_value = mock_backend_module
mock_backend = asp_module.ASPBackend(mock_backend_module, None, Mock())
a = asp_module.HelperFunction("foo", "void foo(){}", mock_backend)
# test a call
a()
# it should call foo() on the backend module
self.assertTrue(mock_backend_module.foo.called)
class ASPModuleMiscTests(unittest.TestCase):
def test_generate(self):
a = asp_module.ASPModule()
mock_backend = Mo |
janusnic/21v-python | unit_03/16.py | Python | mit | 1,009 | 0.011893 | L = ['SPAM!', 'eat', 'more', 'please']
del L[0] # delete one item
print L
del L[1:] # delete an entire section
print L # same as L[1:] = []
li = ['a', 'b', 'new', 'mpilgrim', 'z', 'example', 'new', 'two', 'elements']
print li.remove("z")
print li
li.remove("new")
print li
li.remove("z") |
print li
inventory = ["sword", "armor", "shield", "healing potion"]
print "In a great battle, your shield is destroyed."
del inventory[2]
print "Your inventory is now:"
print inventory
inventory = ["sword", "armor", "shield", "healing potion"]
del inventory[:2]
print inventory
scores = ["1","2","3"]
# delete a score
score = int(raw_input("Delete which score?: "))
if score in scores:
scores.remove(score)
else:
print score, "i | sn't in the high scores list."
# list high-score table
for score in scores:
print score |
ClearCorp/odoo-costa-rica | l10n_cr_hr_holidays/models/__init__.py | Python | agpl-3.0 | 140 | 0 | # -*- coding: utf-8 -*- |
# © 2016 ClearCorp
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
i | mport l10n_cr_hr_holidays
|
brownhead/different-logger | dlogger/_ansify.py | Python | unlicense | 2,079 | 0 | ANSI_ESCAPE_CODES = {
"reset": 0,
"bold": 1,
"faint": 2,
"italic": 3,
"underline": 4,
"blink-slow": 5,
"blink-fast": 6,
"inverse": 7,
"conceal": 8,
"strike-through": 9,
"font-default": 10,
"font-1": 11,
"font-2": 12,
"font-3": 13,
"font-4": 14,
"font-5": 15,
"font-6": 16,
"font-7": 17,
"font-8": 18,
"font-9": 19,
"fraktur": 20,
| "normal-intensity": 22,
"no-italic": 23,
"no-underline": 24,
"no-blink": 25,
"no-inverse": 27,
"no-conceal": 28,
"no-strike-through": 29,
"black": 30,
"red": 31,
"green": 32,
"yellow": 33,
"blue": 34,
"magenta": 35,
"cyan": 36,
"white": 3 | 7,
"color-default": 39,
"background-black": 40,
"background-red": 41,
"background-green": 42,
"background-yellow": 43,
"background-blue": 44,
"background-magenta": 45,
"background-cyan": 46,
"background-white": 47,
"background-default": 49,
"frame": 51,
"encircle": 52,
"overline": 53,
"no-frame": 54,
"no-overline": 55,
"bright-black": 90,
"bright-red": 91,
"bright-green": 92,
"bright-yellow": 93,
"bright-blue": 94,
"bright-magenta": 95,
"bright-cyan": 96,
"bright-white": 97,
"background-bright-black": 100,
"background-bright-red": 101,
"background-bright-green": 102,
"background-bright-yellow": 103,
"background-bright-blue": 104,
"background-bright-magenta": 105,
"background-bright-cyan": 106,
"background-bright-white": 107,
}
def ansify(code_names):
"""Returns an ANSI escape sequence for coloring text.
Given some code_names (see ANSI_ESCAPE_CODES) this function will return a
string you can print to the terminal that will apply the given styles to
any following text.
Ex:
print ansify(["red"]), "this text is red", ansify(["reset"]), \\
"not anymore!"
"""
codes = (str(ANSI_ESCAPE_CODES[code_name]) for code_name in code_names)
return u"\x1B[" + u";".join(codes) + u"m"
|
root-mirror/root | bindings/pyroot/pythonizations/test/import_load_libs.py | Python | lgpl-2.1 | 3,705 | 0.004318 | import unittest
import re
import os
class ImportLoadLibs(unittest.TestCase):
"""
Test which libraries are loaded during importing ROOT
"""
# The whitelist is a list of regex expressions that mark wanted libraries
# Note that the regex has to result in an exact match with the library name.
known_libs = [
# libCore and dependencies
'libCore',
'libm',
'liblz4',
'libxxhash',
'liblzma',
'libzstd',
'libz',
'libpthread',
'libc',
'libdl',
'libpcre',
# libCling and dependencies
'libCling.*',
'librt',
'libncurses.*',
'libtinfo', # by libncurses (on some older platforms)
# libTree and dependencies
'libTree',
'libThread',
'libRIO',
'libNet',
'libImt',
'libMathCore',
'libMultiProc',
'libssl',
'libcrypt.*', # by libssl
'libtbb',
'liburing', # by libRIO if uring option is enabled
# On centos7 libssl links against kerberos pulling in all dependencies below, removed with libssl1.1.0
'libgssapi_krb5',
'libkrb5',
'libk5crypto',
'libkrb5support',
'libselinux',
'libkeyutils',
'libcom_err',
'libresolv',
# cppyy and Python libraries
'libcppyy.*',
'libROOTPythonizations.*',
'libpython.*',
'libutil.*',
'.*cpython.*',
'_.*',
'.*module',
'operator',
'cStringIO',
'binascii',
'libbz2',
'libexpat',
'ISO8859-1',
# System libraries and others
'libnss_.*',
'ld.*',
'libffi',
]
# Verbose mode of the test
verbose = False
def test_import(self):
"""
Test libraries loaded after importing ROOT
"""
import ROOT
libs = str(ROOT.gSystem.GetLibraries())
if self.verbose:
print("Initial output from ROOT.gSystem.GetLibraries():\n" + libs)
# Split paths
libs = libs.split(' ')
# Get library name without full path and .so* suffix
libs = [os.path.basename(l).split('.so')[0] for l in libs \
if not l.startswith('-l') and not l.startswith('-L')]
# Check that the loaded libraries are white listed
bad_libs = []
good_libs = []
matched_re = []
for l in libs:
matched = False
for r in self.known_libs:
m = re.match(r, l)
if m:
if m.group(0) == l:
matched = True
good_libs.append(l)
matched_re.append(r)
break
if not matched:
bad_libs.append(l)
if self.verbose:
print('Found w | hitelisted libraries after importing ROOT with the shown regex match:')
for l, r in zip(good_libs, matched_re):
print(' - {} ({})'.format(l, r))
import sys
sys.stdout.flush()
if bad_libs:
raise Exception('Found not whitelisted libraries after importing ROOT: | ' \
+ '\n - ' + '\n - '.join(bad_libs) \
+ '\nIf the test fails with a library that is loaded on purpose, please add it to the whitelist.')
if __name__ == '__main__':
unittest.main()
|
googleads/googleads-adxbuyer-examples | python/samples/v2_x/create_bidder_level_filter_set.py | Python | apache-2.0 | 7,717 | 0.005702 | #!/usr/bin/python
#
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example creates a bidder-level filter set.
A bidder-level filter set can be used to retrieve aggregated data for all
Authorized Buyers accounts under the given bidder account, including the bidder
account itself.
"""
import argparse
from datetime import date
from datetime import datetime
from datetime import timedelta
import os
import pprint
import sys
import uuid
sys.path.insert(0, os.path.abspath('..'))
from googleapiclient.errors import HttpError
import samples_util
_DATE_FORMAT = '%Y%m%d'
_FILTER_SET_NAME_TEMPLATE = ('bidders/{bidders_resource_id}/'
'filterSets/{filtersets_resource_id}')
_OWNER_NAME_TEMPLATE = 'bidders/{bidders_resource_id}'
_TODAY = date.today()
_VALID_ENVIRONMENTS = ('WEB', 'APP')
_VALID_FORMATS = ('DISPLAY', 'VIDEO')
_VALID_PLATFORMS = ('DESKTOP', 'TABLET', 'MOBILE')
_VALID_TIME_SERIES_GRANULARITIES = ('HOURLY', 'DAILY')
DEFAULT_BIDDER_RESOURCE_ID = 'ENTER_BIDDER_RESOURCE_ID_HERE'
DEFAULT_FILTER_SET_RESOURCE_ID = f'FilterSet_{uuid.uuid4()}'
DEFAULT_END_DATE = _TODAY.strftime(_DATE_FORMAT)
DEF | AULT_START_DATE = (_TODAY - timedelta(days=7)).strftime(
_D | ATE_FORMAT)
def main(ad_exchange_buyer, owner_name, body, is_transient):
try:
# Construct and execute the request.
filter_set = ad_exchange_buyer.bidders().filterSets().create(
ownerName=owner_name, isTransient=is_transient, body=body).execute()
print(f'FilterSet created for bidder: "{owner_name}".')
pprint.pprint(filter_set)
except HttpError as e:
print(e)
if __name__ == '__main__':
def time_series_granularity_type(s):
if s not in _VALID_TIME_SERIES_GRANULARITIES:
raise argparse.ArgumentTypeError('Invalid TimeSeriesGranularity '
f'specified: "{s}".')
return s
def environment_type(s):
if s not in _VALID_ENVIRONMENTS:
raise argparse.ArgumentTypeError(
f'Invalid Environment specified: "{s}".')
return s
def format_type(s):
if s not in _VALID_FORMATS:
raise argparse.ArgumentTypeError(f'Invalid Format specified: "{s}".')
return s
def platform_type(s):
if s not in _VALID_PLATFORMS:
raise argparse.ArgumentTypeError(f'Invalid Platform specified: "{s}".')
return s
def valid_date(s):
try:
return datetime.strptime(s, _DATE_FORMAT).date()
except ValueError:
raise argparse.ArgumentTypeError(f'Invalid date specified: "{s}".')
parser = argparse.ArgumentParser(
description=('Creates a bidder-level filter set with the specified '
'options.'))
# Required fields.
parser.add_argument(
'-b', '--bidder_resource_id', default=DEFAULT_BIDDER_RESOURCE_ID,
help=('The resource ID of the bidders resource for which the filter set '
'is being created. This will be used to construct the ownerName '
'used as a path parameter for filter set requests. For additional '
'information on how to configure the ownerName path parameter, '
'see: https://developers.google.com/authorized-buyers/apis/'
'reference/rest/v2beta1/bidders.filterSets/create'
'#body.PATH_PARAMETERS.owner_name'))
parser.add_argument(
'-r', '--resource_id', default=DEFAULT_FILTER_SET_RESOURCE_ID,
help=('The resource ID of the filter set. Note that this must be '
'unique. This will be used to construct the filter set\'s name. '
'For additional information on how to configure a filter set\'s '
'name, see: https://developers.google.com/authorized-buyers/apis/'
'reference/rest/v2beta1/bidders.filterSets#FilterSet.FIELDS.name'))
parser.add_argument(
'--end_date', default=DEFAULT_END_DATE, type=valid_date,
help=('The end date for the filter set\'s absoluteDateRange field, which '
'will be accepted in this example in YYYYMMDD format.'))
parser.add_argument(
'--start_date', default=DEFAULT_START_DATE, type=valid_date,
help=('The start date for the filter set\'s time_range field, which '
'will be accepted in this example in YYYYMMDD format.'))
# Optional fields.
parser.add_argument(
'-e', '--environment', required=False,
type=environment_type,
help=('The environment on which to filter.'))
parser.add_argument(
'-f', '--format', required=False,
type=format_type,
help=('The format on which to filter.'))
parser.add_argument(
'-p', '--platforms', required=False, nargs='*', type=platform_type,
help=('The platforms on which to filter. The filters represented by '
'multiple platforms are ORed together. Note that you may specify '
'more than one using a space as a delimiter.'))
parser.add_argument(
'-s', '--seller_network_ids', required=False, nargs='*', type=int,
help=('The list of IDs for seller networks on which to filter. The '
'filters represented by multiple seller network IDs are ORed '
'together. Note that you may specify more than one using a space '
'as a delimiter.'))
parser.add_argument(
'-t', '--time_series_granularity', required=False,
type=time_series_granularity_type,
help=('The granularity of time intervals if a time series breakdown is '
'desired.'))
parser.add_argument(
'--is_transient', required=False, default=True, type=bool,
help=('Whether the filter set is transient, or should be persisted '
'indefinitely. In this example, this will default to True.'))
args = parser.parse_args()
# Build the time_range as an AbsoluteDateRange.
time_range = {
'startDate': {
'year': args.start_date.year,
'month': args.start_date.month,
'day': args.start_date.day
},
'endDate': {
'year': args.end_date.year,
'month': args.end_date.month,
'day': args.end_date.day
}
}
# Create a body containing the required fields.
BODY = {
'name': _FILTER_SET_NAME_TEMPLATE.format(
bidders_resource_id=args.bidder_resource_id,
filtersets_resource_id=args.resource_id),
# Note: You may alternatively specify relativeDateRange or
# realtimeTimeRange.
'absoluteDateRange': time_range
}
# Add optional fields to body if specified.
if args.environment:
BODY['environment'] = args.environment
if args.format:
BODY['format'] = args.format
if args.platforms:
BODY['platforms'] = args.platforms
if args.seller_network_ids:
BODY['sellerNetworkIds'] = args.seller_network_ids
if args.time_series_granularity:
BODY['timeSeriesGranularity'] = args.time_series_granularity
try:
service = samples_util.GetService('v2beta1')
except IOError as ex:
print(f'Unable to create adexchangebuyer service - {ex}')
print('Did you specify the key file in samples_util.py?')
sys.exit(1)
main(service, _OWNER_NAME_TEMPLATE.format(
bidders_resource_id=args.bidder_resource_id),
BODY, args.is_transient)
|
PuffinWare/pistat | com/puffinware/pistat/db.py | Python | gpl-3.0 | 767 | 0.019557 | """
Copyright 2016 Puffin Software. All rights reserved.
"""
from com.puffinware.pistat.models import User, Location, Category, Thermostat, Sensor, Reading
from com.puffinware.pist | at import DB
from logging import getLogger
log = getLogger(__name__)
def setup_db(app):
DB.create_tables([User, Location, Category, Thermostat, Sensor, Reading], safe=True)
# This hook ensures that a connection is opened to handle any queries
# generated by the request.
@app.before_request
def _db_connect():
log.debug('DB Connect')
DB.connect()
# This hook ensure | s that the connection is closed when we've finished
# processing the request.
@app.teardown_request
def _db_close(exc):
if not DB.is_closed():
log.debug('DB Close')
DB.close() |
SINGROUP/pycp2k | pycp2k/classes/_screening3.py | Python | lgpl-3.0 | 306 | 0.003268 | from | pycp2k.inputsection import InputSection
class _screening3(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Rc_taper = None
self.Rc_range = None
self._name = "SCREENING"
self._keywords = {'Rc_range': 'RC_RANGE', 'Rc_ta | per': 'RC_TAPER'}
|
tboyce1/home-assistant | homeassistant/components/tado/entity.py | Python | apache-2.0 | 1,664 | 0 | """Base class for Tado entity."""
from homeassistant.helpers.entity import Entity
from .const import DEFAULT_NAME, DOMAIN, TADO_ZONE
class TadoDeviceEntity(Entity):
"""Base implementation for Tado device."""
def __init__(self, device_info):
"""Initialize a Tado device."""
super().__init__()
self._device_info = device_info
self.device_name = device_info["shortSerialNo"]
self.device_id = device_info["serialNo"]
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self.device_id)},
"name": self.device_name,
"manufacturer": DEFAULT_NAME,
"sw_version": self._device_info["currentFwVersion"],
"model": self._device_info["deviceType"],
"via_device": (DOMAIN, self._device_info["serialNo"]),
}
@property
def should_poll(self):
"""Do not poll."""
return False
class TadoZon | eEntity(Entity):
"""Base implementation for Tado zone."""
def __init__(self, zone_name, home_id, zone_id):
"""Initialize a | Tado zone."""
super().__init__()
self._device_zone_id = f"{home_id}_{zone_id}"
self.zone_name = zone_name
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self._device_zone_id)},
"name": self.zone_name,
"manufacturer": DEFAULT_NAME,
"model": TADO_ZONE,
}
@property
def should_poll(self):
"""Do not poll."""
return False
|
mongodb-labs/disasm | app/disasm_demangler.py | Python | apache-2.0 | 654 | 0.001529 | # Copyright 2016 MongoDB Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance | with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See | the License for the specific language governing permissions and
# limitations under the License.
try:
from demangler import demangle
except:
from cppfilt import demangle
|
joopert/home-assistant | tests/components/nsw_rural_fire_service_feed/test_geo_location.py | Python | apache-2.0 | 8,960 | 0.00067 | """The tests for the NSW Rural Fire Service Feeds platform."""
import datetime
from unittest.mock import ANY
from aio_geojson_nsw_rfs_incidents import NswRuralFireServiceIncidentsFeed
from asynctest.mock import patch, MagicMock, call
from homeassistant.components import geo_location
from homeassistant.components.geo_location import ATTR_SOURCE
from homeassistant.components.nsw_rural_fire_service_feed.geo_location import (
ATTR_EXTERNAL_ID,
SCAN_INTERVAL,
ATTR_CATEGORY,
ATTR_FIRE,
ATTR_LOCATION,
ATTR_COUNCIL_AREA,
ATTR_STATUS,
ATTR_TYPE,
ATTR_SIZE,
ATTR_RESPONSIBLE_AGENCY,
ATTR_PUBLICATION_DATE,
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.setup import async_setup_component
from tests.common import assert_setup_component, async_fire_time_changed
import homeassistant.util.dt as dt_util
CONFIG = {
geo_location.DOMAIN: [{"platform": "nsw_rural_fire_service_feed", CONF_RADIUS: 200}]
}
CONFIG_WITH_CUSTOM_LOCATION = {
geo_location.DOMAIN: [
{
"platform": "nsw_rural_fire_service_feed",
CONF_RADIUS: 200,
CONF_LATITUDE: 15.1,
CONF_LONGITUDE: 25.2,
}
]
}
def _generate_mock_feed_entry(
external_id,
title,
distance_to_home,
coordinates,
category=None,
location=None,
attribution=None,
publication_date=None,
council_area=None,
status=None,
entry_type=None,
fire=True,
size=None,
responsible_agency=None,
):
"""Construct a mock feed entry for testing purposes."""
feed_entry = MagicMock()
feed_entry.external_id = external_id
feed_entry.title = title
feed_entry.distance_to_home = distance_to_home
feed_entry.coordinates = coordinates
feed_entry.category = category
feed_entry.location = location
feed_entry.attribution = attribution
feed_entry.publication_date = publication_date
feed_entry.council_area = council_area
feed_entry.status = status
feed_entry.type = entry_type
feed_entry.fire = fire
feed_entry.size = size
feed_entry.responsible_agency = responsible_agency
return feed_entry
async def test_setup(hass):
"""Test the general setup of the platform."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(-31.0, 150.0),
category="Category 1",
location="Location 1",
attribution="Attribution 1",
publication_date=datetime.datetime(
| 2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
| ),
council_area="Council Area 1",
status="Status 1",
entry_type="Type 1",
size="Size 1",
responsible_agency="Agency 1",
)
mock_entry_2 = _generate_mock_feed_entry(
"2345", "Title 2", 20.5, (-31.1, 150.1), fire=False
)
mock_entry_3 = _generate_mock_feed_entry("3456", "Title 3", 25.5, (-31.2, 150.2))
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (-31.3, 150.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed_update:
mock_feed_update.return_value = (
"OK",
[mock_entry_1, mock_entry_2, mock_entry_3],
)
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: -31.0,
ATTR_LONGITUDE: 150.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_CATEGORY: "Category 1",
ATTR_LOCATION: "Location 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_PUBLICATION_DATE: datetime.datetime(
2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_FIRE: True,
ATTR_COUNCIL_AREA: "Council Area 1",
ATTR_STATUS: "Status 1",
ATTR_TYPE: "Type 1",
ATTR_SIZE: "Size 1",
ATTR_RESPONSIBLE_AGENCY: "Agency 1",
ATTR_UNIT_OF_MEASUREMENT: "km",
ATTR_SOURCE: "nsw_rural_fire_service_feed",
ATTR_ICON: "mdi:fire",
}
assert round(abs(float(state.state) - 15.5), 7) == 0
state = hass.states.get("geo_location.title_2")
assert state is not None
assert state.name == "Title 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: -31.1,
ATTR_LONGITUDE: 150.1,
ATTR_FRIENDLY_NAME: "Title 2",
ATTR_FIRE: False,
ATTR_UNIT_OF_MEASUREMENT: "km",
ATTR_SOURCE: "nsw_rural_fire_service_feed",
ATTR_ICON: "mdi:alarm-light",
}
assert round(abs(float(state.state) - 20.5), 7) == 0
state = hass.states.get("geo_location.title_3")
assert state is not None
assert state.name == "Title 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: -31.2,
ATTR_LONGITUDE: 150.2,
ATTR_FRIENDLY_NAME: "Title 3",
ATTR_FIRE: True,
ATTR_UNIT_OF_MEASUREMENT: "km",
ATTR_SOURCE: "nsw_rural_fire_service_feed",
ATTR_ICON: "mdi:fire",
}
assert round(abs(float(state.state) - 25.5), 7) == 0
# Simulate an update - one existing, one new entry,
# one outdated entry
mock_feed_update.return_value = (
"OK",
[mock_entry_1, mock_entry_4, mock_entry_3],
)
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed_update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, removes all entities
mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
# Collect events.
await hass.async_block_till_done()
async def test_setup_with_custom_location(hass):
"""Test the setup with a custom location."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 20.5, (-31.1, 150.1))
with patch(
"aio_geojson_nsw_rfs_incidents.feed_manager.NswRuralFireServiceIncidentsFeed",
wraps=NswRuralFireServiceIncidentsFeed,
) as |
Cisco-Talos/pyrebox | mw_monitor2/ida_scripts/mw_monitor_binary_log_reader.py | Python | gpl-2.0 | 1,419 | 0.001409 | # -------------------------------------------------------------------------
#
# Copyright (C) 2018 Cisco Talos Security Intelligence and Research Group
#
# PyREBox: Python scriptable Reverse Engineering Sandbox
# Author: Xabier Ugarte-Pedrero
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is dis | tributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURP | OSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# -------------------------------------------------------------------------
#!/usr/bin/python
import pickle
import sys
def read_stream(f_in):
data = pickle.load(f_in)
if type(data) is list:
for proc in data:
print proc.__str__()
else:
for proc in data.procs:
print proc.__str__()
if __name__ == "__main__":
if len(sys.argv) != 2:
print "Usage: %s <file>" % (sys.argv[0])
with open(sys.argv[1], "rb") as f:
read_stream(f)
|
citrix-openstack-build/neutron-fwaas | neutron_fwaas/openstack/common/eventlet_backdoor.py | Python | apache-2.0 | 4,924 | 0 | # Copyright (c) 2012 OpenStack Foundation.
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import copy
import errno
import gc
import os
import pprint
import socket
import sys
import traceback
import eventlet
import eventlet.backdoor
import greenlet
from oslo.config import cfg
from neutron_fwaas.openstack.common._i18n import _LI
from neutron_fwaas.openstack.common import log as logging
help_for_backdoor_port = (
"Acceptable values are 0, <port>, and <start>:<end>, where 0 results "
"in listening on a random tcp port number; <port> results in listening "
"on the specified port number (and not enabling backdoor if that port "
"is in use); and <start>:<end> results in listening on the smallest "
"unused port number within the specified range of port numbers. The "
"chosen port is displayed in the service's log file.")
eventlet_backdoor_opts = [
cfg.StrOpt('backdoor_port',
help="Enable eventlet backdoor. %s" % help_for_backdoor_port)
]
CONF = cfg.CONF
CONF.register_opts(eventlet_backdoor_opts)
LOG = logging.getLogger(__name__)
def list_opts():
"""Entry point for oslo.config-generator.
"""
return [(None, copy.deepcopy(eventlet_backdoor_opts))]
class EventletBackdoorConfigValueError(Exception):
def __init__(self, port_range, help_msg, ex):
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
'%(help)s' %
{'range': port_range, 'ex': ex, 'help': help_msg})
super(EventletBackdoorConfigValueError, self).__init__(msg)
self.port_range = port_range
def _dont_use_this():
print("Don't use this, just disconnect instead")
def _find_objects(t):
return [o for o in gc.get_objects() if isinstance(o, t)]
def _print_greenthreads():
for i, gt in enumerate(_find_objects(greenlet.greenlet)):
print(i, gt)
traceback.print_stack(gt.gr_frame)
print()
def _print_nativethreads():
for threadId, stack in sys._current_frames().items():
print(threadId)
traceback.print_stack(stack)
print()
def _parse_port_range(port_range):
if ':' not in port_range:
start, end = port_range, port_range
else:
start, end = port_range.split(':', 1)
try:
start, end = int(start), int(end)
if end < start:
raise ValueError
return start, end
except ValueError as ex:
raise EventletBackdoorConfigValueError(port_range, ex,
help_for_backdoor_port)
def _listen(host, start_port, end_port, listen_func):
try_port = start_port
while True:
try:
return listen_func((host, try_port))
except socket.error as exc:
if (exc.errno != errno.EADDRINUSE or
try_port >= end_port):
raise
try_port += 1
def initialize_if_enabled():
backdoor_locals = {
'exit': _dont_use_this, # So we don't exit the entire process
'quit': _dont_use_this, # So we don't exit the entire process
'fo': _find_objects,
'pgt': _print_greenthreads,
'pnt': _print_nativethreads,
}
if CONF.backdoor_port is None:
return None
start_port, end_port = _parse_port_range(str(CONF.backdoor_port))
# NOTE(johannes): The standard sys.displayhook will print the value of
# the last expression and set it to __builtin__._, which overwrites
# the __builtin__._ that gettext sets. Let's switc | h to using pprint
# since it won't interact poorly with gettext, and it's easier to
# read the output too.
def displayhook(val):
if val is not None:
pprint.pprint(val)
sys.displayhook = displayh | ook
sock = _listen('localhost', start_port, end_port, eventlet.listen)
# In the case of backdoor port being zero, a port number is assigned by
# listen(). In any case, pull the port number out here.
port = sock.getsockname()[1]
LOG.info(
_LI('Eventlet backdoor listening on %(port)s for process %(pid)d') %
{'port': port, 'pid': os.getpid()}
)
eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock,
locals=backdoor_locals)
return port
|
miaerbus/timebank | user/models.py | Python | agpl-3.0 | 4,968 | 0.017519 | # -*- coding: utf-8 -*-
# Copyright (C) 2010 Eduardo Robles Elvira <edulix AT gmail DOT com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.models import User, UserManager
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.db.models import signals, Avg, Q
from datetime import date
import os
from django.conf import settings
def create_profile_for_user(sender, **kwargs):
'''
This way everytime a User is created, a Profile is created too.
'''
if kwargs['created']:
profile = Profile()
if not kwargs['instance'].__dict__.has_key("birth_date"):
profile.birth_date = date.today()
if not kwargs['instance'].__dict__.has_key("address"):
profile.address = _("address")
profile.__dict__.update(kwargs['instance'].__dict__)
profile.save()
#signals.post_save.connect(create_profile_for_user, sender=User)
class Profile(User):
'''
<<<<<<< HEAD
User with timebank settings.
=======
User with time bank settings.
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
'''
photo = models.ImageField(_("Avatar"), blank=True, null=True,
upload_to=os.path.join(settings.STATIC_DOC_ROOT, "photos"))
<<<<<<< HEAD
birth_date = models.DateField(_("Rojstni datum"), default=date.today())
address = models.CharField(_("Naslov"), max_length=100, default=_("address"))
org_name = models.CharField(_("Ime organizacije"), max_length=30, default=_("org_name"))
first_name1 = models.CharField(_("Ime zastopnika"), max_length=30, default=_("first_name"))
last_name1 = models.CharField(_("Priimek zastopnika"), max_length=30, default=_("last_name"))
email1 = models.CharField(_("E-mail zastopnika"), max_length=30, default=_("email"))
# credits in minutes
balance = models.IntegerField(default=600)
=======
birth_date = models.DateField(_("Birth date"), default=date.today())
address = models.CharField(_("Address"), max_length=100, default=_("address"))
# credits in minutes
balance = models.IntegerField(default=0)
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
def balance_hours(self):
if self.balance % 60 == 0:
return self.balance/60
return self.balance/60.0
<<<<<<< HEAD
description = models.TextField(_("Opis"), max_length=300,
blank=True)
land_line = models.CharField(_("Stacionarni telefon"), max_length=20)
mobile_tlf = models.CharField(_("Mobilni telefon"), max_length=20)
email_updates = models.BooleanField(_(u"Želim prejemati novice Časovne banke"),
=======
description = models.TextField(_("Personal address"), max_length=300,
blank=True)
land_line = models.CharField(_("Land line"), max_length=20)
mobile_tlf = models.CharField(_("Mobile phone"), max_length=20)
email_updates = models.BooleanField(_("Receive email updates"),
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
default=True)
# Saving the user language allows sending emails to him in his desired
# language (among other things)
<<<<<<< HEAD
lang_code = models.CharField(_("Jezik"), max_length=10, default='')
class Meta:
verbose_name = _("user")
verbose_name_plural = _("users")
=======
lang_code = models.CharField(_("Language Code"), max_length=10, default='')
class Meta:
verbose_name = _("User")
verbose_name_plural = _("Users")
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
def __unicode__(self):
return self.username
# Use UserManager to get the create_user method, etc.
objects = UserManager()
def __eq__(self, value):
return value and self.id == value.id or False
def transfers_pending(self):
'''
Transfers from this user which are not in a final state
'''
from serv.models import Transfer
return Transfer.objects.filter(Q(credits_payee=self) \
| | Q(credits_payee=self)).filter(status__in=['r', 'd | '])
def karma(self):
'''
Average of the user's transfer scores
'''
karma = self.transfers_received.aggregate(Avg('rating_score'))
if karma['rating_score__avg']:
return int(karma['rating_score__avg'])
else:
return 0
|
xbmc/atv2 | xbmc/lib/libPython/Python/Lib/test/test_dummy_thread.py | Python | gpl-2.0 | 7,139 | 0.005043 | """Generic thread tests.
Meant to be used by dummy_thread and thread. To allow for different modules
to be used, test_main() can be called with the module to use as the thread
implementation as its sole argument.
"""
import dummy_thread as _thread
import time
import Queue
import random
import unittest
from test import test_support
DELAY = 0 # Set > 0 when testing a module other than dummy_thread, such as
# the 'thread' module.
class LockTests(unittest.TestCase):
"""Test lock objects."""
def setUp(self):
# Create a lock
self.lock = _thread.allocate_lock()
def test_initlock(self):
#Make sure locks start locked
self.failUnless(not self.lock.locked(),
"Lock object is not initialized unlocked.")
def test_release(self):
# Test self.lock.release()
self.lock.acquire()
self.lock.release()
self.failUnless(not self.lock.locked(),
"Lock object did not release properly.")
def test_improper_release(self):
#Make sure release of an unlocked thread raises _thread.error
self.failUnlessRaises(_thread.error, self.lock.release)
def test_cond_acquire_success(self):
#Make sure the conditional acquiring of the lock works.
self.failUnless(self.lock.acquire(0),
"Conditional acquiring of the lock failed.")
def test_cond_acquire_fail(self):
#Test acquiring locked lock returns False
self.lock.acquire(0)
self.failUnless(not self.lock.acquire(0),
"Conditional acquiring of a locked lock incorrectly "
"succeeded.")
def test_uncond_acquire_success(self):
#Make sure unconditional acquiring of a lock works.
self.lock.acquire()
self.failUnless(self.lock.locked(),
"Uncondional locking failed.")
def test_uncond_acquire_return_val(self):
#Make sure that an unconditional locking returns True.
self.failUnless(self.lock.acquire(1) is True,
"Unconditional locking did not return True.")
def test_uncond_acquire_blocking(self):
#Make sure that unconditional acquiring of a locked lock blocks.
def delay_unlock(to_unlock, delay):
"""Hold on to lock for a set amount of time before unlocking."""
time.sleep(delay)
to_unlock.release()
self.lock.acquire()
start_time = int(time.time())
_thread.start_new_thread(delay_unlock,(self.lock, DELAY))
if test_support.verbose:
print
print "*** Waiting for thread to release the lock "\
"(approx. %s sec.) ***" % DELAY
self.lock.acquire()
end_time = int(time.time())
if test_support.verbose:
print "done"
self.failUnless((end_time - start_time) >= DELAY,
"Blocking by unconditional acquiring failed.")
class MiscTests(unittest.TestCase):
"""Miscellaneous tests."""
def test_exit(self):
#Make sure _thread.exit() raises SystemExit
self.failUnlessRaises(SystemExit, _thread.exit)
def test_ident(self):
#Test sanity of _thread.get_ident()
self.failUnless(isinstance(_thread.get_ident(), int),
"_thread.get_ident() returned a non-integer")
self.failUnless(_thread.get_ident() != 0,
"_thread.get_ident() returned 0")
def test_LockType(self):
#Make sure _thread.LockType is the same type as _thread.allocate_locke()
self.failUnless(isinstance(_thread.allocate_lock(), _thread.LockType),
"_thread.LockType is not an instance of what is "
"returned by _thread.allocate_lock()")
def test_interrupt_main(self):
#Calling start_new_thread with a function that executes interrupt_main
# should raise KeyboardInterrupt upon completion.
def call_interrupt():
_thread.interrupt_main()
self.failUnlessRaises(KeyboardInterrupt, _thread.start_new_thread,
call_interrupt, tuple())
def test_interrupt_in_main(self):
# Make sure that if interrupt_main i | s called in main threat that
# KeyboardInterrupt is raise | d instantly.
self.failUnlessRaises(KeyboardInterrupt, _thread.interrupt_main)
class ThreadTests(unittest.TestCase):
"""Test thread creation."""
def test_arg_passing(self):
#Make sure that parameter passing works.
def arg_tester(queue, arg1=False, arg2=False):
"""Use to test _thread.start_new_thread() passes args properly."""
queue.put((arg1, arg2))
testing_queue = Queue.Queue(1)
_thread.start_new_thread(arg_tester, (testing_queue, True, True))
result = testing_queue.get()
self.failUnless(result[0] and result[1],
"Argument passing for thread creation using tuple failed")
_thread.start_new_thread(arg_tester, tuple(), {'queue':testing_queue,
'arg1':True, 'arg2':True})
result = testing_queue.get()
self.failUnless(result[0] and result[1],
"Argument passing for thread creation using kwargs failed")
_thread.start_new_thread(arg_tester, (testing_queue, True), {'arg2':True})
result = testing_queue.get()
self.failUnless(result[0] and result[1],
"Argument passing for thread creation using both tuple"
" and kwargs failed")
def test_multi_creation(self):
#Make sure multiple threads can be created.
def queue_mark(queue, delay):
"""Wait for ``delay`` seconds and then put something into ``queue``"""
time.sleep(delay)
queue.put(_thread.get_ident())
thread_count = 5
testing_queue = Queue.Queue(thread_count)
if test_support.verbose:
print
print "*** Testing multiple thread creation "\
"(will take approx. %s to %s sec.) ***" % (DELAY, thread_count)
for count in xrange(thread_count):
if DELAY:
local_delay = round(random.random(), 1)
else:
local_delay = 0
_thread.start_new_thread(queue_mark,
(testing_queue, local_delay))
time.sleep(DELAY)
if test_support.verbose:
print 'done'
self.failUnless(testing_queue.qsize() == thread_count,
"Not all %s threads executed properly after %s sec." %
(thread_count, DELAY))
def test_main(imported_module=None):
global _thread, DELAY
if imported_module:
_thread = imported_module
DELAY = 2
if test_support.verbose:
print
print "*** Using %s as _thread module ***" % _thread
test_support.run_unittest(LockTests, MiscTests, ThreadTests)
if __name__ == '__main__':
test_main()
|
YangLiu928/NDP_Projects | Python_Projects/Python_MySQL/compare_FTD_code/scripts/merchandise_trade_exports.py | Python | mit | 1,458 | 0.009602 | import process_data
# process_data.process_concord(data_folder, output_folder)
# process_data.process_country(data_folder, output_folder)
# process_data.process_district(data_folder, output_folder)
# process_data.process_enduse(data_folder, output_folder)
# process_data.process_exp_comm(data_folder, output_folder)
# process_data.process_exp_cty(data_folder, output_folder)
# process_data.process_exp_detl(data_folder, output_folder)
# process_data.process_exp_dist(data_folder, output_folder)
# process_data.process_hitech(data_folder, output_folder)
# process_data.process_naics(data_folder, output_folder)
# process_data.process_sitc(data_folder, output_folder)
folders = ['2009_import','2009_export','2015_import','2015_export']
for folder in folders:
data_folder = '../data/{0}/'.format(folder)
output_folder = '../output/{0}/'.format(folder)
process_data.process_hsdesc(data_folder, output_folder)
results = []
for folder in folders:
output_folder = '../outpu | t/{0}/'.format(folder)
file = open(output_folder + 'HSDESC.js')
results.append(file.read())
# the following means, the export and import are same in the | same year
print results[0]==results[1] and results[2]==results[3]
file2009 = open('../output/{0}/'.format(results[0]))
file2015 = open('../output/{0}/'.format(results[2]))
line1 = ''
line2 = ''
while (line1==line2):
line1 = file2009.readline()
line2 = file2015.readline()
print line1
print line2
print 'work completed'
|
mxamin/youtube-dl | youtube_dl/extractor/playwire.py | Python | unlicense | 2,407 | 0.001662 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
dict_get,
float_or_none,
)
class PlaywireIE(InfoExtractor):
_VALID_URL = r'https?://(?:config|cdn)\.playwire\.com(?:/v2)?/(?P<publisher_id>\d+)/(?:videos/v2|embed|config)/(?P<id>\d+)'
_TESTS = [{
'url': 'http://config.playwire.com/14907/videos/v2/3353705/player.json',
'md5': 'e6398701e3595888125729eaa2329ed9',
'info_dict': {
'id': '3353705',
'ext': 'mp4',
'title': 'S04_RM_UCL_Rus',
'thumbnail': 're:^https?://.*\.png$',
'duration': 145.94,
},
}, {
# m3u8 in f4m
'url': 'http://config.playwire.com/21772/videos/v2/4840492/zeus.json',
'info_dict': {
'id': '4840492',
'ext': 'mp4',
'title': 'ITV EL SHOW FULL',
}, |
'params': {
| # m3u8 download
'skip_download': True,
},
}, {
# Multiple resolutions while bitrates missing
'url': 'http://cdn.playwire.com/11625/embed/85228.html',
'only_matching': True,
}, {
'url': 'http://config.playwire.com/12421/videos/v2/3389892/zeus.json',
'only_matching': True,
}, {
'url': 'http://cdn.playwire.com/v2/12342/config/1532636.json',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
publisher_id, video_id = mobj.group('publisher_id'), mobj.group('id')
player = self._download_json(
'http://config.playwire.com/%s/videos/v2/%s/zeus.json' % (publisher_id, video_id),
video_id)
title = player['settings']['title']
duration = float_or_none(player.get('duration'), 1000)
content = player['content']
thumbnail = content.get('poster')
src = content['media']['f4m']
formats = self._extract_f4m_formats(src, video_id, m3u8_id='hls')
for a_format in formats:
if not dict_get(a_format, ['tbr', 'width', 'height']):
a_format['quality'] = 1 if '-hd.' in a_format['url'] else 0
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
|
SecondLiners/GO2 | tests/sort_test.py | Python | gpl-3.0 | 3,787 | 0.003433 | import unittest
import webapp2
import main
import band
import gig
import datetime
from google.appengine.ext import testbed
class GigSortTestCase(unittest.TestCase):
TEST_BAND = 'Wild Rumpus'
# Needed to get webapp2 to use a WSGIApplication application instance,
# which is easier to configure in setUp().
webapp2._local = None
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
# Need to set up a global application and Request instance for
# the i18n module to work. There's probably a more elegant way
# of doing this.
self.request_stub = webapp2.Request.blank("/")
self.request_stub.use | r = None
| self.request_stub.app = main.APPLICATION
webapp2.WSGIApplication.app = main.APPLICATION
webapp2.get_app().set_globals(main.APPLICATION, self.request_stub)
def tearDown(self):
self.testbed.deactivate()
def assertNotEmpty(self, obj):
self.assertTrue(obj is not None and len(obj) > 0)
def assertEmpty(self, obj):
self.assertTrue(obj is not None and len(obj) == 0)
def _create_test_band(self):
the_band = band.new_band(self.TEST_BAND)
self.assertIsNotNone(the_band, "did not create band")
return (the_band)
def _create_test_gig(self, band, name):
the_gig = gig.new_gig(band, name, None)
self.assertIsNotNone(the_gig, "did not create gig " + name)
return (the_gig)
def assertGigOrder(self, band, total, first):
all_gigs = gig.get_sorted_gigs_from_band_keys([band.key])
self.assertEqual(len(all_gigs),total,"did not get {0} gigs".format(total))
self.assertTrue(all_gigs[0].title==first, "'{0}' did not come first".format(first))
def test_gig_sort(self):
the_band = self._create_test_band()
gigA = self._create_test_gig(the_band, "A")
gigA.date = datetime.datetime(2020,8,16)
gigA.put()
gigB = self._create_test_gig(the_band, "B")
gigB.date = datetime.datetime(2020,8,16)
gigB.put()
# set gig A to come before gig B
gigA.set_calltime("2:00am")
gigA.put()
gigB.set_calltime("3:00am")
gigB.put()
self.assertGigOrder(the_band, total=2, first="A")
# now make the calltime later for gigA and show that gigB is first
gigA.set_calltime("4:00am")
gigA.put()
self.assertGigOrder(the_band, total=2, first="B")
# now make the calltime later for gigB again
gigB.set_calltime("5:00am")
gigB.put()
self.assertGigOrder(the_band, total=2, first="A")
# check am/pm issues
gigA.set_calltime("12:00pm")
gigA.set_settime(None)
gigA.put()
gigB.set_calltime("1:00pm")
gigB.set_settime(None)
gigB.put()
self.assertGigOrder(the_band, total=2, first="A")
# check "none" call times
gigA.set_calltime(None)
gigA.set_settime("1:00 pm")
gigA.put()
gigB.set_calltime("12:00pm")
gigB.set_settime(None)
gigB.put()
self.assertGigOrder(the_band, total=2, first="B")
# check out other formats of time or not-time
gigA.set_calltime("early")
gigA.set_settime(None)
gigA.put()
gigB.set_calltime("1:00")
gigB.set_settime(None)
gigB.put()
self.assertGigOrder(the_band, total=2, first="A")
gigA.set_calltime("2:30pm")
gigA.set_settime(None)
gigA.put()
gigB.set_calltime("early")
gigB.set_settime(None)
gigB.put()
self.assertGigOrder(the_band, total=2, first="B")
|
rosarior/rua | rua/apps/icons/registry.py | Python | gpl-3.0 | 617 | 0.004862 | from __future__ import absolute_import
from django.utils.translation import ugettext_lazy as _
from smart_settings import LocalScope
from .icons im | port icon_icons_app
from .literals import DEFAULT_ICON_SET
name = 'icons'
label = _(u'Icons')
description = _(u'Handles the registration and rend | ering of icons and sprites.')
dependencies = ['app_registry']
icon = icon_icons_app
settings = [
{
'name': 'ICON_SET',
'default': DEFAULT_ICON_SET,
'description': _(u'Icon set to use to render all the icon in the project.'),
'scopes': [LocalScope()] # TODO: Cluster, Org, User
}
]
|
xranby/apitrace | glproc.py | Python | mit | 12,065 | 0.000663 | ##########################################################################
#
# Copyright 2010 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""Generated an header, glproc.hpp, which does pretty much what GLEW does, but
covers all the functions we support.
"""
import specs.stdapi as stdapi
from dispatch import Dispatcher
from specs.glapi import glapi
from specs.glxapi import glxapi
from specs.wglapi import wglapi
from specs.cglapi import cglapi
from specs.eglapi import eglapi
from specs.glesapi import glesapi
# See http://www.open | gl.org/registry/ABI/
public_symbols = set([
# GL 1.2 and ARB_multitexture
"glAccum",
"glAlphaFunc",
"glAreTexturesResident",
"glArrayElement",
"glBegin",
"glBindTexture",
"glBitmap",
"glBlendFunc",
"glCallList",
"glCallLists",
"glClear",
"glClearAccum",
"glClearColor",
"glClearDepth",
| "glClearIndex",
"glClearStencil",
"glClipPlane",
"glColor3b",
"glColor3bv",
"glColor3d",
"glColor3dv",
"glColor3f",
"glColor3fv",
"glColor3i",
"glColor3iv",
"glColor3s",
"glColor3sv",
"glColor3ub",
"glColor3ubv",
"glColor3ui",
"glColor3uiv",
"glColor3us",
"glColor3usv",
"glColor4b",
"glColor4bv",
"glColor4d",
"glColor4dv",
"glColor4f",
"glColor4fv",
"glColor4i",
"glColor4iv",
"glColor4s",
"glColor4sv",
"glColor4ub",
"glColor4ubv",
"glColor4ui",
"glColor4uiv",
"glColor4us",
"glColor4usv",
"glColorMask",
"glColorMaterial",
"glColorPointer",
"glCopyPixels",
"glCopyTexImage1D",
"glCopyTexImage2D",
"glCopyTexSubImage1D",
"glCopyTexSubImage2D",
"glCullFace",
"glDeleteLists",
"glDeleteTextures",
"glDepthFunc",
"glDepthMask",
"glDepthRange",
"glDisable",
"glDisableClientState",
"glDrawArrays",
"glDrawBuffer",
"glDrawElements",
"glDrawPixels",
"glEdgeFlag",
"glEdgeFlagPointer",
"glEdgeFlagv",
"glEnable",
"glEnableClientState",
"glEnd",
"glEndList",
"glEvalCoord1d",
"glEvalCoord1dv",
"glEvalCoord1f",
"glEvalCoord1fv",
"glEvalCoord2d",
"glEvalCoord2dv",
"glEvalCoord2f",
"glEvalCoord2fv",
"glEvalMesh1",
"glEvalMesh2",
"glEvalPoint1",
"glEvalPoint2",
"glFeedbackBuffer",
"glFinish",
"glFlush",
"glFogf",
"glFogfv",
"glFogi",
"glFogiv",
"glFrontFace",
"glFrustum",
"glGenLists",
"glGenTextures",
"glGetBooleanv",
"glGetClipPlane",
"glGetDoublev",
"glGetError",
"glGetFloatv",
"glGetIntegerv",
"glGetLightfv",
"glGetLightiv",
"glGetMapdv",
"glGetMapfv",
"glGetMapiv",
"glGetMaterialfv",
"glGetMaterialiv",
"glGetPixelMapfv",
"glGetPixelMapuiv",
"glGetPixelMapusv",
"glGetPointerv",
"glGetPolygonStipple",
"glGetString",
"glGetTexEnvfv",
"glGetTexEnviv",
"glGetTexGendv",
"glGetTexGenfv",
"glGetTexGeniv",
"glGetTexImage",
"glGetTexLevelParameterfv",
"glGetTexLevelParameteriv",
"glGetTexParameterfv",
"glGetTexParameteriv",
"glHint",
"glIndexMask",
"glIndexPointer",
"glIndexd",
"glIndexdv",
"glIndexf",
"glIndexfv",
"glIndexi",
"glIndexiv",
"glIndexs",
"glIndexsv",
"glIndexub",
"glIndexubv",
"glInitNames",
"glInterleavedArrays",
"glIsEnabled",
"glIsList",
"glIsTexture",
"glLightModelf",
"glLightModelfv",
"glLightModeli",
"glLightModeliv",
"glLightf",
"glLightfv",
"glLighti",
"glLightiv",
"glLineStipple",
"glLineWidth",
"glListBase",
"glLoadIdentity",
"glLoadMatrixd",
"glLoadMatrixf",
"glLoadName",
"glLogicOp",
"glMap1d",
"glMap1f",
"glMap2d",
"glMap2f",
"glMapGrid1d",
"glMapGrid1f",
"glMapGrid2d",
"glMapGrid2f",
"glMaterialf",
"glMaterialfv",
"glMateriali",
"glMaterialiv",
"glMatrixMode",
"glMultMatrixd",
"glMultMatrixf",
"glNewList",
"glNormal3b",
"glNormal3bv",
"glNormal3d",
"glNormal3dv",
"glNormal3f",
"glNormal3fv",
"glNormal3i",
"glNormal3iv",
"glNormal3s",
"glNormal3sv",
"glNormalPointer",
"glOrtho",
"glPassThrough",
"glPixelMapfv",
"glPixelMapuiv",
"glPixelMapusv",
"glPixelStoref",
"glPixelStorei",
"glPixelTransferf",
"glPixelTransferi",
"glPixelZoom",
"glPointSize",
"glPolygonMode",
"glPolygonOffset",
"glPolygonStipple",
"glPopAttrib",
"glPopClientAttrib",
"glPopMatrix",
"glPopName",
"glPrioritizeTextures",
"glPushAttrib",
"glPushClientAttrib",
"glPushMatrix",
"glPushName",
"glRasterPos2d",
"glRasterPos2dv",
"glRasterPos2f",
"glRasterPos2fv",
"glRasterPos2i",
"glRasterPos2iv",
"glRasterPos2s",
"glRasterPos2sv",
"glRasterPos3d",
"glRasterPos3dv",
"glRasterPos3f",
"glRasterPos3fv",
"glRasterPos3i",
"glRasterPos3iv",
"glRasterPos3s",
"glRasterPos3sv",
"glRasterPos4d",
"glRasterPos4dv",
"glRasterPos4f",
"glRasterPos4fv",
"glRasterPos4i",
"glRasterPos4iv",
"glRasterPos4s",
"glRasterPos4sv",
"glReadBuffer",
"glReadPixels",
"glRectd",
"glRectdv",
"glRectf",
"glRectfv",
"glRecti",
"glRectiv",
"glRects",
"glRectsv",
"glRenderMode",
"glRotated",
"glRotatef",
"glScaled",
"glScalef",
"glScissor",
"glSelectBuffer",
"glShadeModel",
"glStencilFunc",
"glStencilMask",
"glStencilOp",
"glTexCoord1d",
"glTexCoord1dv",
"glTexCoord1f",
"glTexCoord1fv",
"glTexCoord1i",
"glTexCoord1iv",
"glTexCoord1s",
"glTexCoord1sv",
"glTexCoord2d",
"glTexCoord2dv",
"glTexCoord2f",
"glTexCoord2fv",
"glTexCoord2i",
"glTexCoord2iv",
"glTexCoord2s",
"glTexCoord2sv",
"glTexCoord3d",
"glTexCoord3dv",
"glTexCoord3f",
"glTexCoord3fv",
"glTexCoord3i",
"glTexCoord3iv",
"glTexCoord3s",
"glTexCoord3sv",
"glTexCoord4d",
"glTexCoord4dv",
"glTexCoord4f",
"glTexCoord4fv",
"glTexCoord4i",
"glTexCoord4iv",
"glTexCoord4s",
"glTexCoord4sv",
"glTexCoordPointer",
"glTexEnvf",
"glTexEnvfv",
"glTexEnvi",
"glTexEnviv",
"glTexGend",
"glTexGendv",
"glTexGenf",
"glTexGenfv",
"glTexGeni",
"glTexGeniv",
"glTexImage1D",
"glTexImage2D",
"glTexParameterf",
"glTexParameterfv",
"glTexParameteri",
"glTexParameteriv",
"glTexSubImage1D",
"glTexSubImage2D",
"glTranslated",
"glTranslatef",
"glVertex2d",
"glVertex2dv",
"glVertex2f",
"glVertex2fv",
"glVertex2i",
"glVertex2iv",
"glVertex2s",
"glVertex2sv",
"glVertex3d",
"glVertex3dv",
"glVertex3f",
"glVertex3fv",
"glVertex3i",
"glVertex3iv",
"glVertex3s",
"glVertex3sv",
"glV |
gjtempleton/matasano_cryptopals | set1/challenge_4.py | Python | mit | 59 | 0 | def detect_ | single_character_xor(ciphertext):
| return ""
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.