repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
yes7rose/maya_utils | python/maya_utils/singleton.py | Python | mit | 330 | 0.010274 | # -*- coding: utf-8 -*-
# 只是个模板,继承还是有问题,不用于继承
class Singleton(o | bject):
def __init__(self):
globals()[self.__class__.__name__] = sel | f
def __call__(self):
return self
# class Singleton:
# def __call__(self):
# return self
#
# Singleton = Singleton() |
henniggroup/MPInterfaces | mpinterfaces/mat2d/electronic_structure/analysis.py | Python | mit | 32,249 | 0.000868 | from __future__ import print_function, division, unicode_literals
import os
import matplotlib as mpl
mpl.us | e('Agg')
import matplotlib.pyplot as plt
from matplotlib.font_m | anager import FontProperties
import numpy as np
from scipy.spatial.distance import euclidean
from pymatgen.core.structure import Structure
from pymatgen.io.vasp.outputs import Vasprun, Locpot, VolumetricData
from pymatgen.io.vasp.inputs import Incar
from pymatgen.electronic_structure.plotter import BSPlotter, BSPlotterProjected
from pymatgen.electronic_structure.core import Spin
from mpinterfaces.utils import is_converged
__author__ = "Michael Ashton"
__copyright__ = "Copyright 2017, Henniggroup"
__maintainer__ = "Michael Ashton"
__email__ = "joshgabriel92@gmail.com"
__status__ = "Production"
__date__ = "March 3, 2017"
def get_band_edges():
"""
Calculate the band edge locations relative to the vacuum level
for a semiconductor. For a metal, returns the fermi level.
Returns:
edges (dict): {'up_cbm': , 'up_vbm': , 'dn_cbm': , 'dn_vbm': , 'efermi'}
"""
# Vacuum level energy from LOCPOT.
locpot = Locpot.from_file('LOCPOT')
evac = max(locpot.get_average_along_axis(2))
vasprun = Vasprun('vasprun.xml')
bs = vasprun.get_band_structure()
eigenvals = vasprun.eigenvalues
efermi = vasprun.efermi - evac
if bs.is_metal():
edges = {'up_cbm': None, 'up_vbm': None, 'dn_cbm': None, 'dn_vbm': None,
'efermi': efermi}
elif bs.is_spin_polarized:
up_cbm = min(
[min([e[0] for e in eigenvals[Spin.up][i] if not e[1]])
for i in range(len(eigenvals[Spin.up]))]) - evac
up_vbm = max(
[max([e[0] for e in eigenvals[Spin.up][i] if e[1]])
for i in range(len(eigenvals[Spin.up]))]) - evac
dn_cbm = min(
[min([e[0] for e in eigenvals[Spin.down][i] if not e[1]])
for i in range(len(eigenvals[Spin.down]))]) - evac
dn_vbm = max(
[max([e[0] for e in eigenvals[Spin.down][i] if e[1]])
for i in range(len(eigenvals[Spin.down]))]) - evac
edges = {'up_cbm': up_cbm, 'up_vbm': up_vbm, 'dn_cbm': dn_cbm,
'dn_vbm': dn_vbm, 'efermi': efermi}
else:
cbm = bs.get_cbm()['energy'] - evac
vbm = bs.get_vbm()['energy'] - evac
edges = {'up_cbm': cbm, 'up_vbm': vbm, 'dn_cbm': cbm, 'dn_vbm': vbm,
'efermi': efermi}
return edges
def plot_band_alignments(directories, run_type='PBE', fmt='pdf'):
"""
Plot CBM's and VBM's of all compounds together, relative to the band
edges of H2O.
Args:
directories (list): list of the directory paths for materials
to include in the plot.
run_type (str): 'PBE' or 'HSE', so that the function knows which
subdirectory to go into (pbe_bands or hse_bands).
fmt (str): matplotlib format style. Check the matplotlib
docs for options.
"""
if run_type == 'HSE':
subdirectory = 'hse_bands'
else:
subdirectory = 'pbe_bands'
band_gaps = {}
for directory in directories:
sub_dir = os.path.join(directory, subdirectory)
if is_converged(sub_dir):
os.chdir(sub_dir)
band_structure = Vasprun('vasprun.xml').get_band_structure()
band_gap = band_structure.get_band_gap()
# Vacuum level energy from LOCPOT.
locpot = Locpot.from_file('LOCPOT')
evac = max(locpot.get_average_along_axis(2))
if not band_structure.is_metal():
is_direct = band_gap['direct']
cbm = band_structure.get_cbm()
vbm = band_structure.get_vbm()
else:
cbm = None
vbm = None
is_direct = False
band_gaps[directory] = {'CBM': cbm, 'VBM': vbm,
'Direct': is_direct,
'Metal': band_structure.is_metal(),
'E_vac': evac}
os.chdir('../../')
ax = plt.figure(figsize=(16, 10)).gca()
x_max = len(band_gaps) * 1.315
ax.set_xlim(0, x_max)
# Rectangle representing band edges of water.
ax.add_patch(plt.Rectangle((0, -5.67), height=1.23, width=len(band_gaps),
facecolor='#00cc99', linewidth=0))
ax.text(len(band_gaps) * 1.01, -4.44, r'$\mathrm{H+/H_2}$', size=20,
verticalalignment='center')
ax.text(len(band_gaps) * 1.01, -5.67, r'$\mathrm{O_2/H_2O}$', size=20,
verticalalignment='center')
x_ticklabels = []
y_min = -8
i = 0
# Nothing but lies.
are_directs, are_indirects, are_metals = False, False, False
for compound in [cpd for cpd in directories if cpd in band_gaps]:
x_ticklabels.append(compound)
# Plot all energies relative to their vacuum level.
evac = band_gaps[compound]['E_vac']
if band_gaps[compound]['Metal']:
cbm = -8
vbm = -2
else:
cbm = band_gaps[compound]['CBM']['energy'] - evac
vbm = band_gaps[compound]['VBM']['energy'] - evac
# Add a box around direct gap compounds to distinguish them.
if band_gaps[compound]['Direct']:
are_directs = True
linewidth = 5
elif not band_gaps[compound]['Metal']:
are_indirects = True
linewidth = 0
# Metals are grey.
if band_gaps[compound]['Metal']:
are_metals = True
linewidth = 0
color_code = '#404040'
else:
color_code = '#002b80'
# CBM
ax.add_patch(plt.Rectangle((i, cbm), height=-cbm, width=0.8,
facecolor=color_code, linewidth=linewidth,
edgecolor="#e68a00"))
# VBM
ax.add_patch(plt.Rectangle((i, y_min),
height=(vbm - y_min), width=0.8,
facecolor=color_code, linewidth=linewidth,
edgecolor="#e68a00"))
i += 1
ax.set_ylim(y_min, 0)
# Set tick labels
ax.set_xticks([n + 0.4 for n in range(i)])
ax.set_xticklabels(x_ticklabels, family='serif', size=20, rotation=60)
ax.set_yticklabels(ax.get_yticks(), family='serif', size=20)
# Add a legend
height = y_min
if are_directs:
ax.add_patch(plt.Rectangle((i*1.165, height), width=i*0.15,
height=(-y_min*0.1), facecolor='#002b80',
edgecolor='#e68a00', linewidth=5))
ax.text(i*1.24, height - y_min * 0.05, 'Direct', family='serif',
color='w', size=20, horizontalalignment='center',
verticalalignment='center')
height -= y_min * 0.15
if are_indirects:
ax.add_patch(plt.Rectangle((i*1.165, height), width=i*0.15,
height=(-y_min*0.1), facecolor='#002b80',
linewidth=0))
ax.text(i*1.24, height - y_min * 0.05, 'Indirect', family='serif',
size=20, color='w', horizontalalignment='center',
verticalalignment='center')
height -= y_min * 0.15
if are_metals:
ax.add_patch(plt.Rectangle((i*1.165, height), width=i*0.15,
height=(-y_min*0.1), facecolor='#404040',
linewidth=0))
ax.text(i*1.24, height - y_min * 0.05, 'Metal', family='serif',
size=20, color='w', horizontalalignment='center',
verticalalignment='center')
# Who needs axes?
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.set_ylabel('eV', family='serif', size=24)
if fmt == "None":
return ax
else:
plt.savefig('band_alignments.{}'.format(fmt) |
kb8u/ZenPacks.Merit.AdvaFSP3000R7 | ZenPacks/Merit/AdvaFSP3000R7/FSP3000R7NCU.py | Python | gpl-2.0 | 933 | 0.006431 | ######################################################################
#
# FSP3000R7NCU object class
#
# Copyright (C) 2011 Russe | ll Dwarshuis, Merit Network, Inc.
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
######################################################################
__doc__="""FSP3000R7NCU
FSP3000R7NCU is a component of a FSP3000R7Device Device
"""
from ZenPacks.Merit.AdvaFSP3000R7.lib.FSP3000R7Component import *
import logging
log = logging.getLogger('FSP3000R7NCU')
class FSP3000R7NCU(FSP3000R7C | omponent):
"""FSP3000R7NCU object"""
portal_type = meta_type = 'FSP3000R7NCU'
_relations = (("FSP3000R7Dev",
ToOne(ToManyCont,
"ZenPacks.Merit.AdvaFSP3000R7.FSP3000R7Device",
"FSP3000R7Ncu")),
)
InitializeClass(FSP3000R7NCU)
|
dangoldin/python-tools | backup.py | Python | mit | 1,007 | 0.005958 | #!/usr/bin/env python
import datetime
start = datetime.datetime.strptime("2012-11-16", "%Y-%m-%d")
end = datetime.datetime.strptime("2013-10-25", "%Y-%m-%d")
table_name = 'sql_table_name_here';
dates = []
while start < end:
s = start.strftime('%Y-%m-%d')
start += datetime.timedelta(days=1)
e = start.strftime('%Y-%m-%d')
dates.append((s,e))
HOST = 'host'
PORT = '3306'
USER = 'user'
PASS = 'password'
DB = 'database'
DATE_FIELD = 'ymd' # Maybe timestamp? datetime?
S3_FOLDER = 's3_backup_folder'
for s,e in dates:
if True:
print "/usr/local/mysql/bin/mysql -h{} --port={} -u{} -p{} {} -e \"SELECT * from {} where {} >= '{}' and {} < '{}'\" > {}_{}_{}.csv".format(HOST, PORT, USER, PASS, DB, table_name, DATE_FIELD, s, DATE_FIELD, e, table_name, s, e)
print "gzip {}_{}_{}.csv".format(table_name, s, e)
print "aws s3 c | p {}_{}_{}.csv.gz s3://{}/{}/".format(table_name, s, e, S3_FOLDER, table_name)
print "rm {}_{}_{}.cs | v.gz".format(table_name, s, e)
|
dustin/couchapp | Couchapp.py | Python | apache-2.0 | 219 | 0.004566 | #!/us | r/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of couchapp released under the Apache 2 license.
# See the NOTICE for more information.
if __name__ == "__main__":
from couchapp import dispa | tch
|
korvyashka/pykongregate | pykongregate/api.py | Python | mit | 1,946 | 0 | import requests
import simplejson
from simplejson.scanner import JSONDecodeError
from .exceptions import NullResponseException
GET_USER_ITEMS_URL = "http://www.kongregate.com/api/user_items.json"
GET_ITEMS_URL = "http://www.kongregate.com/api/items.json"
USER_INFO_URL = "http://www.kongregate.com/api/user_info.json"
def _handle_request(url, params):
response = requests.get(
GET_USER_ITEMS_URL, params=params
)
try:
return simplejson.loads(response.text)
except JSONDecodeError:
raise NullResponseException(
"request_url: {url}. params: {params}. "
"Got unparsable response: {response}".format(
url=url,
params=params,
response=response.text,
)
)
def get_user_items_api(user_id, api_key):
"""
wrapper on API method:
http | ://developers.kongregate.com/docs/rest/user-items
:param user_id:
:type user_id: int
:param api_key:
:type api_key: str
:rtype: dict
:return: loaded json from response
"""
params = {
'api_key': api_key,
| 'user_id': user_id,
}
url = GET_USER_ITEMS_URL
return _handle_request(url, params)
def get_items_api(api_key):
"""
wrapper on API method:
http://developers.kongregate.com/docs/rest/items
:param api_key:
:type api_key: str
:rtype: dict
:return: loaded json from response
"""
params = {
'api_key': api_key,
}
url = GET_ITEMS_URL
return _handle_request(url, params)
def get_user_info_api(username):
"""
wrapper on API method:
http://developers.kongregate.com/docs/rest/user_info
:param username:
:type username: str
:rtype: dict
:return: loaded json from response
"""
# TODO: implement all params
params = {
'username': username
}
url = USER_INFO_URL
return _handle_request(url, params)
|
ahmfrz/Let-s-Catalog | Lets_Catalog/models/category.py | Python | mit | 664 | 0.003012 | """Defines category table"""
import datetime
from database_setup import Base
from user import User
from sqlalchemy import Column, String, | ForeignKey, Integer, DateTime
from sqlalchemy.orm import relationship
class Category(Base):
''' Defines category table and columns '''
__tablename__ = "category"
id = Column(Integer, primary_key=True)
name = Column(String(150), nullable=False)
created_date = Column(DateTime, default=datetime.datetime.utcnow)
user_id = Column(Integer, ForeignKey('user.id'))
user = relationship(User)
def serialize(self):
return {
'id': self.id,
| 'name': self.name
} |
niwinz/tornado-webtools | webtools/session/backend/database.py | Python | bsd-3-clause | 1,894 | 0.00528 | # -*- coding: utf-8 -*-
from sqlalchemy import Column, Table, select
from sqlalchemy.types import Integer, PickleType, DateTime, Unicode
from webtools.database import Base
from webtools.utils import timezone
from .base import BaseSessionEngine
session = Table("webtools_session", Base.metadata,
Column("id", Integer, primary_key=True, autoincrement=False),
Column("last_modify", DateTime(timezone=True), index=True),
Column("key", Unicode(length=100), unique=True, index=True),
Column("data", PickleType, unique=True),
)
class DatabaseEngine(BaseSessionEngine):
@property
def db(self):
return self._application.db
def load(self):
if self._current_session_key is None:
self._current_session_key = self.random_session_key()
sql = session.insert().values(
last_modify = timezone.now(),
key = self._current_session_key,
data = {}
)
self.db.execute(sql)
self._session_data = {}
self._modified = False
else:
sql = select([session.c.key, session.c.data]).where(session.c.key == self._current_session_key)
res_proxy = self.db.execute(sql).first()
if res_proxy:
self._session_data | = res_proxy['data']
self._modified = False
def save(self):
sql = session.update()\
.where(session.c.key == self._current_session_key)\
.values(data=self._session_data)
self.db.execute(sql)
self.db.commit()
self._modified = False
def delete(self):
| if self._current_session_key is None:
return
self.db.execute(session.delete().where(session.c.key == self._current_session_key))
self.db.commit()
self._current_session_key = None
self._session_data = None
|
ptosco/rdkit | rdkit/sping/SVG/__init__.py | Python | bsd-3-clause | 44 | 0 | # simp | le __init__.py
from .pidSVG import *
| |
danmackinlay/branching_process | branching_process/cts/background.py | Python | mit | 6,771 | 0 | """
Background kernels are *like* InfluenceKernel, but not necessarily integrable
and there is no support for differentiationg with respect to time.
"""
from .influence import InfluenceKernel
have_autograd = False
try:
import autograd.numpy as np
have_autograd = True
except ImportError as e:
import numpy as np
class ConstKernel(InfluenceKernel):
def __init__(
self,
n_bases=0,
kappa=None,
eps=1e-8,
*args, **fixed_kwargs):
self._fixed_kwargs = fixed_kwargs
if kappa is None and n_bases > 0:
kappa = np.zeros(n_bases)
if kappa is not None:
self._fixed_kwargs.setdefault(
'kappa', kappa
)
self.eps = eps
self.n_bases = n_bases
# super(ConstKernel, self).__init__(*args)
def mu_bounds(self):
return [(self.eps, None)]
def kappa_bounds(self):
return [(None, None)] * self.n_bases
def f_kappa(self, **kwargs):
mu = self.get_param('mu', 0.0, **kwargs)
return np.maximum(mu, self.eps)
def guess_params(self, **kwargs):
# from IPython.core.debugger import Tracer; Tracer()()
return self.guess_params_intensity(self.f_kappa(**kwargs))
def guess_params_intensity(self, f_kappa_hat):
med = np.mean(f_kappa_hat)
return dict(
mu=med
)
def __call__(self, t, *args, **kwargs):
mu = self.get_params(**kwargs)['mu']
return np.ones_like(t) * mu
def integrate(self, t, *args, **kwargs):
mu = self.get_params(**kwargs)['mu']
return t * mu
class StepKernel(ConstKernel):
"""
Piecewise-constant rate.
This is presumably for background rate modelling.
"""
def __init__(
self,
t_end=None,
n_bases=None,
*args,
**fixed_kwargs
):
if t_end is None:
t_end = fixed_kwargs.get('tau', [0, 100])[-1]
self.t_end = t_end
if n_bases is None:
if fixed_kwargs.get('tau', None) is not None:
n_bases = np.asarray(fixed_kwargs.get('tau')).size - 1
elif fixed_kwargs.get('kappa', None) is not None:
n_bases = np.asarray(fixed_kwargs.get('kappa')).size
else:
n_bases = 100
self.n_bases = n_bases
fixed_kwargs.setdefault(
'tau',
np.linspace(0, t_end, n_bases+1, endpoint=True)
)
super(StepKernel, self).__init__(
n_bases=n_bases,
*args, **fixed_kwargs)
def f_kappa(self, **kwargs):
kappa = self.get_param('kappa', **kwargs)
mu = self.get_param('mu', 0.0, **kwargs)
return np.maximum(kappa + mu, self.eps)
def guess_params_intensity(self, f_kappa_hat):
med = np.median(f_kappa_hat)
return dict(
mu=med,
kappa=f_kappa_hat-med
)
def __call__(self, t, *args, **kwargs):
"""
"""
tau = self.get_param('tau', **kwargs)
kappa = self.get_param('kappa', **kwargs)
mu = self.get_param('mu', 0.0, **kwargs)
f_kappa = self.f_kappa(kappa=kappa, mu=mu)
tt = np.reshape(t, (-1, 1))
stepwise_mask = (
(tt >= tau[:-1].reshape(1, -1)) *
(tt < tau[1:].reshape(1, -1))
)
outside = (t < tau[0]) + (t >= tau[-1])
# from IPython.core.debugger import Tracer; Tracer()()
return np.sum(
stepwise_mask * np.reshape(f_kappa, (1, -1)),
1
) + outside * mu
def integrate(self, t, *args, **kwargs):
"""
This integral is a simple linear interpolant,
which I would like to do as a spline.
However, I need to do it manually, since
it needs to be autograd differentiable, which splines are not.
The method here is not especially efficent.
"""
tau = self.get_param('tau', **kwargs)
kappa = self.get_param('kappa', **kwargs)
mu = self.get_param('mu', 0.0, **kwargs)
f_kappa = self.f_kappa(kappa=kappa, mu=mu)
t = np.reshape(t, (-1, 1))
delta = np.diff(tau)
each = np.maximum(
0, (t - tau[:-1].reshape(1, -1))
)
each = np.minimum(
each,
delta.reshape(1, -1)
)
return np.sum(
each * np.reshape(f_kappa, (1, -1)),
1
) + (mu * t.ravel())
def majorant(self, t, *args, **kwargs):
kappa = self.get_param('kappa', **kwargs)
mu = self.get_param('mu', 0.0, **kwargs)
kappa = np.maximum(kappa, -mu)
return np.ones_like(t) * (mu + np.amax(kappa))
def count(self, t, *args, **kwargs):
tau = self.get_param('tau', **kwargs)
return np.histogram(t, tau, density=False)
class MultiplicativeStepKernel(StepKernel):
"""
Piecewise-constant rate.
| This is presumably for background rate modelling.
"""
# def kappa_bounds(self):
# return [(-1, None)] * self.n_bases
def f_kappa(self, **kwargs):
kappa = self.get_param('kappa', **kwargs)
mu = self.get_param('mu', 0.0, **kwargs)
return (np.maximum(kappa + 1, self.eps)) * mu
def guess_params_intensity(self, f_kappa_h | at):
# Is this correct?
med = np.median(f_kappa_hat)
return dict(
mu=med,
kappa=f_kappa_hat/med - 1
)
class LogStepKernel(StepKernel):
"""
Piecewise-constant rate.
This is presumably for background rate modelling.
"""
def kappa_bounds(self):
return [(None, None)] * self.n_bases
def f_kappa(self, **kwargs):
kappa = self.get_param('kappa', **kwargs)
mu = self.get_param('mu', 0.0, **kwargs)
return mu * np.exp(kappa)
def guess_params_intensity(self, f_kappa_hat):
# Is this correct?
med = np.median(f_kappa_hat)
return dict(
mu=med,
kappa=np.log(f_kappa_hat/med)
)
def as_background_kernel(
function,
majorant=None,
integral=None,
n_bases=0,
t_start=0,
t_end=100,
**kwargs
):
if hasattr(function, 'majorant'):
return function
elif function is None:
# a number or None?
return ConstKernel(
**kwargs
)
elif n_bases == 0:
# a number or None?
return ConstKernel(
mu=function,
**kwargs
)
else:
return StepKernel(
mu=function,
t_start=t_start,
t_end=t_end,
n_bases=n_bases,
**kwargs
)
|
anant-dev/django | django/template/engine.py | Python | bsd-3-clause | 7,528 | 0 | from django.core.exceptions import ImproperlyConfigured
from django.utils import lru_cache, six
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from .base import Context, Template
from .context import _builtin_context_processors
from .exceptions import TemplateDoesNotExist
from .library import import_library
class Engine(object):
default_builtins = [
'django.template.defaulttags',
'django.template.defaultfilters',
'django.template.loader_tags',
]
def __init__(self, dirs=None, app_dirs=False, context_processors=None,
debug=False, loaders=None, string_if_invalid='',
file_charset='utf-8', libraries=None, builtins=None):
if dirs is None:
dirs = []
if context_processors is None:
context_processors = []
if loaders is None:
loaders = ['django.template.loaders.filesystem.Loader']
if app_dirs:
loaders += ['django.template.loaders.app_directories.Loader']
else:
if app_dirs:
raise ImproperlyConfigured(
"app_dirs must not be set when loaders is defined.")
if libraries is None:
libraries = {}
if builtins is None:
builtins = []
self.dirs = dirs
self.app_dirs = app_dirs
self.context_processors = context_processors
self.debug = debug
self.loaders = loaders
self.string_if_invalid = string_if_invalid
self.file_charset = file_charset
self.libraries = libraries
self.template_libraries = self.get_template_libraries(libraries)
self.builtins = self.default_builtins + builtins
self.template_builtins = self.get_template_builtins(self.builtins)
@staticmethod
@lru_cache.lru_cache()
def get_default():
"""
When only one DjangoTemplates backend is configured, returns it.
Raises ImproperlyConfigured otherwise.
This is required for preserving historical APIs that rely on a
globally available, implicitly configured engine such as:
>>> from django.template import Context, Template
>>> template = Template("Hello {{ name }}!")
>>> context = Context({'name': "world"})
>>> template.render(context)
'Hello world!'
"""
# Since Engine is imported in django.template and since
# DjangoTemplates is a wrapper around this Engine class,
# local imports are required to avoid import loops.
from django.template import engines
from django.template.backends.django import DjangoTemplates
django_engines = [engine for engine in engines.all()
if isinstance(engine, DjangoTemplates)]
if len(django_engines) == 1:
# Unwrap the Engine instance inside DjangoTemplates
return django_engines[0].engine
elif len(django_engines) == 0:
raise ImproperlyConfigured(
"No DjangoTemplates backend is configured.")
else:
raise ImproperlyConfigured(
"Several DjangoTemplates backends are configured. "
"You must select one explicitly.")
@cached_property
def template_context_processors(self):
context_processors = _builtin_context_processors
context_processors += tuple(self.context_processors)
return tuple(import_string(path) for path in context_processors)
def get_template_builtins(self, builtins):
return [import_library(x) for x in builtins]
def get_template_libraries(self, libraries):
loaded = {}
for name, path in libraries.items():
loaded[name] = import_library(path)
return loaded
@cached_property
def template_loaders(self):
return self.get_template_loaders(self.loaders)
def get_template_loaders(self, template_loaders):
loaders = []
for template_loader in template_loaders:
loader = self.find_template_loader(template_loader)
if loader is not None:
loaders.append(loader)
return loaders
def find_template_loader(self, loader):
if isinstance(loader, (tuple, list)):
args = list(loader[1:])
loader = loader[0]
else:
args = []
if isinstance(loader, six.string_types):
loader_class = import_string(loader)
return loader_class(self, *args)
else:
raise ImproperlyConfigured(
"Invalid value in template loaders configuration: %r" % loader)
def find_template(self, name, dirs=None, skip=None):
tried = []
for loader in self.template_loaders:
if loader.supports_recursion:
try:
template = loader.get_template(
name, template_dirs=dirs, skip=skip,
)
return template, template.origin
except TemplateDoesNotExist as e:
tried.extend(e.tried)
else:
# RemovedInDjango20Warning: Use old api for non-recursive
# loaders.
try:
return loader(name, dirs)
except TemplateDoesNotExist:
pass
raise TemplateDoesNotExist(name, tried=tried)
def from_string(self, template_code):
"""
Returns a compiled Template object for the given template code,
handling template inheritance recursively.
"""
return Template(template_code, engine=self)
def get_template(self, template_name):
"""
Returns a compiled Template object for the given template name,
han | dling template inheritance recursively.
"""
template, origin = self.find_template(template_name)
if not hasattr(template, 'render'):
# template needs to be compiled
template = Template(template, origin, template_name, engine=self)
return template
def render_to_string(self, template_name, c | ontext=None):
"""
Render the template specified by template_name with the given context.
For use in Django's test suite.
"""
if isinstance(template_name, (list, tuple)):
t = self.select_template(template_name)
else:
t = self.get_template(template_name)
# Django < 1.8 accepted a Context in `context` even though that's
# unintended. Preserve this ability but don't rewrap `context`.
if isinstance(context, Context):
return t.render(context)
else:
return t.render(Context(context))
def select_template(self, template_name_list):
"""
Given a list of template names, returns the first that can be loaded.
"""
if not template_name_list:
raise TemplateDoesNotExist("No template names provided")
not_found = []
for template_name in template_name_list:
try:
return self.get_template(template_name)
except TemplateDoesNotExist as exc:
if exc.args[0] not in not_found:
not_found.append(exc.args[0])
continue
# If we get here, none of the templates could be loaded
raise TemplateDoesNotExist(', '.join(not_found))
|
keshashah/GCompris | src/sudoku-activity/sudoku.py | Python | gpl-2.0 | 57,045 | 0.097607 | # gcompris - sudoku
#
# Copyright (C) 2005, 2008 Bruno Coudoin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
import goocanvas
import gcompris
import gcompris.utils
import gcompris.skin
import gcompris.bonus
import gcompris.score
import gobject
import gtk
import gtk.gdk
import random
import pango
from gcompris import gcompris_gettext as _
class Gcompris_sudoku:
"""Sudoku game"""
def __init__(self, gcomprisBoard):
self.gcomprisBoard = gcomprisBoard
self.gcomprisBoard.disable_im_context = True
# These are used to let us restart only after the bonus is displayed.
# When the bonus is displayed, it call us first with pause(1) and then with pause(0)
self.board_paused = 0;
self.gamewon = 0;
# It holds the canvas items for each square
self.sudo_square = [] # The square Rect Item
self.sudo_number = [] # The square Text Item
self.sudo_symbol = [] # The square Symbol Item
self.valid_chars = [] # The valid chars for the sudoku are calculated from the dataset
# Holds the coordinate of the current square
self.cursqre = None
self.normal_square_color = 0x33CCFFAAL
self.highl_square_color = 0xFFCC33AAL
self.focus_square_color = 0xFF6633AAL
self.fixed_square_c | olor = 0x3366FFAAL
self.error_square_color = 0xFF3366AAL
self.lines_color = 0xFF3366ffL
self.fixed_number_color = 0x000000FFL
self.user_number_color = 0x000000FFL
self.root_sudo = None
self.sudoku = None # The current sud | oku data
self.sudo_size = 0 # the size of the current sudoku
self.sudo_region = None # the modulo region in the current sudoku
self.timer = 0 # The timer that highlights errors
self.symbolize_level_max = 8 # Last level in which we set symbols
self.symbols = [
(gcompris.utils.load_pixmap("sudoku/rectangle.png"),
gcompris.utils.load_pixmap("sudoku/rectangle_grey.png")),
(gcompris.utils.load_pixmap("sudoku/circle.png"),
gcompris.utils.load_pixmap("sudoku/circle_grey.png")),
(gcompris.utils.load_pixmap("sudoku/rhombus.png"),
gcompris.utils.load_pixmap("sudoku/rhombus_grey.png")),
(gcompris.utils.load_pixmap("sudoku/triangle.png"),
gcompris.utils.load_pixmap("sudoku/triangle_grey.png")),
(gcompris.utils.load_pixmap("sudoku/star.png"),
gcompris.utils.load_pixmap("sudoku/star_grey.png"))
]
def start(self):
# Init the sudoku dataset
self.sudoku = self.init_item_list()
self.gcomprisBoard.level=1
self.gcomprisBoard.maxlevel=len(self.sudoku)
self.gcomprisBoard.sublevel=1
gcompris.bar_set(gcompris.BAR_LEVEL|gcompris.BAR_REPEAT)
gcompris.set_background(self.gcomprisBoard.canvas.get_root_item(),
"sudoku/background.jpg")
gcompris.bar_set_level(self.gcomprisBoard)
# Create our rootitem. We put each canvas item in it so at the end we
# only have to kill it. The canvas deletes all the items it contains automaticaly.
self.rootitem = goocanvas.Group(parent = self.gcomprisBoard.canvas.get_root_item())
self.next_level()
self.pause(0);
def end(self):
# Remove the root item removes all the others inside it
self.rootitem.remove()
self.rootitem = None
gcompris.score.end()
def ok(self):
print("Gcompris_sudoku ok.")
def repeat(self):
self.display_sudoku(self.sudoku[self.gcomprisBoard.level-1][self.gcomprisBoard.sublevel-1])
def config(self):
print("Gcompris_sudoku config.")
def key_press(self, keyval, commit_str, preedit_str):
if(self.cursqre == None):
return False
utf8char = gtk.gdk.keyval_to_unicode(keyval)
strn = u'%c' % utf8char
if(strn in self.valid_chars):
if self.is_legal(strn):
self.sudo_number[self.cursqre[0]][self.cursqre[1]].props.text = \
strn.encode('UTF-8')
# Maybe it's all done
if self.is_solved():
self.cursqre = None
self.gamewon = 1
gcompris.bonus.display(gcompris.bonus.WIN, gcompris.bonus.FLOWER)
else:
# Erase the old number there
if ((keyval == gtk.keysyms.BackSpace) or
(keyval == gtk.keysyms.Delete) or
(keyval == gtk.keysyms.space)):
self.sudo_number[self.cursqre[0]][self.cursqre[1]].props.text = ""
else:
# No key processing done
return False
# Return True if you did process a key
# Return False if you did not processed a key
# (gtk need to send it to next widget)
return True
def pause(self, pause):
self.board_paused = pause
# When the bonus is displayed, it call us first
# with pause(1) and then with pause(0)
# the game is won
if(self.gamewon == 1 and pause == 0):
self.gamewon = 0
if(self.increment_level()):
self.next_level()
return
def set_level(self, level):
self.gcomprisBoard.level = level;
self.gcomprisBoard.sublevel = 1;
gcompris.bar_set_level(self.gcomprisBoard)
self.next_level()
# ---- End of Initialisation
# Code that increments the sublevel and level
# And bail out if no more levels are available
# return True if continue, False if bail out
def next_level(self):
# Randomize symbols
for j in range(0, len(self.symbols)):
# Select a random new position to set the J symbol
old_symbol = self.symbols[j]
new_pos = random.randint(0,len(self.symbols)-1)
self.symbols[j] = self.symbols[new_pos]
self.symbols[new_pos] = old_symbol
self.display_sudoku(self.sudoku[self.gcomprisBoard.level-1][self.gcomprisBoard.sublevel-1])
gcompris.score.start(gcompris.score.STYLE_NOTE, 610, 485,
len(self.sudoku[self.gcomprisBoard.level-1]))
gcompris.score.set(self.gcomprisBoard.sublevel)
return True
# Code that increments the sublevel and level
# And bail out if no more levels are available
# return True if continue, False if bail out
def increment_level(self):
self.gcomprisBoard.sublevel += 1
if(self.gcomprisBoard.sublevel > len(self.sudoku[self.gcomprisBoard.level-1])):
# Try the next level
self.gcomprisBoard.sublevel=1
self.gcomprisBoard.level += 1
# Set the level in the control bar
gcompris.bar_set_level(self.gcomprisBoard);
if(self.gcomprisBoard.level > self.gcomprisBoard.maxlevel):
self.gcomprisBoard.level = self.gcomprisBoard.maxlevel
return True
#
# Set a symbol in the sudoku
#
def set_sudoku_symbol(self, text, x, y):
(pixmap, pixmap_grey) = self.get_pixmap_symbol(self.valid_chars, text)
self.sudo_symbol[x][y].props.pixbuf = pixmap
self.sudo_symbol[x][y].props.visibility = goocanvas.ITEM_VISIBLE
self.sudo_number[x][y].props.text = text
#
# Event on a placed symbol. Means that we remove it
#
def hide_symbol_event(self, item, target, event, data):
if(self.gamewon):
return False
if event.type == gtk.gdk.BUTTON_PRESS:
item.props.visibility = goocanvas.ITEM_INVISIBLE
self.sudo_number[data[0]][data[1]].props.text = ""
#
# This function is being called uppon a click on a symbol on the left
# If a square has the focus, then the clicked square is assigned there
#
def symbol_item_event(self, item, target, event, text):
if(self.gamewon):
return False
if self.selected_item == item:
return False
item.props.pixbuf = item.get_data("pixmap")
if self.selected_item: |
openstack/trove | trove/tests/unittests/cluster/test_models.py | Python | apache-2.0 | 9,636 | 0 | # Copyright 2016 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest.mock import ANY
from unittest.mock import call
from unittest.mock import DEFAULT
from unittest.mock import MagicMock
from unittest.mock import Mock
from unittest.mock import patch
from unittest.mock import PropertyMock
from neutronclient.common import exceptions as neutron_exceptions
from trove.cluster import models
from trove.common import clients
from trove.common import exception
from trove.tests.unittests import trove_testtools
class TestModels(trove_testtools.TestCase):
@patch.object(clients, 'create_nova_client', return_value=MagicMock())
def test_validate_instance_flavors(self, create_nova_cli_mock):
patch.object(
create_nova_cli_mock.return_value, 'flavors',
new_callable=PropertyMock(return_value=Mock()))
mock_flv = create_nova_cli_mock.return_value.flavors.get.return_value
mock_flv.ephemeral = 0
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 1.5,
'region_name': 'home'},
{'flavor_id': 2, 'volume_size': 3,
'region_name': 'work'}]
models.validate_instance_flavors(Mock(), test_instances,
True, True)
create_nova_cli_mock.assert_has_calls([call(ANY, None),
call(ANY, 'home'),
call(ANY, 'work')])
self.assertRaises(exception.LocalStorageNotSpecified,
models.validate_instance_flavors,
Mock(), test_instances, False, True)
mock_flv.ephemeral = 1
models.validate_instance_flavors(Mock(), test_instances,
False, True)
def test_validate_volume_size(self):
self.patch_conf_property('max_accepted_volume_size', 10)
models.validate_volume_size(9)
models.validate_volume_size(10)
self.assertRaises(exception.VolumeQuotaExceeded,
models.validate_volume_size, 11)
self.assertRaises(exception.VolumeSizeNotSpecified,
models.validate_volume_size, None)
@patch.object(models, 'validate_volume_size')
def test_get_required_volume_size(self, vol_size_validator_mock):
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 1.5},
{'flavor_id': 1, 'volume_size': 3}]
total_size = models.get_required_volume_size(test_instances, True)
self.assertEqual(14.5, total_size)
vol_size_validator_mock.assert_has_calls([call(10),
call(1.5),
call(3)], any_order=True)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 1.5},
{'flavor_id': 1, 'volume_size': None}]
self.assertRaises(exception.ClusterVolumeSizeRequired,
models.get_required_volume_size,
test_instances, True)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 1.5},
{'flavor_id': 1}]
self.assertRaises(exception.ClusterVolumeSizeRequired,
models.get_required_volume_size,
test_instances, True)
test_instances = [{'flavor_id': 1},
{'flavor_id': 1},
{'flavor_id': 1}]
total_size = models.get_required_volume_size(test_instances, False)
self.assertIsNone(total_size)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 1.5}]
self.assertRaises(exception.VolumeNotSupported,
models.get_required_volume_size,
test_instances, False)
def test_assert_same_instance_volumes(self):
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10}]
models.assert_same_instance_volumes(test_instances)
|
test_instances = [{'flavor_id': 1, 'volume_size': 5},
{'flavor_id': 1, 'volume_size': 5},
{'flavor_id': 1, 'volume_size': 5}]
models.assert_same_instance_volumes(test_instances, required_size=5)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 1.5},
{'flavor_id': 1, 'volume_size': 10}]
self.assertRaises(excep | tion.ClusterVolumeSizesNotEqual,
models.assert_same_instance_volumes,
test_instances)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10}]
self.assertRaises(exception.ClusterVolumeSizesNotEqual,
models.assert_same_instance_volumes,
test_instances, required_size=5)
def test_assert_same_instance_flavors(self):
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10}]
models.assert_same_instance_flavors(test_instances)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10}]
models.assert_same_instance_flavors(test_instances, required_flavor=1)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 2, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10}]
self.assertRaises(exception.ClusterFlavorsNotEqual,
models.assert_same_instance_flavors,
test_instances)
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10}]
self.assertRaises(exception.ClusterFlavorsNotEqual,
models.assert_same_instance_flavors,
test_instances, required_flavor=2)
@patch.multiple(models, assert_same_instance_flavors=DEFAULT,
assert_same_instance_volumes=DEFAULT)
def test_assert_homogeneous_cluster(self, assert_same_instance_flavors,
assert_same_instance_volumes):
test_instances = [{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10},
{'flavor_id': 1, 'volume_size': 10}]
required_flavor = Mock()
required_volume_size = Mock()
models.assert_homogeneous_cluster(
test_instances, required_flavor=required_flavor,
required_volume_size=required_volume_size)
assert_same_instance_flavors.assert_called_once_with(
test_instances, required_flavor=required_flavor)
assert_s |
mosquito/LXC-Web-Panel | lwp/lxc/__init__.py | Python | mit | 13,591 | 0.000368 | # LXC Python Library
# for compatibility with LXC 0.8 and 0.9
# on Ubuntu 12.04/12.10/13.04
# Author: Elie Deloumeau
# Contact: elie@deloumeau.fr
# The MIT License (MIT)
# Copyright (c) 2013 Elie Deloumeau
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from ..lxclite import exists, stopped, ContainerDoesntExists
import os
import platform
import re
import subprocess
import time
from io import StringIO
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
try:
import configparser
except ImportError:
import ConfigParser as configparser
class CalledProcessError(Exception):
pass
cgroup = {}
cgroup['type'] = 'lxc.network.type'
cgroup['link'] = 'lxc.network.link'
cgroup['flags'] = 'lxc.network.flags'
cgroup['hwaddr'] = 'lxc.network.hwaddr'
cgroup['rootfs'] = 'lxc.rootfs'
cgroup['utsname'] = 'lxc.utsname'
cgroup['arch'] = 'lxc.arch'
cgroup['ipv4'] = 'lxc.network.ipv4'
cgroup['memlimit'] = 'lxc.cgroup.memory.limit_in_bytes'
cgroup['swlimit'] = 'lxc.cgroup.memory.memsw.limit_in_bytes'
cgroup['cpus'] = 'lxc.cgroup.cpuset.cpus'
cgroup['shares'] = 'lxc.cgroup.cpu.shares'
cgroup['deny'] = 'lxc.cgroup.devices.deny'
cgroup['allow'] = 'lxc.cgroup.devices.allow'
def FakeSection(fp):
content = u"[DEFAULT]\n%s" % fp.read()
return StringIO(content)
def DelSection(filename=None):
if filename:
load = open(filename, 'r')
read = load.readlines()
load.close()
i = 0
while i < len(read):
if '[DEFAULT]' in read[i]:
del read[i]
break
load = open(filename, 'w')
load.writelines(read)
load.close()
def file_exist(filename):
'''
checks if a given file exist or not
'''
try:
with open(filename) as f:
f.close()
return True
except IOError:
return False
def ls_auto():
'''
returns a list of autostart containers
'''
try:
auto_list = os.listdir('/etc/lxc/auto/')
except OSError:
auto_list = []
return auto_list
def memory_usage(name):
'''
returns memory usage in MB
'''
if not exists(name):
raise ContainerDoesntExists(
"The container (%s) does not exist!" % name)
if name in stopped():
return 0
cmd = ['lxc-cgroup -n %s memory.usage_in_bytes' % name]
try:
out = subprocess.check_output(cmd, shell=True,
universal_newlines=True).splitlines()
except:
return 0
return int(out[0]) / 1024 / 1024
def host_memory_usage():
'''
returns a dict of host memory usage values
{'percent': int((used/total)*100),
'percent_cached':int((cached/total)*100),
'used': int(used/1024),
'total': int(total/1024)}
'''
out = open('/proc/meminfo')
for line in out:
if 'MemTotal:' == line.split()[0]:
split = line.split()
total = float(split[1])
if 'MemFree:' == line.split()[0]:
split = line.split()
free = float(split[1])
if 'Buffers:' == line.split()[0]:
split = line.split()
buffers = float(split[1])
if 'Cached:' == line.split()[0]:
split = line.split()
cached = float(split[1])
out.close()
used = (total - (free + buffers + cached))
return {'percent': int((used / total) * 100),
'percent_cached': int(((cached) / total) * 100),
'used': int(used / 1024),
'total': int(total / 1024)}
def host_cpu_percent():
'''
returns CPU usage in percent
'''
f = open('/proc/stat', 'r')
line = f.readlines()[0]
data = line.split()
previdle = float(data[4])
prevtotal = float(data[1]) + float(data[2]) + \
float(data[3]) + float(data[4])
f.close()
time.sleep(0.1)
f = open('/proc/stat', 'r')
line = f.readlines()[0]
data = line.split()
idle = float(data[4])
total = float(data[1]) + float(data[2]) + float(data[3]) + float(data[4])
f.close()
intervaltotal = total - prevtotal
percent = 100 * (intervaltotal - (idle - previdle)) / intervaltotal
return str('%.1f' % percent)
def host_disk_usage(directory='/var/lib/lxc'):
'''
returns a dict of disk usage values
{'total': usage[1],
'used': usage[2],
'free': usage[3],
'percent': usage[4]}
'''
usage = subprocess.check_output(['df -h "%s"' % directory],
universal_newlines=True,
shell=True).split('\n')[1].split()
return {'total': usage[1],
'used': usage[2],
'free': usage[3],
'percent': usage[4]}
def host_uptime():
'''
returns a dict of the system uptime
{'day': days,
'time': '%d:%02d' % (hours,minutes)}
'''
f = open('/proc/uptime')
uptime = int(f.readlines()[0].split('.')[0])
minutes = uptime / 60 % 60
hours = uptime / 60 / 60 % 24
days = uptime / 60 / 60 / 24
f.close()
return {'day': days,
'time': '%d:%02d' % (hours, minutes)}
def check_ubuntu():
'''
return the System version
'''
dist = '%s %s' % (platform.linux_distribution()[0],
platform.linux_distribution()[1])
return dist
def get_templates_list():
'''
returns a sorted lxc templates list
'''
templates = []
path = None
try:
path = os.listdir('/usr/share/lxc/templates')
except:
path = os.listdir('/usr/lib/lxc/templates')
if path:
for line in path:
templates.append(line.replace('lxc-', ''))
return sorted(templates)
def check_version():
'''
returns latest LWP version (dict with current and latest)
'''
return {'current': None }
def get_net_settings():
'''
returns a dict of all known settings for LXC networking
'''
filename = '/etc/default/lxc-net'
if not fil | e_exist(filename):
filename = '/etc/default/lxc'
if not file_exist(filename):
return False
config = configparser.SafeConfigParser()
cfg = {}
config.readfp(FakeSection(open(filename)))
cfg['use'] = config.get('DEFAULT', 'USE_LXC_BRIDGE | ').strip('"').strip('"')
cfg['bridge'] = config.get('DEFAULT', 'LXC_BRIDGE').strip('"').strip('"')
cfg['address'] = config.get('DEFAULT', 'LXC_ADDR').strip('"').strip('"')
cfg['netmask'] = config.get('DEFAULT', 'LXC_NETMASK').strip('"').strip('"')
cfg['network'] = config.get('DEFAULT', 'LXC_NETWORK').strip('"').strip('"')
cfg['range'] = config.get('DEFAULT', 'LXC_DHCP_RANGE').strip('"').strip('"')
cfg['max'] = config.get('DEFAULT', 'LXC_DHCP_MAX').strip('"').strip('"')
return cfg
def get_container_settings(name):
'''
returns a dict of all utils settings for a container
'''
if os.geteuid():
filename = os.path.expanduser('~/.local/share/lxc/%s/config' % name)
else:
|
OCA/stock-logistics-warehouse | stock_location_empty/tests/__init__.py | Python | agpl-3.0 | 40 | 0 | from . import test_stock_locati | on_emp | ty
|
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.1/Lib/encodings/cp861.py | Python | mit | 7,192 | 0.039627 | """ Python Character Mapping Codec generated from 'CP861.TXT' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00d0, # LATIN CAPITAL LETTER ETH
0x008c: 0x00f0, # LATIN SMALL LETTER ETH
0x008d: 0x00de, # LATIN CAPITAL LETTER THORN
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00fe, # LATIN SMALL LETTER THORN
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x0098: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00a5: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00a6: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00a7: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x2310, # REVERSED NOT SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL L | ETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER | EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
|
fnp/redakcja | src/documents/management/commands/prune_audience.py | Python | agpl-3.0 | 2,111 | 0.001422 | # This file is part of FNP-Redakcja, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information.
#
import sys
from django.contrib.auth.models import User
from lxml import etree
from django.core.management import BaseCommand
from documents.models import Book
from librarian import DCNS
class Command(BaseCommand):
args = 'exclude_file'
def add_arguments(self, parser):
parser.add_argument(
'-u', '--username', dest='username', metavar='USER',
help='Assign commits to this user (required, preferably yourself).')
def handle(self, exclude_file, **options):
username = options.get('username')
if username:
user = User.objects.get(username=username)
else:
print('Please provide a username.')
sys.exit(1)
excluded_slugs = [line.strip() for line in open(exclude_file, 'rb') if line.strip()]
books = Book.objects.exclude(slug__in=excluded_slugs)
for book in books:
if not book.is_published():
continue
print('processing %s' % book.slug)
chunk = book.chunk_set.first()
old_head = chunk.head
src = old_head.materialize()
tree = etree.fromstring(src)
audience_nodes = tree.findall('.//' + DCNS("audience"))
if not audience_nodes:
| print('%s has no audience, skipping' % book.slug)
continue
for node in audience_nodes:
node.getparent().remove(node)
chunk.commit(
etree.tostring(tree, encoding='unicode'),
author=user,
description='automatyczne skasowanie audience',
publishable=old_head.publishable
| )
print('committed %s' % book.slug)
if not old_head.publishable:
print('Warning: %s not publishable, last head: %s, %s' % (
book.slug, old_head.author.username, old_head.description[:40].replace('\n', ' ')))
|
hoh/reloadr | examples/04_both.py | Python | lgpl-3.0 | 573 | 0 |
from time import sleep
from reloadr import reloadr
@reloadr
def move_z(target, dz):
target.z += dz
@reloadr
class Car:
x | = 0
y = 0
z = 0
def __init__(self, x=0 | , y=0, z=0):
self.x = x
self.y = y
self.z = z
def move(self, dx=0, dy=0):
self.x += dx
self.y += dy
def position(self):
return 'Car on {} {} {}'.format(self.x, self.y, self.z)
car = Car(1000, 3000)
while True:
car.move(1, 1)
move_z(car, 1)
print(car.position())
sleep(0.3)
Car._reload()
move_z._reload()
|
kobotoolbox/kobo_selenium_tests | kobo_selenium_tests/selenium_ide_exported/download_entered_data_test.py | Python | gpl-3.0 | 3,987 | 0.00928 | # -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class DownloadEnteredDataTest(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "http://kc.kbtdev.org/"
self.verificationErrors = []
self.accept_next_alert = True
def test_download_entered_data(self):
# Open KoBoCAT.
driver = self.driver
driver.get(self.base_url + "")
# Assert that our form's title is in the list of projects and follow its link.
self.assertTrue(self.is_element_present(By.LINK_TEXT, "Selenium test form title."))
driver.find_element_by_link_text("Selenium test form title.").click()
# Wait for and click the "Download data" link.
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for "Download data" link.')
try:
if self.is_element_present(By.LINK_TEXT, "Download data"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_link_text("Download data").click()
# Wait for and click the "XLS" link.
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for "XLS" link.')
try:
if self.is_element_present(By.LINK_TEXT, "XLS"): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_link_text("XLS").click()
# Wait for the download page's header and ensure it contains the word "excel" (case insensitive).
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for download page\'s header.')
try:
if self.is_element_present(By.CSS_SELECTOR, ".data-page__header"): break
except: pass
time.sleep(1)
else: self.fail("time out")
self.assertIsNotNone(re.compile('excel', re.IGNORECASE).search(driver.find_element_by_css_selector(".data-page__header").text))
# Wait for the export progress status.
for _ in xrange(self.DEFAULT_WAIT_SECONDS):
self.check_timeout('Waiting for the export progress status.')
try:
if self.is_element_present(By.CSS_SELECTOR, ".refresh-export-progress"): break
except: pass
time.sleep(1)
else: self.fail("time out")
# Wait (a little more than usual) for the export's download link and click it.
for _ in xrange(30):
self.check_timeout('Waiting for the export\'s download link.')
try:
if re.search(r"^Selenium_test_form_title_[\s\S]*$", driver.find_element_by_css_selector("#forms-table a").te | xt): break
except: pass
time.sleep(1)
else: self.fail("time out")
driver.find_element_by_css_selector("#forms-table a").click()
def is_element_present(self, how, what):
try: self. | driver.find_element(by=how, value=what)
except NoSuchElementException: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
|
ipython-contrib/IPython-notebook-extensions | setup.py | Python | bsd-3-clause | 4,617 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup script for jupyter_contrib_nbextensions."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
from __future__ import print_function
import os
from glob import glob
from setuptools import find_packages, setup
# -----------------------------------------------------------------------------
# main setup call
# -----------------------------------------------------------------------------
def main():
setup(
name='jupyter_contrib_nbextensions',
description="A collection of Jupyter nbextensions.",
long_description="""
Contains a collection of extensions that add functionality to the Jupyter
notebook. These extensions are mostly written in Javascript, and are loaded
locally in the browser.
Read
`the documentation <https://jupyter-contrib-nbextensions.readthedocs.io>`_
for more information.
The
`jupyter-contrib repository <https://github.com/ipython-contrib/jupyter_contrib_nbextensions>`_
is maintained independently by a group of users and developers, and is not
officially related to the Jupyter development team.
The maturity of the provided extensions varies, so please check
`the repository issues page <https://github.com/ipython-contrib/jupyter_contrib_nbextensions/issues>`_
if you encounter any problems, and create a new issue if needed!
""", # | noqa: E501
version='0.5.1',
author='ipython-contrib and jupyter-contrib developers',
author_email='jupytercontrib@gmail.com',
url=('https://github.com/'
'ipython-contrib/jupyter_contrib_nbextensions.git'),
download_url=('https://github.com/'
'ipython-contrib/jupyter_contrib_nbextensions'
'/tarball/0.5.1'),
keywords=['IPytho | n', 'Jupyter', 'notebook'],
license='BSD',
platforms=['Any'],
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
py_modules=[
os.path.splitext(os.path.basename(path))[0]
for path in glob('src/*.py')
],
install_requires=[
'ipython_genutils',
'jupyter_contrib_core >=0.3.3',
'jupyter_core',
'jupyter_highlight_selected_word >=0.1.1',
'jupyter_latex_envs >=1.3.8',
'jupyter_nbextensions_configurator >=0.4.0',
'nbconvert >=4.2',
'notebook >=4.0',
'pyyaml',
'tornado',
'traitlets >=4.1',
'lxml'
],
extras_require={
'test': [
'nbformat',
'nose',
'pip',
'requests',
],
'test:python_version == "2.7"': [
'mock',
],
},
# we can't be zip safe as we require templates etc to be accessible to
# jupyter server
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-contrib-nbextension = jupyter_contrib_nbextensions.application:main', # noqa: E501
],
'jupyter_contrib_core.app.subcommands': [
'nbextension = jupyter_contrib_nbextensions.application:jupyter_contrib_core_app_subcommands', # noqa: E501
],
'nbconvert.exporters': [
'html_toc = jupyter_contrib_nbextensions.nbconvert_support.toc2:TocExporter', # noqa: E501
'selectLanguage = jupyter_contrib_nbextensions.nbconvert_support.nbTranslate:NotebookLangExporter', # noqa: E501
'html_embed = jupyter_contrib_nbextensions.nbconvert_support.embedhtml:EmbedHTMLExporter', # noqa: E501
'html_ch = jupyter_contrib_nbextensions.nbconvert_support.collapsible_headings:ExporterCollapsibleHeadings', # noqa: E501
],
},
scripts=[os.path.join('scripts', p) for p in [
'jupyter-contrib-nbextension',
]],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: JavaScript',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
if __name__ == '__main__':
main()
|
cynapse/cynin | products/CMFNotification/tests/testInstallation.py | Python | gpl-3.0 | 4,873 | 0.002257 | """Tests for CMFNotification installation ad uninstallation.
$Id: testInstallation.py 65679 2008-05-25 23:45:26Z dbaty $
"""
from zope.component import getUtility
from zope.component import getMultiAdapter
from AccessControl.PermissionRole import rolesForPermissionOn
from plone.portlets.interfaces import IPortletManager
from plone.portlets.interfaces import IPortletAssignmentMapping
from Products.CMFCore.utils import getToolByName
from Products.CMFNotification.config import LAYER_NAME
from Products.CMFNotification.config import PORTLET_NAME
from Products.CMFNotification.NotificationTool import ID as TOOL_ID
from Products.CMFNotification.permissions import SUBSCRIBE_PERMISSION
from Products.CMFNotification.tests.plonetestbrowser import Browser
from Products.CMFNotification.tests.base import CMFNotificationTestCase
class TestInstallation(CMFNotificationTestCase):
"""Make sure that the product is properly installed."""
def afterSetUp(self):
pass
def testToolIsThere(self):
portal = self.portal
tool = getToolByName(self.portal, TOOL_ID)
self.failUnless(tool is not None)
def testSkinLayerIsThere(self):
stool = getToolByName(self.portal, 'portal_skins')
for skin, layers in stool._getSelections().items():
layers = layers.split(',')
self.failUnless(LAYER_NAME in layers)
self.failUnless(LAYER_NAME in stool.objectIds())
def testPortletCanBeAdded(self):
base_url = self.portal.absolute_url()
for name in ('plone.leftcolumn', 'plone.rightcolumn'):
manager = getUtility(IPortletManager,
name=name,
context=self.portal)
titles = [p.title for p in manager.getAddablePortletTypes()]
self.failUnless(PORTLET_NAME in titles)
manager = getUtility(IPortletManager,
name='plone.rightcolumn',
context=self.portal)
right_portlets = getMultiAdapter((self.portal, manager),
IPortletAssignmentMapping,
context=self.portal)
right_portlets = right_portlets.keys()
self.failUnless(PORTLET_NAME in right_portlets)
def testPermissionHasBeenSet(self):
roles = set(rolesFor | PermissionOn(SUBSCRIBE_PERMISSION, self.portal))
se | lf.failUnlessEqual(roles, set(('Manager', 'Member')))
def testConfigletHasBeenAdded(self):
cptool = getToolByName(self.portal, 'portal_controlpanel')
configlets = [c.getId() for c in cptool.listActions()]
self.failUnless('cmfnotification_configuration' in configlets)
class TestUnInstallation(CMFNotificationTestCase):
"""Test that the product has been properly uninstalled."""
def afterSetUp(self):
"""Uninstall the product before running each test."""
qtool = getToolByName(self.portal, 'portal_quickinstaller')
self.setRoles(['Manager'])
qtool.uninstallProducts(['CMFNotification'])
def testToolIsNotThere(self):
tool = getToolByName(self.portal, TOOL_ID, None)
self.failUnless(tool is None)
def testSkinLayerIsNotThere(self):
stool = getToolByName(self.portal, 'portal_skins')
for skin, layers in stool._getSelections().items():
layers = layers.split(',')
self.failUnless (LAYER_NAME not in layers)
self.failUnless(LAYER_NAME not in stool.objectIds())
def testPortletDoNoExist(self):
base_url = self.portal.absolute_url()
for name in ('plone.leftcolumn', 'plone.rightcolumn'):
manager = getUtility(IPortletManager,
name=name,
context=self.portal)
titles = [p.title for p in manager.getAddablePortletTypes()]
self.failUnless(PORTLET_NAME not in titles)
manager = getUtility(IPortletManager,
name='plone.rightcolumn',
context=self.portal)
right_portlets = getMultiAdapter((self.portal, manager),
IPortletAssignmentMapping,
context=self.portal)
right_portlets = right_portlets.keys()
self.failUnless(PORTLET_NAME not in right_portlets)
def testConfigletDoNotExist(self):
cptool = getToolByName(self.portal, 'portal_controlpanel')
configlets = [c.getId() for c in cptool.listActions()]
self.failUnless('cmfnotification_configuration' not in configlets)
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestInstallation))
suite.addTest(makeSuite(TestUnInstallation))
return suite
|
luckielordie/conan | conans/client/tools/scm.py | Python | mit | 5,881 | 0.00204 | import os
import re
import subprocess
from six.moves.urllib.parse import urlparse, quote_plus
from subprocess import CalledProcessError, PIPE, STDOUT
from conans.client.tools.env import no_op, environment_append
from conans.client.tools.files import chdir
from conans.errors import ConanException
from conans.util.files import decode_text, to_file_bytes
class Git(object):
def __init__(self, folder=None, verify_ssl=True, username=None, password=None,
force_english=True, runner=None):
self.folder = folder or os.getcwd()
if not os.path.exists(self.folder):
os.makedirs(self.folder)
self._verify_ssl = verify_ssl
self._force_eng = force_english
self._username = username
self._password = password
self._runner = runner
def run(self, command):
command = "git %s" % command
with chdir(self.folder) if self.folder else no_op():
| with environment_append({"LC_ALL": "en_US.UTF-8"}) if self._force_eng else no_op():
if not self._runner:
return subprocess.check_output(command, shell=True).decode().strip()
else:
return self._runner(command)
def get_repo_root(self):
return self.run("rev-parse --show-toplevel")
def get_url_with_credentials(self, url):
if not self._username or not se | lf._password:
return url
if urlparse(url).password:
return url
user_enc = quote_plus(self._username)
pwd_enc = quote_plus(self._password)
url = url.replace("://", "://" + user_enc + ":" + pwd_enc + "@", 1)
return url
def _configure_ssl_verify(self):
return self.run("config http.sslVerify %s" % ("true" if self._verify_ssl else "false"))
def clone(self, url, branch=None):
url = self.get_url_with_credentials(url)
if os.path.exists(url):
url = url.replace("\\", "/") # Windows local directory
if os.path.exists(self.folder) and os.listdir(self.folder):
if not branch:
raise ConanException("The destination folder '%s' is not empty, "
"specify a branch to checkout (not a tag or commit) "
"or specify a 'subfolder' "
"attribute in the 'scm'" % self.folder)
output = self.run("init")
output += self._configure_ssl_verify()
output += self.run('remote add origin "%s"' % url)
output += self.run("fetch ")
output += self.run("checkout -t origin/%s" % branch)
else:
branch_cmd = "--branch %s" % branch if branch else ""
output = self.run('clone "%s" . %s' % (url, branch_cmd))
output += self._configure_ssl_verify()
return output
def checkout(self, element, submodule=None):
self._check_git_repo()
output = self.run('checkout "%s"' % element)
if submodule:
if submodule == "shallow":
output += self.run("submodule sync")
output += self.run("submodule update --init")
elif submodule == "recursive":
output += self.run("submodule sync --recursive")
output += self.run("submodule update --init --recursive")
else:
raise ConanException("Invalid 'submodule' attribute value in the 'scm'. "
"Unknown value '%s'. Allowed values: ['shallow', 'recursive']" % submodule)
# Element can be a tag, branch or commit
return output
def excluded_files(self):
try:
file_paths = [os.path.normpath(os.path.join(os.path.relpath(folder, self.folder), el)).replace("\\", "/")
for folder, dirpaths, fs in os.walk(self.folder)
for el in fs + dirpaths]
p = subprocess.Popen(['git', 'check-ignore', '--stdin'],
stdout=PIPE, stdin=PIPE, stderr=STDOUT, cwd=self.folder)
paths = to_file_bytes("\n".join(file_paths))
grep_stdout = decode_text(p.communicate(input=paths)[0])
tmp = grep_stdout.splitlines()
except CalledProcessError:
tmp = []
return tmp
def get_remote_url(self, remote_name=None):
self._check_git_repo()
remote_name = remote_name or "origin"
try:
remotes = self.run("remote -v")
for remote in remotes.splitlines():
try:
name, url = remote.split(None, 1)
url, _ = url.rsplit(None, 1)
if name == remote_name:
return url
except Exception:
pass
except subprocess.CalledProcessError:
pass
return None
def get_commit(self):
self._check_git_repo()
try:
commit = self.run("rev-parse HEAD")
commit = commit.strip()
return commit
except Exception as e:
raise ConanException("Unable to get git commit from %s\n%s" % (self.folder, str(e)))
get_revision = get_commit
def _check_git_repo(self):
try:
self.run("status")
except Exception:
raise ConanException("Not a valid git repository")
def get_branch(self):
self._check_git_repo()
try:
status = self.run("status -bs --porcelain")
# ## feature/scm_branch...myorigin/feature/scm_branch
branch = status.splitlines()[0].split("...")[0].strip("#").strip()
return branch
except Exception as e:
raise ConanException("Unable to get git branch from %s\n%s" % (self.folder, str(e)))
|
dex4er/django | tests/admin_views/urls.py | Python | bsd-3-clause | 617 | 0.001621 | from __future__ import absolute_import
from django.conf.urls import patt | erns, include
from . import views, customadmin, admin
urlpatterns = patterns('',
(r'^test_admin/admin/doc/', include('django.contrib.admindocs.urls' | )),
(r'^test_admin/admin/secure-view/$', views.secure_view),
(r'^test_admin/admin/', include(admin.site.urls)),
(r'^test_admin/admin2/', include(customadmin.site.urls)),
(r'^test_admin/admin3/', include(admin.site.urls), dict(form_url='pony')),
(r'^test_admin/admin4/', include(customadmin.simple_site.urls)),
(r'^test_admin/admin5/', include(admin.site2.urls)),
)
|
pombredanne/metamorphosys-desktop | metamorphosys/META/analysis_tools/PYTHON_RICARDO/output_ingress_egress/scripts/ingress_egress.py | Python | mit | 19,842 | 0.005393 | """
Run test benches related to ingress/egress for a vehicle assembly model specified in settings.
"""
import sys
import os
import _winreg
from scipy.ndimage import measurements as meas
def query_analysis_tools():
"""
Find the location of the Ricardo Python library.
"""
analysis_tools_key_name = r'SOFTWARE\Wow6432Node\META\AnalysisTools'
analysis_tools_key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, analysis_tools_key_name, 0,
_winreg.KEY_READ | _winreg.KEY_WOW64_32KEY)
number_of_keys = _winreg.QueryInfoKey(analysis_tools_key)[0] # 0 means number of sub_keys
info = {}
for sub_key_id in range(0, number_of_keys):
sub_key_name = _winreg.EnumKey(analysis_tools_key, sub_key_id)
sub_key = _winreg.OpenKey(analysis_tools_key, sub_key_name)
info[sub_key_name] = {}
number_of_values = _winreg.QueryInfoKey(sub_key)[1]
for value_id in range(0, number_of_values):
value_tuple = _winreg.EnumValue(sub_key, value_id)
value_name = value_tuple[0]
value = value_tuple[1]
info[sub_key_name][value_name] = value
return info
tools = query_analysis_tools()
sys.path.append(os.path.join(tools['ingress_egress']['InstallLocation']))
## Add the scripts folder itself to the python path.
#sys.path.append(os.path.split(__file__)[0])
import logging
import numpy as np
from rpl.tools.api import test_bench_api as tba
from rpl.tools.geometry import geom_utils
import uniform_grid, check_intersection, best_poses_at_position, exit_time_nx, escape_routes_ball
import data_io
from voxel_methods import Vehicle
def get_parts_of_interest():
"""
Information required for test bench to run
"""
door_prms = {"Datum": ["EXT_HULL_APERTURE"],
"Required": "{*}"}
parts_of_interest = {"Manikin": {"Datum": ["SWRI_H_POINT", "BOOT_LEFT_CSYS"],
"Property": ["vehicle_role"]},
"Hatch_Assembly_Rear_Ramp": door_prms,
"Hatch_Assembly_Personnel_Door": door_prms}
# TODO: ask for litter_open csys. TB should fail if there are no litters in model.
return parts_of_interest
def get_parts_excluded():
"""
Lists parts explicitly excluded from geometry. For example, we don't have seated
manikins in a simulation of troops exiting the vehicle.
We can still request parameters and datums for these parts.
"""
return {'Manikin'}
class Assembly_Info(object):
"""
Perform api calls and make vehicle/door/troop data available
"""
HULLS = {"Hull_Assembly_Parametric", 'Hull_Assembly_Example_With_Connector'}
DOORS = {'Hatch_Assembly_Rear_Ramp', 'Hatch_Assembly_Personnel_Door'}
HATCHES = {'Hatch_Assembly_Driver_Commander', 'Hatch_Assembly_Cargo'}
MANIKINS = {'Manikin'}
LITTERS = {'Litter_Open'}
# Special labels applied to specific types of voxels
VOXEL_LABELS = {2: HULLS,
4: DOORS,
8: HATCHES,
16: MANIKINS,
32: LITTERS}
def __init__(self, settings_dict):
self.settings_dict = settings_dict
self.desired_params = tba.get_data(get_parts_of_interest())
self._up_vector = None
self._tr_mat = None
self._vehicle_stl = None
self._litter_stl = None
self._doors_xyz = None
self.voxel_data = None
def get_doors_xyz(self):
"""
Provide the xyz coordinates for each door point found
"""
# iterate over self.doors and get ext_hull_aperture csys for each point; convert to 3d
# coord vector
if self._doors_xyz is None:
self._doors_xyz = []
for d in self.DOORS:
# Get every door in every type
for k in self.desired_params[d]:
# Convert 4x4 matrix to 3d point coord vector (last column of trans matrix)
tr_mat = self.desired_params[d][k]['datums']['EXT_HULL_APERTURE']['global']
pt = tr_mat[:-1, 3]
self._doors_xyz.append(pt)
assert len(self._doors_xyz) > 0, "No doors found in vehicle; cannot perform egress test"
return self._doors_xyz
def get_geometry(self):
"""
Get relevant geometry for vehicle
"""
if self._vehicle_stl is None:
all_geom = tba.get_all_geom_set()
desired_parts = (all_geom - get_parts_excluded()
- tba.geom_sets["never_in_cabin"]
- self.LITTERS
| self.DOORS)
self._vehicle_stl = tba.load_geometry(desired_parts,
single_file=False)
return self._vehicle_stl
def get_litter(self):
"""
Get geometry of just the litter
"""
if self._litter_stl is None:
self._litter_stl = tba.load_geometry(self.LITTERS,
single_file=True)
return self._litter_stl
def get_vertical(self):
"""
Return a vector corresponding to the up vector in the vehicle. Determined from calculated
metrics file. Normalize it to unit length to be safe.
"""
if self._up_vector is None:
try:
self._up_vector, _, _ = tba.get_up_vector()
logging.debug("Used up vector based on ComputedValues.xml from GME assembly")
except ValueError:
wrn = """
WARNING: Requires a known vector pointing up. Hard-coded value is (0,1,0),
implying that the up direction lies along y axis.
"""
self._up_vector = np.array([0, 1, 0])
logging.warning(wrn)
self._up_vector /= np.linalg.norm(self._up_vector)
return self._up_vector
def get_manikin_positions(self, datum_name="SWRI_H_POINT", veh_role=None):
"""
Process the test bench data to extract manikin positions.
Rotation should be handled by voxel_methods objects, so this outputs orig vehicle coords.
:param datum_name: Get positions based on a specific datum
:param veh_role: Get only manikins with a specific vehicle role
Common vehicle roles to narrow things down:
("driver", "gunner", "troop", "troop_commander", "vehicle_commander")
"""
manikins = | self.desired_params['Manikin']
mani_list = [{'name': k,
'vehicle_role': manikins[k]['properties']['vehicle_role'],
datum_name: manikins[k]['datums'][datum_name]['global']}
for k in manikins]
# API returns a 4x4 matrix marking a csys. The last column (translation) is manikin pos.
if veh_role is not None:
# | If vehicle role specified, filter list by role
return [m[datum_name][:-1, 3] for m in mani_list if m['vehicle_role'] == veh_role]
else:
return [m[datum_name][:-1, 3] for m in mani_list]
def get_vehicle_csys(self):
"""
Create a mock vehicle coordinate system based on the calculated properties/metrics file
Represents the rotation that would be required to align vehicle csys with +z for occupied
voxel grid. (no translation component)
"""
if self._tr_mat is None:
# Rotate around a vector mutually orthogonal to the current and desired "up" vectors
upvec = self.get_vertical()
rot_around = np.cross(upvec, np.array([0, 0, 1]))
rot_ang = -np.arccos(upvec[2])
self._tr_mat = geom_utils.rotation_about_vector(rot_around, rot_ang)
return self._tr_mat
def voxelate_geometry(self):
"""
Perform voxeliza |
Kaezon/allianceauth | allianceauth/services/modules/openfire/views.py | Python | gpl-2.0 | 7,676 | 0.004039 | import datetime
import logging
from django.contrib import messages
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import Group
from django.shortcuts import render, redirect
from allianceauth.services.forms import ServicePasswordForm
from .forms import JabberBroadcastForm
from .manager import OpenfireManager, PingBotException
from .models import OpenfireUser
from .tasks import OpenfireTasks
logger = logging.getLogger(__name__)
ACCESS_PERM = 'openfire.access_openfire'
@login_required
@permission_required(ACCESS_PERM)
def activate_jabber(request):
logger.debug("activate_jabber called by user %s" % request.user)
character = request.user.profile.main_character
logger.debug("Adding jabber user for user %s with main character %s" % (request.user, character))
info = OpenfireManager.add_user(OpenfireTasks.get_username(request.user))
# If our username is blank means we already had a user
if info[0] is not "":
OpenfireUser.objects.update_or_create(user=request.user, defaults={'username': info[0]})
logger.debug("Updated authserviceinfo for user %s with jabber credentials. Updating groups." % request.user)
OpenfireTasks.update_groups.delay(request.user.pk)
logger.info("Successfully activated jabber for user %s" % request.user)
messages.success(request, 'Activated jabber account.')
credentials = {
'username': info[0],
'password': info[1],
}
return render(request, 'services/service_credentials.html',
context={'credentials': credentials, 'service': 'Jabber'})
else:
logger.error("Unsuccessful attempt to activate jabber for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
@login_required
@permission_required(ACCESS_PERM)
def deactivate_jabber(request):
logger.debug("deactivate_jabber called by user %s" % request.user)
if OpenfireTasks.has_account(request.user) and OpenfireTasks.delete_user(request.user):
logger.info("Successfully deactivated jabber for user %s" % request.user)
messages.success(request, 'Deactivated jabber account.')
else:
logger.error("Unsuccessful attempt to deactivate jabber for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
@login_required
@permission_required(ACCESS_PERM)
def reset_jabber_password(request):
logger.debug("reset_jabber_password called by user %s" % request.user)
if OpenfireTasks.has_account(request.user):
result = OpenfireManager.update_user_pass(request.user.openfire.username)
# If our username is blank means we | failed
if result != "":
logger.info("Successfully rese | t jabber password for user %s" % request.user)
messages.success(request, 'Reset jabber password.')
credentials = {
'username': request.user.openfire.username,
'password': result,
}
return render(request, 'services/service_credentials.html',
context={'credentials': credentials, 'service': 'Jabber'})
logger.error("Unsuccessful attempt to reset jabber for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
@login_required
@permission_required('auth.jabber_broadcast')
def jabber_broadcast_view(request):
logger.debug("jabber_broadcast_view called by user %s" % request.user)
allchoices = []
if request.user.has_perm('auth.jabber_broadcast_all'):
allchoices.append(('all', 'all'))
for g in Group.objects.all():
allchoices.append((str(g.name), str(g.name)))
else:
for g in request.user.groups.all():
allchoices.append((str(g.name), str(g.name)))
if request.method == 'POST':
form = JabberBroadcastForm(request.POST)
form.fields['group'].choices = allchoices
logger.debug("Received POST request containing form, valid: %s" % form.is_valid())
if form.is_valid():
main_char = request.user.profile.main_character
logger.debug("Processing jabber broadcast for user %s with main character %s" % (request.user, main_char))
try:
if main_char is not None:
message_to_send = form.cleaned_data[
'message'] + "\n##### SENT BY: " + "[" + main_char.corporation_ticker + "]" + \
main_char.character_name + " TO: " + \
form.cleaned_data['group'] + " WHEN: " + datetime.datetime.utcnow().strftime(
"%Y-%m-%d %H:%M:%S") + " #####\n##### Replies are NOT monitored #####\n"
group_to_send = form.cleaned_data['group']
else:
message_to_send = form.cleaned_data[
'message'] + "\n##### SENT BY: " + "No character but can send pings?" + " TO: " + \
form.cleaned_data['group'] + " WHEN: " + datetime.datetime.utcnow().strftime(
"%Y-%m-%d %H:%M:%S") + " #####\n##### Replies are NOT monitored #####\n"
group_to_send = form.cleaned_data['group']
OpenfireManager.send_broadcast_message(group_to_send, message_to_send)
messages.success(request, 'Sent jabber broadcast to %s' % group_to_send)
logger.info("Sent jabber broadcast on behalf of user %s" % request.user)
except PingBotException as e:
messages.error(request, e)
else:
form = JabberBroadcastForm()
form.fields['group'].choices = allchoices
logger.debug("Generated broadcast form for user %s containing %s groups" % (
request.user, len(form.fields['group'].choices)))
context = {'form': form}
return render(request, 'services/openfire/broadcast.html', context=context)
@login_required
@permission_required(ACCESS_PERM)
def set_jabber_password(request):
logger.debug("set_jabber_password called by user %s" % request.user)
if request.method == 'POST':
logger.debug("Received POST request with form.")
form = ServicePasswordForm(request.POST)
logger.debug("Form is valid: %s" % form.is_valid())
if form.is_valid() and OpenfireTasks.has_account(request.user):
password = form.cleaned_data['password']
logger.debug("Form contains password of length %s" % len(password))
result = OpenfireManager.update_user_pass(request.user.openfire.username, password=password)
if result != "":
logger.info("Successfully set jabber password for user %s" % request.user)
messages.success(request, 'Set jabber password.')
else:
logger.error("Failed to install custom jabber password for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
else:
logger.debug("Request is not type POST - providing empty form.")
form = ServicePasswordForm()
logger.debug("Rendering form for user %s" % request.user)
context = {'form': form, 'service': 'Jabber'}
return render(request, 'services/service_password.html', context=context)
|
amremam2004/pygtk | examples/gtk/scrollable.py | Python | lgpl-2.1 | 8,475 | 0.001416 | #
# Reimplementation of gtk.Layout in python
# Example on how to implement a scrollable container in python
#
# Johan Dahlin <johan@gnome.org>, 2006
#
# Requires PyGTK 2.8.0 or later
import pygtk
pygtk.require('2.0')
import gobject
import gtk
from gtk import gdk
class Child:
widget = None
x = 0
y = 0
def set_adjustment_upper(adj, upper, always_emit):
changed = False
value_changed = False
min = max(0.0, upper - adj.page_size)
if upper != adj.upper:
adj.upper = upper
changed = True
if adj.value > min:
adj.value = min
value_changed = True
if changed or always_emit:
adj.changed()
if value_changed:
adj.value_changed()
def new_adj():
return gtk.Adjustment(0.0, 0.0, 0.0,
0.0, 0.0, 0.0)
class Layout(gtk.Container):
__gsignals__ = dict(set_scroll_adjustments=
| (gobject.SIGNAL_RUN_LAST, None,
(gtk.Adjustment, gtk.Adjustment)))
def __init__(self):
self._ | children = []
self._width = 100
self._height = 100
self._hadj = None
self._vadj = None
self._bin_window = None
self._hadj_changed_id = -1
self._vadj_changed_id = -1
gtk.Container.__init__(self)
if not self._hadj or not self._vadj:
self._set_adjustments(self._vadj or new_adj(),
self._hadj or new_adj())
# Public API
def put(self, widget, x=0, y=0):
child = Child()
child.widget = widget
child.x = x
child.y = y
self._children.append(child)
if self.flags() & gtk.REALIZED:
widget.set_parent_window(self._bin_window)
widget.set_parent(self)
def set_size(self, width, height):
if self._width != width:
self._width = width
if self._height != height:
self._height = height
if self._hadj:
set_adjustment_upper(self._hadj, self._width, False)
if self._vadj:
set_adjustment_upper(self._vadj, self._height, False)
if self.flags() & gtk.REALIZED:
self._bin_window.resize(max(width, self.allocation.width),
max(height, self.allocation.height))
# GtkWidget
def do_realize(self):
self.set_flags(gtk.REALIZED)
self.window = gdk.Window(
self.get_parent_window(),
window_type=gdk.WINDOW_CHILD,
x=self.allocation.x,
y=self.allocation.y,
width=self.allocation.width,
height=self.allocation.height,
wclass=gdk.INPUT_OUTPUT,
colormap=self.get_colormap(),
event_mask=gdk.VISIBILITY_NOTIFY_MASK)
self.window.set_user_data(self)
self._bin_window = gdk.Window(
self.window,
window_type=gdk.WINDOW_CHILD,
x=int(-self._hadj.value),
y=int(-self._vadj.value),
width=max(self._width, self.allocation.width),
height=max(self._height, self.allocation.height),
colormap=self.get_colormap(),
wclass=gdk.INPUT_OUTPUT,
event_mask=(self.get_events() | gdk.EXPOSURE_MASK |
gdk.SCROLL_MASK))
self._bin_window.set_user_data(self)
self.set_style(self.style.attach(self.window))
self.style.set_background(self.window, gtk.STATE_NORMAL)
self.style.set_background(self._bin_window, gtk.STATE_NORMAL)
for child in self._children:
child.widget.set_parent_window(self._bin_window)
self.queue_resize()
def do_unrealize(self):
self._bin_window.set_user_data(None)
self._bin_window.destroy()
self._bin_window = None
gtk.Container.do_unrealize(self)
def _do_style_set(self, style):
gtk.Widget.do_style_set(self, style)
if self.flags() & gtk.REALIZED:
self.style.set_background(self._bin_window, gtk.STATE_NORMAL)
def do_expose_event(self, event):
if event.window != self._bin_window:
return False
gtk.Container.do_expose_event(self, event)
return False
def do_map(self):
self.set_flags(gtk.MAPPED)
for child in self._children:
flags = child.widget.flags()
if flags & gtk.VISIBLE:
if not (flags & gtk.MAPPED):
child.widget.map()
self._bin_window.show()
self.window.show()
def do_size_request(self, req):
req.width = 0
req.height = 0
for child in self._children:
child.widget.size_request()
def do_size_allocate(self, allocation):
self.allocation = allocation
for child in self._children:
self._allocate_child(child)
if self.flags() & gtk.REALIZED:
self.window.move_resize(*allocation)
self._bin_window.resize(max(self._width, allocation.width),
max(self._height, allocation.height))
self._hadj.page_size = allocation.width
self._hadj.page_increment = allocation.width * 0.9
self._hadj.lower = 0
set_adjustment_upper(self._hadj,
max(allocation.width, self._width), True)
self._vadj.page_size = allocation.height
self._vadj.page_increment = allocation.height * 0.9
self._vadj.lower = 0
self._vadj.upper = max(allocation.height, self._height)
set_adjustment_upper(self._vadj,
max(allocation.height, self._height), True)
def do_set_scroll_adjustments(self, hadj, vadj):
self._set_adjustments(hadj, vadj)
# GtkContainer
def do_forall(self, include_internals, callback, data):
for child in self._children:
callback(child.widget, data)
def do_add(self, widget):
self.put(widget)
def do_remove(self, widget):
child = self._get_child_from_widget(widget)
self._children.remove(child)
widget.unparent()
# Private
def _set_adjustments(self, hadj, vadj):
if not hadj and self._hadj:
hadj = new_adj()
if not vadj and self._vadj:
vadj = new_adj()
if self._hadj and self._hadj != hadj:
self._hadj.disconnect(self._hadj_changed_id)
if self._vadj and self._vadj != vadj:
self._vadj.disconnect(self._vadj_changed_id)
need_adjust = False
if self._hadj != hadj:
self._hadj = hadj
set_adjustment_upper(hadj, self._width, False)
self._hadj_changed_id = hadj.connect(
"value-changed",
self._adjustment_changed)
need_adjust = True
if self._vadj != vadj:
self._vadj = vadj
set_adjustment_upper(vadj, self._height, False)
self._vadj_changed_id = vadj.connect(
"value-changed",
self._adjustment_changed)
need_adjust = True
if need_adjust and vadj and hadj:
self._adjustment_changed()
def _adjustment_changed(self, adj=None):
if self.flags() & gtk.REALIZED:
self._bin_window.move(int(-self._hadj.value),
int(-self._vadj.value))
self._bin_window.process_updates(True)
def _get_child_from_widget(self, widget):
for child in self._children:
if child.widget == widget:
return child
else:
raise AssertionError
def _allocate_child(self, child):
allocation = gdk.Rectangle()
allocation.x = child.x
allocation.y = child.y
req = child.widget.get_child_requisition()
allocation.width = req[0]
allocation.height = req[1]
child.widget.size_allocate(allocation)
Layout.set_set_scroll_adjustments_signal('set-scroll-adjustments')
def main():
window = gtk.Window()
window.set_size_request(300, 300)
window.connect('delete-event', gtk.main_quit)
sw = gtk.Scroll |
dshpet/nure_TPDS | PrefixSum/PrefixSumParallelExtraNaive.py | Python | mit | 1,237 | 0.021827 | #!/usr/bin/env python
'''
Naive parallel algorithm of prefix sum
http://people.cs.vt.edu/yongcao/teaching/cs5234/spring2013/slides/Lect | ure10.pdf
'''
import threading
# look maybe multiprocessing lib
import TestFunction
test_data = [2,6,2,3,5]
'''
Generic sum function
'''
def accumulate(in_list, amount, out_list, out_index):
sum = 0
for i in range(0, amount):
sum += in_list[i]
out_list[out_index] = sum
'''
What is prefix sum?
A = [2,6,2,3,5], then R = AllPrefixSum(A) = [0,2,8,10,13,18]
''' |
def prefixSum(num_list):
# create new output holder of the same size
out = [0] * len(num_list)
jobs = []
for i in range(0, len(num_list)):
thread = threading.Thread(target = accumulate(num_list, i + 1, out, i))
jobs.append(thread)
for job in jobs:
job.start()
for job in jobs:
job.join()
return out
result = prefixSum(test_data)
# print(result)
TestFunction.Test(prefixSum, 64)
TestFunction.Test(prefixSum, 128)
TestFunction.Test(prefixSum, 256)
TestFunction.Test(prefixSum, 256)
TestFunction.Test(prefixSum, 512)
TestFunction.Test(prefixSum, 1024)
TestFunction.Test(prefixSum, 2048)
TestFunction.Test(prefixSum, 4096)
TestFunction.Test(prefixSum, 8192) |
mathemage/h2o-3 | h2o-py/tests/testdir_munging/pyunit_categories.py | Python | apache-2.0 | 456 | 0.015351 | import sys
sys.path.insert(1,"../ | ../")
import h2o
from tests import pyunit_utils
def pyunit_categories():
iris = h2o.import_file(pyunit_utils.locate("smalldata/iris/iris.csv"))
category_list = iris['C5'].categories()
print(category_list)
assert set(category_list) == set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica'])
if __name__ == "__main__":
pyunit_utils.standalone_test(pyunit_categories)
else:
| pyunit_categories() |
google/embedding-tests | eval_word_embedding.py | Python | apache-2.0 | 2,379 | 0.004624 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl import app
import os
from data.commo | n import MODEL_DIR
from gensim.models import Word2Vec, FastText
from utils.word_utils import load_glove_model, load_tf_embedding
flags.DEFINE_string('model', 'w2v', 'Word embedding model')
flags.DEFINE_float('noise_multiplier', 0.,
'Ratio of the standard deviation to the clipping norm')
flags.DEFINE_float( | 'l2_norm_clip', 0., 'Clipping norm')
flags.DEFINE_integer('epoch', 4, 'Load model trained this epoch')
flags.DEFINE_integer('microbatches', 128, 'microbatches')
flags.DEFINE_integer('exp_id', 0, 'Experiment trial number')
flags.DEFINE_string('save_dir', os.path.join(MODEL_DIR, 'w2v'),
'Model directory for embedding model')
FLAGS = flags.FLAGS
def main(_):
emb_model = FLAGS.model
save_dir = FLAGS.save_dir
model_name = 'wiki9_{}_{}.model'.format(emb_model, FLAGS.exp_id)
model_path = os.path.join(save_dir, model_name)
if emb_model == 'ft':
model = FastText.load(model_path)
elif emb_model == 'w2v':
model = Word2Vec.load(model_path)
elif emb_model == 'glove':
model = load_glove_model(model_path)
elif emb_model == 'tfw2v':
model = load_tf_embedding(FLAGS.exp_id, save_dir=save_dir,
epoch=FLAGS.epoch,
noise_multiplier=FLAGS.noise_multiplier,
l2_norm_clip=FLAGS.l2_norm_clip,
microbatches=FLAGS.microbatches)
else:
raise ValueError('No such embedding model: {}'.format(emb_model))
eval_data_path = './data/questions-words.txt'
model.accuracy(eval_data_path)
if __name__ == '__main__':
app.run(main)
|
OSURoboticsClub/Rover2015 | software/ros-packages/motor_controller/scripts/motor_queue_proc.py | Python | mit | 2,620 | 0.035496 | #!/usr/bin/env python
import itertools
import time
import rospy
from std_msgs.msg import String
class QueueProc(object):
def __init__(self):
rospy.init_node('motor_queue_proc', anonymous=True)
self.state_change = rospy.Publisher('/motor/state_change', String, queue_size=10)
rospy.Subscriber("/motor/commands", String, self.parse_string)
self.queue = []
self.timer_class = TimeTimer
#
# Parses published strings and loads the next item into the queue
#
def parse_string(self, data):
commands = ["".join(x) for _, x in itertools.groupby(data.data, key=str.isdigit)]
queue_start = len(self.queue)
i = 0
while i < len(commands):
action = commands[i]
val = commands[i+1]
if action == "f":
self.queue.append(["forward", int(val)])
elif action == "b":
self.queue.append(["backward", int(val)])
elif action == "r":
rounded = int(val)%360
if rounded > 180:
rounded = 360-rounded
self.queue.append(["left", rounded])
else:
self.queue.append(["right", rounded])
elif action == "flush":
self.queue = []
i += 2
if queue_start == 0:
self.timer_expire()
#
# changes the state and sets a timer based on the next item in the queue
#
def timer_expire(self):
if len(self.queue) == 0:
self.state_change.publish("stop")
return
nxt = self.queue.pop(0)
if (nxt[0] == "left" or nxt[0] == "right"):
self.state_change.publish("stop")
time.sleep(2)
self.state_change.publish(nxt[0])
tmer = self.timer_class(nxt[1], self.timer_expire, (nxt[0] == "left" or nxt[0] == "r | ight"))
tmer.start()
#
# General timer class, does nothing
#
class GeneralTimer():
def __ | init__(self, distance, callback, is_angle):
self.callback = callback
def start(self):
self.callback()
def get_time(self):
return 0
#
# A time based timer
#
class TimeTimer(GeneralTimer):
def __init__(self, distance, callback, is_angle):
self.callback = callback
#distance in m*10
self.distance = distance
self.is_angle = is_angle
#meters per second
self.mps = 1
#amount of angles turned per second
self.aps = 40
def start(self):
rospy.Timer(rospy.Duration(self.get_time()), self.timer_callback, True)
def timer_callback(self, tEvent):
self.callback()
def get_time(self):
if not self.is_angle:
return float(self.distance)/(self.mps*10)
else:
return self.distance/self.aps
if __name__ == "__main__":
proc = QueueProc()
rospy.spin()
|
Arlefreak/ApiArlefreak | portfolio/migrations/0017_auto_20170606_0238.py | Python | mit | 546 | 0.001832 | # -*- coding: utf | -8 -*-
# Generated by Django 1.11.1 on 2017-06-06 02:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('portfolio', '0016_auto_20170519_1725'),
]
operations = [
migrations.AlterField(
model_name='link',
name='category',
| field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='portfolio.LinkCategory'),
),
]
|
sahat/bokeh | examples/plotting/cloud/glucose.py | Python | bsd-3-clause | 1,424 | 0.007725 |
import pandas as pd
from bokeh.sampledata.glucose import data
from bokeh.plotting import *
output_cloud("glucose")
hold()
dates = data.index.to_series()
figure(x_axis_type="datetime", tools="pan,wheel_zoom,box_zoom,reset,previewsave")
line(dates, data['glucose'], color='red', legend='glucose')
line(dates, data['isig'], color='blue', legend='isig')
curplot().title = "Glucose Measurements"
day = data.ix['2010-10-06']
highs = day[day['glucose'] > 180]
lows = day[day['glucose'] < 80]
figure(x_axis_type="datetime", tools="pan,wheel_zoom,box_zoom,reset,previewsave")
| line(day.index.to_series(), day['glucose'],
line_color="gray", line_dash="4 4", line_width=1, legend="glucose")
scatter(highs.index.to_series(), highs['glucose'] | , size=6, color='tomato', legend="high")
scatter(lows.index.to_series(), lows['glucose'], size=6, color='navy', legend="low")
curplot().title = "Glucose Range"
xgrid()[0].grid_line_color=None
ygrid()[0].grid_line_alpha=0.5
data['inrange'] = (data['glucose'] < 180) & (data['glucose'] > 80)
window = 30.5*288 #288 is average number of samples in a month
inrange = pd.rolling_sum(data.inrange, window)
inrange = inrange.dropna()
inrange = inrange/float(window)
figure(x_axis_type="datetime", tools="pan,wheel_zoom,box_zoom,reset,previewsave")
line(inrange.index.to_series(), inrange, line_color="navy")
curplot().title = "Glucose In-Range Rolling Sum"
# open a browser
show()
|
madisona/django-image-helper | image_helper/tests/test_app/models.py | Python | bsd-3-clause | 216 | 0 | from django.db import models
from image_helper.fields import SizedImag | eField
class TestModel(models.Model):
image = SizedI | mageField(
upload_to='test_images', size=(220, 150), thumbnail_size=(100, 100))
|
Elico-Corp/odoo-addons | business_requirement_deliverable_project_task_mandatory/models/__init__.py | Python | agpl-3.0 | 203 | 0 | # -*- coding: utf-8 -*-
# © 2017 Elico Corp (www.elico-corp.com).
# License AGPL-3.0 or later (http://www.gnu.org/ | licenses/agpl.html).
from . import project_project_category
from . import project_ | task
|
forter/boten | test/test_botparse.py | Python | apache-2.0 | 2,177 | 0.000459 | from boten.core import BaseBot
import payloads
class TestBot(BaseBot):
def command_arg_bot(self, user_name):
yield "hello {}".format(user_name)
def command_no_arg_bot(self):
yield "hello"
def command_optional_arg_bot(self, optional="default"):
yield "hello {}".format(optional)
def command_two_message_bot(self):
yield "message1"
yield "message2"
def foo(self):
pass
def test_available_commands():
bot = TestBot({})
available_commands = bot.commands
assert "arg_bot" in available_commands
assert "no_arg_bot" in available_commands
assert "optional_arg_bot" in available_commands
assert "two_message_bot" in available_commands
assert "foo" not in available_commands
def test_arg_bot_with_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.arg_bot_with_arg))
assert response[0] == "hello derp"
def test_arg_bot_with_no_args():
bot = TestBot({})
response = list(bot.run_command(payloads.arg_bot_with_no_args))
assert response[0].startswith("Got TypeError") # Help message
def test_no_arg_bot_without_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.no_arg_bot_without_arg))
assert response[0] == "hello"
def test_no_arg_bot_with_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.no_arg_bot_with_arg))
assert response[0].startswith("Got TypeError") # Help message
def test_optional_arg_bot_with_optional_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.optional_arg_bot_with_optional_arg))
assert response[0] == 'he | llo derp'
def test_optional_arg_bot_with_no_arg():
bot = TestBot({})
response = list(bot.run_command(payloads.optional_arg_bot_with_no_arg))
assert response[0] == 'hello default'
def test_two_message_bot():
bot = TestBo | t({})
response = list(bot.run_command(payloads.two_message_bot))
assert len(response) == 2
def test_help_subcommand():
bot = TestBot({})
response = list(bot.run_command(payloads.no_arg_bot_with_arg))
assert response[0].startswith("Got TypeError") # Help message
|
ubivar/ubivar-python | setup.py | Python | mit | 2,448 | 0.006536 | import os
import sys
import warnings
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
try:
| from distutils. | command.build_py import build_py_2to3 as build_py
except ImportError:
from distutils.command.build_py import build_py
path, script = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(path))
install_requires = []
if sys.version_info < (2, 6):
warnings.warn(
'Python 2.5 is not supported by Ubivar. If you have any question,'
'please file an issue on Github or contact us at support@ubivar.com.',
DeprecationWarning)
install_requires.append('requests >= 0.8.8, < 0.10.1')
install_requires.append('ssl')
else:
install_requires.append('requests >= 0.8.8')
with open('LONG_DESCRIPTION.rst') as f:
long_description = f.read()
# Don't import ubivar module here, since deps may not be installed
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'ubivar'))
# Get simplejson if we don't already have json
if sys.version_info < (3, 0):
try:
from util import json
except ImportError:
install_requires.append('simplejson')
setup(name='ubivar',
cmdclass={'build_py': build_py},
version='0.9',
description='Ubivar python bindings',
author='Ubivar',
author_email='support@ubivar.com',
url='http://github.com/ubivar/ubivar-python',
packages=['ubivar','ubivar.test','ubivar.test.resources'],
package_data={'ubivar': ['data/ca-certificates.crt']},
install_requires=install_requires,
test_suite='ubivar.test.all',
tests_require=['unittest2', 'mock'],
use_2to3=True,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
# "Programming Language :: Python",
# "Programming Language :: Python :: 2",
# "Programming Language :: Python :: 2.6",
# "Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3",
# "Programming Language :: Python :: 3.3",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules"
])
|
jdob/bonkbot | plugins/ops.py | Python | gpl-2.0 | 720 | 0 | #!/usr/bin/python
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, inc | luding the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from bonkbot.bot.decorators import join
def init_plugin(config, irc_client):
return [give]
@join
def give(message):
new_op = message.author()
# Do | n't try to op yourself
if new_op != message.config.get('bot', 'nick'):
message.irc_client.give_ops(message.channel()[1:], message.author())
|
nathangeffen/tbonline-old | tbonlineproject/gallery/admin.py | Python | mit | 1,624 | 0.009852 | '''Admin interface registers Image model with admin.site.
'''
from django.contrib import admin
from django.contrib.contenttypes import generic
from django.forms.widgets import HiddenInput
from django.db.models import PositiveIntegerField
from credit.admin import OrderedCreditInline
from gallery.models import Gallery, Image
from archive.admin import TaggedItemInline
class ImageInline(admin.TabularInline):
classes = ('collapse open')
model = Gallery.images.through
extra = 0
raw_id_fields = ('image',)
formfield_overrides = {
PositiveIntegerField: {'widget': HiddenInput},
}
sortable_field_name = 'position'
class GalleryAdmin(admin.ModelAdmin):
search_fields = ('title','description',)
list_display = ('id', 'title',)
list_editable = ('title',)
inlines = [TaggedItemInline, ImageInline,]
class Media:
js = [
'grappelli/tinymce/jscripts/tiny_mce/tiny_mce.js',
'grappelli/tinymce_setup/tinymce_setup.js',
]
class ImageAdmin(admin.ModelAdmin):
search_fields = ('title','caption','description')
list_display = ('id', 'title', 'image_thumbnail','file','date_added', 'last_modified')
list_editable = ('title',)
prepopulated_fields = {"slug": ("title",)}
inlines = [TaggedItemInline, OrderedCreditInline, ]
class Media:
js = [
'grappelli/tinymce/jscripts/tiny_mce/tiny_mce.js',
'grappelli/tinymce_ | setup/tinymce_setup.js',
]
admin.site.register(Image, ImageAdmi | n)
admin.site.register(Gallery, GalleryAdmin)
|
johnwiseheart/HangoutsBot | hangupsbot/plugins/image_links.py | Python | gpl-3.0 | 2,109 | 0.003793 | """
Identify images, upload them to google plus, pos | t in hangouts
"""
import aiohttp, asyncio, io, logging, os, re
import plugins
logger = logging.getLogger(__name__)
def _initialise():
plugins.register_handler(_watch_image_link, type="message")
@asyncio.coroutine
def _watch_image_link(bot, event, command):
# Don't handle events caused by the bot himself
if event.user.is_self:
ret | urn
if " " in event.text:
"""immediately reject anything with spaces, must be a link"""
return
probable_image_link = False
event_text_lower = event.text.lower()
if re.match("^(https?://)?([a-z0-9.]*?\.)?imgur.com/", event_text_lower, re.IGNORECASE):
"""imgur links can be supplied with/without protocol and extension"""
probable_image_link = True
elif event_text_lower.startswith(("http://", "https://")) and event_text_lower.endswith((".png", ".gif", ".gifv", ".jpg")):
"""other image links must have protocol and end with valid extension"""
probable_image_link = True
if probable_image_link and "googleusercontent" in event_text_lower:
"""reject links posted by google to prevent endless attachment loop"""
logger.debug("rejected link {} with googleusercontent".format(event.text))
return
if probable_image_link:
link_image = event.text
if "imgur.com" in link_image:
"""special imgur link handling"""
if not link_image.endswith((".jpg", ".gif", "gifv", "png")):
link_image = link_image + ".gif"
link_image = "https://i.imgur.com/" + os.path.basename(link_image)
link_image = link_image.replace(".gifv",".gif")
logger.info("getting {}".format(link_image))
filename = os.path.basename(link_image)
r = yield from aiohttp.request('get', link_image)
raw = yield from r.read()
image_data = io.BytesIO(raw)
image_id = yield from bot._client.upload_image(image_data, filename=filename)
yield from bot.coro_send_message(event.conv.id_, None, image_id=image_id)
|
auag92/n2dm | Asap-3.8.4/Test/testMonteCarloEMT.py | Python | mit | 1,391 | 0.007189 | print
print "This test runs Verlet dynamics with the MonteCarloEMT potential instead"
print "of the usual EMT potential. The result must be the same, but the"
print "performance will be slightly worse."
print
from Asap import *
from Asap.Dynamics.VelocityVerlet import VelocityVerlet
from cPickle import *
from Numeric import *
from Asap.testtools import ReportTest
PrintVersion(1)
data = | load(file("testVerlet.pickle"))
init_pos = array(data["initial"])
init_pos.shape = (-1,3)
init_box = array(data["box"])
init_box.shape = (3,3)
atoms = ListOfAtoms(positions=init_pos, cell=init_box)
atoms.SetAtomicNumbers(47)
atoms.SetCa | lculator(MonteCarloEMT())
dyn = VelocityVerlet(atoms, 2 * femtosecond)
for i in range(20):
dyn.Run(10)
epot = atoms.GetPotentialEnergy() / len(atoms)
ekin = atoms.GetKineticEnergy() / len(atoms)
print "E_pot = %-12.5f E_kin = %-12.5f E_tot = %-12.5f" % (epot, ekin,
epot+ekin)
final_pos = array(data["final"])
diff = max(abs(atoms.GetCartesianPositions().flat - final_pos))
print "Maximal deviation of positions:", diff
ReportTest("Maximal deviation of positions", diff, 0, 1e-9)
diff = max(abs(atoms.GetStresses().flat - array(data["stress"])))
print "Maximal deviation of stresses:", diff
ReportTest("Maximal deviation of stresses", diff, 0, 1e-9)
ReportTest.Summary()
|
xcgspring/dependence_repo | manager/dependence_repo_manager/ui/resources_rc.py | Python | apache-2.0 | 443,165 | 0.000011 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: Wed Jun 17 16:47:37 2015
# by: The Resource Compiler for PyQt (Qt v4.8.6)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x14\x89\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x01\x71\x00\x00\x01\x90\x08\x04\x00\x00\x00\xf2\x96\x98\x08\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\
\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x02\
\x62\x4b\x47\x44\x00\x00\xaa\x8d\x23\x32\x00\x00\x00\x09\x70\x48\
\x59\x73\x00\x00\x01\x2c\x00\x00\x01\x2c\x00\x73\x88\xe9\x52\x00\
\x00\x11\x87\x49\x44\x41\x54\x78\xda\xed\xdd\xcb\x6f\x5f\xe5\x9d\
\xc7\xf1\xb7\x3d\xc4\x0e\x24\xe4\x22\xb5\x49\x48\x9c\xae\x20\x40\
\x02\x52\x01\x89\x09\xcd\x05\xb1\xa6\xa4\x10\xe2\x94\x19\x69\x66\
\x45\x4b\xcb\x3d\x2c\xca\x2c\xdb\xd2\x4b\x80\x40\xa9\x68\x17\xd3\
\x2e\x4a\x0b\x6d\xb9\xc4\x6e\x05\xfc\x01\xa3\x9a\xeb\x14\x08\x90\
\x36\x91\x3a\xcb\x8a\x4a\xa5\x03\xa9\x21\x81\x38\x61\xe4\x59\x38\
\x37\xd3\xe3\xdf\xe3\xc7\x3e\xe7\x7c\x9f\xef\xf7\x7c\x5e\xcf\x2e\
\x8b\xf8\xfc\xce\xf9\xc8\xfa\xbd\x6d\xc7\x01\x69\xcf\x42\x6e\x62\
\x92\x9b\x58\x68\x7d\x21\x22\xf5\x5b\xce\x3d\x4c\x9e\x3a\xf7\xb0\
\xdc\xfa\x82\x44\xea\xb3\x96\x47\xce\x98\xf7\xc9\xf3\x08\x6b\xad\
\x2f\x4c\x64\xfe\x2e\xe5\xd9\x8a\x79\x9f\x3c\xcf\x72\xa9\xf5\x05\
\x8a\xcc\x55\x3f\xd7\x70\xb0\xc7\xbc\x4f\x9e\x83\x5c\x43\xbf\xf5\
\xc5\x8a\xe4\x99\x0a\xcb\x9c\xa3\x08\x15\x37\xa6\x87\x65\xce\x51\
\x84\x4a\xf1\xaa\xc3\x32\xe7\x28\x42\xa5\x58\xbd\xc3\x32\xe7\x28\
\x42\xa5\x30\xb3\x0d\xcb\x9c\xa3\x08\x95\x42\xe4\x87\xa5\x22\x54\
\xdc\x98\x7b\x58\x2a\x42\xa5\x78\xf3\x0f\x4b\x45\xa8\x14\xab\xbe\
\xb0\x54\x84\x4a\x61\x9a\x08\xcb\x9c\xa3\x08\x95\x06\x35\x1b\x96\
\x39\x47\x11\x2a\xb5\x6b\x27\x2c\x73\x8e\x22\x54\x6a\xd3\x6e\x58\
\xe6\x1c\x45\xa8\xcc\x9b\x4d\x58\xe6\x1c\x45\xa8\xcc\x91\x75\x58\
\xe6\x1c\x45\xa8\x64\x2a\x27\x2c\x73\x8e\x22\x54\x66\xa5\xbc\xb0\
\xcc\x39\x8a\x50\xe9\xa9\xdc\xb0\xcc\x39\x8a\x50\xa9\x54\x7e\x58\
\xe6\x1c\x45\xa8\x9c\xc1\x53\x58\xe6\x1c\x45\xa8\xe0\x35\x2c\x73\
\x8e\x22\xb4\xc3\x7c\x87\x65\xce\x51\x84\x76\x50\x8c\xb0\xcc\x39\
\x8a\xd0\x0e\x89\x15\x96\x39\x47\x11\x1a\x5e\xd4\xb0\xcc\x39\x8a\
\xd0\xb0\xe2\x87\x65\xce\x51\x84\x06\xd3\x9d\xb0\xcc\x39\x8a\xd0\
\x20\xba\x17\x96\x39\x47\x11\xea\x5c\x77\xc3\x32\xe7\x28\x42\x5d\
\x52\x58\xe6\x1d\x45\xa8\x2b\x0a\xcb\xb9\x1e\x45\xa8\x03\x0a\xcb\
\xf9\x1e\x45\x68\xc1\x7c\x84\xe5\x5d\xe6\x57\x90\x3e\x8a\xd0\x02\
\x79\x08\xcb\x63\xec\x60\x10\x18\x64\x98\x63\xe6\x57\x93\x3a\x8a\
\xd0\x62\xf8\x08\xcb\xb7\xd8\x3a\x2d\xe7\xfa\xd9\xca\xdb\xe6\x57\
\x95\x3a\x8a\x50\x73\x3e\xc2\x72\x84\x0d\x33\x5c\xff\x06\x46\xcc\
\xaf\x2e\x7d\x14\xa1\x46\x7c\x84\xe5\x1e\xd6\x24\x5e\xc7\x1a\xf6\
\x98\x5f\x65\xfa\x28\x42\x5b\xe6\x23\x2c\xef\x64\xe9\x2c\x5f\xcf\
\x52\x45\xa8\x9c\xe6\x21\x2c\x27\x4e\x84\x65\x0e\x45\xa8\x38\x0d\
\xcb\xbc\x57\xa8\x08\xed\x2c\x1f\x61\xb9\x77\xc6\xb0\xcc\xa1\x08\
\xed\x1c\x1f\x61\xf9\x60\x32\x2c\x73\x28\x42\x3b\x23\x5a\x58\xe6\
\x50\x84\x86\xe7\x21\x2c\x8f\xb2\x83\x81\x06\xef\x81\x22\x34\x28\
\x1f\x61\xf9\x26\x5b\xe8\x6b\xe5\x6e\x28\x42\x43\xf1\x12\x96\xeb\
\x5b\xbe\x2f\x1b\x18\x35\x7f\xd5\xe9\xa3\x08\x4d\xe8\x62\x58\xe6\
\x18\x52\x84\x7a\xe6\x23\x2c\xef\x68\x24\x2c\x73\x28\x42\x5d\xf2\
\x11\x96\x37\x36\x1a\x96\x39\x14\xa1\x8e\x28\x2c\xe7\x7e\xe7\x14\
\xa1\xc5\xf3\x11\x96\xcf\xb4\x1e\x96\x39\x14\xa1\xc5\xf2\x11\x96\
\x0f\x98\x85\x65\x8e\x21\x1e\x32\xbf\x5 | 3\xe9\xd3\xa9\x0 | 8\x55\x58\
\xd6\x4f\x11\x5a\x0c\x0f\x61\xf9\x71\x41\x61\x99\x43\x11\x6a\xcc\
\x47\x58\xee\x2b\x30\x2c\xf3\xee\xb2\x22\xd4\x84\xc2\xb2\x5d\x8a\
\xd0\x56\x79\x09\xcb\xd5\xd6\x37\xaa\x66\x8a\xd0\x56\x28\x2c\x6d\
\x29\x42\x1b\xe5\x21\x2c\x3f\x72\x1a\x96\x39\x06\x19\xe6\xb8\xf9\
\x9d\x4e\x1d\x67\x11\xea\x25\x2c\x37\xbb\x0e\xcb\xbc\x27\xb2\x95\
\xfd\xe6\x77\x3c\x75\x9c\x44\xa8\x8f\xb0\x7c\x3a\x4c\x58\xe6\x50\
\x84\xce\x9b\x8f\xb0\xbc\x3f\x5c\x58\xe6\x50\x84\xce\x99\x8f\xb0\
\xbc\x9d\x25\xd6\x37\xaa\x00\xcb\x14\xa1\xb9\x7c\x84\xe5\xf6\xf0\
\x61\x99\x43\x11\x3a\x4b\x3e\xc2\xf2\x8d\x0e\x85\x65\x0e\x45\x68\
\x82\x97\xb0\xbc\xd8\x7a\x49\x85\x53\x84\x56\x52\x58\xc6\xa2\x08\
\x9d\x46\x61\x19\x93\x22\x14\xf0\x11\x96\x47\x14\x96\x73\xd6\xe9\
\x08\xf5\x11\x96\xaf\x2b\x2c\xe7\xad\x9f\xab\xbb\x17\xa1\x3e\xc2\
\xf2\x29\x85\x65\x8d\x3a\x14\xa1\x0a\xcb\xee\xea\x40\x84\xfa\x08\
\xcb\xdb\x14\x96\x0d\x0a\x1c\xa1\x0a\x4b\x39\x29\x5c\x84\x7a\x09\
\xcb\x4d\x0a\xcb\x16\x85\x89\x50\x85\xa5\xf4\xe2\x3c\x42\x7d\x84\
\xe5\x6e\x85\xa5\x31\xa7\x11\xaa\xb0\x94\x1c\xcb\xb8\xdb\x7c\x0d\
\xe9\x73\x2a\x42\x97\x3b\x08\xcb\xc3\xdc\xa0\xb0\x2c\x8c\x97\x08\
\x5d\x0e\x43\xe6\x97\xd1\xfb\xbc\xa6\xb0\x2c\x96\x87\x08\x1d\x2a\
\x7b\xe2\x4f\x72\x91\xf5\x53\x94\xa4\x4b\x8a\x8e\xd0\x82\x27\xbe\
\x9b\xf3\xac\x9f\x9d\xcc\x5a\xb9\x11\x5a\xe8\xc4\x15\x96\x1e\x95\
\x19\xa1\xc5\x4d\xfc\x43\x85\xa5\x6b\xe5\x45\x68\x51\x13\x57\x58\
\xc6\x50\x56\x84\x16\x33\x71\x85\x65\x34\x97\xf0\x1b\xf3\x55\x15\
\x33\xf1\xef\x2b\x2c\x83\x2a\x21\x42\xcd\x27\x7e\xab\xc2\x32\x38\
\xeb\x08\x1d\x72\xf0\x6b\x12\x45\xe6\x47\x6f\x54\xa4\x29\x7a\xa3\
\x72\xc6\x51\x6e\x46\xa3\xdc\xac\x38\xfa\xa2\x61\x0c\xfa\xa2\x61\
\xcf\xa3\x9f\x29\xf4\x6d\x90\x9d\x7c\x62\xbe\xa2\x69\x13\x2f\x2d\
\x37\x17\x31\xca\x84\xbe\x81\xef\xd2\x32\xee\xe6\x28\x4f\xf1\x4f\
\xd6\x17\xf2\x69\x65\x7d\x16\x3f\x7d\xf4\x63\x58\x9e\x94\x10\x96\
\x33\x7c\x16\x2f\x77\xe2\x93\x4c\xf2\xa4\xfe\x95\xa6\x03\xa5\x84\
\xa5\xcb\x89\x4f\xa2\x7f\x6b\x5f\xb2\xb2\xc2\xd2\xed\xc4\x27\x51\
\x84\x96\xa8\xbc\xb0\x9c\x61\xe2\xa5\xe5\x66\x35\x45\x68\x59\x8a\
\x0d\xcb\x6a\x1e\x3e\x8b\x9f\x3e\xfa\x25\x13\xd6\xca\x0d\xcb\x19\
\x3e\x8b\x7b\x9b\xf8\x24\xfa\x55\x41\x76\xca\x0e\xcb\x40\x13\x9f\
\x44\xbf\x49\xbc\x6d\x1e\xc2\x72\x86\x89\xfb\x78\x2f\xfe\x8f\x2e\
\x67\x8c\xc3\xfa\xb5\x9d\xad\x18\x64\x27\xc7\xf8\x2f\x2e\xb1\xbe\
\x90\xb9\xf1\x3a\x71\x80\x73\x18\x61\x42\xff\xab\x4f\xa3\x9c\x85\
\x65\x35\x9f\x6f\x54\xa6\x1f\xfd\x0a\xfd\x26\x0c\xf1\xb0\xf9\x93\
\xad\xe1\x8d\x4a\x8c\x89\x4f\xa2\xff\x61\xb3\x5e\x1e\xc3\x72\x86\
\x89\x7b\x7e\xa3\x32\xdd\x30\x07\xf4\xff\x24\xd7\x60\x2a\x2c\xf7\
\x73\xbd\xf5\x85\xd4\xf7\x82\x22\xb9\x8c\x31\xfd\xdf\x11\xf3\xe0\
\x3c\x2c\xab\xc5\x9a\x38\xc0\xd9\x8a\xd0\x39\x09\x11\x96\x55\xe2\
\x4d\x7c\xca\xa3\x8c\xf3\x80\x22\x74\x96\x86\x78\x98\x43\xfc\xc0\
\xfa\x32\x9a\x7b\x79\xd6\x41\xd0\x6c\x84\xae\xb7\xbe\xc1\x85\x8b\
\x13\x96\xe1\x73\xb3\xda\x30\x7f\x64\x9f\x22\xb4\x52\xb8\xb0\x9c\
\xe9\x65\xc6\xf7\x79\xc6\xf8\x88\x1b\x15\xa1\x67\x08\x19\x96\xd5\
\xea\x9a\xf8\x6a\x76\x5b\xbf\x94\x9e\x16\xb2\x97\x09\xee\x60\xa9\
\xf5\x85\x14\xc0\x47\x58\x7e\xaf\xbe\x7f\xd8\x58\xcf\x7b\x71\x80\
\x25\xdc\x6a\xfe\xce\x2b\x7d\xba\x1d\xa1\x3e\xbe\x63\xf9\x75\xce\
\x05\xa8\xe7\xbd\x78\x9d\x13\x07\x18\xe0\x7a\x3e\x30\xbf\x45\xa9\
\xf3\x4c\x27\x23\xd4\x43\x58\x8e\xf3\x25\x16\x9c\xba\xe2\x22\x27\
\x0e\xd0\xc7\x17\xf8\xbd\xf9\xcd\x4a\x9d\x37\xd9\xd2\x99\x08\xf5\
\xf1\xa3\xb0\xaf\x72\xd5\xa7\x9e\x48\xb1\x13\x9f\x72\x11\xbf\x36\
\xbf\x69\xa9\xf3\x71\x07\x22\xd4\xc7\xbf\xb1\xfc\x25\x17\x56\x5c\
\x7b\xe1\x13\x07\x38\x8f\xef\x99\xdf\xbc\xf4\x89\x1b\xa1\xd6\xbf\
\xf8\x78\x76\xe7\xbb\xac\x9a\xe1\xfa\x1d\x4c\x1c\xe0\x5c\xbe\x6e\
\x7e\x13\xd3\xe7\x41\xd6\x58\xef\xb1\x66\x3e\xc2\xf2\x6b\x27\xc2\
\xd2\xf5\xc4\x01\x16\x70\x3d\xe3\xe6\xb7\x33\x75\xf6\x86\x89\x50\
\x0f\x61\xf9\xf7\x69\x61\xe9\x7e\xe2\x30\x15\xa1\xff\x6d\x7e\x63\
\x53\xc7\x7b\x84\x7a\x0d\xcb\x10\x13\x9f\x72\x21\xbf\x32\xbf\xc1\
\xa9\x73\x94\x1d\x2e\x23\xd4\x73\x58\x06\x9a\x38\x78\x89\xd0\x3b\
\x5d\x45\xa8\x8f\xb0\xfc\xce\x8c\x61\x19\x6c\xe2\xa0\x08\xad\x53\
\x84\xb0\x0c\x38\x71\x80\x05\x7c\xc9\x45\x84\x6e\xb0\xde\x70\x0f\
\x1e\xc2\xf2\x10\xdb\x92\x61\x19\x74\xe2\x00\x7d\x5c\xc5\xab\xe6\
\x0f\x21\x75\xde\x62\x6b\x71\x3f\x93\xe9\x23\x2c\x5f\x99\x65\x58\
\x06\x9e\xf8\x94\x0b\xf9\xa5\xf9\xc3\x48\x9d\x09\x76\x30\x68\xbd\
\xeb\x13\x7c\x84\xe5\x13\x19\x61\x19\x7e\xe2\x00\xab\xf8\xae\xf9\
\x43\x49\x1f\xfb\x08\x8d\x19\x96\x9d\x98\x38\xc0\xb9\x7c\xcd\xfc\
\xe1\xa4\xcf\x1e\xb3\x08\xf5\x11\x96\xb7\xcc\x21\x2c\x3b\x33\x71\
\x98\x8a\xd0\xbf\x9b\x3f\xa6\xd4\x19\x69\x3d\x42\x3d\x84 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.0/Lib/lib2to3/fixes/fix_urllib.py | Python | mit | 7,255 | 0.000276 | """Fix changes imports of urllib which are now incompatible.
This is rather similar to fix_imports, but because of the more
complex nature of the fixing for urllib, | it has its own fixer.
"""
# Author: Nick Edds
# Local imports
from .fix_imports import alternates, FixImport | s
from .. import fixer_base
from ..fixer_util import Name, Comma, FromImport, Newline, attr_chain
MAPPING = {'urllib': [
('urllib.request',
['URLOpener', 'FancyURLOpener', 'urlretrieve',
'_urlopener', 'urlcleanup']),
('urllib.parse',
['quote', 'quote_plus', 'unquote', 'unquote_plus',
'urlencode', 'pahtname2url', 'url2pathname']),
('urllib.error',
['ContentTooShortError'])],
'urllib2' : [
('urllib.request',
['urlopen', 'install_opener', 'build_opener',
'Request', 'OpenerDirector', 'BaseHandler',
'HTTPDefaultErrorHandler', 'HTTPRedirectHandler',
'HTTPCookieProcessor', 'ProxyHandler',
'HTTPPasswordMgr',
'HTTPPasswordMgrWithDefaultRealm',
'AbstractBasicAuthHandler',
'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
'AbstractDigestAuthHandler',
'HTTPDigestAuthHander', 'ProxyDigestAuthHandler',
'HTTPHandler', 'HTTPSHandler', 'FileHandler',
'FTPHandler', 'CacheFTPHandler',
'UnknownHandler']),
('urllib.error',
['URLError', 'HTTPError'])],
}
# def alternates(members):
# return "(" + "|".join(map(repr, members)) + ")"
def build_pattern():
bare = set()
for old_module, changes in MAPPING.items():
for change in changes:
new_module, members = change
members = alternates(members)
yield """import_name< 'import' (module=%r
| dotted_as_names< any* module=%r any* >) >
""" % (old_module, old_module)
yield """import_from< 'from' mod_member=%r 'import'
( member=%s | import_as_name< member=%s 'as' any > |
import_as_names< members=any* >) >
""" % (old_module, members, members)
yield """import_from< 'from' module_star=%r 'import' star='*' >
""" % old_module
yield """import_name< 'import'
dotted_as_name< module_as=%r 'as' any > >
""" % old_module
yield """power< module_dot=%r trailer< '.' member=%s > any* >
""" % (old_module, members)
class FixUrllib(FixImports):
def build_pattern(self):
return "|".join(build_pattern())
def transform_import(self, node, results):
"""Transform for the basic import case. Replaces the old
import name with a comma separated list of its
replacements.
"""
import_mod = results.get('module')
pref = import_mod.get_prefix()
names = []
# create a Node list of the replacement modules
for name in MAPPING[import_mod.value][:-1]:
names.extend([Name(name[0], prefix=pref), Comma()])
names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref))
import_mod.replace(names)
def transform_member(self, node, results):
"""Transform for imports of specific module elements. Replaces
the module to be imported from with the appropriate new
module.
"""
mod_member = results.get('mod_member')
pref = mod_member.get_prefix()
member = results.get('member')
# Simple case with only a single member being imported
if member:
# this may be a list of length one, or just a node
if isinstance(member, list):
member = member[0]
new_name = None
for change in MAPPING[mod_member.value]:
if member.value in change[1]:
new_name = change[0]
break
if new_name:
mod_member.replace(Name(new_name, prefix=pref))
else:
self.cannot_convert(node,
'This is an invalid module element')
# Multiple members being imported
else:
# a dictionary for replacements, order matters
modules = []
mod_dict = {}
members = results.get('members')
for member in members:
member = member.value
# we only care about the actual members
if member != ',':
for change in MAPPING[mod_member.value]:
if member in change[1]:
if change[0] in mod_dict:
mod_dict[change[0]].append(member)
else:
mod_dict[change[0]] = [member]
modules.append(change[0])
new_nodes = []
for module in modules:
elts = mod_dict[module]
names = []
for elt in elts[:-1]:
names.extend([Name(elt, prefix=pref), Comma()])
names.append(Name(elts[-1], prefix=pref))
new_nodes.append(FromImport(module, names))
if new_nodes:
nodes = []
for new_node in new_nodes[:-1]:
nodes.extend([new_node, Newline()])
nodes.append(new_nodes[-1])
node.replace(nodes)
else:
self.cannot_convert(node, 'All module elements are invalid')
def transform_dot(self, node, results):
"""Transform for calls to module members in code."""
module_dot = results.get('module_dot')
member = results.get('member')
# this may be a list of length one, or just a node
if isinstance(member, list):
member = member[0]
new_name = None
for change in MAPPING[module_dot.value]:
if member.value in change[1]:
new_name = change[0]
break
if new_name:
module_dot.replace(Name(new_name,
prefix=module_dot.get_prefix()))
else:
self.cannot_convert(node, 'This is an invalid module element')
def transform(self, node, results):
if results.get('module'):
self.transform_import(node, results)
elif results.get('mod_member'):
self.transform_member(node, results)
elif results.get('module_dot'):
self.transform_dot(node, results)
# Renaming and star imports are not supported for these modules.
elif results.get('module_star'):
self.cannot_convert(node, 'Cannot handle star imports.')
elif results.get('module_as'):
self.cannot_convert(node, 'This module is now multiple modules')
|
zadgroup/edx-platform | openedx/core/lib/logsettings.py | Python | agpl-3.0 | 5,765 | 0 | """Get log settings."""
import os
import platform
import sys
from logging.handlers import SysLogHandler
LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
def get_logger_config(log_dir,
logging_env="no_env",
tracking_filename="tracking.log",
edx_filename="edx.log",
dev_env=False,
syslog_addr=None,
debug=False,
local_loglevel='INFO',
console_loglevel=None,
service_variant=None):
"""
Return the appropriate logging config dictionary. You should assign the
result of this to the LOGGING var in your settings. The reason it's done
this way instead of registering directly is because I didn't want to worry
about resetting the logging state if this is called multiple times when
settings are extended.
If dev_env is set to true logging will not be done via local rsyslogd,
instead, tracking and application logs will be dropped in log_dir.
"tracking_filename" and "edx_filename" are ignored unless dev_env
is set to true since otherwise logging is handled by rsyslogd.
"""
# Revert to INFO if an invalid string is passed in
if local_loglevel not in LOG_LEVELS:
local_loglevel = 'INFO'
if console_loglevel is None or console_loglevel not in LOG_LEVELS:
console_loglevel = 'DEBUG' if debug else 'INFO'
if service_variant is None:
# default to a blank string so that if SERVICE_VARIANT is not
# set we will not log to a sub directory
service_variant = ''
hostname = platform.node().split(".")[0]
syslog_format = ("[service_variant={service_variant}]"
"[%(name)s][env:{logging_env}] %(levelname)s "
"[{hostname} %(process)d] [%(filename)s:%(lineno)d] "
"- %(message)s").format(service_variant=service_variant,
logging_env=logging_env,
hostname=hostname)
handlers = ['console', 'local']
if syslog_addr:
handlers.append('syslogger-remote')
logger_config = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s %(process)d '
'[%(name)s] %(filename)s:%(lineno)d - %(message)s',
},
'syslog_format': {'format': syslog_format},
'raw': {'format': '%(message)s'},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
}
},
'handlers': {
'console': {
'level': console_loglevel,
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stderr,
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'newrelic': {
'level': 'ERROR',
'class': 'lms.lib.newrelic_logging.NewRelicHandler',
'formatter': 'raw',
}
},
'loggers': {
'tracking': {
'handlers': ['tracking'],
'level': 'DEBUG',
'propagate': False,
},
'': {
'handlers': handlers,
'level': 'DEBUG',
'propagate': False
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
if syslog_addr:
logger_config['handlers'].update({
'syslogger-remote': {
'level': 'INFO',
'class': 'logging.handlers.SysLogHandler',
'address': syslog_addr,
'formatter': 'syslog_format',
},
})
if dev_env:
tracking_file_loc = os.path.join(log_dir, tracking_filename)
edx_file_loc = os.path.join(log_dir, edx_filename)
logger_config['handlers'].update({
'local': {
'class': 'logging.handlers.RotatingFileHandler',
'level': local_loglevel,
'formatter': 'standard',
'filename': edx_file_loc,
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': tracking_file_loc,
'formatter': 'raw',
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
})
else:
# for production environments we will only
# log INFO and up
| logger_config['loggers']['']['level'] = 'INFO'
logger_config['handlers'].update({
'local': {
'level': | local_loglevel,
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'formatter': 'syslog_format',
'facility': SysLogHandler.LOG_LOCAL0,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'facility': SysLogHandler.LOG_LOCAL1,
'formatter': 'raw',
},
})
return logger_config
|
motmot/wxglvideo | motmot/wxglvideo/wxglvideo.py | Python | bsd-3-clause | 4,761 | 0.008402 | import wx
import wx.glcanvas
import pyglet
import pyglet.gl as gl
import pyglet.gl
import motmot.wxvideo.wxvideo as wxvideo
# XXX TODO:
# check off-by-one error in width/coordinate settings (e.g. glOrtho call)
# allow sharing of OpenGL context between instances
NewImageReadyEvent = wx.NewEventType() # use to trigger GUI thread action from grab thread
class PygWxContext:
_gl_begin = False
_workaround_unpack_row_length = False
def __init__(self, glcanvas ):
# glcanvas is instance of wx.glcanvas.GLCanvas
self.glcanvas = glcanvas
pyglet.gl._contexts.append( self )
def SetCurrent(self):
self.glcanvas.GetParent().Show()
if pyglet.version[:3] >= '1.1':
# tested on 1.1beta1
| pyglet.gl.current_context = self
else:
# tested on 1.0
pyglet.gl._current_context = self
self.glcanvas.SetCurrent()
class DynamicImageCanvas(wx.glcanvas.GLCanvas):
"""Display image data to OpenGL using as few resources as possible"""
def _setcurrent(self,hack_ok=True):
| self.wxcontext.SetCurrent()
def __init__(self, *args, **kw):
attribList = kw.get('attribList',None)
if attribList is None:
attribList = [
wx.glcanvas.WX_GL_RGBA,
wx.glcanvas.WX_GL_DOUBLEBUFFER, # force double buffering
wx.glcanvas.WX_GL_DEPTH_SIZE, 16,]
kw['attribList']=attribList
super(DynamicImageCanvas, self).__init__(*args,**kw)
self.init = False
self.Connect( -1, -1, NewImageReadyEvent, self.OnDraw )
self.flip_lr = False
self.fullcanvas = False
self.rotate_180 = False
wx.EVT_ERASE_BACKGROUND(self, self.OnEraseBackground)
wx.EVT_SIZE(self, self.OnSize)
wx.EVT_PAINT(self, self.OnPaint)
self._pygimage = None
self.wxcontext = PygWxContext( self )
self.wxcontext.SetCurrent()
def OnEraseBackground(self, event):
pass # Do nothing, to avoid flashing on MSW. (inhereted from wxDemo)
def set_flip_LR(self,value):
self.flip_lr = value
self._reset_projection()
set_flip_LR.__doc__ = wxvideo.DynamicImageCanvas.set_flip_LR.__doc__
def set_fullcanvas(self,value):
self.fullcanvas = value
self._reset_projection()
def set_rotate_180(self,value):
self.rotate_180 = value
self._reset_projection()
set_rotate_180.__doc__ = wxvideo.DynamicImageCanvas.set_rotate_180.__doc__
def OnSize(self, event):
size = self.GetClientSize()
if self.GetContext():
self.wxcontext.SetCurrent()
gl.glViewport(0, 0, size.width, size.height)
event.Skip()
def OnPaint(self, event):
dc = wx.PaintDC(self)
self.wxcontext.SetCurrent()
if not self.init:
self.InitGL()
self.init = True
self.OnDraw()
def InitGL(self):
self.wxcontext.SetCurrent()
gl.glEnable(gl.GL_BLEND)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
self._reset_projection()
self.extra_initgl()
def extra_initgl(self):
pass
def _reset_projection(self):
if self.fullcanvas:
if self._pygimage is None:
return
width, height = self._pygimage.width, self._pygimage.height
else:
size = self.GetClientSize()
width, height = size.width, size.height
b = 0
t = height
if self.flip_lr:
l = width
r = 0
else:
l = 0
r = width
if self.rotate_180:
l,r=r,l
b,t=t,b
if width==0 or height==0:
# prevent OpenGL error
return
self.wxcontext.SetCurrent()
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity()
gl.glOrtho(l,r,b,t, -1, 1)
gl.glMatrixMode(gl.GL_MODELVIEW)
def new_image(self, image):
self._pygimage = image
self._reset_projection() # always trigger re-calculation of projection - necessary if self.fullcanvas
def update_image(self, image):
"""update the image to be displayed"""
self.wxcontext.SetCurrent()
self._pygimage.view_new_array( image )
event = wx.CommandEvent(NewImageReadyEvent)
event.SetEventObject(self)
wx.PostEvent(self, event)
def core_draw(self):
if self._pygimage is not None:
self._pygimage.blit(0, 0, 0)
def OnDraw(self,event=None):
self.wxcontext.SetCurrent()
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT)
self.core_draw()
self.SwapBuffers()
|
vasco-santos/CertificationService | certModule/CertAuthority.py | Python | mit | 6,803 | 0.001617 | # -*- coding: utf-8 -*-
"""
This module provides the necessary methods for a Certification Authority.
For creating the self signed Certificate for the CA, use the following command:
$ openssl req -x509 -newkey rsa:2048 -keyout ca_priv.pem -out ca_cert.pem
@author: Vasco Santos
"""
import time
from M2Crypto import X509, RSA, EVP, BIO, ASN1
class CertificationAuthority(object):
""" Class responsible for keeping the CA self-signed certificate,
as well as, its private key.
"""
def __init__(self, cert, priv_key, passphrase):
""" Create a Certification Authority Object.
Arguments:
cert: file system path of the CA's self-signed certificate.
priv_key: file system path of the CA's private key (encrypted).
passphrase: Symmetric key for priv_key decryption.
"""
def getPassphrase(*args):
""" Callback for private key decrypting.
"""
return str(passphrase.encode('utf-8'))
self.cert = X509.load_cert(cert.encode('utf-8'))
self.priv_key = RSA.load_key(priv_key.encode('utf-8'), getPassphrase)
# Private key for signing
self.signEVP = EVP.PKey()
self.signEVP.assign_rsa(self.priv_key)
def createSignedCertificate(self, subj_id, pub_key, expiration_time):
""" Create a certificate for a subject public key, signed by the CA.
Arguments:
subj_id: certificate subject identifier.
pub_key: public key of the subject.
expiration_time: certificate life time.
Returns:
Certificate in PEM Format.
"""
# Public Key to certificate
bio = BIO.MemoryBuffer(str(pub_key.decode('hex')))
pub_key = RSA.load_pub_key_bio(bio)
pkey = EVP.PKey()
pkey.assign_rsa(pub_key)
# Certificate Fields
cur_time = ASN1.ASN1_UTCTIME()
cur_time.set_time(int(time.time()))
expire_time = ASN1.ASN1_UTCTIME()
expire_time.set_time(int(time.time()) + expiration_time * 60) # In expiration time minutes
# Certification Creation
cert = X509.X509()
cert.set_pubkey(pkey)
s_name = X509.X509_Name()
s_name.C = "PT"
s_name.CN = str(subj_id)
cert.set_subject(s_name)
i_name = X509.X509_Name()
i_name.C = "PT"
i_name.CN = "Register Server"
cert.set_issuer_name(i_name)
cert.set_not_before(cur_time)
cert.set_not_after(expire_time)
cert.sign(self.signEVP, md="sha1")
#cert.save_pem("peer_CA.pem")
return cert.as_pem().encode('hex')
def decryptData(self, data):
""" Decrypt the intended data with the entity private key.
Arguments:
data: data to be decrypted.
"""
return self.priv_key.private_decrypt(data.decode('base6 | 4'), RSA.pkcs1_padding)
def encryptData(self, data, certificate):
""" Encrypt the intended data with the public key contained in the certificate.
Arguments:
data: data to be encrypted.
| certificate: subject certificate.
"""
cert = X509.load_cert_string(certificate.decode('hex'))
return cert.get_pubkey().get_rsa().public_encrypt(str(data), RSA.pkcs1_padding).encode('base64')
def getPublicKey(self):
""" Get the CA Public Key.
Returns:
CA Public Key in PEM Format.
"""
return self.cert.get_pubkey().get_rsa().as_pem().encode('hex')
def signData(self, data):
""" Sign a received String.
Arguments:
data: string to sign.
Returns:
signature of the received data.
"""
msgDigest = EVP.MessageDigest('sha1')
msgDigest.update(str(data))
self.signEVP.sign_init()
self.signEVP.sign_update(msgDigest.digest())
return self.signEVP.sign_final().encode('base64')
def signEncryptedData(self, cipherData):
""" Sign encrypted data.
Arguments:
cipherData: data encrypted (base64 format).
"""
msgDigest = EVP.MessageDigest('sha1')
msgDigest.update(cipherData.decode('base64'))
self.signEVP.sign_init()
self.signEVP.sign_update(msgDigest.digest())
return self.signEVP.sign_final().encode('base64')
def validCertificate(self, certificate):
""" Verify if a certificate of a subject was issued by this CA.
Arguments:
certificate: subject certificate.
Returns:
true if the certificate was issued by this CA. false otherwise.
"""
cert = X509.load_cert_string(certificate.decode('hex'))
# Data Analysis
# Subject confirmation
return cert.verify(self.cert.get_pubkey())
def validSelfSignedCertificate(self):
""" Verify if the self-signed CA certificate was not corrupted.
Returns:
true if the self signed certificate is valid, false otherwise.
"""
return self.cert.check_ca() and self.cert.verify(self.cert.get_pubkey())
def validSignedData(self, data, signature, certificate):
""" Verify if the received data was signed by the owner of the certificate.
Arguments:
data: received data.
signature: digital signature of the data.
certificate: certificate of the data issuer.
Returns:
true if the data maintains its integrity, false otherwise.
"""
msgDigest = EVP.MessageDigest('sha1')
msgDigest.update(str(data))
pub_key = X509.load_cert_string(certificate.decode('hex')).get_pubkey().get_rsa()
verifyEVP = EVP.PKey()
verifyEVP.assign_rsa(pub_key)
verifyEVP.verify_init()
verifyEVP.verify_update(msgDigest.digest())
return verifyEVP.verify_final(str(signature.decode('base64')))
def validSignedEncryptedData(self, cipherData, signature, certificate):
""" Verify if the received data was signed by the owner of the certificate.
Arguments:
cipherData: data encrypted (base64 format).
signature: digital signature of the data.
certificate: certificate of the data issuer.
Returns:
true if the data maintains its integrity, false otherwise.
"""
msgDigest = EVP.MessageDigest('sha1')
msgDigest.update(cipherData.decode('base64'))
pub_key = X509.load_cert_string(certificate.decode('hex')).get_pubkey().get_rsa()
verifyEVP = EVP.PKey()
verifyEVP.assign_rsa(pub_key)
verifyEVP.verify_init()
verifyEVP.verify_update(msgDigest.digest())
return verifyEVP.verify_final(str(signature.decode('base64')))
|
resulto/django-celery-fulldbresult | django_celery_fulldbresult/__init__.py | Python | bsd-3-clause | 2,722 | 0 | from __future__ import absolute_import, unicode_literals
from celery import current_app
from celery.states import PENDING
from celery.app.task import Context, Task
from celery.signals import before_task_publish
from django_celery_fulldbresult.errors import SchedulingStopPublishing
from django.conf import settings
from django.utils.timezone import now
schedule_eta = getattr(
settings, "DJANGO_CELERY_FULLDBRESULT_SCHEDULE_ETA", False)
track_publish = getattr(
settings, "DJANGO_CELERY_FULLDBRESULT_TRACK_PUBLISH", False)
monkey_patch_async = getattr(
settings, "DJANGO_CELERY_FULLDBRESULT_MONKEY_PATCH_ASYNC", False)
old_apply_async = Task.apply_async
def new_apply_async(self, *args, **kwargs):
try:
return old_apply_async(self, *args, **kwargs)
except SchedulingStopPublishing as exc:
# There was an ETA and the task was not sent to the broker.
# A scheduled task was created instead.
return self.AsyncResult(exc.task_id)
def apply_async_monkey_patch():
Task.apply_async = new_apply_async
def unapply_async_monkey_patch():
Task.apply_async = old_apply_async
if monkey_patch_async:
apply_async_monkey_patch()
if track_publish or schedule_eta:
@before_task_publish.connect
def update_sent_state(sender=None, body=None, exchange=None,
routing_key=None, **kwargs):
# App may not be loaded on init
from django_celery_fulldbresult.models import SCHEDULED
task = current_app.tasks.get(sender)
save = False
status = None
schedule_eta = getattr(
settings, "DJANGO_CELERY_FULLDBRESULT_SCHEDULE_ETA", False)
track_publish = getattr(
settings, "DJANGO_CELERY_FULLDBRESULT_TRACK | _PUBLISH", False)
ignore_result = getattr(task, "ignore_result", False) or\
getattr(settings, "CELERY_IGNORE_RESULT", False)
if schedule_eta and body.get("eta") and not body.get("chord")\
and not body.get("taskset"):
status = SCHEDULED
save = True
elif track_publish and not ignore_result:
status = PENDING
save = True
if save:
ba | ckend = task.backend if task else current_app.backend
request = Context()
request.update(**body)
request.date_submitted = now()
request.delivery_info = {
"exchange": exchange,
"routing_key": routing_key
}
backend.store_result(
body["id"], None, status, traceback=None, request=request)
if status == SCHEDULED:
raise SchedulingStopPublishing(task_id=body["id"])
|
aksinghdce/aksinghdce.github.io | students/subjects/Programming/projects/knowledge-tree/server/knowledgetree/lib/python3.6/site-packages/werkzeug/datastructures.py | Python | mit | 100,480 | 0.000418 | # -*- coding: utf-8 -*-
"""
werkzeug.datastructures
~~~~~~~~~~~~~~~~~~~~~~~
This module provides mixins and classes with an immutable interface.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
import codecs
import mimetypes
import re
from copy import deepcopy
from itertools import repeat
from . import exceptions
from ._compat import BytesIO
from ._compat import collections_abc
from ._compat import fspath
from ._compat import integer_types
from ._compat import iteritems
from ._compat import iterkeys
from ._compat import iterlists
from ._compat import itervalues
from ._compat import make_literal_wrapper
from ._compat import PY2
from ._compat import string_types
from ._compat import text_type
from ._compat import to_native
from ._internal import _missing
from .filesystem import get_filesystem_encoding
def is_immutable(self):
raise TypeError("%r objects are immutable" % self.__class__.__name__)
def iter_multi_items(mapping):
"""Iterates over the items of a mapping yielding keys and values
without dropping any from more complex structures.
"""
if isinstance(mapping, MultiDict):
for item in iteritems(mapping, multi=True):
yield item
elif isinstance(mapping, dict):
for key, value in iteritems(mapping):
if isinstance(value, (tuple, list)):
for v in value:
yield key, v
else:
yield key, value
else:
for item in mapping:
yield item
def native_itermethods(names):
if not PY2:
return lambda x: x
def setviewmethod(cls, name):
viewmethod_name = "view%s" % name
repr_name = "view_%s" % name
def viewmethod(self, *a, **kw):
return ViewItems(self, name, repr_name, *a, **kw)
viewmethod.__name__ = viewmethod_name
viewmethod.__doc__ = "`%s()` object providing a view on %s" % (
viewmethod_name,
name,
)
setattr(cls, viewmethod_name, viewmethod)
def setitermethod(cls, name):
itermethod = getattr(cls, name)
setattr(cls, "iter%s" % name, itermethod)
def listmethod(self, *a, **kw):
return list(itermethod(self, *a, **kw))
listmethod.__name__ = name
listmethod.__doc__ = "Like :py:meth:`iter%s`, but returns a list." % name
setattr(cls, name, listmethod)
def wrap(cls):
for name in names:
setitermethod(cls, name)
setviewmethod(cls, name)
return cls
return wrap
class ImmutableListMixin(object):
"""Makes a :class:`list` immutable.
.. versionadded:: 0.5
:private:
"""
_hash_cache = None
def __hash__(self):
if self._hash_cache is not None:
return self._hash_cache
rv = self._hash_cache = hash(tuple(self))
return rv
def __reduce_ex__(self, protocol):
return type(self), (list(self),)
def __delitem__(self, key):
is_immutable(self)
def __iadd__(self, other):
is_immutable(self)
__imul__ = __iadd__
def __setitem__(self, key, value):
is_immutable(self)
def append(self, item):
is_immutable(self)
remove = append
def extend(self, iterable):
is_immutable(self)
def insert(self, pos, value):
is_immutable(self)
def pop(self, index=-1):
is_immutable(self)
def reverse(self):
is_immutable(self)
def sort(self, cmp=None, key=None, reverse=None):
is_immutable(self)
class ImmutableList(ImmutableListMixin, list):
"""An immutable :class:`list`.
.. versionadded:: 0.5
:private:
"""
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, list.__repr__(self))
class ImmutableDictMixin(object):
"""Makes a :class:`dict` immutable.
.. versionadded:: 0.5
:private:
"""
_hash_cache = None
@classmethod
def fromkeys(cls, keys, value=None):
instance = super(cls, cls).__new__(cls)
instance.__init__(zip(keys, repeat(value)))
return instance
def __reduce_ex__(self, protocol):
return type(self), (dict(self),)
def _iter_hashitems(self):
return iteritems(self)
def __hash__(self):
if self._hash_cache is not None:
return self._hash_cache
rv = self._hash_cache = hash(frozenset(self._iter_hashitems()))
return rv
def setdefault(self, key, default=None):
is_immutable(self)
def update(self, *args, **kwargs):
is_immutable(self)
def pop(self, key, default=None):
is_immutable(self)
def popitem(self):
is_immutable(self)
def __setitem__(self, key, value):
is_immutable(self)
def __delitem__(self, key):
is_immutable(self)
def clear(self):
is_immutable(self)
class ImmutableMultiDictMixin(ImmutableDictMixin):
"""Makes a :class:`MultiDict` immutable.
.. versionadded:: 0.5
:private:
"""
def __reduce_ex__(self, protocol):
return type(self), (list(iteritems(self, multi=True)),)
def _iter_hashitems(self):
return iteritems(self, multi=True)
def add(self, key, value):
is_immutable(self)
def popitemlist(self):
is_immutable(self)
def poplist(self, key):
is_immutable(self)
def setlist(self, key, new_list):
is_immutable(self)
def setlistdefault(self, key, default_list=None):
is_immutable(self)
class UpdateDictMixin(object):
"""Makes dicts call `self.on_update` on modifications.
.. versionadded:: 0.5
:private:
"""
on_update = None
def calls_update(name): # noqa: B902
def oncall(self, *args, **kw):
rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw)
| if self.on_update is not None:
self.on_update(self)
return rv
oncall.__name__ = name
return oncall
def setdefault(self, key, | default=None):
modified = key not in self
rv = super(UpdateDictMixin, self).setdefault(key, default)
if modified and self.on_update is not None:
self.on_update(self)
return rv
def pop(self, key, default=_missing):
modified = key in self
if default is _missing:
rv = super(UpdateDictMixin, self).pop(key)
else:
rv = super(UpdateDictMixin, self).pop(key, default)
if modified and self.on_update is not None:
self.on_update(self)
return rv
__setitem__ = calls_update("__setitem__")
__delitem__ = calls_update("__delitem__")
clear = calls_update("clear")
popitem = calls_update("popitem")
update = calls_update("update")
del calls_update
class TypeConversionDict(dict):
"""Works like a regular dict but the :meth:`get` method can perform
type conversions. :class:`MultiDict` and :class:`CombinedMultiDict`
are subclasses of this class and provide the same feature.
.. versionadded:: 0.5
"""
def get(self, key, default=None, type=None):
"""Return the default value if the requested data doesn't exist.
If `type` is provided and is a callable it should convert the value,
return it or raise a :exc:`ValueError` if that is not possible. In
this case the function will return the default as if the value was not
found:
>>> d = TypeConversionDict(foo='42', bar='blub')
>>> d.get('foo', type=int)
42
>>> d.get('bar', -1, type=int)
-1
:param key: The key to be looked up.
:param default: The default value to be returned if the key can't
be looked up. If not further specified `None` is
returned.
:param type: A callable that is used to cast the value in the
:class:`MultiDict`. If a :exc:`ValueError` is raised
by this callable the default value is returned.
"""
try:
rv = self[key]
except KeyError:
r |
readw/210CT-Coursework | Basic-Py/11-TreeNodeDelete.py | Python | gpl-3.0 | 3,026 | 0.018506 | # | Week 5 - 11) Based on the Python Code or the C++ Code Provided in class as a
# starting point, implement the binary search tree node, delete
# function.
class Node(object):
def __init__(self, value):
self.val | ue=value
self.next=None
self.prev=None
class List(object):
def __init__(self):
self.head=None
self.tail=None
def insert(self,n,x):
#Not actually perfect: how do we prepend to an existing list?
if n!=None:
x.next=n.next
n.next=x
x.prev=n
if x.next!=None:
x.next.prev=x
if self.head==None:
self.head=self.tail=x
x.prev=x.next=None
elif self.tail==n:
self.tail=x
'''
Added remove function.
'''
def remove(self, node):
'''Remove an item from a linked list data structure.''' # Example: n=3
# Sets the delete pointer to the first node
deletePointer = self.head # O(1) --> O(1)
# Until the node is equal to the deletePointer.value
while node != deletePointer.value: # O(n) --> O(3)
# Set the delete pointer to the next value
deletePointer = deletePointer.next # O(n) --> O(3)
# If the previous pointer is equal to none.
if deletePointer.prev != None: # O(1) --> O(1)
# Sets the previous deletePointer's next pointer to the next
# pointer value.
deletePointer.prev.next = deletePointer.next # O(1) --> O(1)
else: # O(1) --> O(1)
# Gets head value of the tree, and set to the next pointer.
self.head = deletePointer.next # O(1) --> O(1)
# If the next value is none.
if deletePointer.next != None: # O(1) --> O(1)
# Set the next previous deletePointer, to the previous
# deletePointer.
deletePointer.next.prev = deletePointer.prev # O(1) --> O(1)
else: # O(1) --> O(1)
# Set the tail of the deletePointer, previous.
self.tail = deletePointer.prev # O(1) --> O(1)
def display(self):
values=[]
n=self.head
while n!=None:
values.append(str(n.value))
n=n.next
print("List: ",",".join(values))
if __name__ == '__main__':
l=List()
l.insert(None, Node(4))
l.insert(l.tail,Node(6))
l.insert(l.tail,Node(8))
l.insert(l.tail,Node(10))
l.insert(l.tail,Node(13))
l.display()
l.remove(10)
l.display()
|
penguinmenac3/GraphProgramming | cluster_server/cluster_host.py | Python | gpl-2.0 | 446 | 0.006726 | # This cluster host should listen on the ne | twork for graph execution requests.
# Such requests consist out of a language identifier string (lua, python, ...) and a graph to execute as well as optional arguments for the graph.
# The graph is executed in is_cluster_server mode, which might restrict some features for security of the cluster provider.
# Output | s of the graph will be reported back over a tcp connection. Each line will be a packet.
|
santosfamilyfoundation/SantosGUI | application/custom/videographicsitem.py | Python | mit | 7,446 | 0.005372 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
| ##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## | met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import QDir, QSize, QSizeF, Qt, QUrl, QRectF
from PyQt5.QtGui import QTransform
from PyQt5.QtMultimedia import QMediaContent, QMediaPlayer
from PyQt5.QtMultimediaWidgets import QGraphicsVideoItem
from PyQt5.QtWidgets import (QApplication, QFileDialog, QGraphicsScene,
QGraphicsView, QHBoxLayout, QPushButton, QSlider, QStyle, QVBoxLayout,
QWidget, QSizePolicy, QPlainTextEdit)
import os
import sys
# add parent folder to python path
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from video import get_video_resolution
WIDTH = 600.0
WIGGLE = 40.0
class VideoPlayer(QWidget):
"""
Arguments
---------
parent: QWidget, the parent widget of VideoPlayer
display_status: bool, default False, will show the status of the media player in the gui
"""
def __init__(self, parent=None, display_status=False):
super(VideoPlayer, self).__init__(parent)
self.display_status = display_status
self.mediaPlayer = QMediaPlayer(None, QMediaPlayer.VideoSurface)
self.videoItem = QGraphicsVideoItem()
scene = QGraphicsScene(self)
graphicsView = QGraphicsView(scene)
scene.addItem(self.videoItem)
self.playButton = QPushButton()
self.playButton.setEnabled(False)
self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))
self.playButton.clicked.connect(self.play)
self.positionSlider = QSlider(Qt.Horizontal)
self.positionSlider.setRange(0, 0)
self.positionSlider.sliderMoved.connect(self.setPosition)
if self.display_status:
self.status_mapping = {
QMediaPlayer.UnknownMediaStatus: "UnknownMediaStatus",
QMediaPlayer.NoMedia: "NoMedia",
QMediaPlayer.LoadingMedia: "LoadingMedia",
QMediaPlayer.LoadedMedia: "LoadedMedia",
QMediaPlayer.StalledMedia: "StalledMedia",
QMediaPlayer.BufferingMedia: "BufferingMedia",
QMediaPlayer.BufferedMedia: "BufferedMedia",
QMediaPlayer.EndOfMedia: "EndOfMedia",
QMediaPlayer.InvalidMedia: "InvalidMedia"
}
self.statusText = QPlainTextEdit()
self.statusText.setReadOnly(True)
self.statusText.setFixedHeight(25)
self.statusText.setFixedWidth(150)
self.mediaPlayer.mediaStatusChanged.connect(self.mediaStatusChanged)
controlLayout = QHBoxLayout()
controlLayout.setContentsMargins(0, 0, 0, 0)
controlLayout.addWidget(self.playButton)
controlLayout.addWidget(self.positionSlider)
if self.display_status:
controlLayout.addWidget(self.statusText)
layout = QVBoxLayout()
layout.addWidget(graphicsView)
layout.addLayout(controlLayout)
self.setFixedWidth(WIDTH + WIGGLE)
self.setLayout(layout)
self.mediaPlayer.setVideoOutput(self.videoItem)
self.mediaPlayer.stateChanged.connect(self.mediaStateChanged)
self.mediaPlayer.positionChanged.connect(self.positionChanged)
self.mediaPlayer.durationChanged.connect(self.durationChanged)
def openFile(self, fileName):
if fileName != '' or fileName is not None:
self.mediaPlayer.setMedia(
QMediaContent(QUrl.fromLocalFile(fileName)))
# set resolution
res_orig = get_video_resolution(fileName)
self.aspect_ratio = float(res_orig[0]) / res_orig[1]
self.videoItem.setSize(QSizeF(WIDTH,
WIDTH / self.aspect_ratio))
self.setFixedHeight(WIDTH / self.aspect_ratio + 2*WIGGLE)
self.playButton.setEnabled(True)
# trick to show screenshot of the first frame of video
self.mediaPlayer.play()
self.mediaPlayer.pause()
def closeFile(self):
self.mediaPlayer.setMedia(QMediaContent())
def play(self):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.mediaPlayer.pause()
else:
self.mediaPlayer.play()
def mediaStateChanged(self, state):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPause))
else:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPlay))
def mediaStatusChanged(self, status):
self.statusText.setPlaceholderText(self.status_mapping[status])
def positionChanged(self, position):
self.positionSlider.setValue(position)
#print self.positionSlider.value()
# if position slider has reached the end, let's stop the video
if self.positionSlider.value() >= self.positionSlider.maximum() - 1:
self.mediaPlayer.stop()
# play/pause hack to show the first frame of video
self.mediaPlayer.play()
self.mediaPlayer.pause()
def durationChanged(self, duration):
self.positionSlider.setRange(0, duration)
def setPosition(self, position):
self.mediaPlayer.setPosition(position)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
player = VideoPlayer()
player.show()
sys.exit(app.exec_())
|
arcyfelix/Courses | 18-04-18-REST APIs with Flask and Python/Section 5 - Storing resources in a SQL database/2_Logging in and retrieving Users from a database/app.py | Python | apache-2.0 | 2,324 | 0.034423 | from flask import Flask, request
from flask_restplus import Resource, Api, reqparse
from flask_jwt import JWT, jwt_required
from security import authenticate, identity
app = Flask(__name__)
app.secret_key = 'Wojciech'
api = Api(app)
# JWT creates a new endpoint /auth
jwt = JWT(app, authenticate, identity)
# List for storing the data.
items = []
class Item(Resource):
# Adding parser as part of the class
parser = reqparse.RequestParser()
parser.add_argument('price',
type = float,
required = True,
help = "Price is required!" )
@jwt_required()
def get(self, | name):
# Code quality improvement
# 'next' normally throws an error if there is no values in the filter function
# that can be replaced by a default value, in this case 'None'
item = next(filter(lambda x: x['name'] == name, items), None)
# If the item is not found
return {'item' : item}, 200 if item is not None else 404
def post(self, name):
# Ensuring that there is only one item with that name
# 'next(filter(lambda x: x['name'] == name, items), None)' will be None
# only if the new e | xisting item does not exist in the list.
if next(filter(lambda x: x['name'] == name, items), None) is not None:
return {'message' : "An item with name '{} already exists!".format(name)}, 400
request_data = Item.parser.parse_args()
item = {'name' : name,
'price' : request_data['price']}
items.append(item)
return item, 201
def delete(self, name):
# Deleting by filtering out the list of items and overwriting it
# We need to use global items variable
# Otherwise Python will think that we want to use a variable before assigning it
global items
items = list(filter(lambda x: x['name'] != name, items))
return {'message' : 'Item deleted.'}
def put(self, name):
request_data = Item.parser.parse_args()
# Check if the item exists
item = next(filter(lambda x: x['name'] == name, items), None)
# If the item does not exist, create it.
if item is None:
item = {'name' : name,
'price' : request_data['price']}
items.append(item)
else:
item.update(request_data)
return item
class ItemList(Resource):
def get(self):
return {'items' : items}
api.add_resource(Item, '/item/<string:name>')
api.add_resource(ItemList, '/items')
app.run(port = 5000, debug = True) |
UCRoboticsLab/BaxterTictactoe | src/baxter_interface/src/baxter_dataflow/__init__.py | Python | apache-2.0 | 1,613 | 0 | # Copyright (c) 2013-2015, Rethink Robotics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Rethink Robotics nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRI | GHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO | EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from .wait_for import wait_for
from .signals import Signal
|
ryfeus/lambda-packs | Tensorflow_Pandas_Numpy/source3.6/tensorflow/core/util/memmapped_file_system_pb2.py | Python | mit | 4,351 | 0.006665 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/util/memmapped_file_system.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/util/memmapped_file_system.proto',
package='tensorflow',
syntax='proto3',
serialized_pb=_b('\n0tensorflow/core/util/memmapped_file_system.proto\x12\ntensorflow\"C\n#MemmappedFileSystemDirectoryElement\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\t\"`\n\x1cMemmappedFileSystemDirectory\x12@\n\x07\x65lement\x18\x01 \x03(\x0b\x32/.tensorflow.MemmappedFileSystemDirectoryElementB\x03\xf8\x01\x01\x62\x06proto3')
)
_MEMMAPPEDFILESYSTEMDIRECTORYELEMENT = _descriptor.Descriptor(
name='MemmappedFileSystemDirectoryElement',
full_name='tensorflow.MemmappedFileSystemDirectoryElement',
file | name=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='offset', full_name='tensorflow.MemmappedFileSystemDirectoryElement.offset', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_exte | nsion=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='tensorflow.MemmappedFileSystemDirectoryElement.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=64,
serialized_end=131,
)
_MEMMAPPEDFILESYSTEMDIRECTORY = _descriptor.Descriptor(
name='MemmappedFileSystemDirectory',
full_name='tensorflow.MemmappedFileSystemDirectory',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='element', full_name='tensorflow.MemmappedFileSystemDirectory.element', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=133,
serialized_end=229,
)
_MEMMAPPEDFILESYSTEMDIRECTORY.fields_by_name['element'].message_type = _MEMMAPPEDFILESYSTEMDIRECTORYELEMENT
DESCRIPTOR.message_types_by_name['MemmappedFileSystemDirectoryElement'] = _MEMMAPPEDFILESYSTEMDIRECTORYELEMENT
DESCRIPTOR.message_types_by_name['MemmappedFileSystemDirectory'] = _MEMMAPPEDFILESYSTEMDIRECTORY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MemmappedFileSystemDirectoryElement = _reflection.GeneratedProtocolMessageType('MemmappedFileSystemDirectoryElement', (_message.Message,), dict(
DESCRIPTOR = _MEMMAPPEDFILESYSTEMDIRECTORYELEMENT,
__module__ = 'tensorflow.core.util.memmapped_file_system_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.MemmappedFileSystemDirectoryElement)
))
_sym_db.RegisterMessage(MemmappedFileSystemDirectoryElement)
MemmappedFileSystemDirectory = _reflection.GeneratedProtocolMessageType('MemmappedFileSystemDirectory', (_message.Message,), dict(
DESCRIPTOR = _MEMMAPPEDFILESYSTEMDIRECTORY,
__module__ = 'tensorflow.core.util.memmapped_file_system_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.MemmappedFileSystemDirectory)
))
_sym_db.RegisterMessage(MemmappedFileSystemDirectory)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
# @@protoc_insertion_point(module_scope)
|
cloud4life/targetcli-fb | targetcli/ui_root.py | Python | apache-2.0 | 7,712 | 0.002464 | '''
Implements the targetcli root UI.
This file is part of targetcli.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from datetime import datetime
from glob import glob
import os
import shutil
import stat
from configshell_fb import ExecutionError
from rtslib_fb import RTSRoot
from rtslib_fb.utils import ignored
from .ui_backstore import complete_path, UIBackstores
from .ui_node import UINode
from .ui_target import UIFabricModule
default_save_file = "/etc/target/saveconfig.json"
kept_backups = 10
class UIRoot(UINode):
'''
The targetcli hierarchy root node.
'''
def __init__(self, shell, as_root=False):
UINode.__init__(self, '/', shell=shell)
self.as_root = as_root
self.rtsroot = RTSRoot()
def refresh(self):
'''
Refreshes the tree of target fabric modules.
'''
self._children = set([])
UIBackstores(self)
# only show fabrics present in the system
for fm in self.rtsroot.fabric_modules:
if fm.wwns == None or any(fm.wwns):
UIFabricModule(fm, self)
def ui_command_saveconfig(self, savefile=default_save_file):
'''
Saves the current configuration to a file so that it can be restored
on next boot.
'''
self.assert_root()
savefile = os.path.expanduser(savefile)
# Only save backups if saving to default location
if savefile == default_save_file:
backup_dir = os.path.dirname(savefile) + "/backup"
backup_name = "saveconfig-" + \
datetime.now().strftime("%Y%m%d-%H:%M:%S") + ".json"
backupfile = backup_dir + "/" + backup_name
with ignored(IOError):
shutil.copy(savefile, backupfile)
# Kill excess backups
backups = sorted(glob(os.path.dirname(savefile) + "/backup/*.json"))
files_to_unlink = list(reversed(backups))[kept_backups:]
for f in files_to_unlink:
os.unlink( | f)
self.shell.log.info("Last %d configs saved in %s." % \
| (kept_backups, backup_dir))
self.rtsroot.save_to_file(savefile)
self.shell.log.info("Configuration saved to %s" % savefile)
def ui_command_restoreconfig(self, savefile=default_save_file, clear_existing=False):
'''
Restores configuration from a file.
'''
self.assert_root()
savefile = os.path.expanduser(savefile)
if not os.path.isfile(savefile):
self.shell.log.info("Restore file %s not found" % savefile)
return
errors = self.rtsroot.restore_from_file(savefile, clear_existing)
self.refresh()
if errors:
raise ExecutionError("Configuration restored, %d recoverable errors:\n%s" % \
(len(errors), "\n".join(errors)))
self.shell.log.info("Configuration restored from %s" % savefile)
def ui_complete_saveconfig(self, parameters, text, current_param):
'''
Auto-completes the file name
'''
if current_param != 'savefile':
return []
completions = complete_path(text, stat.S_ISREG)
if len(completions) == 1 and not completions[0].endswith('/'):
completions = [completions[0] + ' ']
return completions
ui_complete_restoreconfig = ui_complete_saveconfig
def ui_command_clearconfig(self, confirm=None):
'''
Removes entire configuration of backstores and targets
'''
self.assert_root()
confirm = self.ui_eval_param(confirm, 'bool', False)
self.rtsroot.clear_existing(confirm=confirm)
self.shell.log.info("All configuration cleared")
self.refresh()
def ui_command_version(self):
'''
Displays the targetcli and support libraries versions.
'''
from targetcli import __version__ as targetcli_version
self.shell.log.info("targetcli version %s" % targetcli_version)
def ui_command_sessions(self, action="list", sid=None):
'''
Displays a detailed list of all open sessions.
PARAMETERS
==========
I{action}
---------
The I{action} is one of:
- B{list} gives a short session list
- B{detail} gives a detailed list
I{sid}
------
You can specify an I{sid} to only list this one,
with or without details.
SEE ALSO
========
status
'''
indent_step = 4
base_steps = 0
action_list = ("list", "detail")
if action not in action_list:
raise ExecutionError("action must be one of: %s" %
", ".join(action_list))
if sid is not None:
try:
int(sid)
except ValueError:
raise ExecutionError("sid must be a number, '%s' given" % sid)
def indent_print(text, steps):
console = self.shell.con
console.display(console.indent(text, indent_step * steps),
no_lf=True)
def print_session(session):
acl = session['parent_nodeacl']
indent_print("alias: %(alias)s\tsid: %(id)i type: " \
"%(type)s session-state: %(state)s" % session,
base_steps)
if action == 'detail':
if self.as_root:
if acl.authenticate_target:
auth = " (authenticated)"
else:
auth = " (NOT AUTHENTICATED)"
else:
auth = ""
indent_print("name: %s%s" % (acl.node_wwn, auth),
base_steps + 1)
for mlun in acl.mapped_luns:
plugin = mlun.tpg_lun.storage_object.plugin
name = mlun.tpg_lun.storage_object.name
if mlun.write_protect:
mode = "r"
else:
mode = "rw"
indent_print("mapped-lun: %d backstore: %s/%s mode: %s" %
(mlun.mapped_lun, plugin, name, mode),
base_steps + 1)
for connection in session['connections']:
indent_print("address: %(address)s (%(transport)s) cid: " \
"%(cid)i connection-state: %(cstate)s" % \
connection, base_steps + 1)
if sid:
printed_sessions = [x for x in self.rtsroot.sessions if x['id'] == int(sid)]
else:
printed_sessions = list(self.rtsroot.sessions)
if len(printed_sessions):
for session in printed_sessions:
print_session(session)
else:
if sid is None:
indent_print("(no open sessions)", base_steps)
else:
raise ExecutionError("no session found with sid %i" % int(sid))
|
cmelange/ansible | lib/ansible/modules/network/eos/eos_user.py | Python | gpl-3.0 | 11,890 | 0.002019 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: eos_user
version_added: "2.3"
author: "Peter Sprygada (@privateip)"
short_description: Manage the collection of local users on EOS devices
description:
- This module provides declarative management of the local usernames
configured on Arista EOS devices. It allows playbooks to manage
either individual usernames or the collection of usernames in the
current running config. It also supports purging usernames from the
configuration that are not explicitly defined.
extends_documentation_fragment: eos
options:
users:
description:
- The set of username objects to be configured on the remote
Arista EOS device. The list entries can either be the username
or a hash of username and properties. This argument is mutually
exclusive with the C(username) argument.
required: false
default: null
username:
description:
- The username to be configured on the remote Arista EOS
device. This argument accepts a stringv value and is mutually
exclusive with the C(users) argument.
required: false
default: null
update_password:
description:
- Since passwords are encrypted in the device run | ning config, this
argument will instruct the module when to change the password. When
set to C(always), the password will always be updated in the device
and when set to C(on_create) the password will be updated only if
the username is created.
required: false
default: always
choices: ['on_create', 'always']
privilege:
description:
- The C(privilege) argument configures the privilege level of the
user | when logged into the system. This argument accepts integer
values in the range of 1 to 15.
required: false
default: null
role:
description:
- The C(role) argument configures the role for the username in the
device running configuration. The argument accepts a string value
defining the role name. This argument does not check if the role
has been configured on the device.
required: false
default: null
sshkey:
description:
- The C(sshkey) argument defines the SSH public key to configure
for the username. This argument accepts a valid SSH key value.
required: false
default: null
nopassword:
description:
- The C(nopassword) argument defines the username without assigning
a password. This will allow the user to login to the system
without being authenticated by a password. This argument accepts
boolean values.
required: false
default: null
choices: ['true', 'false']
purge:
description:
- The C(purge) argument instructs the module to consider the
resource definition absolute. It will remove any previously
configured usernames on the device with the exception of the
`admin` user which cannot be deleted per EOS constraints.
required: false
default: false
state:
description:
- The C(state) argument configures the state of the uername definition
as it relates to the device operational configuration. When set
to I(present), the username(s) should be configured in the device active
configuration and when set to I(absent) the username(s) should not be
in the device active configuration
required: false
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: create a new user
eos_user:
username: ansible
sshkey: "{{ lookup('file', '~/.ssh/id_rsa.pub') }}"
state: present
- name: remove all users except admin
eos_user:
purge: yes
- name: set multiple users to privilege level
users:
- username: netop
- username: netend
privilege: 15
state: present
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- username ansible secret password
- username admin secret admin
session_name:
description: The EOS config session name used to load the configuration
returned: when changed is True
type: str
sample: ansible_1479315771
"""
import re
from functools import partial
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.eos import get_config, load_config
from ansible.module_utils.six import iteritems
from ansible.module_utils.eos import eos_argument_spec, check_args
def validate_privilege(value, module):
if not 1 <= value <= 15:
module.fail_json(msg='privilege must be between 1 and 15, got %s' % value)
def map_obj_to_commands(updates, module):
commands = list()
state = module.params['state']
update_password = module.params['update_password']
for update in updates:
want, have = update
needs_update = lambda x: want.get(x) and (want.get(x) != have.get(x))
add = lambda x: commands.append('username %s %s' % (want['username'], x))
if want['state'] == 'absent':
commands.append('no username %s' % want['username'])
continue
if needs_update('role'):
add('role %s' % want['role'])
if needs_update('privilege'):
add('privilege %s' % want['privilege'])
if needs_update('password'):
if update_password == 'always' or not have:
add('secret %s' % want['password'])
if needs_update('sshkey'):
add('sshkey %s' % want['sshkey'])
if needs_update('nopassword'):
if want['nopassword']:
add('nopassword')
else:
add('no username %s nopassword' % want['username'])
return commands
def parse_role(data):
match = re.search(r'role (\S+)', data, re.M)
if match:
return match.group(1)
def parse_sshkey(data):
match = re.search(r'sshkey (.+)$', data, re.M)
if match:
return match.group(1)
def parse_privilege(data):
match = re.search(r'privilege (\S+)', data, re.M)
if match:
return int(match.group(1))
def map_config_to_obj(module):
data = get_config(module, flags=['section username'])
match = re.findall(r'^username (\S+)', data, re.M)
if not match:
return list()
instances = list()
for user in set(match):
regex = r'username %s .+$' % user
cfg = re.findall(r'username %s .+$' % user, data, re.M)
cfg = '\n'.join(cfg)
obj = {
'username': user,
'state': 'present',
'nopassword': 'nopassword' in cfg,
'password': None,
'sshkey': parse_sshkey(cfg),
'privilege': parse_privilege(cfg),
'role': parse_role(cfg)
}
instances.append(obj)
return instances
def get_param_value(key, item, module):
# if key doesn't exist in the item, get it from module.params
if not item.get(key):
value = module.params[key]
# if key does exist, do a type check on it to validate it
else:
value_type = module.argument_spec[key].get('type', 'str')
type_checker = module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
type_checker(item[key])
value = item[key]
# validate the param value (if validator func exists)
validat |
pcostell/google-cloud-datastore | python/googledatastore/local_cloud_datastore.py | Python | apache-2.0 | 7,713 | 0.004278 | #
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python wrapper for gcd.sh."""
__author__ = 'eddavisson@google.com (Ed Davisson)'
import logging
import os
import shutil
import socket
import subprocess
import tempfile
import time
import urllib
import zipfile
import httplib2
import portpicker
from googledatastore import connection
_DEFAULT_GCD_OPTIONS = ['--allow_remote_shutdown', '--testing']
class LocalCloudDatastoreFactory(object):
"""A factory for constructing LocalCloudDatastore objects."""
def __init__(self, working_directory, gcd_zip, java=None):
"""Constructs a factory for building local datastore instances.
Args:
working_directory: path to a directory where temporary files will be
stored
g | cd_zip: path to the gcd zip file
java: path to a java executable
Raises:
ValueError: if gcd.sh cannot be located in the gcd zip file
"""
self._working_directory = working_directory
self._remote_datastores = {}
# Extract GCD.
zipped_file = zipfile.ZipFile(g | cd_zip)
self._gcd_dir = os.path.join(self._working_directory, 'gcd')
os.mkdir(self._gcd_dir)
zipped_file.extractall(self._gcd_dir)
# Locate gcd.sh in the unzipped directory (it may be in a directory which
# contains a version string).
gcd_dirs = [d for d in os.listdir(self._gcd_dir)
if os.path.isdir(os.path.join(self._gcd_dir, d))]
for d in gcd_dirs:
if d.startswith('gcd'):
self._gcd_sh = os.path.join(self._gcd_dir, d, 'gcd.sh')
break
else:
raise ValueError('could not find gcd.sh in zip file')
os.chmod(self._gcd_sh, 0700) # executable
# Make GCD use our copy of Java.
if java:
os.environ['JAVA'] = java
def Get(self, project_id):
"""Returns an existing local datastore instance for the provided project_id.
If a local datastore instance doesn't yet exist, it creates one.
"""
if project_id in self._remote_datastores:
return self._remote_datastores[project_id]
datastore = self.Create(project_id)
self._remote_datastores[project_id] = datastore
return datastore
def Create(self, project_id, start_options=None, deadline=10):
"""Creates a local datastore instance.
This method will wait for up to 'deadline' seconds for the datastore to
start.
Args:
project_id: project ID
start_options: a list of additional command-line options to pass to the
gcd.sh start command
deadline: number of seconds to wait for the datastore to respond
Returns:
a LocalCloudDatastore
Raises:
IOError: if the local datastore could not be started within the deadline
"""
return LocalCloudDatastore(self._gcd_sh, self._working_directory,
project_id, deadline, start_options)
def __del__(self):
# Delete temp files.
shutil.rmtree(self._gcd_dir)
class LocalCloudDatastore(object):
"""A local datastore (based on gcd.sh)."""
def __init__(self, gcd_sh, working_directory, project_id, deadline,
start_options):
"""Constructs a local datastore.
Clients should use LocalCloudDatastoreFactory to construct
LocalCloudDatastore instances.
Args:
gcd_sh: path to gcd.sh
working_directory: directory file where temporary files will be stored
project_id: project ID
deadline: number of seconds to wait for the datastore to start
start_options: a list of additional command-line options to pass to the
gcd.sh start command
Raises:
IOError: if the datastore failed to start within the deadline
"""
self._project_id = project_id
self._gcd_sh = gcd_sh
self._http = httplib2.Http()
self.__running = False
self._tmp_dir = tempfile.mkdtemp(dir=working_directory)
self._project_directory = os.path.join(self._tmp_dir, self._project_id)
p = subprocess.Popen([gcd_sh,
'create',
'--project_id=%s' % self._project_id,
self._project_directory])
if p.wait() != 0:
raise IOError('could not create project in directory: %s'
% self._project_directory)
# Start GCD and wait for it to start responding to requests.
port = portpicker.PickUnusedPort()
self._host = 'http://localhost:%d' % port
cmd = [self._gcd_sh, 'start', '--port=%d' % port]
cmd.extend(_DEFAULT_GCD_OPTIONS)
if start_options:
cmd.extend(start_options)
cmd.append(self._project_directory)
subprocess.Popen(cmd)
if not self._WaitForStartup(deadline):
raise IOError('datastore did not respond within %ds' % deadline)
endpoint = '%s/datastore/v1beta3/projects/%s' % (self._host,
self._project_id)
self.__datastore = connection.Datastore(project_endpoint=endpoint)
self.__running = True
def GetDatastore(self):
"""Returns a googledatatsore.Datastore that is connected to the gcd tool."""
return self.__datastore
def _WaitForStartup(self, deadline):
"""Waits for the datastore to start.
Args:
deadline: deadline in seconds
Returns:
True if the instance responds within the deadline, False otherwise.
"""
start = time.time()
sleep = 0.05
def Elapsed():
return time.time() - start
while True:
try:
response, _ = self._http.request(self._host)
if response.status == 200:
logging.info('local server responded after %f seconds', Elapsed())
return True
except socket.error:
pass
if Elapsed() >= deadline:
# Out of time; give up.
return False
else:
time.sleep(sleep)
sleep *= 2
def Clear(self):
"""Clears all data from the local datastore instance.
Returns:
True if the data was successfully cleared, False otherwise.
"""
body = urllib.urlencode({'action': 'Clear Datastore'})
headers = {'Content-type': 'application/x-www-form-urlencoded',
'Content-length': str(len(body))}
response, _ = self._http.request('%s/_ah/admin/datastore' % self._host,
method='POST', headers=headers, body=body)
if response.status == 200:
return True
else:
logging.warning('failed to clear datastore; response was: %s', response)
def Stop(self):
if not self.__running:
return
logging.info('shutting down the datastore running at %s', self._host)
# Shut down the datastore.
headers = {'Content-length': '0'}
response, _ = self._http.request('%s/_ah/admin/quit' % self._host,
method='POST', headers=headers)
if response.status != 200:
logging.warning('failed to shut down datastore; response: %s', response)
self.__running = False
# Delete temp files.
shutil.rmtree(self._tmp_dir)
def __del__(self):
# If the user forgets to call Stop()
logging.warning('datastore shutting down due to '
'LocalCloudDatastore object deletion')
self.Stop()
|
pyrapt/rapt | rapt/transformers/sql/sql_translator.py | Python | mit | 8,668 | 0.000923 | from rapt.treebrd.attributes import AttributeList
from ...treebrd.node import Operator
from ..base_translator import BaseTranslator
class SQLQuery:
"""
Structure defining the building blocks of a SQL query.
| """
def __init__(self, select_block, from_block, where_block=''):
self.prefix = ''
self.select_block = select_block
self.from_block = from_block
self.where_block = where_block
@property
def _basic_query(self):
if self.select_block:
return '{prefix}' \
'SELECT {select} FROM {relation}'
else:
| return '{prefix}{relation}'
@property
def _sql_query_skeleton(self):
sql = self._basic_query
if self.where_block:
sql += ' WHERE {conditions}'
return sql
def to_sql(self):
"""
Construct a SQL query based on the stored blocks.
:return: a SQL query
"""
return self._sql_query_skeleton.format(
prefix=self.prefix, select=self.select_block,
relation=self.from_block, conditions=self.where_block)
class SQLSetQuery(SQLQuery):
"""
Structure defining the building blocks of a SQL query with set semantics.
"""
@property
def _basic_query(self):
return '{prefix}' \
'SELECT DISTINCT {select} FROM {relation}'
class Translator(BaseTranslator):
"""
A Translator defining the operations for translating a relational algebra
statement into a SQL statement using bag semantics.
"""
query = SQLQuery
@classmethod
def _get_temp_name(cls, node):
return node.name or '_{}'.format(id(node))
@classmethod
def _get_sql_operator(cls, node):
operators = {
Operator.union: 'UNION',
Operator.difference: 'EXCEPT',
Operator.intersect: 'INTERSECT',
Operator.cross_join: 'CROSS JOIN',
Operator.theta_join: 'JOIN',
Operator.natural_join: 'NATURAL JOIN',
}
return operators[node.operator]
def relation(self, node):
"""
Translate a relation node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
return self.query(select_block=str(node.attributes),
from_block=node.name)
def select(self, node):
"""
Translate a select node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
child_object = self.translate(node.child)
where_block = node.conditions
if child_object.where_block:
where_block = '({0}) AND ({1})'\
.format(child_object.where_block, node.conditions)
child_object.where_block = where_block
if not child_object.select_block:
child_object.select_block = str(node.attributes)
return child_object
def project(self, node):
"""
Translate a project node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
child_object = self.translate(node.child)
child_object.select_block = str(node.attributes)
return child_object
def rename(self, node):
"""
Translate a rename node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
child_object = self.translate(node.child)
from_block = '({child}) AS {name}({attributes})'.format(
child=child_object.to_sql(), name=node.name,
attributes=', '.join(node.attributes.names))
return self.query(str(node.attributes), from_block=from_block)
def assign(self, node):
"""
Translate an assign node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
child_object = self.translate(node.child)
child_object.prefix = 'CREATE TEMPORARY TABLE {name}({attributes}) AS '\
.format(name=node.name, attributes=', '.join(node.attributes.names))
return child_object
def natural_join(self, node):
"""
Translate an assign node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
return self._join(node)
def theta_join(self, node):
"""
Translate an assign node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
return self._join(node)
def cross_join(self, node):
"""
Translate a cross join node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
return self._join(node)
def union(self, node):
"""
Translate a union node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
return self._set_op(node)
def intersect(self, node):
"""
Translate an intersection node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
return self._set_op(node)
def difference(self, node):
"""
Translate an difference node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
return self._set_op(node)
def _join_helper(self, node):
sobject = self.translate(node)
if node.operator in {
Operator.cross_join, Operator.natural_join, Operator.theta_join
}:
return sobject.from_block
else:
return '({subquery}) AS {name}'.format(
subquery=sobject.to_sql(), name=self._get_temp_name(node))
def _join(self, node):
"""
Translate a join node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
select_block = str(node.attributes)
from_block = '{left} {operator} {right}'.format(
left=self._join_helper(node.left),
right=self._join_helper(node.right),
operator=self._get_sql_operator(node))
if node.operator == Operator.theta_join:
from_block = '{from_block} ON {conditions}'.format(
from_block=from_block,
conditions=node.conditions)
return self.query(select_block, from_block, '')
def _set_op(self, node):
"""
Translate a set operator node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
select_block = str(node.attributes)
from_block = '({left} {operator} ALL {right}) AS {name}'.format(
left=self.translate(node.left).to_sql(),
right=self.translate(node.right).to_sql(),
operator=self._get_sql_operator(node), name=self._get_temp_name(node))
return self.query(select_block=select_block, from_block=from_block)
class SetTranslator(Translator):
"""
A Translator defining the operations for translating a relational algebra
statement into a SQL statement using set semantics.
"""
query = SQLSetQuery
def _set_op(self, node):
"""
Translate a set operator node into SQLQuery, using set semantics.
:param node: a treebrd node
:return: a SQLSetQuery object for the tree rooted at node
"""
select_block = str(node.attributes)
from_block = '({left} {operator} {right}) AS {name}'.format(
left=self.translate(node.left).to_sql(),
right=self.translate(node.right).to_sql(),
operator=self._get_sql_operator(node), name=self._get_temp_name(node))
return self.query(selec |
Featuretools/featuretools | featuretools/tests/primitive_tests/test_primitive_base.py | Python | bsd-3-clause | 2,871 | 0 | from datetime import datetime
import numpy as np
import pandas as pd
from pytest import raises
from featuretools.primitives import Haversine, IsIn, IsNull, Max, TimeSinceLast
from featuretools.primitives.base import TransformPrimitive
def test_call_agg():
primitive = Max()
# the assert is run twice on purpose
for _ in range(2):
assert 5 == primitive(range(6))
def test_call_trans():
primitive = IsNull()
for _ in range(2):
assert pd.Series([False] * 6).equals(primitive(range(6)))
def test_uses_calc_time():
primitive = TimeSinceLast()
primitive_h = TimeSinceLast(unit="hours")
datetimes = pd.Series([datetime(2015, 6, 6), datetime(2015, | 6, 7)])
answer = 86400.0
answer_h = 24.0
assert answer == primitive(datetimes, time=datetime(2015, 6, 8))
assert answer_h == primitive_h(datetimes, time=datetime(2015, 6, 8))
def test_call_multiple_arg | s():
primitive = Haversine()
data1 = [(42.4, -71.1), (40.0, -122.4)]
data2 = [(40.0, -122.4), (41.2, -96.75)]
answer = [2631.231, 1343.289]
for _ in range(2):
assert np.round(primitive(data1, data2), 3).tolist() == answer
def test_get_function_called_once():
class TestPrimitive(TransformPrimitive):
def __init__(self):
self.get_function_call_count = 0
def get_function(self):
self.get_function_call_count += 1
def test(x):
return x
return test
primitive = TestPrimitive()
for _ in range(2):
primitive(range(6))
assert primitive.get_function_call_count == 1
def test_multiple_arg_string():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
self.bool = bool
self.int = int
self.float = float
primitive = Primitive(bool=True, int=4, float=.1)
string = primitive.get_args_string()
assert string == ', int=4, float=0.1'
def test_single_args_string():
assert IsIn([1, 2, 3]).get_args_string() == ', list_of_outputs=[1, 2, 3]'
def test_args_string_default():
assert IsIn().get_args_string() == ''
def test_args_string_mixed():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
self.bool = bool
self.int = int
self.float = float
primitive = Primitive(bool=False, int=0)
string = primitive.get_args_string()
assert string == ', bool=False'
def test_args_string_undefined():
string = Max().get_args_string()
assert string == ''
def test_args_string_error():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
pass
with raises(AssertionError, match='must be attribute'):
Primitive(bool=True, int=4, float=.1).get_args_string()
|
ioam/scipy-2017-holoviews-tutorial | solutions/apps/periodic_app.py | Python | bsd-3-clause | 1,047 | 0.00382 | import holoviews as hv
import geoviews as gv
import dask.dataframe as dd
from holoviews.operation.datashader import datashade, aggregate, shade
from bokeh.models import WMTSTileSource
re | nderer = hv.renderer('bokeh')
# Load data
usecols = ['tpep_pickup_datetime', 'dropoff_x', 'dropoff_y']
ddf = dd.read_csv | ('../data/nyc_taxi.csv', parse_dates=['tpep_pickup_datetime'], usecols=usecols)
ddf['hour'] = ddf.tpep_pickup_datetime.dt.hour
ddf = ddf.persist()
# Declare objects
stream = hv.streams.Counter()
points = hv.Points(ddf, kdims=['dropoff_x', 'dropoff_y'])
dmap = hv.DynamicMap(lambda counter: points.select(hour=counter%24).relabel('Hour: %s' % (counter % 24)),
streams=[stream])
shaded = datashade(dmap)
hv.opts('RGB [width=800, height=600, xaxis=None, yaxis=None]')
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{Z}/{Y}/{X}.jpg'
wmts = gv.WMTS(WMTSTileSource(url=url))
overlay = wmts * shaded
# Create server document
doc = renderer.server_doc(overlay)
dmap.periodic(1)
|
pedrofeijao/RINGO | src/ringo/LD_simulation.py | Python | mit | 5,857 | 0.005805 | #!/usr/bin/env python2
import ringo_config
cfg = ringo_config.RingoConfig()
import pyximport;pyximport.install(build_dir=cfg.pyximport_build())
import argparse
import random
import numpy as np
import model
from simulation import Simulation, SimParameters, EventType, RearrangementType
def run_L_D_simulation(self, L, D):
# L = duplication length
# D = number of DCJs in each branch.
#
param = self.sim_parameters
# pre_dups (at root) and post_dups (at branches) to achieve 1.5 genes/family in average.
pre_duplications = int(0.43 * param.num_genes / L)
post_duplications = int(0.07 * param.num_genes / L)
post_duplications = [int(0.5 * post_duplications), int(1.5 * post_duplications)]
# post_duplications = [int(1 * post_duplications), int(1 * post_duplications)]
param.pre_duplications = pre_duplications
current_copy_number = None # will init at root
deletion_length_range = xrange(1, param.indel_length + 1)
duplication_length_range = xrange(1, L + 1)
idx = 1
ev_tree = self.sim_tree
for ev_node in ev_tree.preorder_node_iter():
if ev_node.parent_node is None:
# identity genome:
ev_node.value = current_genome = model.Genome.identity(param.num_genes, param.num_chr)
ev_node.events = {ev: 0 for ev in EventType.all}
# add copy number information to track orthologous/paralogous, when duplications are present:
for chromosome in current_genome.chromosomes:
chromosome.copy_number = [1] * len(chromosome.gene_order)
current_copy_number = current_genome.gene_count()
# pre-duplications:
for i in range(pre_duplications):
Simulation.apply_random_segmental_duplication(current_genome,
range(1, param.duplication_length + 1),
current_copy_number)
ev_node.events[EventType.DUPLICATION] = pre_duplications
# ev_node.edge.length = pre_duplications
if ev_node.label is None:
ev_node.label = "Root"
else:
# evolve genome:
if ev_node.is_internal():
if ev_node.label is None:
ev_node.label = "M%02d" % idx
idx += 1
else: # complete labelling for leaves
ev_node.label = ev_node.taxon.label
current_genome = ev_node.parent_node.value.clone(ev_node.label)
ev_node.value = current_genome
pd = post_duplications.pop()
ev_node.edge.length = D + pd
# events
events = [EventType.DUPLICATION] * pd + [EventType.REARRANGEMENT] * D
ev_node.edge.events = {ev: 0 for ev in EventType.all}
random.shuffle(events)
for event in events:
if event == EventType.DUPLICATION:
Simulation.apply_random_segmental_duplication(current_genome, duplication_length_range, current_copy_number)
ev_node.edge.events[event] += 1
elif event == EventType.REARRANGEMENT:
# here, I can also have deletions:
ev = np.random.choice([RearrangementType.REVERSAL, EventType.DELETION], 1,
p=[param.rearrangement_p, param.deletion_p])[0]
if ev == RearrangementType.REVERSAL:
Simulation.apply_random_reversal(current_genome)
ev_node.edge.events[event] += 1
else:
Simulation.apply_random_deletion(current_genome, deletion_length_range)
ev_node.edge.events[EventType.DELETION] += 1
ev_node.events = {ev: ev_node.parent_node | .events[ev] + count for ev, count in
ev_node.edge.events.iteritems()}
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Simulates rearrangement evolution on | a given newick tree")
parser.add_argument("-s", "--sim", type=int, help="Simulate a new birth_death with SIM species")
parser.add_argument("-n", "--num_genes", type=int, default=100, help="Number of genes in the root genome.")
parser.add_argument("-c", "--num_chr", type=int, default=5, help="Number of chromosomes in the root genome.")
parser.add_argument("-L", "-dl", "--duplication_length", type=int, default=5, help="Maximum length of duplication event.")
parser.add_argument("-D", "--rearrangements", type=int, default=5, help="Number of rearrangements.")
parser.add_argument("-o", "--output", type=str, default="sim", help="Name of the output folder.")
parser.add_argument("-dp", "--deletion_p", type=float, default=0.0, help="Percentage of deletions, from 0 to 1.0")
parser.add_argument("-ip", "--insertion_p", type=float, default=0.0, help="Percentage of insertions, from 0 to 1.0")
parser.add_argument("-il", "--indel_length", type=int, default=5, help="Maximum size of indel event in genes.")
parser.add_argument("-d", "--disturb", type=float, default=0,
help="Disturb branch lengths multiplying each by e^r, where r in [-d,+d]. ")
param = parser.parse_args()
# Simulation parameters:
sim_par = SimParameters(num_genes=param.num_genes, num_chr=param.num_chr,
del_p=param.deletion_p, ins_p=param.insertion_p, indel_length=param.indel_length,
duplication_length=param.duplication_length)
# start sim object;
sim = Simulation(param.output, sim_par)
sim.simulate_tree(param.sim)
run_L_D_simulation(sim, param.duplication_length, param.rearrangements)
sim.save_simulation(save_copies=True)
|
allenlavoie/tensorflow | tensorflow/contrib/boosted_trees/estimator_batch/custom_export_strategy.py | Python | apache-2.0 | 11,678 | 0.007536 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Strategy to export custom proto formats."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
from tensorflow.contrib.boosted_trees.proto import tree_config_pb2
from tensorflow.contrib.boosted_trees.python.training.functions import gbdt_batch
from tensorflow.contrib.decision_trees.proto import generic_tree_model_extensions_pb2
from tensorflow.contrib.decision_trees.proto import generic_tree_model_pb2
from tensorflow.contrib.learn.python.learn import export_strategy
from tensorflow.contrib.learn.python.learn.utils import saved_model_export_utils
from tensorflow.python.client import session as tf_session
from tensorflow.python.framework import ops
from tensorflow.python.platform import gfile
from tensorflow.python.saved_model import loader as saved_model_loader
from tensorflow.python.saved_model import tag_constants
_SPARSE_FLOAT_FEATURE_NAME_TEMPLATE = "%s_%d"
def make_custom_export_strategy(name,
convert_fn,
feature_columns,
export_input_fn):
"""Makes custom exporter of GTFlow tree format.
Args:
name: A string, for the name of the export strategy.
convert_fn: A function that converts the tree proto to desired format and
saves it to the desired location. Can be None to skip conversion.
feature_columns: A list of feature columns.
export_input_fn: A function that takes no arguments and returns an
`InputFnOps`.
Returns:
An `ExportStrategy`.
"""
base_strategy = saved_model_export_utils.make_export_strategy(
serving_input_fn=export_input_fn, strip_default_attrs=True)
input_fn = export_input_fn()
(sorted_feature_names, dense_floats, sparse_float_indices, _, _,
sparse_int_indices, _, _) = gbdt_batch.extract_features(
input_fn.features, feature_columns)
def export_fn(estimator, export_dir, checkpoint_path=None, eval_result=None):
"""A wrapper to export to SavedModel, and convert it to other formats."""
result_dir = base_strategy.export(estimator, export_dir,
checkpoint_path,
eval_result)
with ops.Graph().as_default() as graph:
with tf_session.Session(graph=graph) as sess:
saved_model_loader.load(
sess, [tag_constants.SERVING], result_dir)
# Note: This is GTFlow internal API and might change.
ensemble_model = graph.get_operation_by_name(
"ensemble_model/TreeEnsembleSerialize")
_, dfec_str = sess.run(ensemble_model.outputs)
dtec = tree_config_pb2.DecisionTreeEnsembleConfig()
dtec.ParseFromString(dfec_str)
# Export the result in the same folder as the saved model.
if convert_fn:
convert_fn(dtec, sorted_feature_names,
len(dense_floats),
len(sparse_float_indices),
len(sparse_int_indices), result_dir, eval_result)
feature_importances = _get_feature_importances(
dtec, sorted_feature_names,
len(dense_floats),
len(sparse_float_indices), len(sparse_int_indices))
sorted_by_importance = sorted(
feature_importances.items(), key=lambda x: -x[1])
assets_dir = os.path.join(result_dir, "assets.extra")
gfile.MakeDirs(assets_dir)
with gfile.GFile(os.path.join(assets_dir, "feature_importances"),
"w") as f:
f.write("\n".join("%s, %f" % (k, v) for k, v in sorted_by_importance))
return result_dir
return export_strategy.ExportStrategy(
name, export_fn, strip_default_attrs=True)
def convert_to_universal_format(dtec, sorted_feature_names,
num_dense, num_sparse_float,
num_sparse_int,
feature_name_to_proto=None):
"""Convert GTFlow trees to universal format."""
del num_sparse_int # unused.
model_and_features = generic_tree_model_pb2.ModelAndFeatures()
# TODO(jonasz): Feature descriptions should contain information about how each
# feature is processed before it's fed to the model (e.g. bucketing
# information). As of now, this serves as a list of features the model uses.
for feature_name in sorted_feature_names:
if not feature_name_to_proto:
model_and_features.features[feature_name].SetInParent()
else:
model_and_features.features[feature_name].CopyFrom(
feature_name_to_proto[feature_name])
model = model_and_features.model
model.ensemble.summation_combination_technique.SetInParent()
for tree_idx in range(len(dtec.trees)):
gtflow_tree = dtec.trees[tree_idx]
tree_weight = dtec.tree_weights[tree_idx]
member = model.ensemble.members.add()
member.submodel_id.value = tree_idx
tree = member.submodel.decision_tree
for node_idx in range(len(gtflow_tree.nodes)):
gtflow_node = gtflow_tree.nodes[node_idx]
node = tree.nodes.add()
node_type = gtflow_node.WhichOneof("node")
node.node_id.value = node_idx
if node_type == "leaf":
leaf = gtflow_node.leaf
if leaf.HasField("vector"):
for weight in leaf.vector.value:
new_value = node.leaf.vector.value.add()
new_value.float_value = weight * tree_weight
else:
for index, weight in zip(
leaf.sparse_vector.index, leaf.sparse_vector.value):
new_value = node.leaf.sparse_vector.sparse_value[index]
new_value.float_value = weight * tree_weight
else:
node = node.binary_node
# Binary nodes here.
if node_type == "dense_float_binary_split":
split = gtflow_node.dense_float_binary_split
feature_id = split.feature_column
inequality_test = node.inequality_left_child_test
inequality_test.feature_id.id.value = sorted_feature_names[feature_id]
inequality_test.type = (
generic_tree_model_pb2.InequalityTest.LESS_OR_EQUAL)
inequality_test.threshold.float_value = split.threshold
elif node_type == "sparse_float_binary_split_default_left":
split = gtflow_node.sparse_float_binary_split_default_left.split
node.default_direction = (generic_tree_model_pb2.BinaryNode.LEFT)
feature_id = split.feature_column + num_dense
inequality_test = node.inequality_left_child_test
inequality_test.feature_id.id.value = (
_SPARSE_FLOAT_FEATURE_NAME_TEMPLATE %
(sorted_feature_names[feature_id], split.dimension_id))
model_and_features.features.pop(sorted_feature_names[feature_id])
(model_and_features.features[inequality_test.feature_id.id.value]
.SetInParent())
inequality_test.type = (
generic_tree_model_pb2.InequalityTest.LESS_OR_EQUAL)
inequality_test.threshold.float_value = split.threshold
elif node_type == "sparse_float_binary_split_default_right":
split = gtflow_node.sparse_float_binary_split_default_right.split
node.default_direction = (
generic_tree_model_pb2.BinaryNode.RIGHT)
# TO | DO(nponomareva): adjus | t this id assignement when we allow multi-
# column sparse tensors.
feature_id = split.feature_column + num_dense
inequality_test |
moreati/trac-gitsvn | sample-plugins/HelloWorld.py | Python | bsd-3-clause | 2,080 | 0.003365 | """Example macro."""
revision = "$Rev$"
url = "$URL$"
#
# The following shows the code for macro, old-style.
#
# The `execute` function serves no purpose other than to illustrate
# the example, it will not be used anymore.
#
# ---- (ignore in your own macro) ----
# --
from trac.util import escape
def execute(hdf, txt, env):
# Currently hdf is set only when the macro is called
# From a wiki page
if hdf:
| hdf['wiki.macro.greeting'] = 'Hello World'
# args will be `None` if the macro is called without parenthesis.
args = txt or 'No arguments'
# then, as `txt` comes from the user, it's important to guard against
# the possibility to inject malicious HTML/Javascript, by using | `escape()`:
return 'Hello World, args = ' + escape(args)
# --
# ---- (ignore in your own macro) ----
#
# The following is the converted new-style macro
#
# ---- (reuse for your own macro) ----
# --
from trac.wiki.macros import WikiMacroBase
class HelloWorldMacro(WikiMacroBase):
_description = cleandoc_(
"""Simple HelloWorld macro.
Note that the name of the class is meaningful:
- it must end with "Macro"
- what comes before "Macro" ends up being the macro name
The documentation of the class (i.e. what you're reading)
will become the documentation of the macro, as shown by
the !MacroList macro (usually used in the TracWikiMacros page).
""")
def expand_macro(self, formatter, name, args):
"""Return some output that will be displayed in the Wiki content.
`name` is the actual name of the macro (no surprise, here it'll be
`'HelloWorld'`),
`args` is the text enclosed in parenthesis at the call of the macro.
Note that if there are ''no'' parenthesis (like in, e.g.
[[HelloWorld]]), then `args` is `None`.
"""
return 'Hello World, args = ' + unicode(args)
# Note that there's no need to HTML escape the returned data,
# as the template engine (Genshi) will do it for us.
# --
# ---- (reuse for your own macro) ----
|
brython-dev/brython | www/src/Lib/test/test_largefile.py | Python | bsd-3-clause | 10,216 | 0.000098 | """Test largefile support on system where this makes sense.
"""
import os
import stat
import sys
import unittest
import socket
import shutil
import threading
from test.support import requires, bigmemtest
from test.support import SHORT_TIMEOUT
from test.support import socket_helper
from test.support.os_helper import TESTFN, unlink
import io # C implementation of io
import _pyio as pyio # Python implementation of io
# size of file to create (>2 GiB; 2 GiB == 2,147,483,648 bytes)
size = 2_500_000_000
TESTFN2 = TESTFN + '2'
class LargeFileTest:
def setUp(self):
if os.path.exists(TESTFN):
mode = 'r+b'
else:
mode = 'w+b'
with self.open(TESTFN, mode) as f:
current_size = os.fstat(f.fileno())[stat.ST_SIZE]
if current_size == size+1:
return
if current_size == 0:
f.write(b'z')
f.seek(0)
f.seek(size)
f.write(b'a')
f.flush()
self.assertEqual(os.fstat(f.fileno())[stat.ST_SIZE], size+1)
@classmethod
def tearDownClass(cls):
with cls.open(TESTFN, 'wb'):
pass
if not os.stat(TESTFN)[stat.ST_SIZE] == 0:
raise cls.failureException('File was not truncated by opening '
'with mode "wb"')
unlink(TESTFN2)
class TestFileMethods(LargeFileTest):
"""Test that each file function works as expected for large
(i.e. > 2 GiB) files.
"""
# _pyio.FileIO.readall() uses a temporary bytearray then casted to bytes,
# so memuse=2 is needed
@bigmemtest(size=size, memuse=2, dry_run=False)
def test_large_read(self, _size):
# bpo-24658: Test that a read greater than 2GB does not fail.
with self.open(TESTFN, "rb") as f:
self.assertEqual(len(f.read()), size + 1)
self.assertEqual(f.tell(), size + 1)
def test_osstat(self):
self.assertEqual(os.stat(TESTFN)[stat.ST_SIZE], size+1)
def test_seek_read(self):
with self.open(TESTFN, 'rb') as f:
self.assertEqual(f.tell(), 0)
self.assertEqual(f.read(1), b'z')
self.assertEqual(f.tell(), 1)
f.seek(0)
self.assertEqual(f.tell(), 0)
f.seek(0, 0)
self.assertEqual(f.tell(), 0)
f.seek(42)
self.assertEqual(f.tell(), 42)
f.seek(42, 0)
self.assertEqual(f.tell(), 42)
f.seek(42, 1)
self.assertEqual(f.tell(), 84)
f.seek(0, 1)
self.assertEqual(f.tell(), 84)
f.seek(0, 2) # seek from the end
self.assertEqual(f.tell(), size + 1 + 0)
f.seek(-10, 2)
self.assertEqual(f.tell(), size + 1 - 10)
f.seek(-size-1, 2)
self.assertEqual(f.tell(), 0)
f.seek(size)
self.assertEqual(f.tell(), size)
# the 'a' that was written at the end of file above
self.assertEqual(f.read(1), b'a')
f.seek(-size-1, 1)
self.assertEqual(f.read(1), b'z')
self.assertEqual(f.tell(), 1)
def test_lseek(self):
with self.open(TESTFN, 'rb') as f:
self.assertEqual(os.lseek(f.fileno(), 0, 0), 0)
self.assertEqual(os.lseek(f.fileno(), 42, 0), 42)
self.assertEqual(os.lseek(f.fileno(), 42, 1), 84)
self.assertEqual(os.lseek(f.fileno(), 0, 1), 84)
self.assertEqual(os.lseek(f.fileno(), 0, 2), size+1+0)
self.assertEqual(os.lseek(f.fileno(), -10, 2), size+1-10)
self.assertEqual(os.lseek(f.fileno(), -size-1, 2), 0)
self.assertEqual(os.lseek(f.fileno(), size, 0), size)
# the 'a' that was written at the end of file above
self.assertEqual(f.read(1), b'a')
def test_truncate(self):
with self.open(TESTFN, 'r+b') as f:
if not hasattr(f, 'truncate'):
raise unittest.SkipTest("open().truncate() not available "
"on this system")
f.seek(0, 2)
# else we've lost track of the true size
self.assertEqual(f.tell(), size+1)
# Cut it back via seek + truncate with no argument.
newsize = size - 10
f.seek(newsize)
f.truncate()
self.assertEqual(f.tell(), newsize) # else pointer moved
f.seek(0, 2)
self.assertEqual(f.tell(), newsize) # else wasn't truncated
# Ensure that truncate(smaller than true size) shrinks
# the file.
newsize -= 1
f.seek(42)
f.truncate(newsize)
self.assertEqual(f.tell(), 42)
f.seek(0, 2)
self.assertEqual(f.tell(), newsize)
# XXX truncate(larger than true size) is ill-defined
# across platform; cut it waaaaay back
f.seek(0)
f.truncate(1)
self.assertEqual(f.tell(), 0) # else pointer moved
f.seek(0)
self.assertEqual(len(f.read()), 1) # else wasn't truncated
def test_seekable(self):
# Issue #5016; seekable() can return False when the current position
# is negative when truncated to an int.
for pos in (2**31-1, 2**31, 2**31+1):
with self.open(TESTFN, 'rb') as f:
f.seek(pos)
self.assertTrue(f.seekable())
def skip_no_disk_space(path, required):
def decorator(fun):
def wrapper(*args, **kwargs):
if shutil.disk_usage(os.path.realpath(path)).free < required:
hsize = int(required / 1024 / 1024)
raise unittest.SkipTest(
f"required {hsize} MiB of free disk space")
return fun(*args, **kwargs)
return wrapper
return decorator
class TestCopyfile(LargeFileTest, unittest.TestCase):
open = staticmethod(io.open)
# Exact required disk space would be (size * 2), but let's give it a
# bit more tolerance.
@skip_no_disk_space(TESTFN, size * 2.5)
def test_it(self):
# Internally shutil.copyfile() can use "fast copy" methods like
# os.sendfile().
size = os.path.getsize(TESTFN)
shutil.copyfile(TESTFN, TESTFN2)
self.assertEqual(os.path.getsize(TESTFN2), size)
with open(TESTFN2, 'rb') as f:
self.assertEqual(f.read(5), b'z\x00\x00\x00\x00')
f.seek(size - 5)
self.assertEqual(f.read(), b'\x00\x00\x00\x00a')
@unittest.skipIf(not hasattr(os, 'sendfile'), 'sendfile not supported')
class TestSocketSendfile(LargeFileTest, unittest.TestCase):
open = staticmethod(io.open)
timeout = SHORT_TIMEOUT
def setUp(self):
super().setUp()
self.thread = None
def tearDown(self):
super().tearDown()
if self.thread is not None:
self.thread.join(self.timeout)
self.thread = None
def tcp_server(self, sock):
def run(sock):
with sock:
conn, _ = sock.accept()
conn.settimeout(self.timeout)
with conn, ope | n(TESTFN2, 'wb') as f:
event.wait(self.timeout)
while True:
chunk = conn.recv(65536)
if not chunk:
return
f.write(chunk)
event = threading.Event()
sock.settimeout(self.timeout)
self. | thread = threading.Thread(target=run, args=(sock, ))
self.thread.start()
event.set()
# Exact required disk space would be (size * 2), but let's give it a
# bit more tolerance.
@skip_no_disk_space(TESTFN, size * 2.5)
def test_it(self):
port = socket_helper.find_unused_port()
with socket.create_server(("", port)) as sock:
self.tcp_server(sock)
with socket.create_connection(("127.0.0.1", port)) as client:
with open(TESTFN, 'rb') as f:
client.sendfile(f)
self.tearDown()
size |
scwhitehouse/rose | lib/python/rose/config_editor/valuewidget/combobox.py | Python | gpl-3.0 | 3,038 | 0.000658 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-7 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any | later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the impl | ied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
import pygtk
pygtk.require('2.0')
import gtk
import rose.config_editor
class ComboBoxValueWidget(gtk.HBox):
"""This is a class to add a combo box for a set of variable values.
It needs to have some allowed values set in the variable metadata.
"""
FRAC_X_ALIGN = 0.9
def __init__(self, value, metadata, set_value, hook, arg_str=None):
super(ComboBoxValueWidget, self).__init__(homogeneous=False,
spacing=0)
self.value = value
self.metadata = metadata
self.set_value = set_value
self.hook = hook
comboboxentry = gtk.ComboBox()
liststore = gtk.ListStore(str)
cell = gtk.CellRendererText()
cell.xalign = self.FRAC_X_ALIGN
comboboxentry.pack_start(cell)
comboboxentry.add_attribute(cell, 'text', 0)
var_values = self.metadata[rose.META_PROP_VALUES]
var_titles = self.metadata.get(rose.META_PROP_VALUE_TITLES)
for k, entry in enumerate(var_values):
if var_titles is not None and var_titles[k]:
liststore.append([var_titles[k] + " (" + entry + ")"])
else:
liststore.append([entry])
comboboxentry.set_model(liststore)
if self.value in var_values:
index = self.metadata['values'].index(self.value)
comboboxentry.set_active(index)
comboboxentry.connect('changed', self.setter)
comboboxentry.connect('button-press-event',
lambda b: comboboxentry.grab_focus())
comboboxentry.show()
self.pack_start(comboboxentry, False, False, 0)
self.grab_focus = lambda: self.hook.get_focus(comboboxentry)
self.set_contains_error = (lambda e:
comboboxentry.modify_bg(gtk.STATE_NORMAL,
self.bad_colour))
def setter(self, widget):
index = widget.get_active()
self.value = self.metadata[rose.META_PROP_VALUES][index]
self.set_value(self.value)
return False
|
albatros69/Divers | sudoku.py | Python | gpl-3.0 | 11,551 | 0.013602 | # -*- coding: utf-8 -*-
from __future__ import (unicode_literals, absolute_import, print_function, division)
from copy import deepcopy
from itertools import combinations
class Cell:
def __init__(self):
self.value = 0
self.row = set()
self.col = set()
self.sq = set()
self.rm_values = set()
def isSet(self):
return self.value > 0
@property
def values(self):
if self.value:
return set()
else:
return set(range(1, 10)) - self.row - self.col - self.sq - self.rm_values
def set(self, val):
if val > 0:
if val not in self.row and val not in self.col and val not in self.sq:
self.value = val
self.row.add(val)
| self.col.add(val)
self.sq.add(val)
else:
raise ValueError
def rm_value(self, val):
if isinstance(val, int):
self.rm_values.add(val)
elif | isinstance(val, set):
self.rm_values |= val
def __repr__(self):
if self.value == 0:
return ' '
else:
return repr(self.value)
def carre(i,j):
return i//3+3*(j//3)
def are_neigh(i,j,k,l):
return (i==k) + (j==l) + (carre(i,j)==carre(k,l))
def coord(dim, i, k):
if dim==0:
return i, k
elif dim==1:
return k, i
elif dim==2:
return 3*(i%3)+k%3,3*(i//3)+k//3
class Sudoku:
def __init__(self, start=None): #(((0,)*9, )*9):
self.grid = { }
self.turns = 0
# Cells initialisation
for i in range(9):
# self.grid[i] = { }
for j in range(9):
self.grid[i,j] = Cell()
# Rows initialisation
for j in range(9):
row = set()
for i in range(9):
self.grid[i,j].row = row
# Columns initialisation
for i in range(9):
col = set()
for j in range(9):
self.grid[i,j].col = col
# Squares initialisation
for c in range(9):
sq = set()
for i in range(3):
for j in range(3):
self.grid[i+3*(c%3),j+3*(c//3)].sq = sq
if start:
for j, c in enumerate(start):
for i, v in enumerate(c):
try:
self.set(i, j, v)
except:
print('###', i, j, v)
raise
def __repr__(self):
result = '-'*25 + "\n"
for j in range(8, -1, -1):
line = ''
for i in range(0, 9, 3):
line += "| %r %r %r " % (tuple( self.grid[k,j] for k in range(i, i+3) ))
result += "%s|\n" % line
if not j%3:
result += '-'*25 + "\n"
return result.rstrip()
@property
def solved(self):
return all( [ self.grid[i,j].isSet() for i in range(9) for j in range(9) ] )
def set(self, i, j, val):
self.grid[i,j].set(val)
def rm_value(self, i, j, val):
self.grid[i,j].rm_value(val)
def neigh_values(self, x, y, coord=False):
row_result = set()
for i in range(9):
if i != x:
if coord:
row_result.add((i,y))
else:
row_result |= self.grid[i,y].values
col_result = set()
for j in range(9):
if j != y:
if coord:
col_result.add((x,j))
else:
col_result |= self.grid[x,j].values
sq_result = set()
for i in range(3):
for j in range(3):
if i != x%3 or j != y%3:
if coord:
sq_result.add((i+3*(x//3),j+3*(y//3)))
else:
sq_result |= self.grid[i+3*(x//3),j+3*(y//3)].values
if coord:
return row_result | col_result | sq_result
else:
return (row_result, col_result, sq_result)
def rech_solitaire_nu(self):
chgt = False
# Solitaire nu
for i in range(9):
for j in range(9):
l = self.grid[i,j].values
if len(l) == 1:
v = l.pop()
print("%d,%d -> %d |" % (i, j, v), end=' ')
self.set(i, j, v)
chgt = True
self.turns += 1
return chgt
def rech_solitaire_camoufle(self):
chgt = False
# Solitaire camouflé
for i in range(9):
for j in range(9):
l = self.grid[i,j].values
for a in ( l - x for x in self.neigh_values(i, j) ):
if len(a) == 1:
v = a.pop()
print("%d,%d => %d |" % (i, j, v), end=' ')
self.set(i, j, v)
chgt = True
self.turns += 1
break
return chgt
def rech_gpes_dominants(self):
chgt = False
for v in range(1, 10):
candidates = [ (i,j) for i in range(9) for j in range(9) if v in self.grid[i,j].values ]
for candidat in candidates:
for dim in (0, 1): # colonne/ligne
copains = [ a for a in candidates if a[dim]==candidat[dim] and are_neigh(*candidat,*a) >= 2 ]
candid_mince = [ a for a in candidates if a[dim]==candidat[dim] and a not in copains ]
candid_sq = [ a for a in candidates if carre(*a)==carre(*candidat) and a not in copains ]
if not candid_mince:
for cell in candid_sq:
print("%d,%d -> -%d |" % (*cell, v), end=' ')
self.rm_value(*cell, v)
chgt = True
self.turns += 1
elif not candid_sq:
for cell in candid_mince:
print("%d,%d -> -%d |" % (*cell, v), end=' ')
self.rm_value(*cell, v)
chgt = True
self.turns += 1
return chgt
def rech_gpes_nus(self):
chgt = False
candidates = [ (i,j,self.grid[i,j].values) for i in range(9) for j in range(9) if self.grid[i,j].values ]
for (i,j,v) in candidates:
current_gpe = [(i,j)]
for (k,l,m) in candidates:
if all([ 1 <= are_neigh(*g,k,l) <= 2 for g in current_gpe ]) and m <= v:
current_gpe.append((k,l))
if len(current_gpe) == len(v):
for (k,l,m) in candidates:
intersect = m&v
if all([ 1 <= are_neigh(*g,k,l) <= 2 for g in current_gpe ]) and intersect:
print("%d,%d => -%s |" % (k,l,intersect), end=' ')
self.rm_value(k,l,intersect)
chgt = True
self.turns += 1
return chgt
def rech_gpes_camoufles(self):
chgt = False
candidates = [ (i,j,self.grid[i,j].values) for i in range(9) for j in range(9) ]
values_count = ( # col, lig, sq
{ i: {j: set() for j in range(1, 10)} for i in range(9)},
{ i: {j: set() for j in range(1, 10)} for i in range(9)},
{ i: {j: set() for j in range(1, 10)} for i in range(9)},
)
for (i, j, values) in candidates:
for v in values:
values_count[0][i][v].add((i,j))
values_count[1][j][v].add((i,j))
values_count[2][carre(i,j)][v].add((i,j))
for dim in (0, 1, 2): # colonne/ligne/carré
for k in range(9):
count_values = [ {'vals': set((v, )), 'cells': c} for (v,c) in values_count[dim][k].items() if len(c) > 1 ]
# len(c) = 0 correspond aux valeurs fixées. Et 1 au solitaire nu...
all_combinations = []
|
paulray/NICERsoft | scripts/fitharms.py | Python | mit | 11,515 | 0.022058 | #!/usr/bin/env python
from __future__ import print_function,division
from astropy.io import fits
import matplotlib.pyplot as plt
import numpy as np
import matplotlib
from pint.templates import lctemplate,lcprimitives,lcfitters
from pint.eventstats import z2m,sf_z2m, hm, sf_hm, sig2sigma
import sys
from astropy import log
import scipy.stats
def compute_fourier(phases,nh=10,pow_phase=False):
'''Compute Fourier amplitudes from an array of pulse phases
phases should be [0,1.0)
nh is the number of harmonics (1 = fundamental only)
Returns: cos and sin component arrays, unless pow_phase is True
then returns Fourier power (Leahy normalized) and phase arrays
DC bin is not computed or returned
'''
phis = 2.0*np.pi*phases # Convert phases to radians
n = len(phis)
c = np.asarray([(np.cos(k*phis)).sum() for k in range(1,nh+1)])/n
s = np.asarray([(np.sin(k*phis)).sum() for k in range(1,nh+1)])/n
c *= 2.0
s *= 2.0
if pow_phase:
# CHECK! There could be errors here!
# These should be Leahy normalized powers
fourier_pow = (n/2)*(c**2+s**2)
fourier_phases = np.arctan2(s,c)
return n,fourier_pow,fourier_phases
else:
return n,c,s
def evaluate_fourier(n,c,s,nbins,k=None):
# This should be updated to do a little integral over each bin.
# Currently evaluates the model at the center of each bin
model = np.zeros(nbins)+n/nbins
theta = 2.0*np.pi*np.arange(nbins,dtype=np.float)/nbins
theta += theta[1]/2.0
if k is not None:
model += (n/nbins)*(c[k]*np.cos((k+1)*theta) + s[k]*np.sin((k+1)*theta))
else:
for k in range(len(c)):
model += (n/nbins)*(c[k]*np.cos((k+1)*theta) + s[k]*np.sin((k+1)*theta))
return model
def evaluate_chi2(hist,model):
# Question here is whether error should be sqrt(data) or sqrt(model)
return ((hist-model)**2/model).sum()
def compute_phist(phases,nbins=200):
h, edges = np.histogram(phases,bins=np.linspace(0.0,1.0,nbins+1,endpoint=True))
return edges[:-1], h
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description = "Fit a set of pulse phases to harmonics")
parser.add_argument("evname", help="Input event file (must have PULSE_PHASE column)")
parser.add_argument("--white",help = "Replace phases with white random numbers, for testing", action="store_true")
parser.add_argument("--txt",help = "Assume input file is .txt instead of FITS", action="store_true")
parser.add_argument("--showcomps",help = "Show individual components of harmonic fit on plot", action="store_true")
parser.add_argument("--noplot",help = "Don't show any plots", action="store_true")
parser.add_argument("--output",help = "Save figures with basename", default=None)
parser.add_argument("--numharm",help="Max harmonic to use in analysis (1=Fundamental only)",default=4,type=int)
parser.add_argument("--numbins",help="Number of bins for histograms",default=200,type=int)
parser.add_argument("--emin",help="Minimum energy to include (keV)",default=0.25,type=float)
parser.add_argument("--emax",help="Maximum energy to include (keV)",default=12.0,type=float)
args = parser.parse_args()
if args.txt:
exposure = None
ph,en = np.loadtxt(args.evname,unpack=True,usecols=(1,2),skiprows=3)
log.info("Read {0} phases from .txt file".format(len(ph)))
tstart = 0.0
else:
f = fits.open(args.evname)
en = f['events'].data.field('pi')
ph = f['events'].data.field('pulse_phase')
log.info("Read {0} phases from FITS file".format(len(ph)))
exposure = float(f['events'].header['EXPOSURE'])
tstart = float(f['events'].header['TSTART'])
log.info("Exposure = {0} s".format(exposure))
if args.white:
# Random phases uni | form over [0,1)
ph = np.random.random_sample(len(en))
log.info("Replaced with {0} random phases".format(len(en)))
matplotlib.rcParams['font.family'] = "serif"
matplotlib.rcParams.update({'font.size': 13})
matplotlib.rc('axes', linewidth=1.5)
if args.output:
resultsfile = open("{0}_results.txt".format(args.output),"w")
print("{0:.6f}".format(tstart),file=resultsfile)
# | Filter on energy
idx = np.where(np.logical_and(en > int(args.emin*100), en < int(args.emax*100) ))[0]
ph = ph[idx]
en = en[idx]
# Hack to manually split out a segment
#q = 3 # Use 0, 1, 2, 3
#qn = len(ph)//4
#ph = ph[q*qn:(q+1)*qn]
#en = en[q*qn:(q+1)*qn]
nbins = args.numbins
bins,phist = compute_phist(ph,nbins=nbins)
fig,axs = plt.subplots(nrows=2,ncols=1)
plt.subplots_adjust(left=0.15, bottom=0.1, right=0.97, top=0.94,hspace=0.001)
ax=axs[0]
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True, labelbottom=False)
# ax.text(.5,.8,'PSR J0030+0451', horizontalalignment='center', transform=ax.transAxes)
# ax.text(.5,.8,'PSR J0437-4715', horizontalalignment='center', transform=ax.transAxes)
# ax.text(.2,.8,'PSR J1231-1411', horizontalalignment='center', transform=ax.transAxes)
# ax.text(.8,.8,'PSR J2124-3358', horizontalalignment='center', transform=ax.transAxes)
ax.step(np.concatenate((bins,np.ones(1))),np.concatenate((phist,phist[-1:])),color='k',where='post')
ax.set_xlim(0.0,1.0)
ax.set_ylabel('Counts per bin')
n,c,s = compute_fourier(ph,nh=args.numharm)
model = evaluate_fourier(n,c,s,nbins)
ax.plot(bins+bins[1]/2.0,model,color='r',lw=2)
if args.showcomps:
for k in range(len(c)):
ax.plot(np.linspace(0.0,1.0,nbins),evaluate_fourier(n,c,s,nbins,k=k),ls='--')
fn,fpow,fphase = compute_fourier(ph,nh=args.numharm,pow_phase=True)
i=1
log.info("Harm LeahyPower Phase(deg)")
for fp, fph in zip(fpow,fphase):
log.info("{0:2d} {1:12.3f} {2:9.3f} deg".format(i,fp,np.rad2deg(fph)))
if args.output:
print("{0:2d} {1:12.3f} {2:9.3f}".format(i,fp,np.rad2deg(fph)),file=resultsfile)
i+=1
pcounts = (model-model.min()).sum()
pcounts_err = np.sqrt(model.sum() + model.min()*len(model))
if exposure:
log.info("Pulsed counts = {0:.3f}, count rate = {1:.3f}+/-{2:.4f} c/s".format(pcounts, pcounts/exposure, pcounts_err/exposure))
log.info("Total rate = {0:.3f} c/s, Unpulsed rate = {1:.3f} c/s".format(n/exposure, n/exposure-pcounts/exposure))
ax = axs[1]
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
ax.errorbar(np.linspace(0.0,1.0,nbins),phist-model,yerr=np.sqrt(phist),fmt='.',ecolor='k')
chisq = evaluate_chi2(phist,model)
nparams = 1 + 2*args.numharm # 1 for DC + 2 for each sinusoidal component
ax.set_xlim(0.0,1.0)
ax.set_xlabel('Pulse Phase')
ax.set_ylabel('Residuals (counts)')
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True)
ndof = len(phist)-nparams
axs[0].set_title("NumHarm = {0}, Chisq = {1:.2f}, DOF = {2}".format(args.numharm,chisq,ndof))
ax.grid(1)
# ax.set_label("{0} Harmonic Fit to Profile".format(args.numharm))
plt.tight_layout()
if args.output:
fig.savefig("{0}_harmfit.pdf".format(args.output))
# Plot distribution of residuals to compare to a gaussian
fig,ax = plt.subplots()
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
chi = (phist-model)/np.sqrt(model)
#x, y = np.histogram(chi,bins=np.linspace(-2.0,2.0,0.1))
x = np.linspace(-3.0,3.0,32,endpoint=True)
ax.hist(chi,bins=x,density=True)
ax.set_title('Histogram of residuals')
ax.plot(x,scipy.stats.norm.pdf(x))
plt.tight_layout()
# Plot histogram of phase differences to see if they are Poisson
fig,ax = plt.subplots()
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
ph.sort()
pdiffs = (ph[1:]-ph[:-1])*1.0
x = np.linspace(0.0,50.0e-6,200,endpoint=True)
histn, histbins, histpatches = ax.hist(pdiffs,bins=x,density=True,log=True)
ax.set_title('H |
mahak/neutron | neutron/db/models/provisioning_block.py | Python | apache-2.0 | 1,178 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance | with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under | the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.db import model_base
from neutron_lib.db import standard_attr
import sqlalchemy as sa
class ProvisioningBlock(model_base.BASEV2):
# the standard attr id of the thing we want to block
standard_attr_id = (
sa.Column(sa.BigInteger().with_variant(sa.Integer(), 'sqlite'),
sa.ForeignKey(standard_attr.StandardAttribute.id,
ondelete="CASCADE"),
primary_key=True))
# the entity that wants to block the status change (e.g. L2 Agent)
entity = sa.Column(sa.String(255), nullable=False, primary_key=True)
|
crmccreary/openerp_server | openerp/addons/sale_margin/sale_margin.py | Python | agpl-3.0 | 4,000 | 0.00675 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at you | r option) any later version.
#
# This program is distributed in the hope that it will be usef | ul,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class sale_order_line(osv.osv):
_inherit = "sale.order.line"
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None):
res = super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty=qty,
uom=uom, qty_uos=qty_uos, uos=uos, name=name, partner_id=partner_id,
lang=lang, update_tax=update_tax, date_order=date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)
if not pricelist:
return res
frm_cur = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id.id
to_cur = self.pool.get('product.pricelist').browse(cr, uid, [pricelist])[0].currency_id.id
if product:
purchase_price = self.pool.get('product.product').browse(cr, uid, product).standard_price
price = self.pool.get('res.currency').compute(cr, uid, frm_cur, to_cur, purchase_price, round=False)
res['value'].update({'purchase_price': price})
return res
def _product_margin(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = 0
if line.product_id:
if line.purchase_price:
res[line.id] = round((line.price_unit*line.product_uos_qty*(100.0-line.discount)/100.0) -(line.purchase_price*line.product_uos_qty), 2)
else:
res[line.id] = round((line.price_unit*line.product_uos_qty*(100.0-line.discount)/100.0) -(line.product_id.standard_price*line.product_uos_qty), 2)
return res
_columns = {
'margin': fields.function(_product_margin, string='Margin',
store = True),
'purchase_price': fields.float('Cost Price', digits=(16,2))
}
sale_order_line()
class sale_order(osv.osv):
_inherit = "sale.order"
def _product_margin(self, cr, uid, ids, field_name, arg, context=None):
result = {}
for sale in self.browse(cr, uid, ids, context=context):
result[sale.id] = 0.0
for line in sale.order_line:
result[sale.id] += line.margin or 0.0
return result
def _get_order(self, cr, uid, ids, context=None):
return super(self,sale_order)._get_order(cr, uid, ids, context=context)
_columns = {
'margin': fields.function(_product_margin, string='Margin', help="It gives profitability by calculating the difference between the Unit Price and Cost Price.", store={
'sale.order.line': (_get_order, ['margin'], 20),
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 20),
}),
}
sale_order()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
huguesv/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/conda/models/dist.py | Python | apache-2.0 | 11,208 | 0.002141 | # -*- coding: utf-8 -*-
# Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import namedtuple
from logging import getLogger
import re
from .channel import Channel
from .package_info import PackageInfo
from .records import PackageRecord
from .. import CondaError
from .._vendor.auxlib.entity import Entity, EntityType, IntegerField, StringField
from ..base.constants import CONDA_PACKAGE_EXTENSIONS, DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL
from ..base.context import context
from ..common.compat import ensure_text_type, text_type, with_metaclass
from ..common.constants import NULL
from ..common.url import has_platform, is_url, join_url
log = getLogger(__name__)
DistDetails = namedtuple('DistDetails', ('name', 'version', 'build_string', 'build_number',
'dist_name', 'fmt'))
IndexRecord = PackageRecord # for conda-build backward compat
class DistType(EntityType):
def __call__(cls, *args, **kwargs):
if len(args) == 1 and not kwargs:
value = args[0]
if value in Dist._cache_:
return Dist._cache_[value]
elif isinstance(value, Dist):
dist = value
elif isinstance(value, PackageRecord):
dist = Dist.from_string(value.fn, channel_override=value.channel.canonical_name)
elif hasattr(value, 'dist') and isinstance(value.dist, Dist):
dist = value.dist
elif isinstance(value, PackageInfo):
dist = Dist.from_string(value.repodata_record.fn,
channel_override=value.channel.canonical_name)
elif isinstance(value, Channel):
dist = Dist.from_url(value.url())
else:
dist | = Dist.from_string(value)
Di | st._cache_[value] = dist
return dist
else:
return super(DistType, cls).__call__(*args, **kwargs)
def strip_extension(original_dist):
for ext in CONDA_PACKAGE_EXTENSIONS:
if original_dist.endswith(ext):
original_dist = original_dist[:-len(ext)]
return original_dist
def split_extension(original_dist):
stripped = strip_extension(original_dist)
return stripped, original_dist[len(stripped):]
@with_metaclass(DistType)
class Dist(Entity):
_cache_ = {}
_lazy_validate = True
channel = StringField(required=False, nullable=True, immutable=True)
dist_name = StringField(immutable=True)
name = StringField(immutable=True)
fmt = StringField(immutable=True)
version = StringField(immutable=True)
build_string = StringField(immutable=True)
build_number = IntegerField(immutable=True)
base_url = StringField(required=False, nullable=True, immutable=True)
platform = StringField(required=False, nullable=True, immutable=True)
def __init__(self, channel, dist_name=None, name=None, version=None, build_string=None,
build_number=None, base_url=None, platform=None, fmt='.tar.bz2'):
super(Dist, self).__init__(channel=channel,
dist_name=dist_name,
name=name,
version=version,
build_string=build_string,
build_number=build_number,
base_url=base_url,
platform=platform,
fmt=fmt)
def to_package_ref(self):
return PackageRecord(
channel=self.channel,
subdir=self.platform,
name=self.name,
version=self.version,
build=self.build_string,
build_number=self.build_number,
)
@property
def full_name(self):
return self.__str__()
@property
def build(self):
return self.build_string
@property
def subdir(self):
return self.platform
@property
def pair(self):
return self.channel or DEFAULTS_CHANNEL_NAME, self.dist_name
@property
def quad(self):
# returns: name, version, build_string, channel
parts = self.dist_name.rsplit('-', 2) + ['', '']
return parts[0], parts[1], parts[2], self.channel or DEFAULTS_CHANNEL_NAME
def __str__(self):
return "%s::%s" % (self.channel, self.dist_name) if self.channel else self.dist_name
@property
def is_feature_package(self):
return self.dist_name.endswith('@')
@property
def is_channel(self):
return bool(self.base_url and self.platform)
def to_filename(self, extension=None):
if self.is_feature_package:
return self.dist_name
else:
return self.dist_name + self.fmt
def to_matchspec(self):
return ' '.join(self.quad[:3])
def to_match_spec(self):
from .match_spec import MatchSpec
base = '='.join(self.quad[:3])
return MatchSpec("%s::%s" % (self.channel, base) if self.channel else base)
@classmethod
def from_string(cls, string, channel_override=NULL):
string = text_type(string)
if is_url(string) and channel_override == NULL:
return cls.from_url(string)
if string.endswith('@'):
return cls(channel='@',
name=string,
version="",
build_string="",
build_number=0,
dist_name=string)
REGEX_STR = (r'(?:([^\s\[\]]+)::)?' # optional channel
r'([^\s\[\]]+)' # 3.x dist
r'(?:\[([a-zA-Z0-9_-]+)\])?' # with_features_depends
)
channel, original_dist, w_f_d = re.search(REGEX_STR, string).groups()
original_dist, fmt = split_extension(original_dist)
if channel_override != NULL:
channel = channel_override
if not channel:
channel = UNKNOWN_CHANNEL
# enforce dist format
dist_details = cls.parse_dist_name(original_dist)
return cls(channel=channel,
name=dist_details.name,
version=dist_details.version,
build_string=dist_details.build_string,
build_number=dist_details.build_number,
dist_name=original_dist,
fmt=fmt)
@staticmethod
def parse_dist_name(string):
original_string = string
try:
string = ensure_text_type(string)
no_fmt_string, fmt = split_extension(string)
# remove any directory or channel information
if '::' in no_fmt_string:
dist_name = no_fmt_string.rsplit('::', 1)[-1]
else:
dist_name = no_fmt_string.rsplit('/', 1)[-1]
parts = dist_name.rsplit('-', 2)
name = parts[0]
version = parts[1]
build_string = parts[2] if len(parts) >= 3 else ''
build_number_as_string = ''.join(filter(lambda x: x.isdigit(),
(build_string.rsplit('_')[-1]
if build_string else '0')))
build_number = int(build_number_as_string) if build_number_as_string else 0
return DistDetails(name, version, build_string, build_number, dist_name, fmt)
except:
raise CondaError("dist_name is not a valid conda package: %s" % original_string)
@classmethod
def from_url(cls, url):
assert is_url(url), url
if not any(url.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) and '::' not in url:
raise CondaError("url '%s' is not a conda package" % url)
dist_details = cls.parse_dist_name(url)
if '::' in url:
url_no_tarball = url.rsplit('::', 1)[0]
platform = context.subdir
base_url = url_no_tarball.split('::')[0]
ch |
7ws/django-setmagic | setmagic/backend.py | Python | mit | 1,189 | 0 | from setmagic.models import Setting
from .exceptions import NoSuchSettingError
class SettingsBackend(object):
'''
A structure to organize the settings scheme and provide a simple API for
easier access to all registered settings.
'''
settings = property(lambda self: self._settings)
def __init__(self, settings_defs):
'''
Sync settings schema to both the backend and database
'''
self.defs = settings_defs
for setting_def in settings_defs.values():
try:
setting = Setting. | objects.get(name=setting_def['name'])
except Setting.DoesNotExist:
name = setting_def['name']
setting = Setting(
name=name,
current_value=self.defs[name | ].get('default'))
setting.__dict__.update(**setting_def)
setting.save()
def get(self, name):
try:
return Setting.objects.get(name=name).current_value
except Setting.DoesNotExist:
raise NoSuchSettingError(name)
def set(self, name, value):
Setting.objects.filter(name=name).update(current_value=value)
|
SekoiaLab/Fastir_Collector | memory/windows2012ServerMemory.py | Python | gpl-3.0 | 662 | 0.001511 | from __future__ import unicode_literals
from memory.mem import _Memory
class Windows2012ServerMemory(_Memory):
def __init__(self, params):
super(Windows2012ServerMemory, self).__init__(params)
def csv_all_modules_dll(self):
super(Windows2012ServerMemory, self)._csv_all_modules_dll()
def csv_all_modules_opened_files(self):
super(Windows2012ServerMemory, self)._csv_al | l_modules_opened_files()
def json_all_modules_dll(self):
super(Windows2 | 012ServerMemory, self)._json_all_modules_dll()
def json_all_modules_opened_files(self):
super(Windows2012ServerMemory, self)._json_all_modules_opened_files() |
cjaymes/expatriate | src/expatriate/xpath/NodeTest.py | Python | lgpl-3.0 | 877 | 0.00114 | # Copyright 2016 Casey Jaymes
# This file is part of Expatriate.
#
# Expatriate is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at | your option) any later version.
#
# Expatriate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more deta | ils.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Expatriate. If not, see <http://www.gnu.org/licenses/>.
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class NodeTest(object):
def __init__(self):
self.children = []
|
Kaggle/docker-python | tests/test_pytorch_ignite.py | Python | apache-2.0 | 245 | 0 | import unittest
|
from ignite.engine import Engine
class TestPytorchIgnite(unittest.TestCase):
def test_engine(self):
def update_fn(engine, batch):
| pass
engine = Engine(update_fn)
engine.run([0, 1, 2])
|
joowani/quadriga | quadriga/rest.py | Python | mit | 4,123 | 0 | from __future__ import absolute_import, unicode_literals, division
import hashlib
import hmac
import time
from quadriga.exceptions import RequestError
class RestClient(object):
"""REST client using HMAC SHA256 authentication.
:param url: QuadrigaCX URL.
:type url: str | unicode
:param api_key: QuadrigaCX API key.
:type api_key: str | unicode
:param api_secret: QuadrigaCX API secret.
:type api_secret: str | unicode
:param client_id: QuadrigaCX client ID (number used for user login).
:type client_id: str | unicode | int
:param timeout: Number of seconds to wait for QuadrigaCX to respond to an
API request.
:type timeout: int | float
:param session: User-defined requests.Session object.
:type session: requests.Session
"""
http_success_status_codes = {200, 201, 202}
def __init__(self, url, api_key, api_secret, client_id, timeout, session):
self._url = url
self._api_key = str(api_key)
self._hmac_key = str(api_secret).encode('utf-8')
self._client_id = str(client_id)
self._timeout = timeout
self._session = session
def _handle_response(self, resp):
"""Handle the response from QuadrigaCX.
:param resp: Response from QuadrigaCX.
:type resp: requests.models.Response
:return: Response body.
:rtype: dict
:raise quadriga.exceptions.RequestError: If HTTP OK was not returned.
"""
http_code = resp.status_code
if http_code not i | n self.http_success_status_codes:
raise RequestError(
response=resp,
message='[HTTP {}] {}'.format(http_code, resp.reason)
)
try:
body = resp.json()
| except ValueError:
raise RequestError(
response=resp,
message='[HTTP {}] response body: {}'.format(
http_code,
resp.text
)
)
else:
if 'error' in body:
error_code = body['error'].get('code', '?')
raise RequestError(
response=resp,
message='[HTTP {}][ERR {}] {}'.format(
resp.status_code,
error_code,
body['error'].get('message', 'no error message')
),
error_code=error_code
)
return body
def get(self, endpoint, params=None):
"""Send an HTTP GET request to QuadrigaCX.
:param endpoint: API endpoint.
:type endpoint: str | unicode
:param params: URL parameters.
:type params: dict
:return: Response body from QuadrigaCX.
:rtype: dict
:raise quadriga.exceptions.RequestError: If HTTP OK was not returned.
"""
response = self._session.get(
url=self._url + endpoint,
params=params,
timeout=self._timeout
)
return self._handle_response(response)
def post(self, endpoint, payload=None):
"""Send an HTTP POST request to QuadrigaCX.
:param endpoint: API endpoint.
:type endpoint: str | unicode
:param payload: Request payload.
:type payload: dict
:return: Response body from QuadrigaCX.
:rtype: dict
:raise quadriga.exceptions.RequestError: If HTTP OK was not returned.
"""
nonce = int(time.time() * 10000)
hmac_msg = str(nonce) + self._client_id + self._api_key
signature = hmac.new(
key=self._hmac_key,
msg=hmac_msg.encode('utf-8'),
digestmod=hashlib.sha256
).hexdigest()
if payload is None:
payload = {}
payload['key'] = self._api_key
payload['nonce'] = nonce
payload['signature'] = signature
response = self._session.post(
url=self._url + endpoint,
json=payload,
timeout=self._timeout
)
return self._handle_response(response)
|
devartis/passbook | passbook/models.py | Python | mit | 15,345 | 0.001499 | # -*- coding: utf-8 -*-
import decimal
import hashlib
import json
import zipfile
from io import BytesIO
from M2Crypto import SMIME
from M2Crypto import X509
from M2Crypto.X509 import X509_Stack
class Alignment:
LEFT = 'PKTextAlignmentLeft'
CENTER = 'PKTextAlignmentCenter'
RIGHT = 'PKTextAlignmentRight'
JUSTIFIED = 'PKTextAlignmentJustified'
NATURAL = 'PKTextAlignmentNatural'
class BarcodeFormat:
PDF417 = 'PKBarcodeFormatPDF417'
QR = 'PKBarcodeFormatQR'
AZTEC = 'PKBarcodeFormatAztec'
CODE128 = 'PKBarcodeFormatCode128'
class TransitType:
AIR = 'PKTransitTypeAir'
TRAIN = 'PKTransitTypeTrain'
BUS = 'PKTransitTypeBus'
BOAT = 'PKTransitTypeBoat'
GENERIC = 'PKTransitTypeGeneric'
class DateStyle:
NONE = 'PKDateStyleNone'
SHORT = 'PKDateStyleShort'
MEDIUM = 'PKDateStyleMedium'
LONG = 'PKDateStyleLong'
FULL = 'PKDateStyleFull'
class NumberStyle:
DECIMAL = 'PKNumberStyleDecimal'
PERCENT = 'PKNumberStylePercent'
SCIENTIFIC = 'PKNumberStyleScientific'
SPELLOUT = 'PKNumberStyleSpellOut'
class Field(object):
def __init__(self, key, value, label=''):
self.key = key # Required. The key must be unique within the scope
self.value = value # Required. Value of the field. For example, 42
self.label = label # Optional. Label text for the field.
self.changeMessage = '' # Optional. Format string for the alert text that is displayed when the pass is updated
self.textAlignment = Alignment.LEFT
def json_dict(self):
return self.__dict__
class DateField(Field):
def __init__(self, key, value, label='', dateStyle=DateStyle.SHORT,
timeStyle=DateStyle.SHORT, ignoresTimeZone=False):
super().__init__(key, value, label)
self.dateStyle = dateStyle # Style of date to display
self.timeStyle = timeStyle # Style of time to display
self.isRelative = False # If true, the labels value is displayed as a relative date
if ignoresTimeZone:
self.ignoresTimeZone = ignoresTimeZone
def json_dict(self):
return self.__dict__
class NumberField(Field):
def __init__(self, key, value, label=''):
super().__init__(key, value, label)
self.numberStyle = NumberStyle.DECIMAL # Style of date to display
def json_dict(self):
return self.__dict__
class CurrencyField(Field):
def __init__(self, key, value, label='', currencyCode=''):
super().__init__(key, value, label)
self.currencyCode = currencyCode # ISO 4217 currency code
def json_dict(self):
return self.__dict__
class Barcode(object):
def __init__(self, message, format=BarcodeFormat.PDF417, altText='', messageEncoding='iso-8859-1'):
self.format = format
self.message = message # Required. Message or payload to be displayed as a barcode
self.messageEncoding = messageEncoding # Required. Text encoding that is used to convert the message
if altText:
self.altText = altText # Optional. Text displayed near the barcode
def json_dict(self):
return self.__dict__
class Location(object):
def __init__(self, latitude, longitude, altitude=0.0):
# Required. Latitude, in degrees, of the location.
try:
self.latitude = float(latitude)
except (ValueError, TypeError):
self.latitude = 0.0
# Required. Longitude, in degrees, of the location.
try:
self.longitude = float(longitude)
except (ValueError, TypeError):
self.longitude = 0.0
# Optional. Altitude, in meters, of the location.
try:
self.altitude = float(altitude)
except (ValueError, TypeError):
sel | f.altitude = 0.0
# Optional. Notification distance
self.distance = None
# Optional. Text displayed on the lock screen when
# the pass is currently near the location
self.relevantText = ''
def json_dict(self):
return self.__dict__
class IBeacon(object):
def __init__(self, proximityuuid, major, minor):
| # IBeacon data
self.proximityUUID = proximityuuid
self.major = major
self.minor = minor
# Optional. Text message where near the ibeacon
self.relevantText = ''
def json_dict(self):
return self.__dict__
class PassInformation(object):
def __init__(self):
self.headerFields = []
self.primaryFields = []
self.secondaryFields = []
self.backFields = []
self.auxiliaryFields = []
def addHeaderField(self, key, value, label):
self.headerFields.append(Field(key, value, label))
def addPrimaryField(self, key, value, label):
self.primaryFields.append(Field(key, value, label))
def addSecondaryField(self, key, value, label):
self.secondaryFields.append(Field(key, value, label))
def addBackField(self, key, value, label):
self.backFields.append(Field(key, value, label))
def addAuxiliaryField(self, key, value, label):
self.auxiliaryFields.append(Field(key, value, label))
def json_dict(self):
d = {}
if self.headerFields:
d.update({'headerFields': [f.json_dict() for f in self.headerFields]})
if self.primaryFields:
d.update({'primaryFields': [f.json_dict() for f in self.primaryFields]})
if self.secondaryFields:
d.update({'secondaryFields': [f.json_dict() for f in self.secondaryFields]})
if self.backFields:
d.update({'backFields': [f.json_dict() for f in self.backFields]})
if self.auxiliaryFields:
d.update({'auxiliaryFields': [f.json_dict() for f in self.auxiliaryFields]})
return d
class BoardingPass(PassInformation):
def __init__(self, transitType=TransitType.AIR):
super().__init__()
self.transitType = transitType
self.jsonname = 'boardingPass'
def json_dict(self):
d = super().json_dict()
d.update({'transitType': self.transitType})
return d
class Coupon(PassInformation):
def __init__(self):
super().__init__()
self.jsonname = 'coupon'
class EventTicket(PassInformation):
def __init__(self):
super().__init__()
self.jsonname = 'eventTicket'
class Generic(PassInformation):
def __init__(self):
super().__init__()
self.jsonname = 'generic'
class StoreCard(PassInformation):
def __init__(self):
super().__init__()
self.jsonname = 'storeCard'
class Pass(object):
def __init__(self, passInformation, json='', passTypeIdentifier='',
organizationName='', teamIdentifier=''):
self._files = {} # Holds the files to include in the .pkpass
self._hashes = {} # Holds the SHAs of the files array
# Standard Keys
# Required. Team identifier of the organization that originated and
# signed the pass, as issued by Apple.
self.teamIdentifier = teamIdentifier
# Required. Pass type identifier, as issued by Apple. The value must
# correspond with your signing certificate. Used for grouping.
self.passTypeIdentifier = passTypeIdentifier
# Required. Display name of the organization that originated and
# signed the pass.
self.organizationName = organizationName
# Required. Serial number that uniquely identifies the pass.
self.serialNumber = ''
# Required. Brief description of the pass, used by the iOS
# accessibility technologies.
self.description = ''
# Required. Version of the file format. The value must be 1.
self.formatVersion = 1
# Visual Appearance Keys
self.backgroundColor = None # Opti |
davidcox/freetype-py | freetype/__init__.py | Python | bsd-3-clause | 71,996 | 0.012584 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
#
# FreeType high-level python API - Copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
#
# -----------------------------------------------------------------------------
'''
FreeType high-level python API
This the bindings for the high-level API of FreeType (that must be installed
somewhere on your system).
Note:
-----
C Library will be searched using the ctypes.util.find_library. However, this
search might fail. In such a case (or for other reasons), you can specify the
FT_library_filename before importing the freetype library and freetype will use
the specified one.
'''
from ctypes import *
from ft_types import *
from ft_enums import *
from ft_errors import *
from ft_structs import *
import ctypes.util
import os
PACKAGE_FOLDER = os.path.abspath(os.path.dirname(__file__))
__dll__ = None
__handle__ = None
FT_Library_filename = ctypes.util.find_library('freetype')
if not FT_Library_filename:
paths_to_try = [ os.path.join(PACKAGE_FOLDER, 'libfreetype.dll'), # Windows
'libfreetype.so.6', # Linux
'/usr/X11/lib/libfreetype.dylib', # MacOS X
]
for p in paths_to_try:
try:
__dll__ = CDLL(p)
except OSError:
pass
if __dll__ is not None:
break
if not FT_Library_filename and not __dll__:
raise RuntimeError, 'Freetype library not found'
if not __dll__:
__dll__ = CDLL(FT_Library_filename)
# -----------------------------------------------------------------------------
# High-level API of FreeType 2
# -----------------------------------------------------------------------------
FT_Init_FreeType = __dll__.FT_Init_FreeType
FT_Done_FreeType = __dll__.FT_Done_FreeType
FT_Library_Version = __dll__.FT_Library_Version
def __del_library__(self):
global __handle__
if __handle__:
try:
FT_Done_FreeType(byref(self))
__handle__ = None
except:
pass
FT_Library.__del__ = __del_library__
def get_handle():
'''
Get unique FT_Library handle
'''
global __handle__
if not __handle__:
__handle__ = FT_Library( )
error = FT_Init_FreeType( byref(__handle__) )
| if error: raise FT_Exception(error)
try | :
set_lcd_filter( FT_LCD_FILTER_DEFAULT )
except:
pass
if error: raise FT_Exception(error)
return __handle__
def version():
'''
Return the version of the FreeType library being used as a tuple of
( major version number, minor version number, patch version number )
'''
amajor = FT_Int()
aminor = FT_Int()
apatch = FT_Int()
library = get_handle()
FT_Library_Version(library, byref(amajor), byref(aminor), byref(apatch))
return (amajor.value, aminor.value, apatch.value)
FT_Get_X11_Font_Format = __dll__.FT_Get_X11_Font_Format
FT_Get_X11_Font_Format.restype = c_char_p
FT_Library_SetLcdFilter= __dll__.FT_Library_SetLcdFilter
if version()>=(2,4,0):
FT_Library_SetLcdFilterWeights = __dll__.FT_Library_SetLcdFilterWeights
FT_New_Face = __dll__.FT_New_Face
FT_New_Memory_Face = __dll__.FT_New_Memory_Face
FT_Open_Face = __dll__.FT_Open_Face
FT_Attach_File = __dll__.FT_Attach_File
FT_Attach_Stream = __dll__.FT_Attach_Stream
if version()>=(2,4,2):
FT_Reference_Face = __dll__.FT_Reference_Face
FT_Done_Face = __dll__.FT_Done_Face
FT_Done_Glyph = __dll__.FT_Done_Glyph
FT_Select_Size = __dll__.FT_Select_Size
FT_Request_Size = __dll__.FT_Request_Size
FT_Set_Char_Size = __dll__.FT_Set_Char_Size
FT_Set_Pixel_Sizes = __dll__.FT_Set_Pixel_Sizes
FT_Load_Glyph = __dll__.FT_Load_Glyph
FT_Load_Char = __dll__.FT_Load_Char
FT_Set_Transform = __dll__.FT_Set_Transform
FT_Render_Glyph = __dll__.FT_Render_Glyph
FT_Get_Kerning = __dll__.FT_Get_Kerning
FT_Get_Track_Kerning = __dll__.FT_Get_Track_Kerning
FT_Get_Glyph_Name = __dll__.FT_Get_Glyph_Name
FT_Get_Glyph = __dll__.FT_Get_Glyph
FT_Glyph_Get_CBox = __dll__.FT_Glyph_Get_CBox
FT_Get_Postscript_Name = __dll__.FT_Get_Postscript_Name
FT_Get_Postscript_Name.restype = c_char_p
FT_Select_Charmap = __dll__.FT_Select_Charmap
FT_Set_Charmap = __dll__.FT_Set_Charmap
FT_Get_Charmap_Index = __dll__.FT_Get_Charmap_Index
FT_Get_CMap_Language_ID= __dll__.FT_Get_CMap_Language_ID
FT_Get_CMap_Format = __dll__.FT_Get_CMap_Format
FT_Get_Char_Index = __dll__.FT_Get_Char_Index
FT_Get_First_Char = __dll__.FT_Get_First_Char
FT_Get_Next_Char = __dll__.FT_Get_Next_Char
FT_Get_Name_Index = __dll__.FT_Get_Name_Index
FT_Get_SubGlyph_Info = __dll__.FT_Get_SubGlyph_Info
if version()>=(2,3,8):
FT_Get_FSType_Flags = __dll__.FT_Get_FSType_Flags
FT_Get_FSType_Flags.restype = c_ushort
FT_Get_Sfnt_Name_Count = __dll__.FT_Get_Sfnt_Name_Count
FT_Get_Sfnt_Name = __dll__.FT_Get_Sfnt_Name
FT_Outline_GetInsideBorder = __dll__.FT_Outline_GetInsideBorder
FT_Outline_GetOutsideBorder = __dll__.FT_Outline_GetOutsideBorder
FT_Outline_Get_BBox = __dll__.FT_Outline_Get_BBox
FT_Outline_Get_CBox = __dll__.FT_Outline_Get_CBox
FT_Stroker_New = __dll__.FT_Stroker_New
FT_Stroker_Set = __dll__.FT_Stroker_Set
FT_Stroker_Rewind = __dll__.FT_Stroker_Rewind
FT_Stroker_ParseOutline = __dll__.FT_Stroker_ParseOutline
FT_Stroker_BeginSubPath = __dll__.FT_Stroker_BeginSubPath
FT_Stroker_EndSubPath = __dll__.FT_Stroker_EndSubPath
FT_Stroker_LineTo = __dll__.FT_Stroker_LineTo
FT_Stroker_ConicTo = __dll__.FT_Stroker_ConicTo
FT_Stroker_CubicTo = __dll__.FT_Stroker_CubicTo
FT_Stroker_GetBorderCounts = __dll__.FT_Stroker_GetBorderCounts
FT_Stroker_ExportBorder = __dll__.FT_Stroker_ExportBorder
FT_Stroker_GetCounts = __dll__.FT_Stroker_GetCounts
FT_Stroker_Export = __dll__.FT_Stroker_Export
FT_Stroker_Done = __dll__.FT_Stroker_Done
FT_Glyph_Stroke = __dll__.FT_Glyph_Stroke
FT_Glyph_StrokeBorder = __dll__.FT_Glyph_StrokeBorder
FT_Glyph_To_Bitmap = __dll__.FT_Glyph_To_Bitmap
# -----------------------------------------------------------------------------
# Stand alone functions
# -----------------------------------------------------------------------------
def set_lcd_filter(filt):
'''
This function is used to apply color filtering to LCD decimated bitmaps,
like the ones used when calling FT_Render_Glyph with FT_RENDER_MODE_LCD or
FT_RENDER_MODE_LCD_V.
Note:
-----
This feature is always disabled by default. Clients must make an explicit
call to this function with a 'filter' value other than FT_LCD_FILTER_NONE
in order to enable it.
Due to PATENTS covering subpixel rendering, this function doesn't do
anything except returning 'FT_Err_Unimplemented_Feature' if the
configuration macro FT_CONFIG_OPTION_SUBPIXEL_RENDERING is not defined in
your build of the library, which should correspond to all default builds of
FreeType.
The filter affects glyph bitmaps rendered through FT_Render_Glyph,
FT_Outline_Get_Bitmap, FT_Load_Glyph, and FT_Load_Char.
It does not affect the output of FT_Outline_Render and
FT_Outline_Get_Bitmap.
If this feature is activated, the dimensions of LCD glyph bitmaps are
either larger or taller than the dimensions of the corresponding outline
with regards to the pixel grid. For example, for FT_RENDER_MODE_LCD, the
filter adds up to 3 pixels to the left, and up to 3 pixels to the right.
The bitmap offset values are adjusted correctly, so clients shouldn't need
to modify their layout and glyph positioning code when enabling the filter.
'''
library = get_handle()
error = FT_Library_SetLcdFilter(library, filt)
if error: raise FT_Exception(error)
def set_lcd_filter_weights(a,b,c,d,e):
''' |
nonZero/demos-python | src/examples/short/datetime/date_to_mysql.py | Python | gpl-3.0 | 511 | 0.001957 | #!/usr/bin/python3
'''
example of how to convert the output of date(1) on the command line
to mysql t | ype dates.
'''
import subprocess # for check_output
import datetime # for strptime
def date_to_mysql(output):
format = '%a %b %d %H:%M:%S %Z %Y'
d = datetime.datetime.strptime(output, format)
# print('d is [{0}]'.format(d))
return d
output = subprocess.check_output('date').decode().strip()
print('output is [{0}]'.format(output))
d = date_to_mys | ql(output)
print('d is [{0}]'.format(d))
|
lbeckman314/dotfiles | i3/musicwatcher.py | Python | mit | 1,595 | 0.025078 | #!/usr/bin/env python2
import i3ipc
import subprocess
# process all windows on this workspace. hide when leaving and show when entering
# because chrome/ium doesnt c | onsider itself hidden when on an invisible workspace
# this script drops my cpu u | sage when listening to google music from ~10% to ~3%
# I'm just putting any workspaces that have chromium apps running on them
WATCHED_WORKSPACES = [5, 6]
HIDDEN = '_NET_WM_STATE_HIDDEN'
SHOWN = '_NET_WM_STATE_SHOWN'
def showWindow(windowId):
print "SHOWING"
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-remove", HIDDEN])
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-set", "_NET_WM_STATE", SHOWN])
def hideWindow(windowId):
print "HIDING"
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-remove", SHOWN])
subprocess.call(["xprop", "-id", str(windowId), "-f",
"_NET_WM_STATE", "32a", "-set", "_NET_WM_STATE", HIDDEN])
def process_window(window, ws_event):
print "Processing window: %s (%d)" % (window.name, window.window)
if ws_event.current.num in WATCHED_WORKSPACES:
# music workspace has been focused
showWindow(window.window)
elif ws_event.old.num in WATCHED_WORKSPACES:
# music workspace has been unfocused
hideWindow(window.window)
def onWorkspace(i3, event):
if event.change in ['focus']:
windows = i3.get_tree().leaves()
for window in windows:
if window.workspace().num in WATCHED_WORKSPACES:
process_window(window, event)
i3 = i3ipc.Connection()
i3.on('workspace', onWorkspace)
i3.main()
|
editorsnotes/editorsnotes | editorsnotes/main/migrations/0019_auto_20160229_0921.py | Python | agpl-3.0 | 10,543 | 0.003889 | # -*- coding: utf-8 -*-
from django.db import migrations, models
from django.conf import settings
import editorsnotes.main.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0018_auto_20151019_1331'),
]
operations = [
migrations.AlterField(
model_name='document',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'),
),
migrations.AlterField(
model_name='document',
name='creator',
field=models.ForeignKey(related_name='created_document_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'),
),
migrations.AlterField(
model_name='document',
name='last_updated',
field=models.DateTimeField(auto_now=True, verbose_name='The last time this item was edited.'),
),
migrations.AlterField(
model_name='document',
name='last_updater',
field=models.ForeignKey(related_name='last_to_update_document_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The last us | er to update this item.'),
),
migrations.AlterField(
model_name='featureditem',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_na | me='The time this item was created.'),
),
migrations.AlterField(
model_name='featureditem',
name='creator',
field=models.ForeignKey(related_name='created_featureditem_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'),
),
migrations.AlterField(
model_name='note',
name='assigned_users',
field=models.ManyToManyField(help_text='Users who have been assigned to this note.', to=settings.AUTH_USER_MODEL, blank=True),
),
migrations.AlterField(
model_name='note',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'),
),
migrations.AlterField(
model_name='note',
name='creator',
field=models.ForeignKey(related_name='created_note_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'),
),
migrations.AlterField(
model_name='note',
name='is_private',
field=models.BooleanField(default=False, help_text=b"If true, will only be be viewable to users who belong to the note's project."),
),
migrations.AlterField(
model_name='note',
name='last_updated',
field=models.DateTimeField(auto_now=True, verbose_name='The last time this item was edited.'),
),
migrations.AlterField(
model_name='note',
name='last_updater',
field=models.ForeignKey(related_name='last_to_update_note_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The last user to update this item.'),
),
migrations.AlterField(
model_name='note',
name='license',
field=models.ForeignKey(blank=True, to='licensing.License', help_text='The license under which this note is available.', null=True),
),
migrations.AlterField(
model_name='note',
name='markup',
field=models.TextField(help_text='Text for this item that uses CommonMark syntax, with Working Notes-specific additions for notes, topics, and documents.', null=True, blank=True),
),
migrations.AlterField(
model_name='note',
name='markup_html',
field=editorsnotes.main.fields.XHTMLField(help_text='The markup text for this item rendered into HTML.', null=True, editable=False, blank=True),
),
migrations.AlterField(
model_name='note',
name='project',
field=models.ForeignKey(related_name='notes', to='main.Project', help_text='The project to which this note belongs.'),
),
migrations.AlterField(
model_name='note',
name='status',
field=models.CharField(default='1', help_text='The status of the note. "Open" for outstanding, "Closed" for finished, or "Hibernating" for somewhere in between.', max_length=1, choices=[('0', 'closed'), ('1', 'open'), ('2', 'hibernating')]),
),
migrations.AlterField(
model_name='note',
name='title',
field=models.CharField(help_text='The title of the note.', max_length='80'),
),
migrations.AlterField(
model_name='project',
name='image',
field=models.ImageField(upload_to='project_images', null=True, verbose_name='An image representing this project.', blank=True),
),
migrations.AlterField(
model_name='project',
name='markup',
field=models.TextField(help_text='Text for this item that uses CommonMark syntax, with Working Notes-specific additions for notes, topics, and documents.', null=True, blank=True),
),
migrations.AlterField(
model_name='project',
name='markup_html',
field=editorsnotes.main.fields.XHTMLField(help_text='The markup text for this item rendered into HTML.', null=True, editable=False, blank=True),
),
migrations.AlterField(
model_name='project',
name='name',
field=models.CharField(help_text='The name of the project.', max_length='80'),
),
migrations.AlterField(
model_name='projectinvitation',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'),
),
migrations.AlterField(
model_name='projectinvitation',
name='creator',
field=models.ForeignKey(related_name='created_projectinvitation_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'),
),
migrations.AlterField(
model_name='scan',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'),
),
migrations.AlterField(
model_name='scan',
name='creator',
field=models.ForeignKey(related_name='created_scan_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'),
),
migrations.AlterField(
model_name='topic',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'),
),
migrations.AlterField(
model_name='topic',
name='creator',
field=models.ForeignKey(related_name='created_topic_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'),
),
migrations.AlterField(
model_name='topic',
name='last_updated',
field=models.DateTimeField(auto_now=True, verbose_name='The last time this item was edited.'),
),
migrations.AlterField(
model_name='topic',
name='last_updater',
field=models.ForeignKey(related_name='last_to_update_topic_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The last user to update this item.'),
),
migrations.AlterField(
model_name='topic',
name='markup',
field=models.TextField(help_text='Text for this item that uses CommonMark syntax, with Working Notes-specific additions for notes, topics, and documents.', null=True, blank=True),
),
migrations.AlterField(
model_name='topic',
name='markup_html',
field=editorsnotes.main.fields.XHTMLField(help_text='The markup text for thi |
mysociety/polipop | polipop/popit/migrations/0005_auto__add_codetype__chg_field_personcode_type__chg_field_organisationc.py | Python | agpl-3.0 | 13,851 | 0.007725 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CodeType'
db.create_table('popit_codetype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('type', self.gf('django.db.models.fields.CharField')(max_length=100)),
('desc', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal('popit', ['CodeType'])
# Removing old type column (loses anything there currently)
db.delete_column('popit_personcode', 'type')
# Adding new type column
db.add_column('popit_personcode', 'type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['popit.CodeType']))
# Removing old type column (loses anything there currently)
db.delete_column('popit_organisationcode', 'type')
# Adding new type column
db.add_column('popit_organisationcode', 'type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['popit.CodeType']))
def backwards(self, orm):
# Removing index on 'OrganisationCode', fields ['type']
db.delete_index('popit_organisationcode', ['type_id'])
# Removing index on 'PersonCode', fields ['type']
db.delete_index('popit_personcode', ['type_id'])
# Deleting model 'CodeType'
db.delete_table('popit_codetype')
db.delete_column('popit_personcode', 'type_id')
db.add_column('popit_personcode', 'type', self.gf('django.db.models.fields.CharField')(max_length=100))
db.delete_column('popit_organisationcode', 'type_id')
db.add_column('popit_organisationcode', 'type', self.gf('django.db.models.fields.CharField')(max_length=100))
models = {
'popit.codetype': {
'Meta': {'object_name': 'CodeType'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'desc': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'popit.organisation': {
'Meta': {'ordering': "['name']", 'object_name': 'Organisation'},
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'ended': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'started': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'summary': ('markitup.fields. | MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'popit.organisationcode': {
| 'Meta': {'object_name': 'OrganisationCode'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'codes'", 'to': "orm['popit.Organisation']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['popit.CodeType']"})
},
'popit.organisationdata': {
'Meta': {'object_name': 'OrganisationData'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'values'", 'to': "orm['popit.OrganisationDataKey']"}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'data'", 'to': "orm['popit.Organisation']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'popit.organisationdatakey': {
'Meta': {'object_name': 'OrganisationDataKey'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'popit.person': {
'Meta': {'ordering': "['slug']", 'object_name': 'Person'},
'_description_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_of_birth': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'date_of_death': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'description': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'popit.personcode': {
'Meta': {'object_name': 'PersonCode'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'codes'", 'to': "orm['popit.Person']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['popit.CodeType']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'popit.persondata': {
'Meta': {'object_name': 'PersonData'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'values'", 'to': "orm['popit.PersonDataKey']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'data'", 'to': "orm['popit.Person']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'popit.persondatakey': {
'Meta': {'object_name': 'PersonDataKey'},
'created': ('django.db.models.fields.DateTimeFi |
sarleon/HackGame | app/advancetool/views.py | Python | gpl-3.0 | 307 | 0.019544 | from . import advancetools
from flask import render_ | template,redirect,Response,url_for
@advancetools.route('/')
def index():
return redirect(url_for('auto_decode_ba | se'))
@advancetools.route('/auto_decode_base')
def auto_decode_base():
return render_template('advancetool/auto_decode_base.html')
|
emacsattic/haddoc | lib/python/haddoc/lookup.py | Python | gpl-2.0 | 885 | 0.00226 | """
Lookup some search terms in the index.
"""
import sys
import haddoc
from collections import defaultdict
def main():
import optparse
parser = optparse.OptionParser(__doc__.strip())
haddoc.add_options(parser)
opts, args = parser.parse_args()
haddoc.validate_options(opts)
if not args:
parser.error("You need to specify some search terms.")
conn = haddoc.getconnection()
curs = conn.cursor()
rsets = []
curs.execute("""
SELECT term, source, fullname, url FROM terms
WHERE term ~* %s
""", ('.*'.join(args),))
res = defaultdict(list)
for term, source, fullname, url in curs:
res[url].append((source, fullname))
for | url, rlist in res.iteritems():
print url
for source, fullname in rlist:
print " %s: %s" % (source, fullname)
print
conn.close( | )
|
lilleswing/deepchem | contrib/pubchem_dataset/download_pubchem_ftp.py | Python | mit | 1,644 | 0.01399 | import ftplib
import os
import time
import deepchem
def main():
ftp = ftplib.FTP("ftp.ncbi.nih.gov")
ftp.login("anonymous", "anonymous")
# First download all SDF files. We need these to get smiles
ftp.cwd("/pubchem/Compound/CURRENT-Full/SDF")
data_dir = deepchem.utils.get_data_dir()
sdf_dir = os.path.join(data_dir,"SDF")
if not os.path.exists(sdf_dir):
os.mkdir(sdf_dir)
filelist = ftp.nlst()
existingfiles = os.listdir(sdf_dir)
print("Downloading: {0} SDF files".format(len(filelist)))
i = 0
for filename in filelist:
local_filename = os.path.join(sdf_dir, filename)
if filename in existingfiles or "README" in filename:
i = i + 1
continue
with open(local_filename, 'wb') as file :
ftp.retrbinary('RETR ' + filename, file.write)
i = i + 1
# Next download all Bioassays
ftp.cwd("/pubchem/Bioassay/CSV/Data")
data_dir = deepchem.utils.get_data_dir()
bioassay_dir = os.path.join(data_dir, "Data")
if not os.path.exists(bioassay_dir):
os.mkdir(bioassay_dir)
| filelist = ftp.nlst()
existingfiles = os.listdir(bioassay_dir)
print("Downloading: {0} Bioassay files".format(len(filelist)))
i = 0
for filename in filelist:
local_filename = os.path.join(bioassay_dir, filename)
if filename in existingfiles or "README" in filename:
i = i + 1
continue
with open(local_filename, 'wb') as file:
ftp.retrbinar | y('RETR ' + filename, file.write)
i = i + 1
print("Processed file {0} of {1}".format(i, len(filelist)))
ftp.quit()
if __name__ == "__main__" :
main()
|
SPlanzer/AIMS | ElectoralAddress/Gui/Config.py | Python | bsd-3-clause | 1,936 | 0.024793 |
# Configuration settings for electoral address GUI
from PyQt4.QtCore import *
import ElectoralAddress.Database as Database
organisationName='Land Information New Zealand'
applicationName='Electoral Address Loader'
_settings=None
def settings():
global _settings
if not _settings:
_settings = QSettings( organisationName, applicationName )
return _settings
def set( item, value ):
settings().setValue(item,value)
def get( item, default='' ):
value = str(settings().value(item,default))
return value
def configureDatabase(dbmodule=Database):
dbmodule.setHost( str(get('Database/host',dbmodule.host())))
dbmodule.setPort( str(get('Database/port',dbmodule.port())))
dbmodule.setDatabase( str(get('Database/database',dbmodule.database())))
dbmodule.setUser( str(get('Database/user',dbmodule.user())))
dbmodule.setPassword( str(get('Database/password',dbmodule.password())))
dbmodule.setBdeSchema( str(get('Database/bdeSchema',dbmodule.bdeSchema())))
dbmodule.setAddressSchema( str(get('Database/addressSchema',dbmodule.addressSchema())))
def | setDatabaseConfiguration( host=None, port=None, database=None, user=None, password=None, bdeSchema=None, addressSchema=None, db | module=Database ):
if not host: host = dbmodule.host()
if not port: host = dbmodule.port()
if not database: database = dbmodule.database()
if not user: user = dbmodule.user()
if not password: password = dbmodule.password()
if not addressSchema: addressSchema = dbmodule.addressSchema()
if not bdeSchema: bdeSchema = dbmodule.bdeSchema()
set('Database/host',host)
set('Database/port',port)
set('Database/database',database)
set('Database/user',user)
set('Database/password',password)
set('Database/addressSchema',addressSchema)
set('Database/bdeSchema',bdeSchema)
configureDatabase(dbmodule)
|
birsoyo/conan | conans/client/userio.py | Python | mit | 4,473 | 0.000894 | import os
import sys
from conans.client.output import ConanOutput
from conans.model.username import Username
from conans.errors import InvalidNameException, ConanException
import getpass
from six.moves import input as raw_input
class UserIO(object):
"""Class to interact with the user, used to show messages and ask for information"""
def __init__(self, ins=sys.stdin, out=None):
"""
Params:
ins: input stream
out: ConanOutput, should have "write" method
"""
self._ins = ins
if not out:
out = ConanOutput(sys.stdout)
self.out = out
self._interactive = True
def disable_input(self):
self._interactive = False
def _raise_if_non_interactive(self):
if not self._interactive:
raise ConanException("Conan interactive mode disabled")
def raw_input(self):
self._raise_if_non_interactive()
return raw_input()
def get_pass(self):
self._raise_if_non_interactive()
return getpass.getpass("")
def request_login(self, remote_name, | username=None):
"""Request u | ser to input their name and password
:param username If username is specified it only request password"""
user_input = ''
while not username:
try:
if self._interactive:
self.out.write("Remote '%s' username: " % remote_name)
user_input = self.get_username(remote_name)
username = Username(user_input)
except InvalidNameException:
self.out.error('%s is not a valid username' % user_input)
if self._interactive:
self.out.write('Please enter a password for "%s" account: ' % username)
try:
pwd = self.get_password(remote_name)
except ConanException:
raise
except Exception as e:
raise ConanException('Cancelled pass %s' % e)
return username, pwd
def get_username(self, remote_name):
"""Overridable for testing purpose"""
return self._get_env_username(remote_name) or self.raw_input()
def get_password(self, remote_name):
"""Overridable for testing purpose"""
return self._get_env_password(remote_name) or self.get_pass()
def request_string(self, msg, default_value=None):
"""Request user to input a msg
:param msg Name of the msg
"""
self._raise_if_non_interactive()
if default_value:
self.out.input_text('%s (%s): ' % (msg, default_value))
else:
self.out.input_text('%s: ' % msg)
s = self._ins.readline().replace("\n", "")
if default_value is not None and s == '':
return default_value
return s
def request_boolean(self, msg, default_option=None):
"""Request user to input a boolean"""
ret = None
while ret is None:
if default_option is True:
s = self.request_string("%s (YES/no)" % msg)
elif default_option is False:
s = self.request_string("%s (NO/yes)" % msg)
else:
s = self.request_string("%s (yes/no)" % msg)
if default_option is not None and s == '':
return default_option
if s.lower() in ['yes', 'y']:
ret = True
elif s.lower() in ['no', 'n']:
ret = False
else:
self.out.error("%s is not a valid answer" % s)
return ret
def _get_env_password(self, remote_name):
"""
Try CONAN_PASSWORD_REMOTE_NAME or CONAN_PASSWORD or return None
"""
remote_name = remote_name.replace("-", "_").upper()
var_name = "CONAN_PASSWORD_%s" % remote_name
ret = os.getenv(var_name, None) or os.getenv("CONAN_PASSWORD", None)
if ret:
self.out.info("Got password '******' from environment")
return ret
def _get_env_username(self, remote_name):
"""
Try CONAN_LOGIN_USERNAME_REMOTE_NAME or CONAN_LOGIN_USERNAME or return None
"""
remote_name = remote_name.replace("-", "_").upper()
var_name = "CONAN_LOGIN_USERNAME_%s" % remote_name
ret = os.getenv(var_name, None) or os.getenv("CONAN_LOGIN_USERNAME", None)
if ret:
self.out.info("Got username '%s' from environment" % ret)
return ret
|
manz/python-mapnik | test/python_tests/feature_test.py | Python | lgpl-2.1 | 3,439 | 0.012511 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import eq_,raises
from .utilities import run_all
import mapnik
from binascii import unhexlify
def test_default_constructor():
f = mapnik.Feature(mapnik.Context(),1)
eq_(f is not None,True)
def test_feature_geo_interface():
ctx = mapnik.Context()
feat = mapnik.Feature(ctx,1)
feat.geometry = mapnik.Geometry.from_wkt('Point (0 0)')
eq_(feat.__geo_interface__['geometry'],{u'type': u'Point', u'coordinates': [0, 0]})
def test_python_extended_constructor():
context = mapnik.Context()
context.push('foo')
context.push('foo')
f = mapnik.Feature(context,1)
wkt = 'POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))'
f.geometry = mapnik.Geometry.from_wkt(wkt)
f['foo'] = 'bar'
eq_(f['foo'], 'bar')
eq_(f.envelope(),mapnik.Box2d(10.0,10.0,45.0,45.0))
# reset
f['foo'] = u"avión"
eq_(f['foo'], u"avión")
f['foo'] = 1.4
eq_(f['foo'], 1.4)
f['foo'] = True
eq_(f['foo'], True)
def test_add_geom_wkb():
# POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))
wkb = '010300000001000000050000000000000000003e4000000000000024400000000000002440000000000000344000000000000034400000000000004440000000000000444000000000000044400000000000003e400000000000002440'
geometry = mapnik.Geometry.from_wkb(unhexlify(wkb))
eq_(geometry.is_valid(), True)
eq_(geometry.is_simple(), True)
eq_(geometry.envelope(), mapnik.Box2d(10.0,10.0,40.0,40.0))
geometry.correct()
# valid after calling correct
eq_(geometry.is_valid(), True)
def test_feature_expression_evaluation():
context = mapnik.Context()
context.push('name')
f = mapnik.Feature(context,1)
| f['name'] = 'a'
eq_(f['name'],u'a')
expr = mapnik.Expression("[name]='a'")
evaluated = expr.evaluate(f)
eq_(evaluated,True)
num_attributes = len(f)
eq_(num_attributes,1)
eq_(f.id(),1)
# https://github.com/mapnik/mapnik/issues/933
def test_feature_expression_evaluation_missing_attr():
context = mapnik.Context()
context.push('name')
f = mapnik.Feature(context,1)
f['name'] = u'a'
| eq_(f['name'],u'a')
expr = mapnik.Expression("[fielddoesnotexist]='a'")
eq_(f.has_key('fielddoesnotexist'),False)
try:
expr.evaluate(f)
except Exception as e:
eq_("Key does not exist" in str(e),True)
num_attributes = len(f)
eq_(num_attributes,1)
eq_(f.id(),1)
# https://github.com/mapnik/mapnik/issues/934
def test_feature_expression_evaluation_attr_with_spaces():
context = mapnik.Context()
context.push('name with space')
f = mapnik.Feature(context,1)
f['name with space'] = u'a'
eq_(f['name with space'],u'a')
expr = mapnik.Expression("[name with space]='a'")
eq_(str(expr),"([name with space]='a')")
eq_(expr.evaluate(f),True)
# https://github.com/mapnik/mapnik/issues/2390
@raises(RuntimeError)
def test_feature_from_geojson():
ctx = mapnik.Context()
inline_string = """
{
"geometry" : {
"coordinates" : [ 0,0 ]
"type" : "Point"
},
"type" : "Feature",
"properties" : {
"this":"that"
"known":"nope because missing comma"
}
}
"""
mapnik.Feature.from_geojson(inline_string,ctx)
if __name__ == "__main__":
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
|
makerblueprint/retrospecification | modules/host/beaglebone-black/beaglebone-black.py | Python | cc0-1.0 | 599 | 0.013356 | #
# Get the pin which correlates with a given purpose.
#
# @param char array purpos | e
# The purpose to search by.
# @return int
# A pin which can be used for the given purpose.
#
def getPin(purpose):
purpose_collection = {
"i2c-data": 20
"i2c-clock": 19
"adc": 39
"adc0": 39
"adc-0": 39
"one-wire-data": 40
"adc1": 40
"adc-1": 40
"spi-slave-select": 28
"spi-master-out-slave-in": 30
| "spi-master-in-slave-out": 29
"spi-clock": 31
}
if purpose in purpose_collection:
return purpose_collection[purpose]
else
return -1
|
munificent/magpie-optionally-typed | doc/site/magpie/setup.py | Python | mit | 305 | 0.003279 | """
A Pygments lex | er for Magpie.
"""
from setuptools import setup
__author__ = 'Robert Nystrom'
setup(
name='Magpie',
version='1.0',
description=__doc__,
author=__author__,
packages=['magpie'],
entry_points='''
[pygments.lexers]
magpielexer = magpie: | MagpieLexer
'''
) |
conorkcorbin/tractometry | call_interpolate.py | Python | mit | 1,231 | 0.029245 | from bundleprocessing import interpolateMetric
import pandas as pd
import nibabel as nib
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-templateBundle', '--templateBundle', required = True)
parser.add_argument('-subjectBundle', '--subjectBundle', required = True)
parser.add_argument('-metric', '--metric', required = True)
par | ser.add_argument('-numPoints', '--numPoints', type=int, required = True)
parser.add_argument('-flip', '--flip', type=bool, required = True)
parser.add_argument('-outTracks', '--outTracks', required = True)
parser.add_argument('-outMetrics', '--outMetrics', required = True)
args = parser.parse_args()
tracks, hdr = nib.tra | ckvis.read(args.templateBundle)
templateBundle = [trk[0] for trk in tracks]
tracks, hdr = nib.trackvis.read(args.subjectBundle)
subjectBundle = [trk[0] for trk in tracks]
image = nib.load(args.metric)
metric = image.get_data()
subjectTracks,scalars = interpolateMetric(templateBundle,
subjectBundle,
metric,
hdr['voxel_size'],
args.numPoints,
args.flip)
nib.trackvis.write(args.outTracks,subjectTracks,hdr)
data = pd.DataFrame(scalars,columns=["Metrics"])
data.to_csv(args.outMetrics,index=False)
|
oppia/oppia | core/domain/suggestion_registry_test.py | Python | apache-2.0 | 148,457 | 0.00029 | # Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for suggestion registry classes."""
from __future__ import annotations
import datetime
import os
from core import feconf
from core import utils
from core.domain import config_services
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import exp_services
from core.domain import fs_domain
from core.domain import fs_services
from core.domain import html_validation_service
from core.domain imp | ort question_domain
from core.domain import question_services
from core.domain import skill_services
from core.domain import state_domain
from core.d | omain import suggestion_registry
from core.domain import suggestion_services
from core.platform import models
from core.tests import test_utils
(suggestion_models,) = models.Registry.import_models([models.NAMES.suggestion])
class MockInvalidSuggestion(suggestion_registry.BaseSuggestion):
def __init__(self): # pylint: disable=super-init-not-called
pass
class BaseSuggestionUnitTests(test_utils.GenericTestBase):
"""Tests for the BaseSuggestion class."""
def setUp(self):
super(BaseSuggestionUnitTests, self).setUp()
self.base_suggestion = MockInvalidSuggestion()
def test_base_class_accept_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement accept.'):
self.base_suggestion.accept()
def test_base_class_get_change_list_for_accepting_suggestion_raises_error(
self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement '
'get_change_list_for_accepting_suggestion.'):
self.base_suggestion.get_change_list_for_accepting_suggestion()
def test_base_class_pre_accept_validate_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' pre_accept_validate.'):
self.base_suggestion.pre_accept_validate()
def test_base_class_populate_old_value_of_change_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' populate_old_value_of_change.'):
self.base_suggestion.populate_old_value_of_change()
def test_base_class_pre_update_validate_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' pre_update_validate.'):
self.base_suggestion.pre_update_validate({})
def test_base_class_get_all_html_content_strings(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' get_all_html_content_strings.'):
self.base_suggestion.get_all_html_content_strings()
def test_base_class_get_target_entity_html_strings(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' get_target_entity_html_strings.'):
self.base_suggestion.get_target_entity_html_strings()
def test_base_class_convert_html_in_suggestion_change(self):
def conversion_fn():
"""Temporary function."""
pass
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' convert_html_in_suggestion_change.'):
self.base_suggestion.convert_html_in_suggestion_change(
conversion_fn)
class SuggestionEditStateContentUnitTests(test_utils.GenericTestBase):
"""Tests for the SuggestionEditStateContent class."""
AUTHOR_EMAIL = 'author@example.com'
REVIEWER_EMAIL = 'reviewer@example.com'
ASSIGNED_REVIEWER_EMAIL = 'assigned_reviewer@example.com'
fake_date = datetime.datetime(2016, 4, 10, 0, 0, 0, 0)
def setUp(self):
super(SuggestionEditStateContentUnitTests, self).setUp()
self.signup(self.AUTHOR_EMAIL, 'author')
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.suggestion_dict = {
'suggestion_id': 'exploration.exp1.thread1',
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_ACCEPTED,
'author_name': 'author',
'final_reviewer_id': self.reviewer_id,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'state_1',
'new_value': 'new suggestion content',
'old_value': None
},
'score_category': 'content.Algebra',
'language_code': None,
'last_updated': utils.get_time_in_millisecs(self.fake_date),
'edited_by_reviewer': False
}
def test_create_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
observed_suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertDictEqual(
observed_suggestion.to_dict(), expected_suggestion_dict)
def test_validate_suggestion_edit_state_content(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
suggestion.validate()
def test_get_score_part_helper_methods(self):
expected_suggestion_dict = self.suggestion_dict
suggestion = suggestion_registry.SuggestionEditStateContent(
expected_suggestion_dict['suggestion_id'],
expected_suggestion_dict['target_id'],
expected_suggestion_dict['target_version_at_submission'],
expected_suggestion_dict['status'], self.author_id,
self.reviewer_id, expected_suggestion_dict['change'],
expected_suggestion_dict['score_category'],
expected_suggestion_dict['language_code'], False, self.fake_date)
self.assertEqual(suggestion.get_score_type(), 'content')
self.assertEqual(suggestion.get_score_sub_type(), 'Algebra')
def test_validate_suggestion_type(self):
ex |
mgerstner/backintime | common/test/test_restore.py | Python | gpl-2.0 | 7,692 | 0.00429 | # Back In Time
# Copyright (C) 2008-2017 Oprea Dan, Bart de Koning, Richard Bailey, Germar Reitze
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import sys
import unittest
import pwd
import grp
import stat
from tempfile import TemporaryDirectory
from test import generic
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import config
import snapshots
import mount
CURRENTUID = os.geteuid()
CURRENTUSER = pwd.getpwuid(CURRENTUID).pw_name
CURRENTGID = os.getegid()
CURRENTGROUP = grp.getgrgid(CURRENTGID).gr_name
class RestoreTestCase(generic.SnapshotsWithSidTestCase):
def setUp(self):
super(RestoreTestCase, self).setUp()
self.include = TemporaryDirectory()
generic.create_test_files(self.sid.pathBackup(self.include.name))
def tearDown(self):
super(RestoreTestCase, self).tearDown()
self.include.cleanup()
def prepairFileInfo(self, restoreFile, mode = 33260):
d = self.sid.fileInfo
d[restoreFile.encode('utf-8', 'replace')] = (mode,
CURRENTUSER.encode('utf-8', 'replace'),
CURRENTGROUP.encode('utf-8', 'replace'))
self.sid.fileInfo = d
class TestRestore(RestoreTestCase):
def test_restore_multiple_files(self):
restoreFile1 = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile1)
restoreFile2 = os.path.join(self.include.name, 'foo', 'bar', 'baz')
self.prepairFileInfo(restoreFile2)
self.sn.restore(self.sid, (restoreFile1, restoreFile2))
self.assertIsFile(restoreFile1)
with open(restoreFile1, 'rt') as f:
self.assertEqual(f.read(), 'bar')
self.assertEqual(33260, os.stat(restoreFile1).st_mode)
self.assertIsFile(restoreFile2)
with open(restoreFile2, 'rt') as f:
self.assertEqual(f.read(), 'foo')
self.assertEqual(33260, os.stat(restoreFile2).st_mode)
def test_restore_to_different_destination(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with TemporaryDirectory() as dest:
destRestoreFile = os.path.join(dest, 'test')
self.sn.restore(self.sid, restoreFile, restore_to = dest)
self.assertIsFile(destRestoreFile)
with open(destRestoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
self.assertEqual(33260, os.stat(destRestoreFile).st_mode)
def test_restore_folder_to_different_destination(self):
restoreFolder = self.include.name
self.prepairFileInfo(restoreFolder)
self.prepairFileInfo(os.path.join(restoreFolder, 'test'))
self.prepairFileInfo(os.path.join(restoreFolder, 'file with spaces'))
with TemporaryDirectory() as dest:
destRestoreFile = os.path.join(dest, os.path.basename(restoreFolder), 'test')
self.sn.restore(self.sid, restoreFolder, restore_to = dest)
self.assertIsFile(destRestoreFile)
with open(destRestoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
self.assertEqual(33260, os.stat(destRestoreFile).st_mode)
def test_delete(self):
restoreFolder = self.include.name
junkFolder = os.path.join(self.include.name, 'junk')
os.makedirs(junkFolder)
self.assertExists(junkFolder)
self.prepairFileInfo(restoreFolder)
self.sn.restore(self.sid, restoreFolder, delete = True)
self.assertIsFile(restoreFolder, 'test')
self.assertNotExists(junkFolder)
def test_backup(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with open(restoreFile, 'wt') as f:
f.write('fooooooooooooooooooo')
self.sn.restore(self.sid, restoreFile, backup = True)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
backupFile = restoreFile + self.sn.backupSuffix()
self.assertIsFile(backupFile)
with open(backupFile, 'rt') as f:
self.assertEqual(f.read(), 'fooooooooooooooooooo')
def test_no_backup(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with open(restoreFile, 'wt') as f:
f.write('fooooooooooooooooooo')
self.sn.restore(self.sid, restoreFile, backup = False)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
backupFile = restoreFile + self.sn.backupSuffix()
self.assertIsNoFile(backupFile)
def test_only_new(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
with open(restoreFile, 'wt') as f:
f.write('fooooooooooooooooooo')
# change mtime to be newer than the one in snapshot
st = os.stat(restoreFile)
atime = st[stat.ST_ATIME]
mtime = st[stat.ST_MTIME]
new_mtime = mtime + 3600
os.utime(restoreFile, (atime, new_mtime))
self.sn.restore(self.sid, restoreFile, only_new = True)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'fooooooooooooooooooo')
class TestRestoreLocal(RestoreTestCase):
"""
Tests which should run on local and ssh profile
"""
def test_restore(self):
restoreFile = os.path.join(self.include.name, 'test')
self.prepairFileInfo(restoreFile)
self.sn.restore(self.sid, restoreFile)
self.assertIsFile(restoreFile)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'bar')
| self.assertEqual(33260, os.stat(restoreFile).st_mode)
def test_restore_file_with_spaces(self):
restoreFile = os.path.join(self.include.name, 'file with spaces')
self.prepairFileInfo(restoreFile)
self.sn.restore(self.sid, restoreFile)
self.assertIsFile(restore | File)
with open(restoreFile, 'rt') as f:
self.assertEqual(f.read(), 'asdf')
self.assertEqual(33260, os.stat(restoreFile).st_mode)
@unittest.skipIf(not generic.LOCAL_SSH, 'Skip as this test requires a local ssh server, public and private keys installed')
class TestRestoreSSH(generic.SSHSnapshotsWithSidTestCase, TestRestoreLocal):
def setUp(self):
super(TestRestoreSSH, self).setUp()
self.include = TemporaryDirectory()
generic.create_test_files(os.path.join(self.remoteSIDBackupPath, self.include.name[1:]))
#mount
self.cfg.setCurrentHashId(mount.Mount(cfg = self.cfg).mount())
def tearDown(self):
#unmount
mount.Mount(cfg = self.cfg).umount(self.cfg.current_hash_id)
super(TestRestoreSSH, self).tearDown()
self.include.cleanup()
|
barisser/perceptron | btcanalysis.py | Python | mit | 2,614 | 0.034047 | import requests
import json
import math
import matplotlib.pyplot as plt
data=[]
price=[] #weighted price per day, reverse order
volume=[] #volume USD per day
onedaychange=[]
Xmovingaverage=[]
Xinterval=15
Ymovingaverage=[]
Yinterval=50
Xdifference=[] # PERCENTAGE DIFFERENCE between X moving average and todays price
Ydifference=[]
def get_data():
f='http://www.quandl.com/api/v1/datasets/BITCOIN/BITSTAMPUSD'
a=requests.get(f)
if a.status_code==200:
b=a.content
global c
c=json.loads(b)
print c['column_names']
#return c['data']
global data
datas=c['data']
data=[]
for x in datas:
if x[7]>10000000:
g=0
else:
data.append(x)
def prep():
global volume, price, onedaychange,Xmovingaverage, Ymovingaverage
global Xdifference, Ydifference
for x in data:
volume.append(x[6])
price.append(x[7])
price=price[::-1]
volume=volume[::-1]
a=0
onedaychange.append(1)
while a<len(price)-1:
onedaychange.append(price[a+1]/price[a]-1)
a=a+1
for i in range(0,Xinterval):
Xmovingaverage.append(0)
for i in range(0,Yinterval):
Ymovingaverage.append(0)
a=0
while a<len(price)-Xinterval:
b | =0
r=0
while b<Xinterval:
r=r+float(price[a+b])/float(Xinterval)
b=b+1
Xmovingaverage.append(r)
a=a+1
a=0
while a<len(pric | e)-Yinterval:
c=0
r=0
while c<Yinterval:
r=r+float(price[a+c])/float(Yinterval)
c=c+1
Ymovingaverage.append(r)
a=a+1
for i in range(0,Xinterval):
Xdifference.append(1)
for i in range(0,Yinterval):
Ydifference.append(1)
g=Xinterval
while g<len(Xmovingaverage):
Xdifference.append(price[g]/Xmovingaverage[g]-1)
g=g+1
g=Yinterval
while g<len(Ymovingaverage):
Ydifference.append(price[g]/Ymovingaverage[g]-1)
g=g+1
def average(x):
b=0
n=0
for a in x:
if a<999999999 and a>0:
b=b+a
n=n+1.0
b=b/n
return b
def standard_deviation(x):
b=0
g=average(x)
for y in x:
b=b+math.pow((y-g),2)
b=b/len(x)
b=math.pow(b,0.5)
return b
def correlation(x,y):
a=0
b=0
ax=average(x)
ay=average(y)
while a<len(x):
c=x[a]-ax
d=y[a]-ay
g=c*d
b=b+g
a=a+1
b=b/(standard_deviation(x)*standard_deviation(y))
b=b/len(x)
return b
def init():
get_data()
prep()
|
uogbuji/versa | tools/py/iriref.py | Python | apache-2.0 | 1,234 | 0.004862 | #versa.iriref
'''
Versa distinguishes declared IRI references from strings which coincidentally look like IRI references
'''
from amara3 import iri
import logging
from versa import init_localization
init_localization()
#VERSA_BASEIRI = 'http://bibfra.me/purl/versa/'
class iriref(str):
'''
IRI references object, just a sli | ghtly decorated unicode
>>> from | versa.iriref import iriref
>>> iriref('spam')
'spam'
>>> iriref('spam eggs')
[raises ValueError]
>>> from versa import I
>>> I('spam')
'spam'
'''
def __new__(cls, value):
if not iri.matches_uri_ref_syntax(value):
raise ValueError(_('Invalid IRI reference: "{0}"'.format(value)))
self = super(iriref, cls).__new__(cls, value)
#self = unicode, cls).__new__(cls, value)
# optionally do stuff to self here
return self
def __repr__(self):
return u'I(' + str(self) + ')'
def __call__(self, tail):
'''
>>> from versa import I
>>> base = I('https://example.org/')
>>> a = base('a')
>>> a
I(https://example.org/a)
'''
# Just dumb concatenation for now
return iriref(str(self) + str(tail))
|
comick/barduino | app/weblib/logging.py | Python | mit | 1,603 | 0.000624 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
flask.logging
~~~~~~~~~~~~~
Implements the logging support for Flask.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import web
from logging import getLogger
from logging import getLoggerClass
from logging import DEBUG
from logging import Formatter
from logging import INFO
from logging import StreamHandler
__all__ = ['create_logger']
def create_logger():
"""Creates a logger for the given application.
This logger works similar to a regular Python logger but changes the
effective logging level based on the application's debug fla | g. Furthermore
this function also removes all attached handlers in case there was a logger
with the log name before.
| """
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(self):
if self.level == 0:
return DEBUG if web.config.DEBUG else INFO
return super(DebugLogger, self).getEffectiveLevel()
class DebugHandler(StreamHandler):
def emit(x, record):
StreamHandler.emit(x, record)
handler = DebugHandler()
handler.setLevel(DEBUG)
handler.setFormatter(Formatter(web.config.LOG_FORMAT))
logger = getLogger(web.config.LOGGER_NAME)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
if web.config.LOG_ENABLE:
logger.addHandler(handler)
return logger
|
cliffano/swaggy-jenkins | clients/python/generated/test/test_favorite_impllinks.py | Python | mit | 871 | 0.001148 | """
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.1.2-pre.0
Contact: blah@cliffano.com
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import swaggyjenkins
from swaggyjenkins.model.link import Link
globals()['Link'] = Link
from swaggyjenki | ns.model.favorite_impllinks import FavoriteImpllinks
class TestFavoriteImpllinks(unittest.TestCase):
"""FavoriteImpllinks unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testFavoriteImpllinks(self):
"""Test FavoriteImpllinks"""
# FIXME: construct object with mandatory attributes with example values
# model = FavoriteImpllinks() # noqa: E501
pass
if __name__ | == '__main__':
unittest.main()
|
sobhe/baaz | baaz/LearningBasedInformationExtractor.py | Python | mit | 598 | 0.033445 | from hazm import IOBTagger
from nltk.t | ag import TaggerI
class LearningBasedInformationExtractor(TaggerI):
def train(self, sentences, model_file='resources/extractor.model'):
self.tagger = IOBTagger(patterns=[
'*',
'*:tl1=%X[-1,1]',
'*:t=%X[0,1]',
'*:tr1=%X[1,1]',
'*:cp=%m[0,2,"..$"]',
'*:c=%X[0,2]',
'*:c0l1=%X[-1,2]/%X[0,2]',
'*:c0r1=%X[0,2]/%X[1,2]',
'*:cl1=%X[-1,2]',
'*:cl2=%X[-2,2]',
'*:cr1=%X[1,2]',
'*:cr2=%X[2, | 2]',
])
self.tagger.train(sentences)
self.tagger.save_model(model_file)
def tag(self, sent):
return self.tagger.tag(sent)
|
zhangjiajie/PTP | nexus/bin/nexus_binary2multistate.py | Python | gpl-3.0 | 1,202 | 0.008319 | #!/usr/bin/env python
import sys
from nexus import NexusReader, VERSION
from nexus.tools import multistatise, combine_nexuses
__author__ = 'Simon Greenhill <simon@simon.net.nz>'
__doc__ = """nexus_binary2multistate - python-nexus tools v%(version)s
Converts binary nexuses to a multistate nexus.
""" % {'version': VERSION,}
if __name__ == '__main__':
from optparse imp | ort OptionParser
parser = OptionParser(usage="usage: %prog [-o output.nex] nex1.nex nex2.nex ... nexN.nex")
parser.add_option("-o", "--output", dest="output",
action="store", default=None, type="string",
help="output nexus file")
options, nexuslist = parser.parse_args()
if len(nexuslist) < 1: |
print(__doc__)
parser.print_help()
sys.exit()
if options.output is not None:
outfile = options.output
else:
outfile = 'multistate.nex'
nexuslist2 = []
for nfile in nexuslist:
n = NexusReader(nfile)
n = multistatise(n)
nexuslist2.append(n)
out = combine_nexuses(nexuslist2)
out.write_to_file(outfile, charblock=True, interleave=False)
print("Written to %s" % outfile)
|
brohrer/becca | becca/preprocessor.py | Python | mit | 3,736 | 0 | import numpy as np
from becca.discretizer import Discretizer
import becca.tools as tools
import os
import logging
logging.basicConfig(filename='log/log.log', level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(name)s %(message)s')
logger = logging.getLogger(os.path.basename(__file_ | _))
class Preprocessor(object):
"""
The Preprocessor takes the raw sensor signals in and creates a set of
inputs for the Brain to learn from.
"""
def __init__(self, n_sensors=None):
"""
Parameters
----------
| n_sensors: int
The number of sensors that the world will be providing. These
are the only pieces of information Becca needs about the
world to get started.
"""
if not n_sensors:
logger.error('You have to give a number for n_sensors.')
return
else:
self.n_sensors = n_sensors
# n_inputs: int
# The total number of inputs that the preprocessor passes on.
self.n_inputs = 0 # self.n_commands
# input_energies: array of floats
# The reservoirs of energy associated with each of the inputs.
# Each input is subject to fatigue.
# It has to be quiet for a while
# before it can be strongly active again.
# Initialize it to handle many more inputs than will be needed
# at first.
# n_init = 10 * (self.n_commands + self.n_sensors)
n_init = 10 * (self.n_sensors)
self.input_energies = np.ones(n_init)
# input_activities: array of floats
# The most recent set of input activities, generated from
# discretized sensors.
self.input_activities = np.zeros(n_init)
# Initialize the Discretizers that will take in the
# (possibly continuous) sensor inputs and turn each one into
# a set of discrete values.
self.discretizers = []
for i in range(self.n_sensors):
new_discretizer = Discretizer(
base_position=float(i) + .5,
n_inputs=self.n_inputs,
name='sensor_' + str(i))
self.discretizers.append(new_discretizer)
self.n_inputs += 2
def convert_to_inputs(self, sensors):
"""
Build a set of discretized inputs for the featurizer.
Parameters
----------
sensors: list of floats, strings and/or stringifiable objects
The sensor values from the current time step.
Returns
-------
input_activities: list of floats
The activity levels associated with each of the inputs
that the preprocessor is providing to the featurizer.
"""
raw_input_activities = np.zeros(self.input_energies.size)
# This assumes that n_commands is constant.
# raw_input_activities[:self.n_commands] = commands
for i_sensor, discretizer in enumerate(self.discretizers):
raw_input_activities, self.n_inputs = discretizer.step(
input_activities=raw_input_activities,
n_inputs=self.n_inputs,
raw_val=sensors[i_sensor],
)
self.input_activities = tools.fatigue(
raw_input_activities, self.input_energies)
# Grow input_activities and input_energies as necessary.
if self.n_inputs > self.input_energies.size / 2:
new_input_energies = np.ones(2 * self.n_inputs)
new_input_energies[:self.input_energies.size] = (
self.input_energies)
self.input_energies = new_input_energies
return self.input_activities[:self.n_inputs]
|
Pexego/PXGO_00049_2013_PCG | project-addons/maintenance/maintenance_type.py | Python | agpl-3.0 | 6,303 | 0.006665 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2004-2014 Pexego Sistemas Informáticos All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
from openerp.osv import orm, fields
from datetime import *
from dateutil.relativedelta import relativedelta
from dateutil.rrule import *
class maintenance_type(orm.Model):
_name = 'maintenance.type'
_columns = {
'name':fields.char('Name', size=64, required=True),
'descripcion': fields.text('Description'),
'type':fields.selection([
('reform', 'Reforms'),
('corrective', 'Corrective'),
('predictive', 'Predictive'),
('preventive', 'Preventive'),
('legal', 'Legal')
], 'Type', select=True, required=True),
'survey_id':fields.many2one('survey.survey', 'Associated survey'),
'planificado':fields.boolean('Planned'),
'intervalo':fields.selection([
('3', 'Daily'),
('1', 'Monthly'),
('2', 'Weekly')
], 'Interval', select=True),
'interval_count': fields.integer('Repeat with interval', help="Repeat with interval (Days/Week/Month)", required=True),
'inicio': fields.date('Initial date'),
'ultima_ejecucion': fields.date('last execution'),
'lunes':fields.boolean('Monday'),
'martes':fields.boolean('Tuesday'),
'miercoles':fields.boolean('Wednesday'),
'jueves':fields.boolean('Thursday'),
'viernes':fields.boolean('Friday'),
'sabado':fields.boolean('Saturday'),
'domingo':fields.boolean('Sunday'),
'element_ids':fields.many2many('maintenance.element', 'maintenanceelement_maintenancetype_rel', 'type_id', 'element_id', 'Maintenance elements'),
'department_id': fields.many2one('hr.department', 'Department'),
'on_stop': fields.boolean('On stop')
}
_defaults = {
'interval_count': 1
}
def run_scheduler(self, cr, uid, automatic=False, use_new_cursor=False, context=None):
if not context:
context = {}
maintenance_type_obj = self.pool.get('maintenance.type')
type_ids = maintenance_type_obj.search(cr, uid, [('planificado', '=', True),('on_stop', '=', False)])
type_objs = maintenance_type_obj.browse(cr, uid, type_ids, context)
dias = {
'lunes':MO,
'martes':TU,
'miercoles':WE,
'jueves':TH,
'viernes':FR,
'sabado':SA,
'domingo':SU,
}
for type_obj in type_objs:
if type_obj.planificado:
ultima_ej = datetime.strptime(type_obj.ultima_ejecucion or type_obj.inicio, "%Y-%m-%d")
fin = (datetime.now() + relativedelta(months=+1)) - relativedelta(days=-1)
fechas_excluidas = []
for dia in dias.keys():
if type_obj[dia] :
fechas_excluidas += rrule(int(type_obj.intervalo), byweekday=dias[dia], dtstart=ultima_ej).between(ultima_ej, fin, inc=True)
fechas = rrule(int(type_obj.intervalo), dtstart=ultima_ej, interval=type_obj.interval_count).between(ultima_ej, fin, inc=True)
if fechas:
ultima_creacion = ultima_ej
for fecha in fechas:
crear_solicitud = True
if fecha in fechas_excluidas:
fecha_cambiada = False
nueva_fecha = fecha
while not fecha_cambiada:
nueva_fecha = nueva_fecha + relativedelta(days=+1)
# si el intervalo es diario
if nueva_fecha in fechas or nueva_fecha > fin:
crear_solicitud = False
break
if nueva_fecha not in fechas_excluidas:
fecha = nueva_fecha
fecha_cambiada = True
if crear_solicitud:
ultima_creacion = fecha
element_ids = []
for obj in type_obj.element_ids:
element_ids.append(obj.id)
args_request = {
'maintenance_type_id':type_obj.id,
'element_ids':[(6, 0, element_ids)],
'fecha_solicitud':fecha,
'department_id': type_obj.departm | ent_id and type_obj.department_id.id or False,
'executor_department_id': type_obj.department_id and type_obj.department_id.id or False,
}
self.pool | .get('intervention.request').create(cr, uid, args_request, context)
maintenance_type_obj.write(cr, uid, type_obj.id, {'ultima_ejecucion': ultima_creacion}, context)
return True
maintenance_type()
|
qtile/qtile | test/test_popup.py | Python | mit | 1,850 | 0 | # Copyright (c) 2020-1 Matt Colligan
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import textwrap
def test_popup_focus(manager):
manager.test_window(" | one")
start_wins = len(manager.backend.get_all_windows())
success, msg = manager.c.eval(
textwrap.dedent(
"""
from libqtile.popup import Popup
popup = Popup(self,
x=0,
y=0,
width=self.current_screen.width,
height=self.current_screen.height,
)
popup.place()
popup.unhide()
"""
)
)
assert success, msg
end_wins = len(manag | er.backend.get_all_windows())
assert end_wins == start_wins + 1
assert manager.c.group.info()["focus"] == "one"
assert manager.c.group.info()["windows"] == ["one"]
assert len(manager.c.windows()) == 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.