repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Chunfang/defmod-swpc
|
example/F3Dp/F3D_syn.py
|
Python
|
mit
| 4,626
| 0.043234
|
#!/usr/bin/env python
import numpy as np
import os,sys
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
import argparse
ap=argparse.ArgumentParser()
ap.add_argument('-vis') # 1 plot cropped point cloud
ap.add_argument('-refine') #
|
1 refine mesh
ap.add_argument('-clean') # 1 remove tmp files
if ap.parse_args().vis==None:
vis=0
else:
vis=int(ap.parse_args()
|
.vis)
if ap.parse_args().refine==None:
refine=0
else:
refine=int(ap.parse_args().refine)
if ap.parse_args().clean==None:
clean=0
else:
clean=int(ap.parse_args().clean)
# Synthetic fault pixels
z=np.linspace(.2, -.8, num=100)
y=np.linspace(-.625,.625, num=120)
grid=np.meshgrid(y,z)
x=np.zeros((len(z)*len(y),1),dtype=np.float)
dat_vert=np.hstack((x,grid[0].reshape(x.shape),grid[1].reshape(x.shape)))
# weak
wl=np.linspace(.12,.18,num=8); amp=.03125*np.sqrt(wl)
e=1.025; r=-.2
dip=70.; zcnt=-.35
omg=[ 0.82976173, 0.89624834, 0.03829284, -0.50016345, -1.06606012, 1.40505898, -1.24256034, 1.28623393]
#omg=(np.random.rand(wl.shape[0])-.5)*np.pi
L=dat_vert[1,:].max()-dat_vert[1,:].min()
zmax=z.max(); zmin=z.min()
for i in range(len(wl)):
phs=dat_vert[:,1]/wl[i]*np.pi+omg[i]
dat_vert[:,0]=dat_vert[:,0]+amp[i]*np.cos(phs)*(e*zmax-dat_vert[:,2])/(e*zmax-zmin)*np.exp(r*abs(phs)/np.pi)
dat_vert[:,0]=dat_vert[:,0]+(zcnt-dat_vert[:,2])*np.tan((90.-dip)/180.*np.pi)
# ridge patch
def flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup):
b1=-slope1*trunc1-.7
b2=-slope2*trunc2-.7
in_id=np.where(np.logical_and(dat_vert[:,2]-slope1*dat_vert[:,1]<b1, dat_vert[:,2]-slope2*dat_vert[:,1]<b2))[0]
out_id=np.setdiff1d(np.array(range(len(dat_vert)),dtype=np.int32),in_id)
x_shift=dat_vert[in_id,0]
# ridge patch
k=0
zup=dat_vert[:,2].max()
zlw=dat_vert[:,2].min()
for i in in_id:
r=abs(dat_vert[i,1]-.5*(trunc1+trunc2))
R=.5*((dat_vert[i,2]-b2)/slope2-(dat_vert[i,2]-b1)/slope1)
h=hlw+(dat_vert[i,2]-zlw)/(zup-zlw)*(hup-hlw)
x_shift[k]=x_shift[k]+np.cos(r/R*np.pi/2.)*h
k+=1
dat_vert=np.vstack((dat_vert[out_id,:],
np.hstack((x_shift.reshape(len(in_id),1),
dat_vert[in_id,1].reshape(len(in_id),1),
dat_vert[in_id,2].reshape(len(in_id),1)))))
return dat_vert
slope1=10.;slope2=-10.
trunc1=.1;trunc2=.6
hup=0.;hlw=.08
#dat_vert=flt_patch(dat_vert,slope1,slope2,trunc1,trunc2,hlw,hup)
print omg
fout='F3D_syn.xyz'
f=open(fout,'w+')
np.savetxt(f,dat_vert,delimiter=' ', fmt='%.6f '*3)
f.close()
from subprocess import call
fin=fout
fout=fout.rsplit('.')[0]+'.stl'
mxl='xyz2stl.mlx'
call(['meshlabserver', '-i',fin,'-o',fout,'-s',mxl])
if clean==1: os.remove(fin)
# Mesh
fin=fout
if refine==1:
fout=fout.rsplit('.')[0]+'_dns.exo'
else:
fout=fout.rsplit('.')[0]+'.exo'
jou='F3D_tet.jou'
txt_jou=open(jou,'r')
txt_jou_tmp=open('tmp.jou','w+')
hf=0.0025 # fault grid length (0.0025 for ~100 m tet model, 0.003 for ~40 m)
hm=0.0075 # matrix grid length (0.0075 for ~100 m tet model, 0.010 for ~40 m)
for line in txt_jou:
line=line.strip('\r\n')
if 'import' in line.lower():
line='import stl "'+fin+'"'
if 'export' in line.lower():
line='export mesh "'+fout+'" dimension 3 overwrite'
if 'surface 46 94 95 97 size' in line.lower():
line='surface 46 94 95 97 size %0.6f' %(2*hf)
if 'volume all size' in line.lower():
line='volume all size %0.6f' %(2*hm)
txt_jou_tmp.write(line+'\n')
if 'mesh volume all' in line.lower() and refine==1:
txt_jou_tmp.write('refine volume all\n')
txt_jou.close();txt_jou_tmp.close()
call(['trelis','-nojournal','-nographics','tmp.jou'])
if clean==1: os.remove('tmp.jou')
# Preprocessing msh=>inp
dt_dyn=2E-5 #1E-5 for dns 100 m tet model, 8E-5 for 40 m tet, 8E-4 for ~1 m tet
import F3D_msh2inp
_=F3D_msh2inp.msh2inp(fout,dt_dyn)
# Fault plot
if vis==1:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(dat_vert[:,0], dat_vert[:,1], dat_vert[:,2], c='b', marker='.')
# Create cubic bounding box to simulate equal aspect ratio
max_range = np.array([np.max(dat_vert[:,0])-np.min(dat_vert[:,0]),np.max(dat_vert[:,1])\
-np.min(dat_vert[:,1]), np.max(dat_vert[:,2])-np.min(dat_vert[:,2])]).max()
Xb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][0].flatten()
Yb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][1].flatten()
Zb = 0.5*max_range*np.mgrid[-1:2:2,-1:2:2,-1:2:2][2].flatten()
for xb, yb, zb in zip(Xb, Yb, Zb):
ax.plot([xb], [yb], [zb], 'w',)
plt.title('fault [km]')
plt.grid()
plt.show()
|
cloudify-cosmo/cloudify-plugins-common
|
setup.py
|
Python
|
apache-2.0
| 2,007
| 0
|
########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTI
|
ES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from setuptools import setup
install_requires = [
'cloudify-rest-client==4.4.dev1',
'pika==0.9.14',
'networkx==1.9.1',
'proxy_tools==0.1.0',
'bottle==0.12.7',
'jinja2==2.7.2'
]
try:
import importlib # noqa
except ImportError:
install_requires.append('
|
importlib')
try:
import argparse # NOQA
except ImportError as e:
install_requires.append('argparse==1.2.2')
try:
from collections import OrderedDict # noqa
except ImportError:
install_requires.append('ordereddict==1.1')
setup(
name='cloudify-plugins-common',
version='4.4.dev1',
author='cosmo-admin',
author_email='cosmo-admin@gigaspaces.com',
packages=['cloudify',
'cloudify.compute',
'cloudify.workflows',
'cloudify.plugins',
'cloudify.celery',
'cloudify.proxy',
'cloudify.test_utils',
'cloudify.ctx_wrappers'],
license='LICENSE',
description='Contains necessary decorators and utility methods for '
'writing Cloudify plugins',
zip_safe=False,
install_requires=install_requires,
entry_points={
'console_scripts': [
'ctx = cloudify.proxy.client:main',
]
},
package_data={'cloudify.ctx_wrappers': ['ctx.py']},
scripts=[
'ctx_wrappers/ctx-sh'
]
)
|
tonikelope/python-passport-trace-attack
|
pypassport/iso7816.py
|
Python
|
gpl-2.0
| 7,834
| 0.016977
|
# Copyright 2009 Jean-Francois Houzard, Olivier Roger
#
# This file is part of pypassport.
#
# pypassport is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# pypassport is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with pyPassport.
# If not, see <http://www.gnu.org/licenses/>.
from pypassport.reader import ReaderException
from pypassport.hexfunctions import *
from pypassport import apdu
from pypassport.logger import Logger
class Iso7816Exception(Exception):
def __init__(self, *params):
Exception.__init__(self, *params)
class Iso7816(Logger):
Errors = {
0x61:'SW2 indicates the number of response bytes still available',
0x62:{0x00:'No information given',\
0x81:'Part of returned data may be corrupted',\
0x82:'End of file/record reached before reading Le bytes',\
0x83:'Selected file invalidated',\
0x84:'FCI not formatted according to ISO7816-4 section 5.1.5'},
0x63:{0x00:'No information given',\
0x81:'File filled up by the last write',\
0x82:'Card Key not supported',\
0x83:'Reader Key not supported',\
0x84:'Plain transmission not supported',\
0x85:'Secured Transmission not supported',\
0x86:'Volatile memory not available',\
0x87:'Non Volatile memory not available',\
0x88:'Key number not valid',\
0x89:'Key length is not correct',\
0xC:'Counter provided by X (valued from 0 to 15) (exact meaning depending on the command)'},
0x64:'State of non-volatile memory unchanged (SW2=00, other values are RFU)',
0x65:{0x00:'No information given',\
0x81:'Memory failure'},
0x66:'Reserved for security-related issues (not defined in this part of ISO/IEC 7816)',
0x67:{0x00:'Wrong length'},
0x68:{0x00:'No information given',\
0x81:'Logical channel not supported',\
0x82:'Secure messaging not supported'},
0x69:{0x00:'No information given',\
0x81:'Command incompatible with file structure',\
0x82:'Security status not satisfied',\
0x83:'Authentication method blocked',\
0x84:'Referenced data invalidated',\
0x85:'Conditions of use not satisfied',\
0x86:'Command not allowed (no current EF)',\
0x87:'Expected SM data objects missing',\
0x88:'SM data objects incorrect'},
0x6A:{0x00:'No information given',\
0x80:'Incorrect parameters in the data field',\
0x81:'Function not supported',\
0x82:'File not found',\
0x83:'Record not found',\
0x84:'Not enough memory space in the file',\
0x85:'Lc inconsistent with TLV structure',\
0x86:'Incorrect parameters P1-P2',\
0x87:'Lc inconsistent with P1-P2',\
0x88:'Referenced data not found'},
0x6B:{0x00:'Wrong parameter(s) P1-P2'},
0x6C:'Wrong length Le: SW2 indicates the exact length',
0x6D:{0x00:'Instruction code not supported or invalid'},
0x6E:{0x00:'Class not supported'},
0x6F:{0x00:'No precise diagnosis'},
0x90:{0x00:'Success'} #No further qualification
}
def __init__(self, reader):
Logger.__init__(self, "ISO7816")
self._reader = reader
self._ciphering = False
def transmit(self, toSend, logMsg):
"""
@param toSend: The command to transmit.
@type toSend: A commandAPDU object.
@param logMsg: A log message associated to the transmit.
@type logMsg: A string.
@return: The result field of the responseAPDU object
The P1 and P2 fields are checked after each transmit.
If they don't mean succes, the appropriate error string is retrieved
from the Error dictionary and an APDUException is raised.
The Iso7816Exception is composed of three fields: ('error message', p1, p2)
To access these fields when the exception is raised,
acces the APDUException object like a list::
try:
x.apduTransmit(commandAPDU(..))
except Iso7816Exception, exc:
print "error: " + exc[0]
print "(pw1, pw2) + str( (exc[1], exc[2]) )
"""
try:
self.log(logMsg)
self.log(str(toSend))
if self._ciphering:
toSend = self._ciphering.protect(toSend)
self.log("[SM] " + str(toSend))
res = self._reader.transmit(toSend)
if self._ciphering:
self.log("[SM] " + str(res))
res = self._ciphering.unprotect(res)
msg = Iso7816.Errors[res.sw1][res.sw2]
self.log(str(res)+" //" + msg)
if msg == "Success":
return res.res
else:
raise Iso7816Exception(msg, res.sw1, res.sw2)
except KeyError, k:
raise Iso7816Exception("Unknown error", res.sw1, res.sw2)
def setCiphering(self, c=False):
self._ciphering = c
def selectFile(self, p1, p2, file="", cla="00", ins="A4"):
lc = hexToHexRep(len(file)/2)
toSend = apdu.CommandAPDU(cla, ins, p1, p2, lc, file, "")
return self.transmit(toSend, "Select File")
def readBinary(self, offset, nbOfByte):
os = "%04x" % int(offset)
toSend = apdu.CommandAPDU("00", "B0", os[0:2], os[2:4], "", "", hexToHexRep(nbOfByte))
return self.transmit(toSend, "Read Binary")
def updateBinary(self, offset, data, cla="00", ins="D6"):
os = "%04x" % int(offset)
data = binToHexRep(data)
lc = hexToHexRep(len(data)/2)
|
toSend = apdu.CommandAPDU(cla, ins, os[0:2], os[2:4], lc, data, "")
return self.transmit(toSend, "Update Binary")
def getChallenge(self):
toSend = apdu.CommandAPDU("00", "84", "00", "00", "", "", "08")
return self.transmit(toSend, "Get Challenge")
def internalAuthentication(self, rnd_ifd):
data = binToHexRep(rnd_ifd)
lc = hexToHexRep(len(data)/2)
toSend = apdu.CommandAPDU("00", "88", "00",
|
"00", lc, data, "00")
res = self.transmit(toSend, "Internal Authentication")
return res
# def mutualAuthentication(self, data):
# data = binToHexRep(data)
# lc = hexToHexRep(len(data)/2)
# toSend = apdu.CommandAPDU("00", "82", "00", "00", lc, data, "28")
# return self.transmit(toSend, "Mutual Authentication")
|
geoscript/geoscript-py
|
geoscript/plot/curve.py
|
Python
|
mit
| 991
| 0.0222
|
from org.jfree.data.xy import XYSeries, XYSeriesCollection
from org.jfree.chart.plot import PlotOrientation
from org.jfree.chart import ChartFactory
from geoscript.plot.chart
|
import Chart
from org.jfree.chart.renderer.xy import XYSplineRenderer, XYLine3DRenderer
def
|
curve(data, name="", smooth=True, trid=True):
"""
Creates a curve based on a list of (x,y) tuples.
Setting *smooth* to ``True`` results in a spline renderer renderer is used.
Setting *trid* to ``True`` results in a 3D plot. In this case the ``smooth``
argument is ignored.
"""
dataset = XYSeriesCollection()
xy = XYSeries(name);
for d in data:
xy.add(d[0], d[1])
dataset.addSeries(xy);
chart = ChartFactory.createXYLineChart(None, None, None, dataset,
PlotOrientation.VERTICAL, True, True, False)
if smooth:
chart.getXYPlot().setRenderer(XYSplineRenderer())
if trid:
chart.getXYPlot().setRenderer(XYLine3DRenderer())
return Chart(chart)
|
WarrenWeckesser/scipy
|
scipy/stats/_hypotests.py
|
Python
|
bsd-3-clause
| 45,046
| 0
|
from collections import namedtuple
from dataclasses import make_dataclass
import numpy as np
import warnings
from itertools import combinations
import scipy.stats
from scipy.optimize import shgo
from . import distributions
from ._continuous_distns import chi2, norm
from scipy.special import gamma, kv, gammaln
from . import _wilcoxon_data
__all__ = ['epps_singleton_2samp', 'cramervonmises', 'somersd',
'barnard_exact', 'cramervonmises_2samp']
Epps_Singleton_2sampResult = namedtuple('Epps_Singleton_2sampResult',
('statistic', 'pvalue'))
def epps_singleton_2samp(x, y, t=(0.4, 0.8)):
"""Compute the Epps-Singleton (ES) test statistic.
Test the null hypothesis that two samples have the same underlying
probability distribution.
Parameters
----------
x, y : array-like
The two samples of observations to be tested. Input must not have more
than one dimension. Samples can have different lengths.
t : array-like, optional
The points (t1, ..., tn) where the empirical characteristic function is
to be evaluated. It should be positive distinct numbers. The default
value (0.4, 0.8) is proposed in [1]_. Input must not have more than
one dimension.
Returns
-------
statistic : float
The test statistic.
pvalue : float
The associated p-value based on the asymptotic chi2-distribution.
See Also
--------
ks_2samp, anderson_ksamp
Notes
-----
Testing whether two samples are generated by the same underlying
distribution is a classical question in statistics. A widely used test is
the Kolmogorov-Smirnov (KS) test which relies on the empirical
distribution function. Epps and Singleton introduce a test based on the
empirical characteristic function in [1]_.
One advantage of the ES test compared to the KS test is that is does
not assume a continuous distribution. In [1]_, the authors conclude
that the test also has a higher power than the KS test in many
examples. They recommend the use of the ES test for discrete samples as
well as continuous samples with at least 25 observations each, whereas
`anderson_ksamp` is recommended for smaller sample sizes in the
continuous case.
The p-value is computed from the asymptotic distribution of the test
statistic which follows a `chi2` distribution. If the sample size of both
`x` and `y` is below 25, the small sample correction proposed in [1]_ is
applied to the test statistic.
The default values of `t` are determined in [1]_ by considering
various distributions and finding good values that lead to a high power
of the test in general. Table III in [1]_ gives the optimal values for
the distributions tested in that study. The values of `t` are scaled by
the semi-interquartile range in the implementation, see [1]_.
References
----------
.. [1] T. W. Epps and K. J. Singleton, "An omnibus test for the two-sample
problem using the empirical characteristic function", Journal of
Statistical Computation and Simulation 26, p. 177--203, 1986.
.. [2] S. J. Goerg and J. Kaiser, "Nonparametric testing of distributions
- the Epps-Singleton two-sample test using the empirical characteristic
function", The Stata Journal 9(3), p. 454--465, 2009.
"""
x, y, t = np.asarray(x), np.asarray(y), np.asarray(t)
# check if x and y are valid inputs
if x.ndim > 1:
raise ValueError('x must be 1d, but x.ndim equals {}.'.format(x.ndim))
if y.ndim > 1:
raise ValueError('y must be 1d, but y.ndim equals {}.'.format(y.ndim))
nx, ny = len(x), len(y)
if (nx < 5) or (ny < 5):
raise ValueError('x and y should have at least 5 elements, but len(x) '
'= {} and len(y) = {}.'.format(nx, ny))
if not np.isfinite(x).all():
raise ValueError('x must not contain nonfinite values.')
if not np.isfinite(y).all():
raise ValueError('y must not contain nonfinite values.')
n = nx + ny
# check if t is valid
if t.ndim > 1:
raise ValueError('t must be 1d, but t.ndim equals {}.'.format(t.ndim))
if np.less_equal(t, 0).any():
raise ValueError('t must contain positive elements only.')
# rescale t with semi-iqr as proposed in [1]; import iqr here to avoid
# circular import
from scipy.stats import iqr
sigma = iqr(np.hstack((x, y))) / 2
ts = np.reshape(t, (-1, 1)) / sigma
# covariance estimation of ES test
gx = np.vstack((np.cos(ts*x), np.sin(ts*x))).T # shape = (nx, 2*len(t))
gy = np.vstack((np.cos(ts*y), np.sin(ts*y))).T
cov_x = np.cov(gx.T, bias=True) # the test uses biased cov-estimate
cov_y = np.cov(gy.T, bias=True)
est_cov = (n/nx)*cov_x + (n/ny)*cov_y
est_cov_inv = np.linalg.pinv(est_cov)
r = np.linalg.matrix_rank(est_cov_inv)
if r < 2*len(t):
warnings.warn('Estimated covariance matrix does not have full rank. '
'This indicates a bad choice of the input t and the '
'test might not be consistent.') # see p. 183 in [1]_
# compute test statistic w distributed asympt. as chisquare with df=r
g_diff = np.mean(gx, axis=0) - np.mean(gy, axis=0)
w = n*np.dot(g_diff.T, np.dot(est_cov_inv, g_diff))
# apply small-sample correction
if (max(nx, ny) < 25):
corr = 1.0/(1.0 + n**(-0.45) + 10.1*(nx**(-1.7) + ny**(-1.7)))
w = corr * w
p = chi2.sf(w, r)
return Epps_Singleton_2sampResult(w, p)
class CramerVonMisesResult:
def __init__(self, statistic, pvalue):
self.statistic = statistic
self.pvalue = pvalue
def __repr__(self):
return (f"{self.__class__.__name__}(statistic={self.statistic}, "
f"pvalue={self.pvalue})")
def _psi1_mod(x):
"""
psi1 is defined in equation 1.10 in Csorgo, S. and Faraway, J. (1996).
This implements a modified version by excluding the term V(x) / 12
(here: _cdf_cvm_inf(x) / 12) to avo
|
id evaluating _cdf_cvm_inf(x)
twice in _cdf_cvm.
Implementation based on MAPLE code of Julian Faraway and R code of the
function pCvM in the package goftest (v1.1.1), permission granted
by Adrian Baddeley. Main difference in the implementation: the code
here keeps adding terms of the series until the terms are small enough.
"""
def _ed2(y):
z = y**2 / 4
b = kv(1/4, z) + kv(3/4, z)
return np.exp(-z) * (y/2)**(3/2) * b / np.sqrt(np.pi)
def
|
_ed3(y):
z = y**2 / 4
c = np.exp(-z) / np.sqrt(np.pi)
return c * (y/2)**(5/2) * (2*kv(1/4, z) + 3*kv(3/4, z) - kv(5/4, z))
def _Ak(k, x):
m = 2*k + 1
sx = 2 * np.sqrt(x)
y1 = x**(3/4)
y2 = x**(5/4)
e1 = m * gamma(k + 1/2) * _ed2((4 * k + 3)/sx) / (9 * y1)
e2 = gamma(k + 1/2) * _ed3((4 * k + 1) / sx) / (72 * y2)
e3 = 2 * (m + 2) * gamma(k + 3/2) * _ed3((4 * k + 5) / sx) / (12 * y2)
e4 = 7 * m * gamma(k + 1/2) * _ed2((4 * k + 1) / sx) / (144 * y1)
e5 = 7 * m * gamma(k + 1/2) * _ed2((4 * k + 5) / sx) / (144 * y1)
return e1 + e2 + e3 + e4 + e5
x = np.asarray(x)
tot = np.zeros_like(x, dtype='float')
cond = np.ones_like(x, dtype='bool')
k = 0
while np.any(cond):
z = -_Ak(k, x[cond]) / (np.pi * gamma(k + 1))
tot[cond] = tot[cond] + z
cond[cond] = np.abs(z) >= 1e-7
k += 1
return tot
def _cdf_cvm_inf(x):
"""
Calculate the cdf of the Cramér-von Mises statistic (infinite sample size).
See equation 1.2 in Csorgo, S. and Faraway, J. (1996).
Implementation based on MAPLE code of Julian Faraway and R code of the
function pCvM in the package goftest (v1.1.1), permission granted
by Adrian Baddeley. Main difference in the implementation: the code
here keeps adding terms of the series until the terms are small enough.
The function is not expected to be accurate for large values of x, say
x > 4, when the cdf is very close to 1.
"""
x = np.a
|
hasibi/TAGME-Reproducibility
|
nordlys/wikipedia/utils.py
|
Python
|
mit
| 966
| 0.006211
|
"""
Wikipedia utils.
@author: Faegheh Hasibi (faegheh.hasibi@idi.ntnu.no)
"""
from urllib import quote
class WikipediaUtils(object):
mongo = None
@staticmethod
def wiki_title_to_uri(title):
"""
Converts wiki page title to wiki_uri
based on https://en.wikipedia.org/wiki/Wikipedia:Page_name#Spaces.2C_underscores_and_character_coding
encoding based on http://dbpedia.org/services-resources/uri-encoding
"""
if title:
wiki_uri = "<wikipedia:" + quote(title, ' !$&\'()*+,-./:;=@_~').replace(' ', '_') + ">"
return wiki_uri
else:
return None
@staticmethod
def wiki_uri_to_dbp_uri(wiki_uri):
"""Converts Wikipedia uri to DBpedia URI."""
return wiki_uri.replace("<wikipedia:", "<dbpedia:")
def main():
# example usage
print W
|
ikipediaUtils.wiki_title_to_uri("Tango (genre m
|
usical)")
if __name__ == "__main__":
main()
|
Chandra-MARX/marxs
|
marxs/tests/test_analysis.py
|
Python
|
gpl-3.0
| 4,162
| 0.002162
|
# Licensed under GPL vers
|
ion 3 - see LICENSE.rst
import numpy as np
import transforms3d
fr
|
om astropy.table import Table
from astropy.coordinates import SkyCoord
import astropy.units as u
from ..analysis import (find_best_detector_position,
resolvingpower_per_order)
from ..math.utils import e2h
from ..source import PointSource, FixedPointing
from ..simulator import Sequence
from ..optics import (CATGrating,
CircleAperture, PerfectLens, RadialMirrorScatter,
FlatDetector)
from ..design import RowlandTorus, GratingArrayStructure
def test_detector_position():
'''Check that the optimal detector position is found at the convergent point.'''
n = 1000
convergent_point = np.array([3., 5., 7.])
pos = np.random.rand(n, 3) * 100. + 10.
dir = - pos + convergent_point[np.newaxis, :]
photons = Table({'pos': e2h(pos, 1), 'dir': e2h(dir, 0),
'energy': np.ones(n), 'polarization': np.ones(n), 'probability': np.ones(n)})
opt = find_best_detector_position(photons)
assert np.abs(opt.x - 3.) < 0.1
def test_resolvingpower_consistency():
'''Compare different methods to measure the resolving power.
This test only ensures consistency, not correctness. However, most of the
underlying statistical functions are implemented in other packages and\
tested there.
This test requires a full pipeline to set up the input photons correctly
and it thus also serves as an integration test.
'''
entrance = np.array([12000., 0., 0.])
aper = CircleAperture(position=entrance, zoom=100)
lens = PerfectLens(focallength=12000., position=entrance, zoom=100)
rms = RadialMirrorScatter(inplanescatter=1e-4 * u.rad,
perpplanescatter=1e-5 * u.rad,
position=entrance, zoom=100)
uptomirror = Sequence(elements=[aper, lens, rms])
# CAT grating with blaze angle ensure that positive and negative orders
# are defined the same way for all gratings in the GAS.
blazeang = 1.91
rowland = RowlandTorus(6000., 6000.)
blazemat = transforms3d.axangles.axangle2mat(np.array([0, 0, 1]), np.deg2rad(blazeang))
gas = GratingArrayStructure(rowland=rowland, d_element=30.,
x_range=[1e4, 1.4e4],
radius=[50, 100],
elem_class=CATGrating,
elem_args={'d': 1e-4, 'zoom': [1., 10., 10.],
'orientation': blazemat,
'order_selector': None},
)
star = PointSource(coords=SkyCoord(23., 45., unit="degree"), flux=5. / u.s / u.cm**2)
pointing = FixedPointing(coords=SkyCoord(23., 45., unit='deg'))
photons = star.generate_photons(exposuretime=200 * u.s)
p = pointing(photons)
p = uptomirror(p)
o = np.array([0, -3, -6])
res1 = resolvingpower_per_order(gas, p.copy(), orders=o, detector=None)
res2 = resolvingpower_per_order(gas, p.copy(), orders=o, detector=rowland)
res3 = resolvingpower_per_order(gas, p.copy(), orders=o, detector=FlatDetector(zoom=1000))
# FWHM is similar
assert np.isclose(res1[1][0], res2[1][0], atol=0.1)
assert np.isclose(res1[1][1], res2[1][1], atol=0.2) # differs stronger here if fit not good
assert np.isclose(res2[1][0], 1.8, rtol=0.1, atol=0.1)
# Resolution of 0th order is essentially 0
assert np.isclose(res1[0][0], 0, atol=0.5)
assert np.isclose(res2[0][0], 0, atol=0.5)
assert np.isclose(res3[0][0], 0, atol=0.5)
# Resolution of higher orders is consistent and higher
assert np.isclose(res1[0][1], res2[0][1], rtol=0.1)
assert np.isclose(res1[0][2], res2[0][2], rtol=0.2)
assert np.isclose(res1[0][1], res3[0][1], rtol=0.1)
# Resolution is higher at higher orders (approximately linear for small angles)
assert np.isclose(res1[0][2], 2 * res1[0][1], rtol=0.2)
assert np.isclose(res2[0][2], 2 * res2[0][1], rtol=0.2)
# No test for res3 here, since that does not follow Rowland circle.
|
egassem/python_study
|
src/com/xiaobei/util/CalenderUtils.py
|
Python
|
apache-2.0
| 763
| 0.0192
|
'''
Created on 2016年9月16日
@author: Administrator
'''
import calendar
#返回year的日历
def getYear(year):
return calendar.calendar(year)
#返回year-month的日历
def getMonth(year, month):
return calendar.month(year, month)
#返
|
回某年某月的第一天是星期几(从0开始, 0是星期一,6是星期日)和该月天数
def getMonthRange(year, month):
return calendar.monthrange(year, month)
#返回某个月以每一周为元素的序列
def getMonthYear(year, month):
return calendar.monthcalendar(year, month)
#判断year是是否闰年
def isLeap(year):
return calendar.isleap(year)
print(getYear(2016))
print(getMonth(2016, 10))
print(getMonthYear(2016, 10))
print(getMonthRange(2016, 5))
print(isLeap(201
|
6))
|
Arkapravo/morse-0.6
|
src/morse/sensors/jido_posture.py
|
Python
|
bsd-3-clause
| 5,867
| 0.004091
|
import logging; logger = logging.getLogger("morse." + __name__)
import morse.core.sensor
class JidoPostureClass(morse.core.sensor.MorseSensorClass):
""" Jido posture sensor. Currently working with PTU and KUKA arm """
def __init__(self, obj, parent=None):
""" Constructor method.
Receives the reference to the Blender object.
The second parameter should be the name of the object's parent.
"""
logger.info('%s initialization' % obj.name)
# Call the constructor of the parent class
super(self.__class__,self).__init__(obj, parent)
# Object position (maybe delete later)
self.local_data['x'] = 0.0
self.local_data['y'] = 0.0
self.local_data['z'] = 0.0
self.local_data['yaw'] = 0.0
self.local_data['pitch'] = 0.0
self.local_data['roll'] = 0.0
# joints of kuka-arm
self.local_data['seg0'] = 0.0
self.local_data['seg1'] = 0.0
self.local_data['seg2'] = 0.0
self.local_data['seg3'] = 0.0
self.local_data['seg4'] = 0.0
self.local_data['seg5'] = 0.0
self.local_data['seg6'] = 0.0
# joints of PTU-unit
self.local_data['pan'] = 0.0
self.local_data['tilt'] = 0.0
logger.info('Component initialized')
##################### PTU joints ###############
|
###
# Check if robot parent has a child named "PTUname"
for child in self.robot_parent
|
.blender_obj.children:
if str(child) == self.blender_obj['PTUname']:
self._ptu_obj = child
# Get the references to the childen object and
# store a transformation3d structure for their position
for child in self._ptu_obj.childrenRecursive:
if 'PanBase' in child.name:
self._pan_base = child
self._pan_position_3d = morse.helpers.transformation.Transformation3d(child)
elif 'TiltBase' in child.name:
self._tilt_base = child
self._tilt_position_3d = morse.helpers.transformation.Transformation3d(child)
# Check the bases were found, or exit with a message
try:
logger.info("Using PTU: '%s'" % self._ptu_obj.name)
logger.info("Using pan base: '%s'" % self._pan_base.name)
logger.info("Using tilt base: '%s'" % self._tilt_base.name)
except AttributeError as detail:
logger.error("Platine is missing the pan and tilt bases. Module will not work!")
###################### KUKA joints ##################
# Gather information about all segments of the kuka-arm
self._segments = []
self.kuka_obj = 0
# Check if robot parent has a child named "kuka_base"
for child in self.robot_parent.blender_obj.children:
if str(child) == self.blender_obj['KUKAname']:
self.kuka_obj = child
try:
logger.info("Using KUKA arm: '%s'" % self.kuka_obj.name)
except AttributeError as detail:
logger.error("Kuka arm is missing. Module will not work!")
# The axis along which the different segments of the kuka armrotate
# Considering the rotation of the arm as installed in Jido
self._dofs = ['y', 'z', 'y', 'z', 'y', 'z', 'y']
def default_action(self):
""" Get the x, y, z, yaw, pitch and roll of the blender object. """
x = self.position_3d.x
y = self.position_3d.y
z = self.position_3d.z
yaw = self.position_3d.yaw
pitch = self.position_3d.pitch
roll = self.position_3d.roll
############################# PTU joints ##############################
# Reset movement variables
rx, ry, rz = 0.0, 0.0, 0.0
# Update the postition of the base platforms
try:
self._pan_position_3d.update(self._pan_base)
self._tilt_position_3d.update(self._tilt_base)
except AttributeError as detail:
logger.error("Platine is missing the pan and tilt bases. Platine does not work!")
return
current_pan = self._pan_position_3d.yaw
current_tilt = self._tilt_position_3d.pitch
logger.debug("Platine: pan=%.4f, tilt=%.4f" % (current_pan, current_tilt))
############################# KUKA joints ##############################
armature = self.kuka_obj
self._angles = []
i = 0
for channel in armature.channels:
self._segments.append(channel)
# Extract the angles
segment_angle = channel.joint_rotation
if self._dofs[i] == 'y':
self._angles.append(segment_angle[1])
elif self._dofs[i] == '-y':
self._angles.append(-segment_angle[1])
elif self._dofs[i] == 'z':
self._angles.append(segment_angle[2])
i = i + 1
############################# Hand data over to middleware ##############################
self.local_data['x'] = float(x)
self.local_data['y'] = float(y)
self.local_data['z'] = float(z)
self.local_data['yaw'] = float(yaw)
self.local_data['pitch'] = float(pitch)
self.local_data['roll'] = float(roll)
# KUKA arm
self.local_data['seg0'] = self._angles[0]
self.local_data['seg1'] = self._angles[1]
self.local_data['seg2'] = self._angles[2]
self.local_data['seg3'] = self._angles[3]
self.local_data['seg4'] = self._angles[4]
self.local_data['seg5'] = self._angles[5]
self.local_data['seg6'] = self._angles[6]
# PTU
self.local_data['pan'] = float(current_pan)
self.local_data['tilt'] = float(current_tilt)
|
lyy289065406/expcodes
|
python/99-project/django-web/ExpPH/ExpPH/utils/BaseUtils.py
|
Python
|
gpl-3.0
| 1,328
| 0.009632
|
# -*- coding: utf8 -*-
'''
基本工具
Created on 2014年5月14日
@author: Exp
'''
''' 获取系统时间 '''
def getSysTime(format = "%Y-%m-%d %H:%M:%S"):
import time
return time.strftime(format)
# End Fun getSysTime()
''' 判断是否为本地运行环境,否则为SAE运行环境 '''
def isLocalEnvironment():
from os import environ
return not environ.get("APP_NAME", "")
# End Fun isLocalEnvironment()
''' 加密字符串 '''
def encrypt(plaintext):
import base64
return base64.encodestring(plaintext)
# End Fun encrypt()
''' 解密字符串 '''
def decrypt(ciphertext):
import base64
return base64.decodestri
|
ng(ciphertext)
# End Fun decrypt()
''' 简单编码转换,把未知编码的orgStr转码为aimCharset,其中orgStr的源编码由系统自动判断 '''
def simpleTranscoding(orgStr, aimCharset):
import chardet
orgCharset = chardet.detect(orgStr)['encoding'] #自动判断编码
return transcoding(orgStr, orgCharset, aimCharset)
# End Fun simpleTranscoding()
''' 编码转换,把源编码为orgCharset的orgStr,转码为aimCharset '''
def transcoding(orgStr, orgCharset, aimCharset):
unicodeStr = orgStr.decode(orgCharset)
return unicodeStr.encode(aimCharset)
|
# End Fun transcoding()
|
V8Wookiee/Python_Learning
|
Resources/sysmon.py
|
Python
|
gpl-3.0
| 3,922
| 0.024987
|
# Copyright 2015 Matt Hawkins
#
# Update : July 2016
# added CPU and disk monitoring to script
# johnty.wang@mail.mcgill.ca
#
# additional requirement: psutil
#
#--------------------------------------
from subprocess import PIPE, Popen
import smbus
import psutil
import os
import time
# Define some device parameters
I2C_ADDR = 0x27 # I2C device address
LCD_WIDTH = 16 # Maximum characters per line
# Define some device constants
LCD_CHR = 1 # Mode - Sending data
LCD_CMD = 0 # Mode - Sending command
LCD_LINE_1 = 0x80 # LCD RAM address for the 1st line
LCD_LINE_2 = 0xC0 # LCD RAM address for the 2nd line
#LCD_LINE_3 = 0x94 # LCD RAM address for the 3rd line
#LCD_LINE_4 = 0xD4 # LCD RAM address for the 4th line
LCD_BACKLIGHT = 0x08 # On
#LCD_BACKLIGHT = 0x00 # Off
ENABLE = 0b00000100 # Enable bit
# Timing constants
E_PULSE = 0.0005
E_DELAY = 0.0005
#Open I2C interface
#bus = smbus.SMBus(0) # Rev 1 Pi
|
uses 0
bus = smbus.SMBus(1) # Rev 2 Pi uses 1
def lcd_init():
# Initialise display
lcd_byte(0x33,LCD_CMD) # 110011 Initialise
lcd_byte(0x32,LCD_CMD) # 110010 Initialise
lcd_byte(0x06,LCD_CMD) # 000110 Cursor move direction
lcd_byte(0x0C,LCD_CMD) # 0
|
01100 Display On,Cursor Off, Blink Off
lcd_byte(0x28,LCD_CMD) # 101000 Data length, number of lines, font size
lcd_byte(0x01,LCD_CMD) # 000001 Clear display
time.sleep(E_DELAY)
def get_cpu_temperature():
process = Popen(['vcgencmd', 'measure_temp'], stdout=PIPE)
output, _error = process.communicate()
return float(output[output.index('=') + 1:output.rindex("'")])
#Scott tried this, but didn't work
#def memory_usage_resource():
# import resource
# rusage_denom = 1024.
# if sys.platform == 'darwin':
# # ... it seems that in OSX the output is different units ...
# rusage_denom = rusage_denom * rusage_denom
# mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom
# return mem
def lcd_byte(bits, mode):
# Send byte to data pins
# bits = the data
# mode = 1 for data
# 0 for command
bits_high = mode | (bits & 0xF0) | LCD_BACKLIGHT
bits_low = mode | ((bits<<4) & 0xF0) | LCD_BACKLIGHT
# High bits
bus.write_byte(I2C_ADDR, bits_high)
lcd_toggle_enable(bits_high)
# Low bits
bus.write_byte(I2C_ADDR, bits_low)
lcd_toggle_enable(bits_low)
def lcd_toggle_enable(bits):
# Toggle enable
time.sleep(E_DELAY)
bus.write_byte(I2C_ADDR, (bits | ENABLE))
time.sleep(E_PULSE)
bus.write_byte(I2C_ADDR,(bits & ~ENABLE))
time.sleep(E_DELAY)
def lcd_string(message,line):
# Send string to display
message = message.ljust(LCD_WIDTH," ")
lcd_byte(line, LCD_CMD)
for i in range(LCD_WIDTH):
lcd_byte(ord(message[i]),LCD_CHR)
def main():
# Main program block
# Initialise display
lcd_init()
while True:
cpu_temp = get_cpu_temperature()
cpu_usage = psutil.cpu_percent()
LINE1 = "CPU TMP = " + str(cpu_temp)+" C"
LINE2 = "CPU USE = " + str(cpu_usage)+" %"
#print "cpu temp = ", cpu_temp
#print "cpu usage = ", cpu_usage
lcd_string(LINE1,LCD_LINE_1)
lcd_string(LINE2,LCD_LINE_2)
time.sleep(5)
st = os.statvfs("/")
free = st.f_bavail * st.f_frsize
total = st.f_blocks * st.f_frsize
used = total - free
LINE1 = "DISK USAGE:"
LINE2 = " "+ str(used/1024/1024) + "/" + str(total/1024/1024)+" MB"
lcd_string(LINE1, LCD_LINE_1)
lcd_string(LINE2, LCD_LINE_2)
#print "disk use =", LINE2
time.sleep(5)
#mem = psutil.virtual_memory()
#LINE1 = "Mem Use = " + str(mem)+" C"
#LINE2 = " "+ str(used/1024/1024) + "/" + str(total/1024/1024)+" MB"
#lcd_string(LINE1, LCD_LINE_1)
#lcd_string(LINE2, LCD_LINE_2)
#time.sleep(5)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
finally:
LCD_BACKLIGHT = 0x00 #turn off backlight when exiting!
lcd_byte(0x01, LCD_CMD)
|
etherkit/OpenBeacon2
|
client/win/venv/Lib/site-packages/PyInstaller/hooks/hook-gi.repository.xlib.py
|
Python
|
gpl-3.0
| 593
| 0.003373
|
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2019, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#--------------------------------------------------------------------------
|
---
"""
Import hook for PyGObject https://wiki.gnome.org/PyGObject
"""
from PyInstaller.utils.hooks import get_gi_typelibs
binaries, datas, hiddenimports = get_gi
|
_typelibs('xlib', '2.0')
|
buddyli/android_intership
|
controller/restaurant_oper.py
|
Python
|
apache-2.0
| 2,323
| 0.037517
|
#!/usr/bin/env python
#-*- encoding:utf-8 -*-
import json
from datetime import datetime
from bottle import route, mako_template as template, redirect, request, response, get, post
from bottle import static_file, view #为了不经过controller直接返回诸如html,css等静态文件引入
from model.documents import *
from setting import *
DATE_FORMAT = '%Y-%m-%d %H:%M:%S' # 入库格式化时间
@route('/to_add_item')
def to_add_item():
return template('views/system/item/add', site_opt = site_opt)
@route('/add_item', method = 'POST')
def add_item():
DATE_FORMAT = '%Y%m%d%H%M%S'
innerName = 'attr_%s' % datetime.now().strftime(DATE_FORMAT)
#request.params可以同时获取到GET或者POST方法传入的参数
name = request.params.get('name')
address = request.params.get('address')
telno = request.params.get('telno')
lat = request.params.get('lat')
lon = request.params.get('lon')
item = Restaurant(name=unicode(name, 'utf8'), address=unicode(address, 'utf8'), telno=telno, lat = lat, lon = lon)
item.save()
redirect('list_item')
@route('/list_item')
def list_item():
start =
|
request.params.get('start') or '0'
size = request.params.get
|
('size') or '1000'
items = Restaurant.objects[int(start):(int(start) + int(size))]
data = {
'items': items
}
return template('views/system/item/list', data = data, site_opt = site_opt)
@route('/del_item')
def del_item():
id = request.params.get('id')
Restaurant.objects(id=id).delete()
# cascade delete menus of the restaurant
Menu.objects(restaurant=id).delete()
redirect('/list_item')
@route('/modify_item', method = 'POST')
def modify_item():
id = request.params.get('id')
name = request.params.get('name')
address = request.params.get('address')
telno = request.params.get('telno')
lat = request.params.get('lat')
lon = request.params.get('lon')
print 'modify item=====%s, %s, %s, %s' % (id, name, address, telno)
Restaurant.objects(id=id).update(set__name = unicode(name, 'utf8'), set__address = address, set__telno = unicode(telno, 'utf-8'), set__lat = lat, set__lon = lon)
redirect('/list_item')
@route('/to_modify_item')
def to_modify_item():
id = request.params.get('id')
item = Restaurant.objects(id = id)[0]
data = {
'item': item
}
return template('views/system/item/edit', data = data, site_opt = site_opt)
|
coreycb/horizon
|
openstack_dashboard/dashboards/project/networks/tables.py
|
Python
|
apache-2.0
| 7,635
| 0
|
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django import template
from django.template import defaultfilters as filters
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.usage import quotas
LOG = logging.getLogger(__name__)
class CheckNetworkEditable(object):
"""Mixin class to determine the specified network is editable."""
def allowed(self, reques
|
t, datum=None):
# Only administrator is allowed to create and manage shared networks.
if datum and datum.shared:
return False
return True
class DeleteNetwork(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Network",
u"Delete Ne
|
tworks",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Network",
u"Deleted Networks",
count
)
policy_rules = (("network", "delete_network"),)
def delete(self, request, network_id):
network_name = network_id
try:
# Retrieve the network list.
network = api.neutron.network_get(request, network_id,
expand_subnet=False)
network_name = network.name
LOG.debug('Network %(network_id)s has subnets: %(subnets)s',
{'network_id': network_id, 'subnets': network.subnets})
for subnet_id in network.subnets:
api.neutron.subnet_delete(request, subnet_id)
LOG.debug('Deleted subnet %s', subnet_id)
api.neutron.network_delete(request, network_id)
LOG.debug('Deleted network %s successfully', network_id)
except Exception:
msg = _('Failed to delete network %s')
LOG.info(msg, network_id)
redirect = reverse("horizon:project:networks:index")
exceptions.handle(request, msg % network_name, redirect=redirect)
class CreateNetwork(tables.LinkAction):
name = "create"
verbose_name = _("Create Network")
url = "horizon:project:networks:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_network"),)
def allowed(self, request, datum=None):
usages = quotas.tenant_quota_usages(request)
# when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False
# usages["networks"] is empty
if usages.get('networks', {}).get('available', 1) <= 0:
if "disabled" not in self.classes:
self.classes = [c for c in self.classes] + ["disabled"]
self.verbose_name = _("Create Network (Quota exceeded)")
else:
self.verbose_name = _("Create Network")
self.classes = [c for c in self.classes if c != "disabled"]
return True
class EditNetwork(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "update"
verbose_name = _("Edit Network")
url = "horizon:project:networks:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("network", "update_network"),)
class CreateSubnet(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "subnet"
verbose_name = _("Add Subnet")
url = "horizon:project:networks:addsubnet"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_subnet"),)
# neutron has used both in their policy files, supporting both
policy_target_attrs = (("network:tenant_id", "tenant_id"),
("network:project_id", "tenant_id"),)
def allowed(self, request, datum=None):
usages = quotas.tenant_quota_usages(request)
# when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False
# usages["subnets'] is empty
if usages.get('subnets', {}).get('available', 1) <= 0:
if 'disabled' not in self.classes:
self.classes = [c for c in self.classes] + ['disabled']
self.verbose_name = _('Add Subnet (Quota exceeded)')
else:
self.verbose_name = _('Add Subnet')
self.classes = [c for c in self.classes if c != 'disabled']
return True
def get_subnets(network):
template_name = 'project/networks/_network_ips.html'
context = {"subnets": network.subnets}
return template.loader.render_to_string(template_name, context)
DISPLAY_CHOICES = (
("up", pgettext_lazy("Admin state of a Network", u"UP")),
("down", pgettext_lazy("Admin state of a Network", u"DOWN")),
)
STATUS_DISPLAY_CHOICES = (
("active", pgettext_lazy("Current status of a Network", u"Active")),
("build", pgettext_lazy("Current status of a Network", u"Build")),
("down", pgettext_lazy("Current status of a Network", u"Down")),
("error", pgettext_lazy("Current status of a Network", u"Error")),
)
class ProjectNetworksFilterAction(tables.FilterAction):
name = "filter_project_networks"
filter_type = "server"
filter_choices = (('name', _("Name ="), True),
('shared', _("Shared ="), True,
_("e.g. Yes / No")),
('router:external', _("External ="), True,
_("e.g. Yes / No")),
('status', _("Status ="), True),
('admin_state_up', _("Admin State ="), True,
_("e.g. UP / DOWN")))
class NetworksTable(tables.DataTable):
name = tables.WrappingColumn("name_or_id",
verbose_name=_("Name"),
link='horizon:project:networks:detail')
subnets = tables.Column(get_subnets,
verbose_name=_("Subnets Associated"),)
shared = tables.Column("shared", verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
external = tables.Column("router:external", verbose_name=_("External"),
filters=(filters.yesno, filters.capfirst))
status = tables.Column("status", verbose_name=_("Status"),
display_choices=STATUS_DISPLAY_CHOICES)
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=DISPLAY_CHOICES)
class Meta(object):
name = "networks"
verbose_name = _("Networks")
table_actions = (CreateNetwork, DeleteNetwork,
ProjectNetworksFilterAction)
row_actions = (EditNetwork, CreateSubnet, DeleteNetwork)
|
eduNEXT/edx-platform
|
common/djangoapps/track/backends/tests/test_mongodb.py
|
Python
|
agpl-3.0
| 1,162
| 0.001721
|
# lint-amnesty, pylint: disable=missing-module-docstring
from unittest.mock import patch
from django.test import TestCase
from common.djangoapps.track.backends.mongodb import MongoBackend
class TestMongoBackend(Tes
|
tCase): # lint-amnesty, pylint: disable=missing-class-docstring
def setUp(self):
super().setUp()
self.mongo_patcher = patch('common.djangoapps.track.backends.mongodb.MongoClient')
self.mongo_patcher.start()
self
|
.addCleanup(self.mongo_patcher.stop)
self.backend = MongoBackend()
def test_mongo_backend(self):
events = [{'test': 1}, {'test': 2}]
self.backend.send(events[0])
self.backend.send(events[1])
# Check if we inserted events into the database
calls = self.backend.collection.insert.mock_calls
assert len(calls) == 2
# Unpack the arguments and check if the events were used
# as the first argument to collection.insert
def first_argument(call):
_, args, _ = call
return args[0]
assert events[0] == first_argument(calls[0])
assert events[1] == first_argument(calls[1])
|
georgestarcher/TA-SyncKVStore
|
bin/ta_synckvstore/solnlib/packages/schematics/validate.py
|
Python
|
mit
| 3,988
| 0.001254
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import functools
import inspect
from .common import * # pylint: disable=redefined-builtin
from .datastructures import Context
from .exceptions import FieldError, DataError
from .transforms import import_loop, validation_converter
from .undefined import Undefined
def validate(cls, instance_or_dict, trusted_data=None, partial=False, strict=False,
convert=True, context=None, **kwargs):
"""
Validate some untrusted data using a model. Trusted data can be passed in
the `trusted_data` parameter.
:param cls:
The model class to use as source for validation. If given an instance,
will also run instance-level validators on the data.
:param instance_or_dict:
A ``dict`` or ``dict``-like structure for incoming data.
:param partial:
Allow partial data to validate; useful for PATCH requests.
Essentially drops the ``required=True`` arguments from field
def
|
initions. Default: False
:param strict:
Complain about unre
|
cognized keys. Default: False
:param trusted_data:
A ``dict``-like structure that may contain already validated data.
:param convert:
Controls whether to perform import conversion before validating.
Can be turned off to skip an unnecessary conversion step if all values
are known to have the right datatypes (e.g., when validating immediately
after the initial import). Default: True
:returns: data
``dict`` containing the valid raw_data plus ``trusted_data``.
If errors are found, they are raised as a ValidationError with a list
of errors attached.
"""
context = context or get_validation_context(partial=partial, strict=strict, convert=convert)
errors = {}
try:
data = import_loop(cls, instance_or_dict, trusted_data=trusted_data,
context=context, **kwargs)
except DataError as exc:
errors = exc.messages
data = exc.partial_data
errors.update(_validate_model(cls, data, context))
if errors:
raise DataError(errors, data)
return data
def _validate_model(cls, data, context):
"""
Validate data using model level methods.
:param cls:
The Model class to validate ``data`` against.
:param data:
A dict with data to validate. Invalid items are removed from it.
:returns:
Errors of the fields that did not pass validation.
"""
errors = {}
invalid_fields = []
for field_name, field in iteritems(cls._fields):
if field_name in cls._validator_functions and field_name in data:
value = data[field_name]
try:
cls._validator_functions[field_name](cls, data, value, context)
except FieldError as exc:
field = cls._fields[field_name]
serialized_field_name = field.serialized_name or field_name
errors[serialized_field_name] = exc.messages
invalid_fields.append(field_name)
for field_name in invalid_fields:
data.pop(field_name)
return errors
def get_validation_context(**options):
validation_options = {
'field_converter': validation_converter,
'partial': False,
'strict': False,
'convert': True,
'validate': True,
'new': False,
}
validation_options.update(options)
return Context(**validation_options)
def prepare_validator(func, argcount):
if isinstance(func, classmethod):
func = func.__get__(object).__func__
if len(inspect.getargspec(func).args) < argcount:
@functools.wraps(func)
def newfunc(*args, **kwargs):
if not kwargs or kwargs.pop('context', 0) is 0:
args = args[:-1]
return func(*args, **kwargs)
return newfunc
return func
__all__ = module_exports(__name__)
|
NeCTAR-RC/karaage-user
|
kguser/conf/settings.py
|
Python
|
gpl-3.0
| 1,269
| 0.003152
|
# Django settings for kguser project.
from os import path
fr
|
om karaage.conf.defaults import *
TEMPLATE_DIRS += (
'/usr/share/kguser/templates',
)
ROOT_URLCONF = 'kguser.conf.urls'
SITE_ID = 2
STATIC_ROOT = '/var/lib/karaage-user/static'
STATIC_URL = '/kguser_media/'
LOGIN_URL = 'kgauth_login_select'
ALLOW_REGISTRATIONS = False
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
BOOTSTRAP3 = {
'jquery_url': '//code.jquery.com/jquery.min.js',
'base_url': '//netdna.bootstrapcdn.com/bootstrap/3.1.1/',
'css_url': '//netd
|
na.bootstrapcdn.com/bootswatch/3.1.1/simplex/bootstrap.min.css',
'theme_url': None,
'javascript_url': None,
'horizontal_label_class': 'col-md-2',
'horizontal_field_class': 'col-md-4',
'set_required': True,
}
INSTALLED_APPS = INSTALLED_APPS + ('kgauth', 'kgkeystone', 'kguser', 'bootstrap3', 'django_gravatar',)
import sys
if 'test' in sys.argv:
execfile(path.join(path.dirname(__file__), "test_settings.py"))
else:
execfile("/etc/karaage/user_settings.py")
DEFAULT_FROM_EMAIL = ACCOUNTS_EMAIL
if DEBUG:
TEMPLATE_DIRS = (
path.abspath(path.join(path.dirname(__file__), '..', '..', 'templates')),
path.abspath(path.join(path.dirname(__file__), '..', 'templates')),
) + TEMPLATE_DIRS
|
DominoTree/servo
|
tests/wpt/web-platform-tests/tools/wpt/tests/test_revlist.py
|
Python
|
mpl-2.0
| 6,239
| 0.000641
|
import mock
from tools.wpt import revlist
def test_calculate_cutoff_date():
assert revlist.calculate_cutoff_date(3601, 3600, 0) == 3600
assert revlist.calculate_cutoff_date(3600, 3600, 0) == 3600
assert revlist.calculate_cutoff_date(3599, 3600, 0) == 0
assert revlist.calculate_cutoff_date(3600, 3600, 1) == 1
assert revlist.calculate_cutoff_date(3600, 3600, -1) == 3599
def test_parse_epoch():
assert revlist.parse_epoch(b"10h") == 36000
assert revlist.parse_epoch(b"10d") == 864000
assert revlist.parse_epoch(b"10w") == 6048000
@mock.patch('subprocess.check_output')
def test_get_epoch_revisions(mocked_check_output):
# check:
#
# * Several revisions in the same epoch offset (BC, DEF, HIJ, and LM)
# * Revision with a timestamp exactly equal to the epoch boundary (H)
# * Revision in non closed interval (O)
#
# mon tue wed thu fri sat sun mon thu wed
# | | | | | | | | |
# -A---B-C---DEF---G---H--IJ----------K-----L-M----N--O--
# ^
# until
# max_count: 5; epoch: 1d
# Expected result: N,M,K,J,G,F,C,A
epoch = 86400
until = 1188000 # Wednesday, 14 January 1970 18:00:00 UTC
mocked_check_output.return_value = b'''
merge_pr_O O 1166400 _wed_
merge_pr_N N 1080000 _tue_
merge_pr_M M 1015200 _mon_
merge_pr_L L 993600 _mon_
merge_pr_K K 907200 _sun_
merge_pr_J J 734400 _fri_
merge_pr_I I 712800 _fri_
merge_pr_H H 691200 _fri_
merge_pr_G G 648000 _thu_
merge_pr_F F 583200 _wed_
merge_pr_E E 561600 _wed_
merge_pr_D D 540000 _wed_
merge_pr_C C 475200 _tue_
merge_pr_B B 453600 _tue_
merge_pr_A A 388800 _mon_
'''
tagged_revisons = revlist.get_epoch_revisions(epoch, until, 8)
assert tagged_revisons.next() == 'N'
assert tagged_revisons.next() == 'M'
assert tagged_revisons.next() == 'K'
assert tagged_revisons.next() == 'J'
assert tagged_revisons.next() == 'G'
assert tagged_revisons.next() == 'F'
assert tagged_revisons.next() == 'C'
assert tagged_revisons.next() == 'A'
assert len(list(tagged_revisons)) == 0 # generator exhausted
# check: max_count with enough candidate items in the revision list
#
# mon tue wed thu fri sat sun mon
# | | | | | | |
# ------B-----C-----D----E-----F-----G------H---
# ^
# until
# max_count: 5; epoch: 1d
# Expected result: G,F,E,D,C
epoch = 86400
until = 1015200 # Monday, 12 January 1970 18:00:00 UTC
mocked_check_output.return_value = b'''
merge_pr_H H 993600 _mon_
merge_pr_G G 907200 _sun_
merge_pr_F F 820800 _sat_
merge_pr_E E 734400 _fri_
merge_pr_D D 648000 _thu_
merge_pr_C C 561600 _wed_
merge_pr_B B 475200 _thu_
'''
tagged_revisons = revlist.get_epoch_revisions(epoch, until, 5)
assert tagged_revisons.next() == 'G'
assert tagged_revisons.next() == 'F'
|
assert tagged_revisons.next() == 'E'
assert tagged_revisons.n
|
ext() == 'D'
assert tagged_revisons.next() == 'C'
assert len(list(tagged_revisons)) == 0 # generator exhausted
# check: max_count with less returned candidates items than the needed
#
# mon tue wed thu fri sat sun mon
# | | | | | | |
# -----------------------------F-----G------H---
# ^
# until
# max_count: 5; epoch: 1d
# Expected result: G,F
epoch = 86400
until = 1015200 # Monday, 12 January 1970 18:00:00 UTC
mocked_check_output.return_value = b'''
merge_pr_H H 993600 _mon_
merge_pr_G G 907200 _sun_
merge_pr_F F 820800 _sat_
'''
tagged_revisons = revlist.get_epoch_revisions(epoch, until, 5)
assert tagged_revisons.next() == 'G'
assert tagged_revisons.next() == 'F'
assert len(list(tagged_revisons)) == 0 # generator exhausted
# check: initial until value is on an epoch boundary
#
# sud mon tue wed thu
# | | | |
# -F-G-----------------H
# ^
# until
# max_count: 3; epoch: 1d
# Expected result: G,F
# * H is skipped because because the epoch
# interval is defined as an right-open interval
# * G is included but in the Monday's interval
# * F is included because it is the unique candidate
# included in the Sunday's interval
epoch = 86400
until = 1296000 # Thursday, 15 January 1970 0:00:00 UTC
mocked_check_output.return_value = b'''
merge_pr_H H 1296000 _wed_
merge_pr_G G 950400 _mon_
merge_pr_F F 921600 _sud_
'''
tagged_revisons = revlist.get_epoch_revisions(epoch, until, 3)
assert tagged_revisons.next() == 'G'
assert tagged_revisons.next() == 'F'
assert len(list(tagged_revisons)) == 0 # generator exhausted
# check: until aligned with Monday, 5 January 1970 0:00:00 (345600)
# not with Thursday, 1 January 1970 0:00:00 (0)
#
# sud mon tue wed thu
# | | | |
# -F-G--------------H---
# ^
# until
# max_count: 1; epoch: 1w
# Expected result: F
epoch = 604800
moday = 950400 # Monday, 12 January 1970 00:00:00 UTC
until = moday + 345600 # 1296000. Thursday, 15 January 1970 0:00:00 UTC
mocked_check_output.return_value = b'''
merge_pr_H H 1180800 _wed_
merge_pr_G G 950400 _mon_
merge_pr_F F 921600 _sud_
'''
tagged_revisons = revlist.get_epoch_revisions(epoch, until, 1)
assert tagged_revisons.next() == 'F'
assert len(list(tagged_revisons)) == 0 # generator exhausted
|
pravsripad/mne-python
|
mne/io/ctf/info.py
|
Python
|
bsd-3-clause
| 19,623
| 0
|
"""Populate measurement info."""
# Author: Eric Larson <larson.eric.d<gmail.com>
#
# License: BSD-3-Clause
from time import strptime
from calendar import timegm
import os.path as op
import numpy as np
from ...utils import logger, warn, _clean_names
from ...transforms import (apply_trans, _coord_frame_name, invert_transform,
combine_transforms)
from ...annotations import Annotations
from ..meas_info import _empty_info
from ..write import get_new_file_id
from ..ctf_comp import _add_kind, _calibrate_comp
from ..constants import FIFF
from .constants import CTF
_ctf_to_fiff = {CTF.CTFV_COIL_LPA: FIFF.FIFFV_POINT_LPA,
CTF.CTFV_COIL_RPA: FIFF.FIFFV_POINT_RPA,
CTF.CTFV_COIL_NAS: FIFF.FIFFV_POINT_NASION}
def _pick_isotrak_and_hpi_coils(res4, coils, t):
"""Pick the HPI coil locations given in device coordinates."""
if coils is None:
return list(), list()
dig = list()
hpi_result = dict(dig_points=list())
n_coil_dev = 0
n_coil_head = 0
for p in coils:
if p['valid']:
if p['kind'] in [CTF.CTFV_COIL_LPA, CTF.CTFV_COIL_RPA,
CTF.CTFV_COIL_NAS]:
kind = FIFF.FIFFV_POINT_CARDINAL
ident = _ctf_to_fiff[p['kind']]
else: # CTF.CTFV_COIL_SPARE
kind = FIFF.FIFFV_POINT_HPI
ident = p['kind']
if p['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_DEVICE:
if t is None or t['t_ctf_dev_dev'] is None:
raise RuntimeError('No coordinate transformation '
'available for HPI coil locations')
d = dict(kind=kind, ident=ident,
r=apply_trans(t['t_ctf_dev_dev'], p['r']),
coord_frame=FIFF.FIFFV_COORD_UNKNOWN)
hpi_result['dig_points'].append(d)
n_coil_dev += 1
elif p['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_HEAD:
if t is None or t['t_ctf_head_head'] is None:
raise RuntimeError('No coordinate transformation '
'available for (virtual) Polhemus data')
d = dict(kind=kind, ident=ident,
r=apply_trans(t['t_ctf_head_head'], p['r']),
coord_frame=FIFF.FIFFV_COORD_HEAD)
dig.append(d)
n_coil_head += 1
if n_coil_head > 0:
logger.info(' Polhemus data for %d HPI coils added' % n_coil_head)
if n_coil_dev > 0:
logger.info(' Device coordinate loc
|
ations for %d HPI coils added'
|
% n_coil_dev)
return dig, [hpi_result]
def _convert_time(date_str, time_str):
"""Convert date and time strings to float time."""
for fmt in ("%d/%m/%Y", "%d-%b-%Y", "%a, %b %d, %Y"):
try:
date = strptime(date_str.strip(), fmt)
except ValueError:
pass
else:
break
else:
raise RuntimeError(
'Illegal date: %s.\nIf the language of the date does not '
'correspond to your local machine\'s language try to set the '
'locale to the language of the date string:\n'
'locale.setlocale(locale.LC_ALL, "en_US")' % date_str)
for fmt in ('%H:%M:%S', '%H:%M'):
try:
time = strptime(time_str, fmt)
except ValueError:
pass
else:
break
else:
raise RuntimeError('Illegal time: %s' % time_str)
# MNE-C uses mktime which uses local time, but here we instead decouple
# conversion location from the process, and instead assume that the
# acquisiton was in GMT. This will be wrong for most sites, but at least
# the value we obtain here won't depend on the geographical location
# that the file was converted.
res = timegm((date.tm_year, date.tm_mon, date.tm_mday,
time.tm_hour, time.tm_min, time.tm_sec,
date.tm_wday, date.tm_yday, date.tm_isdst))
return res
def _get_plane_vectors(ez):
"""Get two orthogonal vectors orthogonal to ez (ez will be modified)."""
assert ez.shape == (3,)
ez_len = np.sqrt(np.sum(ez * ez))
if ez_len == 0:
raise RuntimeError('Zero length normal. Cannot proceed.')
if np.abs(ez_len - np.abs(ez[2])) < 1e-5: # ez already in z-direction
ex = np.array([1., 0., 0.])
else:
ex = np.zeros(3)
if ez[1] < ez[2]:
ex[0 if ez[0] < ez[1] else 1] = 1.
else:
ex[0 if ez[0] < ez[2] else 2] = 1.
ez /= ez_len
ex -= np.dot(ez, ex) * ez
ex /= np.sqrt(np.sum(ex * ex))
ey = np.cross(ez, ex)
return ex, ey
def _at_origin(x):
"""Determine if a vector is at the origin."""
return (np.sum(x * x) < 1e-8)
def _check_comp_ch(cch, kind, desired=None):
if desired is None:
desired = cch['grad_order_no']
if cch['grad_order_no'] != desired:
raise RuntimeError('%s channel with inconsistent compensation '
'grade %s, should be %s'
% (kind, cch['grad_order_no'], desired))
return desired
def _convert_channel_info(res4, t, use_eeg_pos):
"""Convert CTF channel information to fif format."""
nmeg = neeg = nstim = nmisc = nref = 0
chs = list()
this_comp = None
for k, cch in enumerate(res4['chs']):
cal = float(1. / (cch['proper_gain'] * cch['qgain']))
ch = dict(scanno=k + 1, range=1., cal=cal, loc=np.full(12, np.nan),
unit_mul=FIFF.FIFF_UNITM_NONE, ch_name=cch['ch_name'][:15],
coil_type=FIFF.FIFFV_COIL_NONE)
del k
chs.append(ch)
# Create the channel position information
if cch['sensor_type_index'] in (CTF.CTFV_REF_MAG_CH,
CTF.CTFV_REF_GRAD_CH,
CTF.CTFV_MEG_CH):
# Extra check for a valid MEG channel
if np.sum(cch['coil']['pos'][0] ** 2) < 1e-6 or \
np.sum(cch['coil']['norm'][0] ** 2) < 1e-6:
nmisc += 1
ch.update(logno=nmisc, coord_frame=FIFF.FIFFV_COORD_UNKNOWN,
kind=FIFF.FIFFV_MISC_CH, unit=FIFF.FIFF_UNIT_V)
text = 'MEG'
if cch['sensor_type_index'] != CTF.CTFV_MEG_CH:
text += ' ref'
warn('%s channel %s did not have position assigned, so '
'it was changed to a MISC channel'
% (text, ch['ch_name']))
continue
ch['unit'] = FIFF.FIFF_UNIT_T
# Set up the local coordinate frame
r0 = cch['coil']['pos'][0].copy()
ez = cch['coil']['norm'][0].copy()
# It turns out that positive proper_gain requires swapping
# of the normal direction
if cch['proper_gain'] > 0.0:
ez *= -1
# Check how the other vectors should be defined
off_diag = False
# Default: ex and ey are arbitrary in the plane normal to ez
if cch['sensor_type_index'] == CTF.CTFV_REF_GRAD_CH:
# The off-diagonal gradiometers are an exception:
#
# We use the same convention for ex as for Neuromag planar
# gradiometers: ex pointing in the positive gradient direction
diff = cch['coil']['pos'][0] - cch['coil']['pos'][1]
size = np.sqrt(np.sum(diff * diff))
if size > 0.:
diff /= size
# Is ez normal to the line joining the coils?
if np.abs(np.dot(diff, ez)) < 1e-3:
off_diag = True
# Handle the off-diagonal gradiometer coordinate system
r0 -= size * diff / 2.0
ex = diff
ey = np.cross(ez, ex)
else:
ex, ey = _get_plane_vectors(ez)
else:
ex, ey = _get_plane_vectors(ez)
|
maleficarium/youtube-dl
|
test/test_http.py
|
Python
|
unlicense
| 6,282
| 0.001595
|
#!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from youtube_dl import YoutubeDL
from youtube_dl.compat import compat_http_server, compat_urllib_request
import ssl
import threading
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
def http_server_port(httpd):
if os.name == 'java' and isinstance(httpd.socket, ssl.SSLSocket):
# In Jython SSLSocket is not a subclass of socket.socket
sock = httpd.socket.sock
else:
sock = httpd.socket
return sock.getsockname()[1]
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
def log_message(self, format, *args):
pass
def do_GET(self):
if self.path == '/video.html':
self.send_response(200)
self.send_header('Content-Type', 'text/html; charset=utf-8')
self.end_headers()
self.wfile.write(b'<html><video src="/vid.mp4" /></html>')
elif self.path == '/vid.mp4':
self.send_response(200)
self.send_header('Content-Type', 'video/mp4')
self.end_headers()
self.wfile.write(b'\x00\x00\x00\x00\x20\x66\x74[video]')
elif self.path == '/302':
if sys.version_
|
info[0] == 3:
# XXX: Python 3 http server does not allow non-ASCII header value
|
s
self.send_response(404)
self.end_headers()
return
new_url = 'http://localhost:%d/中文.html' % http_server_port(self.server)
self.send_response(302)
self.send_header(b'Location', new_url.encode('utf-8'))
self.end_headers()
elif self.path == '/%E4%B8%AD%E6%96%87.html':
self.send_response(200)
self.send_header('Content-Type', 'text/html; charset=utf-8')
self.end_headers()
self.wfile.write(b'<html><video src="/vid.mp4" /></html>')
else:
assert False
class FakeLogger(object):
def debug(self, msg):
pass
def warning(self, msg):
pass
def error(self, msg):
pass
class TestHTTP(unittest.TestCase):
def setUp(self):
self.httpd = compat_http_server.HTTPServer(
('localhost', 0), HTTPTestRequestHandler)
self.port = http_server_port(self.httpd)
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
self.server_thread.daemon = True
self.server_thread.start()
def test_unicode_path_redirection(self):
# XXX: Python 3 http server does not allow non-ASCII header values
if sys.version_info[0] == 3:
return
ydl = YoutubeDL({'logger': FakeLogger()})
r = ydl.extract_info('http://localhost:%d/302' % self.port)
self.assertEqual(r['url'], 'http://localhost:%d/vid.mp4' % self.port)
class TestHTTPS(unittest.TestCase):
def setUp(self):
certfn = os.path.join(TEST_DIR, 'testcert.pem')
self.httpd = compat_http_server.HTTPServer(
('localhost', 0), HTTPTestRequestHandler)
self.httpd.socket = ssl.wrap_socket(
self.httpd.socket, certfile=certfn, server_side=True)
self.port = http_server_port(self.httpd)
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
self.server_thread.daemon = True
self.server_thread.start()
def test_nocheckcertificate(self):
if sys.version_info >= (2, 7, 9): # No certificate checking anyways
ydl = YoutubeDL({'logger': FakeLogger()})
self.assertRaises(
Exception,
ydl.extract_info, 'https://localhost:%d/video.html' % self.port)
ydl = YoutubeDL({'logger': FakeLogger(), 'nocheckcertificate': True})
r = ydl.extract_info('https://localhost:%d/video.html' % self.port)
self.assertEqual(r['url'], 'https://localhost:%d/vid.mp4' % self.port)
def _build_proxy_handler(name):
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
proxy_name = name
def log_message(self, format, *args):
pass
def do_GET(self):
self.send_response(200)
self.send_header('Content-Type', 'text/plain; charset=utf-8')
self.end_headers()
self.wfile.write('{self.proxy_name}: {self.path}'.format(self=self).encode('utf-8'))
return HTTPTestRequestHandler
class TestProxy(unittest.TestCase):
def setUp(self):
self.proxy = compat_http_server.HTTPServer(
('localhost', 0), _build_proxy_handler('normal'))
self.port = http_server_port(self.proxy)
self.proxy_thread = threading.Thread(target=self.proxy.serve_forever)
self.proxy_thread.daemon = True
self.proxy_thread.start()
self.geo_proxy = compat_http_server.HTTPServer(
('localhost', 0), _build_proxy_handler('geo'))
self.geo_port = http_server_port(self.geo_proxy)
self.geo_proxy_thread = threading.Thread(target=self.geo_proxy.serve_forever)
self.geo_proxy_thread.daemon = True
self.geo_proxy_thread.start()
def test_proxy(self):
geo_proxy = 'localhost:{0}'.format(self.geo_port)
ydl = YoutubeDL({
'proxy': 'localhost:{0}'.format(self.port),
'geo_verification_proxy': geo_proxy,
})
url = 'http://foo.com/bar'
response = ydl.urlopen(url).read().decode('utf-8')
self.assertEqual(response, 'normal: {0}'.format(url))
req = compat_urllib_request.Request(url)
req.add_header('Ytdl-request-proxy', geo_proxy)
response = ydl.urlopen(req).read().decode('utf-8')
self.assertEqual(response, 'geo: {0}'.format(url))
def test_proxy_with_idn(self):
ydl = YoutubeDL({
'proxy': 'localhost:{0}'.format(self.port),
})
url = 'http://中文.tw/'
response = ydl.urlopen(url).read().decode('utf-8')
# b'xn--fiq228c' is '中文'.encode('idna')
self.assertEqual(response, 'normal: http://xn--fiq228c.tw/')
if __name__ == '__main__':
unittest.main()
|
fairbird/OpenPLI-BlackHole
|
lib/python/Tools/Transponder.py
|
Python
|
gpl-2.0
| 11,703
| 0.033923
|
from enigma import eDVBFrontendParametersSatellite, eDVBFrontendParametersCable, eDVBFrontendParametersTerrestrial, eDVBFrontendParametersATSC
from Components.NimManager import nimmanager
def orbpos(pos):
return pos > 3600 and "N/A" or "%d.%d\xc2\xb0%s" % (pos > 1800 and ((3600 - pos) / 10, (3600 - pos) % 10, "W") or (pos / 10, pos % 10, "E"))
def getTunerDescription(nim):
try:
return nimmanager.getTerrestrialDescription(nim)
except:
print "[ChannelNumber] nimmanager.getTerrestrialDescription(nim) failed, nim:", nim
return ""
def getMHz(frequency):
return (frequency+50000)/100000/10.
def getChannelNumber(frequency, nim):
if nim == "DVB-T":
for n in nimmanager.nim_slots:
if n.isCompatible("DVB-T"):
nim = n.slot
break
f = getMHz(frequency)
descr = getTunerDescription(nim)
if "DVB-T" in descr:
if "Europe" in descr:
if 174 < f < 230: # III
d = (f + 1) % 7
return str(int(f - 174)/7 + 5) + (d < 3 and "-" or d > 4 and "+" or "")
elif 470 <= f < 863: # IV,V
d = (f + 2) % 8
return str(int(f - 470) / 8 + 21) + (d < 3.5 and "-" or d > 4.5 and "+" or "")
elif "Australia" in descr:
d = (f + 1) % 7
ds = (d < 3 and "-" or d > 4 and "+" or "")
if 174 < f < 202: # CH6-CH9
return str(int(f - 174)/7 + 6) + ds
elif 202 <= f < 209: # CH9A
return "9A" + ds
elif 209 <= f < 230: # CH10-CH12
return str(int(f - 209)/7 + 10) + ds
elif 526 < f < 820: # CH28-CH69
d = (f - 1) % 7
return str(int(f - 526)/7 + 28) + (d < 3 and "-" or d > 4 and "+" or "")
return ""
def supportedChannels(nim):
descr = getTunerDescription(nim)
return "Europe" in descr and "DVB-T" in descr
def channel2frequency(channel, nim):
descr = getTunerDescription(nim)
if "Europe" in descr and "DVB-T" in descr:
if 5 <= channel <= 12:
return (177500 + 7000*(channel- 5))*1000
elif 21 <= channel <= 69:
return (474000 + 8000*(channel-21))*1000
return 474000000
def ConvertToHumanReadable(tp, tunertype = None):
ret = { }
if tunertype is None:
tunertype = tp.get("tuner_type", "None")
if tunertype == "DVB-S":
ret["tuner_type"] = _("Satellite")
ret["inversion"] = {
eDVBFrontendParametersSatellite.Inversion_Unknown : _("Auto"),
eDVBFrontendParametersSatellite.Inversion_On : _("On"),
eDVBFrontendParametersSatellite.Inversion_Off : _("Off")}.get(tp.get("inversion"))
ret["fec_inner"] = {
eDVBFrontendParametersSatellite.FEC_None : _("None"),
eDVBFrontendParametersSatellite.FEC_Auto : _("Auto"),
eDVBFrontendParametersSatellite.FEC_1_2 : "1/2",
eDVBFrontendParametersSatellite.FEC_2_3 : "2/3",
eDVBFrontendParametersSatellite.FEC_3_4 : "3/4",
eDVBFrontendParametersSatellite.FEC_5_6 : "5/6",
eDVBFrontendParametersSatellite.FEC_6_7 : "6/7",
eDVBFrontendParametersSatellite.FEC_7_8 : "7/8",
eDVBFrontendParametersSatellite.FEC_3_5 : "3/5",
eDVBFrontendParametersSatellite.FEC_4_5 : "4/5",
eDVBFrontendParametersSatellite.FEC_8_9 : "8/9",
eDVBFrontendParametersSatellite.FEC_9_10 : "9/10"}.get(tp.get("fec_inner"))
ret["modulation"] = {
eDVBFrontendParametersSatellite.Modulation_Auto : _("Auto"),
eDVBFrontendParametersSatellite.Modulation_QPSK : "QPSK",
eDVBFrontendParametersSatellite.Modulation_QAM16 : "QAM16",
eDVBFrontendParametersSatellite.Modulation_8PSK : "8PSK",
eDVBFrontendParametersSatellite.Modulation_16APSK : "16APSK",
eDVBFrontendParametersSatellite.Modulation_32APSK : "32APSK"}.get(tp.get("modulation"))
ret["orbital_position"] = nimmanager.getSatName(int(tp.get("orbital_position")))
ret["orb_pos"] = orbpos(int(tp.get("orbital_position")))
ret["polarization"] = {
eDVBFrontendParametersSatellite.Polarisation_Horizontal : _("Horizontal"),
eDVBFrontendParametersSatellite.Polarisation_Vertical : _("Vertical"),
eDVBFrontendParametersSatellite.Polarisation_CircularLeft : _("Circular left"),
eDVBFrontendParametersSatellite.Polarisation_CircularRight : _("Circular right")}.get(tp.get("polarization"))
ret["polarization_abbreviation"] = {
eDVBFrontendParametersSatellite.Polarisation_Horizontal : "H",
eDVBFrontendParametersSatellite.Polarisation_Vertical : "V",
eDVBFrontendParametersSatellite.Polarisation_CircularLeft : "L",
eDVBFrontendParametersSatellite.Polarisation_CircularRight : "R"}.get(tp.get("polarization"))
ret["system"] = {
eDVBFrontendParametersSatellite.System_DVB_S : "DVB-S",
eDVBFrontendParametersSatellite.System_DVB_S2 : "DVB-S2"}.get(tp.get("system"))
if ret["system"] == "DVB-S2":
ret["rolloff"] = {
eDVBFrontendParametersSatellite.RollOff_alpha_0_35 : "0.35",
eDVBFrontendParametersSatellite.RollOff_alpha_0_25 : "0.25",
eDVBFrontendParametersSatellite.RollOff_alpha_0_20 : "0.20",
eDVBFrontendParametersSatellite.RollOff_auto : _("Auto")}.get(tp.get("rolloff"))
ret["pilot"] = {
eDVBFrontendParametersSatellite.Pilot_Unknown : _("Auto"),
eDVBFrontendParametersSatellite.Pilot_On : _("On"),
eDVBFrontendParametersSatellite.Pilot_Off : _("Off")}.get(tp.get("pilot"))
ret["pls_mode"] = {
eDVBFrontendParametersSatellite.PLS_Root : _("Root"),
eDVBFrontendParametersSatellite.PLS_Gold : _("Gold"),
eDVBFrontendParametersSatellite.PLS_Combo : _("Combo"),
eDVBFrontendParametersSatellite.PLS_Unknown : _("Unknown")}.get(tp.get("pls_mode"))
else:
ret["pls_mode"] = None
ret["is_id"] = None
ret["pls_code"] = None
elif tunertype == "DVB-C":
ret["tuner_type"] = _("Cable")
ret["modulation"] = {
eDVBFrontendParametersCable.Modulation_Auto: _("Auto"),
eDVBFrontendParametersCable.Modulation_QAM16 : "QAM16",
eDVBFrontendParametersCable.Modulation_QAM32 : "QAM32",
eDVBFrontendParametersCable.Modulation_QAM64 : "QAM64",
eDVBFrontendParametersCable.Modulation_QAM128 : "QAM128",
eDVBFrontendParametersCable.Modulation_QAM256 : "QAM256"}.get(tp.get("modulation"))
ret["inversion"] = {
eDVBFrontendParametersCable.Inversion_Unknown : _("Auto"),
eDVBFrontendParametersCable.Inversion_On : _("On"),
eDVBFrontendParametersCable.Inversion_Off : _("Off")}.get(tp.get("inversion"))
ret["fec_inner"] = {
eDVBFrontendParametersCable.FEC_None : _("None"),
eDVBFrontendParametersCable.FEC_Auto : _("Auto"),
eDVBFrontendParametersCable.FEC_1_2 : "1/2",
eDVBFrontendParametersCable.FEC_2_3 : "2/3",
eDVBFrontendParametersCable.FEC_3_4 : "3/4",
eDVBFrontendParametersCable.FEC_5_6 : "5/6",
eDVBFrontendParametersCable.FEC_7_8 : "7/8",
eDVBFrontendParametersCable.FEC_8_9 : "8/9",
eDVBFrontendParametersCable.FEC_3_5 : "3/5",
eDVBFrontendParametersCable.FEC_4_5 : "4/5",
eDVBFrontendParametersCable.FEC_9_10 : "9/10"}.get(tp.get("fec_inner"))
ret["system"] = {
eDVBFrontendParametersCable.System_DVB_C_ANNEX_A : "DVB-C",
eDVBFrontendParametersCable.System_DVB_C_ANNEX_C : "DVB-C ANNEX C"}.get(tp.get("system"))
elif tunertype == "DVB-T":
ret["tuner_ty
|
pe"] = _("Terrestrial")
ret["bandwidth"] =
|
{
0 : _("Auto"),
10000000 : "10 MHz",
8000000 : "8 MHz",
7000000 : "7 MHz",
6000000 : "6 MHz",
5000000 : "5 MHz",
1712000 : "1.712 MHz"}.get(tp.get("bandwidth"))
ret["code_rate_lp"] = {
eDVBFrontendParametersTerrestrial.FEC_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.FEC_1_2 : "1/2",
eDVBFrontendParametersTerrestrial.FEC_2_3 : "2/3",
eDVBFrontendParametersTerrestrial.FEC_3_4 : "3/4",
eDVBFrontendParametersTerrestrial.FEC_5_6 : "5/6",
eDVBFrontendParametersTerrestrial.FEC_6_7 : "6/7",
eDVBFrontendParametersTerrestrial.FEC_7_8 : "7/8",
eDVBFrontendParametersTerrestrial.FEC_8_9 : "8/9"}.get(tp.get("code_rate_lp"))
ret["code_rate_hp"] = {
eDVBFrontendParametersTerrestrial.FEC_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.FEC_1_2 : "1/2",
eDVBFrontendParametersTerrestrial.FEC_2_3 : "2/3",
eDVBFrontendParametersTerrestrial.FEC_3_4 : "3/4",
eDVBFrontendParametersTerrestrial.FEC_5_6 : "5/6",
eDVBFrontendParametersTerrestrial.FEC_6_7 : "6/7",
eDVBFrontendParametersTerrestrial.FEC_7_8 : "7/8",
eDVBFrontendParametersTerrestrial.FEC_8_9 : "8/9"}.get(tp.get("code_rate_hp"))
ret["constellation"] = {
eDVBFrontendParametersTerres
|
mjoblin/netdumplings
|
tests/console/test_sniff.py
|
Python
|
mit
| 20,005
| 0
|
import asyncio
import builtins
import importlib.util
import json
import logging
import types
import asynctest
import click.testing
import pytest
from netdumplings.console.sniff import (
sniff_cli, get_valid_chefs, network_sniffer, dumpling_emitter,
send_dumplings_from_queue_to_hub,
)
class TestSniffCLI:
"""
Test the sniff_cli() function.
"""
def test_default_case(self, mocker):
# We exit the infinite loop by faking the death of the sniffer process.
mock_sniffer_process = mocker.Mock()
mock_sniffer_process.is_alive.side_effect = [True, True, False, False]
mock_dumpling_emitter_process = mocker.Mock()
mock_dumpling_emitter_process.is_alive.return_value = True
mock_queue = mocker.patch('multiprocessing.Queue')
mock_configure_logging = mocker.patch(
'netdumplings.console.sniff.configure_logging'
)
mocker.patch('netdumplings.console.sniff.sleep')
mock_process = mocker.patch(
'multiprocessing.Process',
side_effect=[mock_sniffer_process, mock_dumpling_emitter_process],
)
runner = click.testing.CliRunner()
result = runner.invoke(
sniff_cli,
[
'--kitchen-name', 'test_kitchen',
'--hub', 'test_hub:5000',
'--interface', 'test_interface',
'--filter', 'test_filter',
'--chef-module', 'netdumplings.dumplingchefs',
'--chef', 'ARPChef',
'--poke-interval', 10,
],
)
mock_configure_logging.assert_called_once()
# Check that the sniffer & dumpling emitter processes were created and
# started.
assert mock_process.call_count == 2
mock_process.assert_any_call(
target=network_sniffer,
args=(
'test_kitchen',
'test_interface',
('ARPChef',),
('netdumplings.dumplingchefs',),
{
'netdumplings.dumplingchefs': ['ARPChef'],
},
'test_filter',
10.0,
mock_queue.return_value,
)
)
mock_process.assert_any_call(
target=dumpling_emitter,
args=(
'test_kitchen',
'test_hub:5000',
mock_queue.return_value,
{
'kitchen_name': 'test_kitchen',
'interface': 'test_interface',
'filter': 'test_filter',
'chefs': ['netdumplings.dumplingchefs.ARPChef'],
'poke_interval': 10.0,
},
),
)
# We exited the infinite loop by faking the end of the sniffer process.
# This means we should have called terminate() on the emitter process.
mock_sniffer_process.start.assert_called_once()
assert mock_sniffer_process.terminate.call_count == 0
mock_dumpling_emitter_process.start.assert_called_once()
mock_dumpling_emitter_process.terminate.assert_called_once()
assert result.exit_code == 0
def test_no_valid_chefs(self, mocker):
"""
Test that no valid chefs results in an error log and an exit code of 1.
"""
mocker.patch(
'netdumplings.console.sniff.get_valid_chefs',
return_value={},
)
logger = logging.getLogger('netdumplings.console.sniff')
mock_error = mocker.patch.objec
|
t(logger, 'error')
runner = click.testing.CliRunner()
result = runner.invoke(
sniff_cli,
[
'--kitchen-name', 'test_kitchen',
],
)
mock_error.assert_called_once_with(
'test_kitchen: No valid che
|
fs found. Not starting sniffer.'
)
assert result.exit_code == 1
class TestSniffChefList:
"""
Test the chef_list() function.
"""
def test_chef_list(self, mocker):
"""
Test requesting a chef list.
"""
mock_list_chefs = mocker.patch(
'netdumplings.console.sniff.list_chefs'
)
runner = click.testing.CliRunner()
result = runner.invoke(
sniff_cli,
[
'--chef-list',
'--chef-module', 'testchefs.one',
'--chef-module', 'morechefs',
],
)
assert result.exit_code == 0
mock_list_chefs.assert_called_once_with(('testchefs.one', 'morechefs'))
class TestSniffNetworkSniffer:
"""
Test the network_sniffer() function.
"""
def test_network_sniffer(self, mocker):
"""
Test calling network_sniffer(). We pass in a single valid chef and
perform the following checks:
- The kitchen gets instantiated.
- The chef is instantiated, assigned to the kitchen, and given the
dumpling queue.
- The kitchen's run() method is called.
"""
# network_sniffer() uses the __import__ builtin to import chefs, so we
# need to patch that.
builtin_import = builtins.__import__
chef_class_callable = mocker.Mock()
def import_side_effect(*args, **kwargs):
if args[0] == 'chefmodule':
return types.SimpleNamespace(ChefName=chef_class_callable)
return builtin_import(*args, **kwargs)
mocker.patch.object(
builtins, '__import__', side_effect=import_side_effect
)
mock_dumpling_kitchen = mocker.patch('netdumplings.DumplingKitchen')
kitchen_name = 'test_kitchen'
interface = 'test_interface'
chefs = ''
chef_modules = ''
valid_chefs = {'chefmodule': ['ChefName']}
sniffer_filter = 'test_filter'
chef_poke_interval = 10
dumpling_queue = mocker.Mock()
network_sniffer(
kitchen_name, interface, chefs, chef_modules, valid_chefs,
sniffer_filter, chef_poke_interval, dumpling_queue,
)
# Check that the DumplingKitchen was instantiated and run() was called.
mock_dumpling_kitchen.assert_called_once_with(
name=kitchen_name,
interface=interface,
sniffer_filter=sniffer_filter,
chef_poke_interval=chef_poke_interval,
dumpling_queue=dumpling_queue,
)
mock_dumpling_kitchen.return_value.run.assert_called_once()
chef_class_callable.assert_called_once_with(
kitchen=mock_dumpling_kitchen.return_value,
)
def test_network_sniffer_with_module_and_file_chefs(self, mocker):
"""
Test calling network_sniffer() with one valid chef from a module and
another valid chef from a file. We just check that both __import__
and importlib.util.spec_from_file_location get called once each.
"""
# network_sniffer() uses the __import__ builtin to import chefs, so we
# need to patch that.
builtin_import = builtins.__import__
module_chef_callable = mocker.Mock()
file_chef_callable = mocker.Mock()
def import_side_effect(*args, **kwargs):
if args[0] == 'chefmodule':
return types.SimpleNamespace(
ChefNameFromModule=module_chef_callable
)
return builtin_import(*args, **kwargs)
mocker.patch.object(
builtins, '__import__', side_effect=import_side_effect
)
mocker.patch.object(
importlib.util,
'module_from_spec',
return_value=types.SimpleNamespace(
ChefNameFromFile=file_chef_callable
)
)
mocker.patch.object(importlib.util, 'spec_from_file_location')
kitchen_name = 'test_kitchen'
interface = 'test_interface'
chefs = ''
chef_modules = ''
valid_chefs = {
'chefmodule': ['ChefNameFromModule'],
'tests/data/chefs_in_a_file.py': ['ChefNameFromFile'],
}
|
architecture-building-systems/CEAforArcGIS
|
cea/technologies/chiller_absorption.py
|
Python
|
mit
| 20,692
| 0.004253
|
"""
Absorption chillers
"""
import cea.config
import cea.inputlocator
import pandas as pd
import numpy as np
from math import log, ceil
import sympy
from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK
from cea.analysis.costs.equations import calc_capex_annualized, calc_opex_annualized
__author__ = "Shanshan Hsieh"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Shanshan Hsieh"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
# technical model
def calc_chiller_main(mdot_chw_kgpers, T_chw_sup_K, T_chw_re_K, T_hw_in_C, T_ground_K, absorption_chiller):
"""
This model calculates the operation conditions of the absorption chiller given the chilled water loads in
evaporators and the hot water inlet temperature in the generator (desorber).
This is an empirical model using characteristic equation method developed by _[Kuhn A. & Ziegler F., 2005].
The parameters of each absorption chiller can be derived from experiments or performance curves from manufacturer's
catalog, more details are described in _[Puig-Arnavat M. et al, 2010].
Assumptions: constant external flow rates (chilled water at the evaporator, cooling water at the condenser and
absorber, hot water at the generator).
:param mdot_chw_kgpers: required chilled water flow rate
:type mdot_chw_kgpers: float
:param T_chw_sup_K: required chilled water supply temperature (outlet from the evaporator)
:type T_chw_sup_K: float
:param T_chw_re_K: required chilled water return temperature (inlet to the evaporator)
:type T_chw_re_K: float
:param T_hw_in_C: hot water inlet temperature to the generator
:type T_hw_in_C: float
:param T_ground_K: ground temperature
:type T_ground_K: float
:param locator: locator class
:return:
..[Kuhn A. & Ziegler F., 2005] Operational results of a 10kW absorption chiller and adaptation of the characteristic
equation. In: Proceedings of the interantional conference solar air conditioning. Bad Staffelstein, Germany: 2005.
..[Puig-Arnavat M. et al, 2010] Analysis and parameter identification for characteristic equations of single- and
double-effect absorption chillers by means of multivariable regression. Int J Refrig: 2010.
"""
chiller_prop = absorption_chiller.chiller_prop # get data from the class
# create a dict of input operating conditions
input_conditions = {'T_chw_sup_K': T_chw_sup_K,
'T_chw_re_K': T_chw_re_K,
'T_hw_in_C': T_hw_in_C,
'T_ground_K': T_ground_K}
mcp_chw_WperK = mdot_chw_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK
q_chw_total_W = mcp_chw_WperK * (T_chw_re_K - T_chw_sup_K)
if np.isclose(q_chw_total_W, 0.0):
wdot_W = 0.0
q_cw_W = 0.0
q_hw_W = 0.0
T_hw_out_C = np.nan
EER = 0.0
input_conditions['q_chw_W'] = 0.0
else:
min_chiller_size_W = min(chiller_prop['cap_min'].values)
max_chiller_size_W = max(chiller_prop['cap_max'].values)
# get chiller properties and input conditions according to load
if q_chw_total_W < min_chiller_size_W:
# get chiller property according to load
chiller_prop = chiller_prop[chiller_prop['cap_min'] == min_chiller_size_W]
# operate at minimum load
number_of_chillers_activated = 1.0 # only activate one chiller
input_conditions['q_chw_W'] = chiller_prop['cap_min'].values # minimum load
elif q_chw_total_W <= max_chiller_size_W:
# get chiller property according to load
chiller_prop = chiller_prop[(chiller_prop['cap_min'] <= q_chw_total_W) &
(chiller_prop['cap_max'] >= q_chw_total_W)]
# operate one chiller at the cooling load
number_of_chillers_activated = 1.0 # only activate one chiller
input_conditions['q_chw_W'] = q_chw_total_W # operate at the chilled water load
else:
# get chiller property according to load
chiller_prop = chiller_prop[chiller_prop['cap_max'] == max_chiller_size_W]
# distribute loads to multiple chillers
number_of_chillers_activated = q_chw_total_W / max_chiller_size_W
# operate at maximum load
input_conditions['q_chw_W'] = max(chiller_prop['cap_max'].values)
absorption_chiller.update_data(chiller_prop)
operating_conditions = calc_operating_conditions(absorption_chiller, input_conditions)
# calculate chiller outputs
wdot_W = calc_power_demand(input_conditions['q_chw_W'], chiller_prop) * number_of_chillers_activated
q_cw_W = operating_conditions['q_cw_W'] * number_of_chillers_activated
q_hw_W = operating_conditions['q_hw_W'] * number_of_chillers_activated
T_hw_out_C = operating_conditions['T_hw_out_C']
EER = q_chw_total_W / (q_hw_W + wdot_W)
if T_hw_out_C < 0.0 :
print ('T_hw_out_C = ', T_hw_out_C, ' incorrect condition, check absorption chiller script.')
chiller_operation = {'wdot_W': wdot_W, 'q_cw_W': q_cw_W, 'q_hw_W': q_hw_W, 'T_hw_out_C': T_hw_out_C,
'q_chw_W': q_chw_total_W, 'EER': EER}
return chiller_operation
def calc_operating_conditions(absorption_chiller, input_conditions):
"""
Calculates chiller operating conditions at given input conditions by solving the characteristic equations and the
energy balance equations. This method is adapted from _[Kuhn A. & Ziegler F., 2005].
The heat rejection to cooling tower is approximated with the energy balance:
Q(condenser) + Q(absorber) = Q(generator) + Q(evaporator)
:param AbsorptionChiller chiller_prop: parameters in the characteristic equations and the external flow rates.
:param input_conditions:
:type input_conditions: dict
:return: a dict with operating conditions of the chilled water, cooling water and hot water loops in a absorption
chiller.
To improve speed, the system of equations was solved using sympy for the output variable ``q_hw_kW`` which is
then used to compute the remaining output variables
|
. The following code was used to create the expression to
calculate ``q_hw_kW`` with::
# use symbolic computation to derive a formula for q_hw_kW:
# first, make sure all the variables are s
|
ympy symbols:
T_chw_in_C, T_chw_out_C, T_cw_in_C, T_hw_in_C, mcp_cw_kWperK, mcp_hw_kWperK, q_chw_kW = sympy.symbols(
"T_chw_in_C, T_chw_out_C, T_cw_in_C, T_hw_in_C, mcp_cw_kWperK, mcp_hw_kWperK, q_chw_kW")
T_hw_out_C, T_cw_out_C, q_hw_kW = sympy.symbols('T_hw_out_C, T_cw_out_C, q_hw_kW')
a_e, a_g, e_e, e_g, r_e, r_g, s_e, s_g = sympy.symbols("a_e, a_g, e_e, e_g, r_e, r_g, s_e, s_g")
ddt_e, ddt_g = sympy.symbols("ddt_e, ddt_g")
# the system of equations:
eq_e = s_e * ddt_e + r_e - q_chw_kW
eq_ddt_e = ((T_hw_in_C + T_hw_out_C) / 2.0
+ a_e * (T_cw_in_C + T_cw_out_C) / 2.0
+ e_e * (T_chw_in_C + T_chw_out_C) / 2.0
- ddt_e)
eq_g = s_g * ddt_g + r_g - q_hw_kW
eq_ddt_g = ((T_hw_in_C + T_hw_out_C) / 2.0
+ a_g * (T_cw_in_C
+ T_cw_out_C) / 2.0
+ e_g * (T_chw_in_C + T_chw_out_C) / 2.0
- ddt_g)
eq_bal_g = (T_hw_in_C - T_hw_out_C) - q_hw_kW / mcp_hw_kWperK
# solve the system of equations with sympy
eq_sys = [eq_e, eq_g, eq_bal_g, eq_ddt_e, eq_ddt_g]
unknown_variables = (T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g)
a, b = sympy.linear_eq_to_matrix(eq_sys, unknown_variables)
T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g = tuple(*sympy.linsolve(eq_sys, unknown_variables))
q_hw_kW.simplify()
..[Kuhn A. & Ziegler F., 2005] Operational results of a 10kW absorption chiller and adaptation of the characteristic
equation. In: Proceedings of the interan
|
TomBaxter/waterbutler
|
tests/tasks/test_move.py
|
Python
|
apache-2.0
| 6,692
| 0.001494
|
import sys
import copy
import time
import asyncio
import hashlib
from unittest import mock
import celery
import pytest
from waterbutler import tasks # noqa
from waterbutler.core import remote_logging
from waterbutler.core import utils as core_utils
from waterbutler.core.path import WaterButlerPath
import tests.utils as test_utils
# Hack to get the module, not the function
move = sys.modules['waterbutler.tasks.move']
FAKE_TIME = 1454684930.0
@pytest.fixture(autouse=True)
def patch_backend(monkeypatch):
monkeypatch.setattr(move.core.app, 'backend', None)
@pytest.fixture(autouse=True)
def callback(monkeypatch):
mock_request = test_utils.MockCoroutine(
return_value=mock.Mock(
status=200,
read=test_utils.MockCoroutine(
return_value=b'meowmeowmeow'
)
)
)
monkeypatch.setattr(core_utils, 'send_signed_request', mock_request)
return mock_request
@pytest.fixture
def mock_time(monkeypatch):
mock_time = mock.Mock(return_value=FAKE_TIME)
monkeypatch.setattr(time, 'time', mock_time)
@pytest.fixture
def src_path():
return WaterButlerPath('/user/bin/python')
@pytest.fixture
def dest_path():
return WaterButlerPath('/usr/bin/golang')
@pytest.fixture(scope='function')
def src_provider():
p = test_utils.MockProvider()
p.move.return_value = (test_utils.MockFileMetadata(), True)
p.auth['callback_url'] = 'src_callback'
return p
@pytest.fixture(scope='function')
def dest_provider():
p = test_utils.MockProvider()
p.move.return_value = (test_utils.MockFileMetadata(), True)
p.auth['callback_url'] = 'dest_callback'
return p
@pytest.fixture(scope='function')
def providers(monkeypatch, src_provider, dest_provider):
def make_provider(name=None, **kwargs):
if name == 'src':
return src_provider
if name == 'dest':
return dest_provider
raise ValueError
|
('Unexpected provider')
|
monkeypatch.setattr(move.utils, 'make_provider', make_provider)
return src_provider, dest_provider
@pytest.fixture(autouse=True)
def log_to_keen(monkeypatch):
mock_log_to_keen = test_utils.MockCoroutine()
monkeypatch.setattr(remote_logging, 'log_to_keen', mock_log_to_keen)
return mock_log_to_keen
@pytest.fixture
def src_bundle(src_path):
return {
'nid': 'mst3k',
'path': src_path,
'provider': {
'name': 'src',
'auth': {
'callback_url': '',
},
'settings': {},
'credentials': {},
}
}
@pytest.fixture
def dest_bundle(dest_path):
return {
'nid': 'fbi4u',
'path': dest_path,
'provider': {
'name': 'dest',
'auth': {
'callback_url': '',
},
'settings': {},
'credentials': {},
}
}
@pytest.fixture
def bundles(src_bundle, dest_bundle):
return src_bundle, dest_bundle
class TestMoveTask:
def test_move_calls_move(self, event_loop, providers, bundles):
src, dest = providers
src_bundle, dest_bundle = bundles
move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle))
assert src.move.called
src.move.assert_called_once_with(dest, src_bundle['path'], dest_bundle['path'])
def test_is_task(self):
assert callable(move.move)
assert isinstance(move.move, celery.Task)
assert not asyncio.iscoroutine(move.move)
assert asyncio.iscoroutinefunction(move.move.adelay)
def test_imputes_exceptions(self, event_loop, providers, bundles, callback):
src, dest = providers
src_bundle, dest_bundle = bundles
src.move.side_effect = Exception('This is a string')
with pytest.raises(Exception):
move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle))
(method, url, data), _ = callback.call_args_list[0]
assert src.move.called
src.move.assert_called_once_with(dest, src_bundle['path'], dest_bundle['path'])
assert method == 'PUT'
assert data['errors'] == ["Exception('This is a string',)"]
assert url == 'dest_callback'
def test_return_values(self, event_loop, providers, bundles, callback, src_path, dest_path, mock_time):
src, dest = providers
src_bundle, dest_bundle = bundles
metadata = test_utils.MockFileMetadata()
src.move.return_value = (metadata, False)
ret1, ret2 = move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle))
assert (ret1, ret2) == (metadata, False)
(method, url, data), _ = callback.call_args_list[0]
assert method == 'PUT'
assert url == 'dest_callback'
assert data['action'] == 'move'
assert data['auth'] == {'callback_url': 'dest_callback'}
assert data['email'] == False
assert data['errors'] == []
assert data['time'] == FAKE_TIME + 60
assert data['source'] == {
'nid': 'mst3k',
'resource': 'mst3k',
'path': '/' + src_path.raw_path,
'name': src_path.name,
'materialized': str(src_path),
'provider': src.NAME,
'kind': 'file',
'extra': {},
}
assert data['destination'] == {
'nid': 'fbi4u',
'resource': 'fbi4u',
'path': metadata.path,
'name': metadata.name,
'materialized': metadata.path,
'provider': dest.NAME,
'kind': 'file',
'contentType': metadata.content_type,
'etag': hashlib.sha256(
'{}::{}'.format(metadata.provider, metadata.etag)
.encode('utf-8')
).hexdigest(),
'extra': metadata.extra,
'modified': metadata.modified,
'modified_utc': metadata.modified_utc,
'created_utc': metadata.created_utc,
'size': metadata.size,
}
def test_starttime_override(self, event_loop, providers, bundles, callback, mock_time):
src, dest = providers
src_bundle, dest_bundle = bundles
stamp = FAKE_TIME
move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle), start_time=stamp-100)
move.move(copy.deepcopy(src_bundle), copy.deepcopy(dest_bundle), start_time=stamp+100)
(_, _, data), _ = callback.call_args_list[0]
assert data['email'] is True
assert data['time'] == 60 + stamp
(_, _, data), _ = callback.call_args_list[1]
assert data['email'] is False
assert data['time'] == 60 + stamp
|
kdheepak/psst
|
psst/case/matpower/reader.py
|
Python
|
mit
| 1,747
| 0.006869
|
from __future__ import print_function, absolute_import
import re
import logging
import numpy as np
from ...utils import int_else_float_except_string
logging.basicConfig()
logger = logging.getLogger(__file__)
def find_name(string):
return re.search('function\s*mpc\s*=\s*(?P<data>.*?)\n', string).groupdict()['data']
def find_attributes(string):
pattern = 'mpc\.(?P<attribute>.*?)\s*=\s*'
return re.findall(pattern, string, re.DOTALL)
def parse_file(attribute, string):
match = search_file(attribute, string)
if match is not None:
match = match.strip("'").strip('"')
_list = list()
for line in match.splitlines():
line = line.split('%')[0]
line = line.replace(';', '')
if line.strip():
if attribute == 'bus_name':
_list.append([line.strip().strip("'")])
else:
_list.append([int_else_float_except_string(s) for s in line.strip().split()])
|
return _list
else:
return match
def search_file(attribute, string):
if attribute in ['gen', 'gencost', 'bus', 'branch']:
pattern = r'mpc\.{}\s*=\s*\[[\n]?(?P<data>.*?)[\n]?\];'.format(attribute)
elif attribute in ['version', 'baseMVA']:
pattern = r'mpc\.{}\s*=\s*(
|
?P<data>.*?);'.format(attribute)
elif attribute == 'bus_name':
pattern = r'mpc\.{}\s*=\s*\{{[\n]?(?P<data>.*?)[\n]?\}};'.format('bus_name')
else:
logger.warning('Unable to parse mpc.%s. Please contact the developer.', attribute)
return None
match = re.search(pattern, string, re.DOTALL)
if match is not None:
return match.groupdict().get('data', None)
else:
return match
|
zlalanne/msp430-webcontrol
|
msp430webcontrol/tcp_comm/views.py
|
Python
|
bsd-3-clause
| 2,798
| 0.006076
|
from django.http import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
from msp430.models import *
from tcp_comm.client import push_config
from msp430.models import MSP430
import json
@csrf_exempt
def register(request):
"""When an MSP430 connects to the TCP server this is called"""
if request.method == 'POST':
try:
jreq = json.loads(request.body.decode('UTF-8'))['json']
except:
return HttpResponseBadRequest('Unable to parse post json key',
mimetype='application/json')
# Verify fields exist
if 'mac' not in jreq or 'ip' not in jreq or 'iface' not in jreq:
return HttpResponseBadRequest('Does not have required fields',
mimetype='application/json')
# Update MSP430 model
default_name = "{0} = {1}".format(jreq['mac'], jreq['ip'])
msp430_db, created = MSP430.objects.get_or_create(mac_address=jreq['mac'],
defaults={'current_ip':jreq['ip'],
'name':default_name})
msp430_db.current_ip = jreq['ip']
msp430_db.online = True
msp430_db.save()
def update_iface(model_cls, index_name):
if index_name in jreq['iface']:
for iface in jreq['iface'][index_name]:
iface_db, created = model_cls.objects.get_or_create(msp430=msp430_db, name=iface['name'],
defaults={'io_type':iface['io_type']})
iface_db.description = iface['desc']
iface_db.possible_choices = json.dumps(iface['choices'])
iface_db.save()
# Update referring interface models
update_iface(MSP430ReadInterface, 'read')
update_iface(MSP430WriteInterface, 'write')
# Send configs to the MSP430
push_config(msp430_db)
else:
return HttpResponse('Not a POST', mimetype='application/json')
return HttpResponse('ok
|
', mimetype='application/json')
@csrf_exempt
def disconnect(request):
if request.method == 'POST':
try:
jreq = json.loads(reque
|
st.body.decode('UTF-8'))['json']
except:
return HttpResponseBadRequest('Unable to parse post json key', mimetype='application/json')
# verify fields exist
if 'mac' not in jreq:
return HttpResponseBadRequest('Does not have required fields - mac', mimetype='application/json')
msp430 = MSP430.objects.get(mac_address=jreq['mac'])
msp430.online = False
msp430.save()
return HttpResponse('ok', mimetype='application/json')
|
cboling/SDNdbg
|
docs/old-stuff/pydzcvr/pydzcvr/tests/alltests.py
|
Python
|
apache-2.0
| 2,190
| 0.00137
|
#!/usr/bin/env python
'''
alltests.py - This module runs the automated tests in all the components.
To run specific test cases, pass one or more names of package/module names
on the command line which contain the test cases to be run.
Usage:
python AllTests.py - Runs all the unittests
python AllTests.py mypackage.MyFile - Runs the tests in 'mypackage/MyF
|
ile'
@author: Chip Boling
@copyright: 2015 Boling Consulting Solutions. All rights reserved.
@license: Artistic License 2.0, http://opensource.org/licenses/Artistic-2.0
@contact: support@bcsw.net
@deffield updated: Updated
'''
import unittest as uTest
#import site
import sys
import logging
alltestnames = [
'mypackage.myTestModule',
]
if __name__ == '__main__':
# Configure logging
logging.basicConfig() # default level is WARN
print
print
# I
|
f no arguments are given, all of the test cases are run.
if len(sys.argv) == 1:
testnames = alltestnames
verbosity = 2
logging.getLogger().setLevel(logging.INFO)
print 'Loading all Webware Tests...'
else:
testnames = sys.argv[1:]
# Turn up verbosity and logging level
verbosity = 3
logging.getLogger().setLevel(logging.DEBUG)
print 'Loading tests %s...' % testnames
tests = uTest.TestSuite()
# We could just use defaultTestLoader.loadTestsFromNames(),
# but it doesn't give a good error message when it cannot load a test.
# So we load all tests individually and raise appropriate exceptions.
for test in testnames:
try:
tests.addTest(uTest.defaultTestLoader.loadTestsFromName(test))
except Exception:
print 'ERROR: Skipping tests from "%s".' % test
try: # just try to import the test after loadig failed
__import__(test)
except ImportError:
print 'Could not import the test module.'
else:
print 'Could not load the test suite.'
from traceback import print_exc
print_exc()
print
print 'Running the tests...'
uTest.TextTestRunner(verbosity=verbosity).run(tests)
|
rockychen-dpaw/oim-cms
|
registers/utils.py
|
Python
|
apache-2.0
| 806
| 0
|
from django.contrib.admin import ModelAdmin
from django.utils.encoding import smart_text
class OimModelAdmin(ModelAdmin):
""" OimModelAdmin"""
def has_module
|
_permission(self, request):
user = r
|
equest.user
if user.is_superuser:
return True
if user.is_staff:
if user.groups.filter(name="OIM Staff").exists():
return True
return False
def smart_truncate(content, length=100, suffix='....(more)'):
"""Small function to truncate a string in a sensible way, sourced from:
http://stackoverflow.com/questions/250357/smart-truncate-in-python
"""
content = smart_text(content)
if len(content) <= length:
return content
else:
return ' '.join(content[:length + 1].split(' ')[0:-1]) + suffix
|
burnpanck/traits
|
traits/tests/test_property_delete.py
|
Python
|
bsd-3-clause
| 730
| 0
|
"""
Unit tests to ensure that we can call reset_traits/delete on a
property trait (regression tests for Github issue #67).
"""
from traits import _py2to3
from traits.api import Any, HasTraits, Int, Property, TraitError
from traits.testing.unittest_tools import unittest
class E(HasTraits):
a = Property(Any)
b = Property(Int)
class TestPropertyDelete(unittest.TestCase):
def test_property_delete(self):
e = E()
with self.assertRaises(TraitError):
del e.a
with self.assertRaises(TraitError):
del e.b
def test_property_reset_traits(self):
e = E()
unrese
|
table = e.reset_traits()
_py2to
|
3.assertCountEqual(self, unresetable, ['a', 'b'])
|
Saevon/DMP-Career-Share
|
server.py
|
Python
|
mit
| 3,413
| 0.002344
|
# !/usr/bin/env python
# -*- coding: UTF-8 -*-
from functools import wraps
import bottle
import datetime
import json
import os
from error import DMPException
from profile import ProfileHandler
app = bottle.Bottle()
def json_return(func):
@wraps(func)
def wrapper(*args, **kwargs):
data = json.dumps(func(*args, **kwargs), **app.config['json'])
data = data.replace(' ', ' ').replace('\n', '<br/>')
ret
|
urn data
return wrapper
@app.route('/favicon.ico')
def favicon():
bottle.response.status = 404
@app.route('/')
@json_return
def root():
return {
'status': 200,
'data': "Welcome, go to '/profile_name' to sync you
|
r profile>"
}
@app.route('/<name>')
@json_return
def sync(name):
'''
Shows the data in the root folder
'''
try:
profile_handler.merge_profile(name)
except KeyError as err:
return {
'status': 404,
'error': 'Profile not Found',
}
except DMPException as err:
trace = getattr(err, 'trace', 'NO TRACE')
print trace
return {
'status': 500,
'error': 'Server Error',
}
print "%s: user requested merge" % name
return {
'status': 200,
'data': "Profile %s updated" % name,
'time_server': datetime.datetime.now().strftime('%b %d %Y at %H:%M'),
'time_utc': datetime.datetime.utcnow().strftime('%b %d %Y at %H:%M UTC'),
}
##################################################
# Settings & Startup
##################################################
app.config.update({
'debug': False,
'host': '0.0.0.0',
'port': 7070,
'quiet': True,
'json': {
'sort_keys': True,
'indent': 4,
},
})
from optparse import OptionParser
app_parser = OptionParser(usage="usage: %prog profile_path initial_path [options]")
app_parser.add_option(
"-p", "--port",
dest="port",
)
app_parser.add_option(
"-v", "--debug", "--verbose",
dest="debug",
action="store_true",
)
app_parser.add_option(
"-r", "--root",
dest="static_root",
action="store",
)
app_parser.add_option(
"-q", "--quiet",
dest="debug",
action="store_false",
)
app_parser.add_option(
"--host",
dest="host",
action="store",
)
app_parser.add_option(
"--open",
dest="host",
action="store_const",
const="0.0.0.0",
)
def parse_options():
'''
Reads any commandline options, returning a final dict of options
'''
(options, args) = app_parser.parse_args()
if len(args) != 2:
app_parser.error("Both profile_path and initial_path are required")
# Remove any unset options, using the defaults defined earlier instead
options = vars(options)
options = dict((key, options[key]) for key in options if options[key] is not None)
options['path'] = os.path.abspath(args[0])
options['initial_path'] = os.path.abspath(args[1])
return options
if __name__ == '__main__':
options = parse_options()
app.config.update(options)
profile_handler = ProfileHandler(options['path'], options['initial_path'], enable_timer=False)
# Debug only settings go here
if app.config["debug"]:
bottle.debug(True)
app.config.update({
'reloader': True,
'quiet': False,
})
print 'starting Server'
app.run(**app.config)
|
jessrosenfield/pants
|
tests/python/pants_test/option/test_option_value_container.py
|
Python
|
apache-2.0
| 3,394
| 0.004714
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import copy
import unittest
from pants.option.option_value_container import OptionValueContainer
from pants.option.ranked_value import RankedValue
class OptionValueContainerTest(unittest.TestCase):
def test_standard_values(self):
o = OptionValueContainer()
o.foo = 1
self.assertEqual(1, o.foo)
with self.assertRaises(AttributeError):
o.bar
def test_value_ranking(self):
o = OptionValueContainer()
o.foo = RankedValue(RankedValue.CONFIG, 11)
self.assertEqual(11, o.foo)
self.assertEqual(RankedValue.CONFIG, o.get_rank('foo'))
o.foo = RankedValue(RankedValue.HARDCODED, 22)
self.assertEqual(11, o.foo)
self.assertEqual(RankedValue.CONFIG, o.get_rank('foo'))
o.foo = RankedValue(RankedValue.ENVIRONMENT, 33)
self.assertEqual(33, o.foo)
self.assertEqual(RankedValue.ENVIRONMENT, o.get_rank('foo'))
o.foo = 44 # No explicit rank is assumed to be a FLAG.
self.assertEqual(44, o.foo)
self.assertEqual(RankedValue.FLAG, o.get_rank('foo'))
def test_is_flagged(self):
o = OptionValueContainer()
o.foo = RankedValue(RankedValue.NONE, 11)
self.assertFalse(o.is_flagged('foo'))
o.foo = RankedValue(RankedValue.CONFIG, 11)
self.assertFalse(o.is_flagged('foo'))
o.foo = RankedValue(RankedValue.ENVIRONMENT, 11)
self.assertFalse(o.is_flagged('foo'))
o.foo = RankedValue(RankedValue.FLAG, 11)
self.assertTrue(o.is_flagged('foo'))
def test_indexing(self):
o = OptionValueContainer()
o.foo = 1
self.assertEqual(1, o['foo'])
self.assertEqual(1, o.get('foo'))
self.assertEqual(1, o.get('foo', 2))
self.assertIsNone(o.get('unknown'))
self.assertEqual(2, o.get('unknown', 2))
with self.assertRaises(AttributeError):
o['bar']
def test_iterator(self):
o = OptionValueContainer()
o.a = 3
o.b = 2
o.c = 1
names = list(iter(o))
self.assertListEqual(['a', 'b', 'c'], names)
def test_copy(self):
# copy semantics can get hairy when overriding __setattr__/__getattr__, s
|
o we test them.
o = OptionValueContainer()
o.foo = 1
o.bar = {'a'
|
: 111}
p = copy.copy(o)
# Verify that the result is in fact a copy.
self.assertEqual(1, p.foo) # Has original attribute.
o.baz = 42
self.assertFalse(hasattr(p, 'baz')) # Does not have attribute added after the copy.
# Verify that it's a shallow copy by modifying a referent in o and reading it in p.
o.bar['b'] = 222
self.assertEqual({'a': 111, 'b': 222}, p.bar)
def test_deepcopy(self):
# copy semantics can get hairy when overriding __setattr__/__getattr__, so we test them.
o = OptionValueContainer()
o.foo = 1
o.bar = {'a': 111}
p = copy.deepcopy(o)
# Verify that the result is in fact a copy.
self.assertEqual(1, p.foo) # Has original attribute.
o.baz = 42
self.assertFalse(hasattr(p, 'baz')) # Does not have attribute added after the copy.
# Verify that it's a deep copy by modifying a referent in o and reading it in p.
o.bar['b'] = 222
self.assertEqual({'a': 111}, p.bar)
|
kiall/designate-py3
|
designate/objects/recordset.py
|
Python
|
apache-2.0
| 8,399
| 0
|
# Copyright (c) 2014 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required
|
by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from copy import deepcopy
from designate import exceptions
from designate im
|
port utils
from designate.objects import base
from designate.objects.validation_error import ValidationError
from designate.objects.validation_error import ValidationErrorList
LOG = logging.getLogger(__name__)
class RecordSet(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
@property
def action(self):
# Return action as UPDATE if present. CREATE and DELETE are returned
# if they are the only ones.
action = 'NONE'
actions = {'CREATE': 0, 'DELETE': 0, 'UPDATE': 0, 'NONE': 0}
for record in self.records:
actions[record.action] += 1
if actions['CREATE'] != 0 and actions['UPDATE'] == 0 and \
actions['DELETE'] == 0 and actions['NONE'] == 0:
action = 'CREATE'
elif actions['DELETE'] != 0 and actions['UPDATE'] == 0 and \
actions['CREATE'] == 0 and actions['NONE'] == 0:
action = 'DELETE'
elif actions['UPDATE'] != 0 or actions['CREATE'] != 0 or \
actions['DELETE'] != 0:
action = 'UPDATE'
return action
@property
def managed(self):
managed = False
for record in self.records:
if record.managed:
return True
return managed
@property
def status(self):
# Return the worst status in order of ERROR, PENDING, ACTIVE
status = 'ACTIVE'
for record in self.records:
if (record.status == 'ERROR') or \
(record.status == 'PENDING' and status != 'ERROR') or \
(status != 'PENDING'):
status = record.status
return status
FIELDS = {
'shard': {
'schema': {
'type': 'integer',
'minimum': 0,
'maximum': 4095
}
},
'tenant_id': {
'schema': {
'type': 'string',
},
'read_only': True
},
'domain_id': {
'schema': {
'type': 'string',
'description': 'Zone identifier',
'format': 'uuid'
},
},
'name': {
'schema': {
'type': 'string',
'description': 'Zone name',
'format': 'domainname',
'maxLength': 255,
},
'immutable': True,
'required': True
},
'type': {
'schema': {
'type': 'string',
'description': 'RecordSet type (TODO: Make types extensible)',
'enum': ['A', 'AAAA', 'CNAME', 'MX', 'SRV', 'TXT', 'SPF', 'NS',
'PTR', 'SSHFP', 'SOA']
},
'required': True,
'immutable': True
},
'ttl': {
'schema': {
'type': ['integer', 'null'],
'description': 'Default time to live',
'minimum': 0,
'maximum': 2147483647
},
},
'description': {
'schema': {
'type': ['string', 'null'],
'maxLength': 160
},
},
'records': {
'relation': True,
'relation_cls': 'RecordList'
},
# TODO(graham): implement the polymorphic class relations
# 'records': {
# 'polymorphic': 'type',
# 'relation': True,
# 'relation_cls': lambda type_: '%sList' % type_
# },
}
def validate(self):
errors = ValidationErrorList()
# Get the right classes (e.g. A for Recordsets with type: 'A')
try:
record_list_cls = self.obj_cls_from_name('%sList' % self.type)
record_cls = self.obj_cls_from_name(self.type)
except KeyError as e:
e = ValidationError()
e.path = ['recordset', 'type']
e.validator = 'value'
e.validator_value = [self.type]
e.message = ("'%(type)s' is not a supported Record type"
% {'type': self.type})
# Add it to the list for later
errors.append(e)
raise exceptions.InvalidObject(
"Provided object does not match "
"schema", errors=errors, object=self)
# Get any rules that the record type imposes on the record
changes = record_cls.get_recordset_schema_changes()
old_fields = {}
if changes:
LOG.debug("Record %s is overriding the RecordSet schema with: %s" %
(record_cls.obj_name(), changes))
old_fields = deepcopy(self.FIELDS)
self.FIELDS = utils.deep_dict_merge(self.FIELDS, changes)
error_indexes = []
# Copy these for safekeeping
old_records = deepcopy(self.records)
# Blank the records for this object with the right list type
self.records = record_list_cls()
i = 0
for record in old_records:
record_obj = record_cls()
try:
record_obj._from_string(record.data)
# The _from_string() method will throw a ValueError if there is not
# enough data blobs
except ValueError as e:
# Something broke in the _from_string() method
# Fake a correct looking ValidationError() object
e = ValidationError()
e.path = ['records', i]
e.validator = 'format'
e.validator_value = [self.type]
e.message = ("'%(data)s' is not a '%(type)s' Record"
% {'data': record.data, 'type': self.type})
# Add it to the list for later
errors.append(e)
error_indexes.append(i)
else:
# Seems to have loaded right - add it to be validated by
# JSONSchema
self.records.append(record_obj)
i += 1
try:
# Run the actual validate code
super(RecordSet, self).validate()
except exceptions.InvalidObject as e:
# Something is wrong according to JSONSchema - append our errors
increment = 0
# This code below is to make sure we have the index for the record
# list correct. JSONSchema may be missing some of the objects due
# to validation above, so this re - inserts them, and makes sure
# the index is right
for error in e.errors:
if len(error.path) > 1 and isinstance(error.path[1], int):
error.path[1] += increment
while error.path[1] in error_indexes:
increment += 1
error.path[1] += 1
# Add the list from above
e.errors.extend(errors)
# Raise the exception
raise e
else:
# If JSONSchema passes, but we found parsing errors,
# raise an exception
if len(errors) > 0:
raise exceptions.InvalidObject(
"Provided object does not match "
"schema", errors=errors, object=self)
finally:
if old_fields:
self.FIELDS = old_fields
# Send in the tradition
|
luchasei/http-log-app
|
tests/logEntryTests.py
|
Python
|
mit
| 2,432
| 0.00699
|
import unittest
from logEntry import LogEntry
class TestLogEntry(unitt
|
est.TestCase):
def test_parse_log_1(self):
line = '188.45.108.168 - - [12/Dec/2015:19:44:09 +0100] "GET /images/stories/raith/almhuette_raith.jpg HTTP/1.1" 200 43300 "http://www.almhuette-raith.at/" "Mozilla/5.0 (Linux; Android 4.4.2; de-at; SAMSUNG GT-I9301I Build/KOT49H) AppleWebKit/537.36 (KHTML, like G
|
ecko) Version/1.5 Chrome/28.0.1500.94 Mobile Safari/537.36" "-"'
entry = LogEntry(line)
self.assertEqual(entry.clientIp,'188.45.108.168')
self.assertEqual(entry.clientId, '-')
self.assertEqual(entry.userName, '-')
self.assertEqual(entry.requestLine, 'GET /images/stories/raith/almhuette_raith.jpg HTTP/1.1')
self.assertEqual(entry.requestUrl, '/images/stories/raith/almhuette_raith.jpg')
self.assertEqual(entry.urlSection, '/images/')
self.assertEqual(entry.statusCode, 200)
self.assertEqual(entry.sizeBytes, 43300)
def test_parse_log_2(self):
line = 'hmu4.cs.auckland.ac.nz - - [09/Feb/2016:02:50:20 -0500] "GET /docs/GCDOAR/EnergyStar.html HTTP/1.0" 200 6829'
entry = LogEntry(line)
self.assertEqual(entry.clientIp, 'hmu4.cs.auckland.ac.nz')
self.assertEqual(entry.clientId, '-')
self.assertEqual(entry.userName, '-')
self.assertEqual(entry.requestLine, 'GET /docs/GCDOAR/EnergyStar.html HTTP/1.0')
self.assertEqual(entry.requestUrl, '/docs/GCDOAR/EnergyStar.html')
self.assertEqual(entry.urlSection, '/docs/')
self.assertEqual(entry.statusCode, 200)
self.assertEqual(entry.sizeBytes, 6829)
def test_parse_log_3(self):
line = '2607:f0d0:1002:0051:0000:0000:0000:0004 - - [23/Jan/2016:15:41:52 +0100] "POST /administrator/index.php HTTP/1.1" 200 "-" "-" "-" "-"'
entry = LogEntry(line)
self.assertEqual(entry.clientIp, '2607:f0d0:1002:0051:0000:0000:0000:0004')
self.assertEqual(entry.clientId, '-')
self.assertEqual(entry.userName, '-')
self.assertEqual(entry.requestLine, 'POST /administrator/index.php HTTP/1.1')
self.assertEqual(entry.requestUrl, '/administrator/index.php')
self.assertEqual(entry.urlSection, '/administrator/')
self.assertEqual(entry.statusCode, 200)
self.assertEqual(entry.sizeBytes, 0)
if __name__ == '__main__':
unittest.main()
|
hb9kns/PyBitmessage
|
src/bitmessageqt/safehtmlparser.py
|
Python
|
mit
| 5,318
| 0.006205
|
from HTMLParser import HTMLParser
import inspect
import re
from urllib import quote, quote_plus
from urlparse import urlparse
class SafeHTMLParser(HTMLParser):
# from html5lib.sanitiser
acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area',
'article', 'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button',
'canvas', 'caption', 'center', 'cite', 'code', 'col', 'colgroup',
'command', 'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn',
'dialog', 'dir', 'div', 'dl', 'dt', 'em', 'event-source', 'fieldset',
'figcaption', 'figure', 'footer', 'font', 'header', 'h1',
'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'ins',
'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map', 'menu', 'meter',
'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup', 'option',
'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select',
'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong',
'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot',
'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video']
replaces_pre = [["&", "&"], ["\"", """], ["<", "<"], [">", ">"]]
replaces_post = [["\n", "<br/>"], ["\t", " "], [" ", " "], [" ", " "], ["<br/> ", "<br/> "]]
src_schemes = [ "data" ]
#uriregex1 = re.compile(r'(?i)\b((?:(https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?]))')
uriregex1 = re.compile(r'((https?|ftp|bitcoin):(?:/{1,3}|[a-z0-9%])(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)')
uriregex2 = re.compile(r'<a href="([^"]+)&')
emailregex = re.compile(r'\b([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,})\b')
@staticmethod
def replace_pre(text):
for a in SafeHTMLParser.replaces_pre:
text = text.replace(a[0], a[1])
return text
@staticmethod
def replace_post(text):
for a in SafeHTMLParser.replaces_post:
text = text.replace(a[0], a[1])
if len(text) > 1 and text[0] == " ":
text = " " + text[1:]
return text
def __init__(self, *args, **kwargs):
HTMLParser.__init__(self, *args, **kwargs)
self.reset_safe()
def reset_safe(self):
self.elements = set()
self.raw = u""
self.sanitised = u""
self.has_html = False
self.allow_picture = False
self.allow_external_src = False
def add_if_acceptable(self, tag, attrs = None):
if tag not in SafeHTMLParser.acceptable_elements:
return
self.sanitised += "<"
if inspect.stack()[1][3] == "handle_endtag":
self.sanitised += "/"
self.sanitised += tag
if attrs is not None:
for attr, val in attrs:
if tag == "img" and attr == "src" and not self.allow_picture:
val = ""
elif attr == "src" and not self.allow_external_src:
url = urlparse(val)
if url.scheme not in SafeHTMLParser.src_schemes:
val = ""
self.sanitised += " " + quote_plus(attr)
if not (val is None):
self.sanitised += "=\"" + val + "\""
if inspect.stack()[1][3] == "handle_startendtag":
self.sanitised += "/"
self.sanitised += ">"
def handle_starttag(self, tag, attrs):
if tag in SafeHTMLParser.acceptable_elements:
self.has_html = True
self.add_if_acceptable(tag, attrs)
def handle_endtag(self, tag):
self.add_if_acceptable(tag)
def handle_startend
|
tag(self, tag, attrs):
if tag in SafeHTMLParser.acceptable_elements:
self.has_html = True
self.add_if_acceptable(tag, attrs)
def handle_data(self, data):
self.sanitised += data
def handle_charref(self, name):
self.sanitised += "&#" + name + ";"
def handle_entityref(self, name):
self.sanitised += "&" + name + ";"
def feed(self
|
, data):
try:
data = unicode(data, 'utf-8')
except UnicodeDecodeError:
data = unicode(data, 'utf-8', errors='replace')
HTMLParser.feed(self, data)
tmp = SafeHTMLParser.replace_pre(data)
tmp = SafeHTMLParser.uriregex1.sub(
r'<a href="\1">\1</a>',
tmp)
tmp = SafeHTMLParser.uriregex2.sub(r'<a href="\1&', tmp)
tmp = SafeHTMLParser.emailregex.sub(r'<a href="mailto:\1">\1</a>', tmp)
tmp = SafeHTMLParser.replace_post(tmp)
self.raw += tmp
def is_html(self, text = None, allow_picture = False):
if text:
self.reset()
self.reset_safe()
self.allow_picture = allow_picture
self.feed(text)
self.close()
return self.has_html
|
ImWalkinHere/Calculator
|
tests/test_operations.py
|
Python
|
lgpl-2.1
| 1,523
| 0.006566
|
"""Brief description of what this file should test"""
import pytest
from Calculator import operations
def tes
|
t_addition():
assert operations.add(1, 2) == 3
def test_subtraction():
assert operations.subtract(1 ,2) == -1
def test_multiplication():
assert operations.multiply(2, -1) == -2
def test_divide():
# test for floating point division returns floating point
assert isinstance(operations.divide(3, 2), float)
assert operations.divide(8, 4) == 2
# Check that DivisionByZero
|
Error is raised
with pytest.raises(Exception):
operations.divide(1, 0)
@pytest.mark.parametrize("given, expected", [
(0, 0),
(-0.76, 0.76),
(1, 1),
])
def test_abs_val(given, expected):
assert operations.abs_val(given) == expected
@pytest.mark.parametrize("given, expected", [
(0, 0),
(.5, 0),
(-0.75, -1),
])
def test_floor(given, expected):
assert operations.floor(given) == expected
@pytest.mark.parametrize("given, expected", [
(0, 0),
(-1.5, -1),
(2.3, 3),
])
def test_ceiling(given, expected):
assert operations.ceiling(given) == expected
@pytest.mark.parametrize("given_a, given_b, expected", [
(0, 1, 0),
(3, 3, 27),
(-3, 0, 1),
(0, 0, 1),
])
def test_power(given_a, given_b, expected):
assert operations.power(given_a, given_b) == expected
@pytest.mark.parametrize("given, expected", [
(0.45, 0),
(-3.6, -3),
(3, 3),
])
def test_rounding(given, expected):
assert operations.rounding(given) == expected
|
DDT-INMEGEN/opendata
|
webservice.py
|
Python
|
agpl-3.0
| 1,056
| 0.012311
|
from flask import Flask, session, redirect, url_for, escape, request, jsonify
from flask.ext.pymo
|
ngo import PyMongo
from pprint import pformat
import sys
import os
import re
app = Flask("download")
mongo = PyMongo(app)
local_repo = '/home/rgarcia/opendata'
server_repo = '/home/rgarcia/public_html'
############
# anuncios #
############
|
@app.route('/register', methods=['POST'])
def anuncio_save():
downloader = request.get_json()
try:
downloader.pop('email_repeat')
path = downloader['path']
oid = mongo.db.downloaders.save(downloader)
local_path = os.path.join( local_repo, path )
server_path = os.path.join( server_repo, str(oid) )
app.logger.debug(local_path)
app.logger.debug(server_path)
os.symlink(local_path, server_path)
return jsonify({ "oid": str(oid)} )
except:
raise
# return jsonify({ "status": "error",
# "message": pformat(sys.exc_info()[0]) })
if __name__ == '__main__':
app.run(debug=True)
|
avenet/hackerrank
|
find_the_robot_iterator.py
|
Python
|
mit
| 377
| 0.018568
|
def spiral_iterator():
x, y =
|
0, 0
direction = [(1, 0), (0, 1), (-1, 0), (0, -1)]
i = 1
print x,y
while True:
dir_index = (i - 1) % 4
vector = i * direction[dir_index][0], i * direction[dir_index][1]
x += vector[0]
y += v
|
ector[1]
print x,y
i += 1
raw_input()
result = spiral_iterator()
|
arshvin/scripts
|
zabbix/T_hdfs_space_checker/hdfs_space_metric_server.py
|
Python
|
apache-2.0
| 3,517
| 0.033551
|
import asyncore, socket, logging, time, asynchat, os
from hdfs_space_common import get_tree_from_cache, get_child_node, TreeNode
FORMAT = '%(asctime)
|
-15s: %(levelname)s %(module)s - %(funcName)
|
s: %(message)s'
logging.basicConfig(format=FORMAT, level=logging.WARNING)
class ChatHandler(asynchat.async_chat):
def __init__(self, sock):
asynchat.async_chat.__init__(self, sock = sock)
self.ibuffer = []
self.obuffer = ''
self.set_terminator("\n")
def collect_incoming_data(self, data):
self.ibuffer.append(data)
logging.info('Received data "%s"' % data)
def found_terminator(self):
self.handle_request()
def handle_request(self):
data = self.ibuffer.pop(0)
#Data should be like:
#metric:path|user|size
# OR
#db:new_path
command = data.split(":")[0]
if command == 'metric':
metric_args = data.split(":")[1].split('|')
hdfs_path = metric_args[0] if len(metric_args) > 0 else "/"
user_name = metric_args[1] if len(metric_args) > 1 else "ALL"
metric = metric_args[2] if len(metric_args) > 2 else "size"
logging.debug('metric_args: %s' % metric_args)
logging.debug('hdfs_path: %s' % hdfs_path)
logging.debug('user_name: %s' % user_name)
logging.debug('metric: %s' % metric)
result = 0
if user_name == "ALL" and metric == 'size':
logging.warning('Rather using this script try command "hdfs dfs -du /"')
elif user_name == "ALL" and metric == 'amount':
logging.info('Calculating the metric')
result = get_child_node(file_tree, hdfs_path).get_amount_for_all()
else:
if metric == "size":
logging.info('Calculating the metric')
result = get_child_node(file_tree, hdfs_path).get_size_by_user(user_name)
elif metric == "amount":
logging.info('Calculating the metric')
result = get_child_node(file_tree, hdfs_path).get_amount_by_user(user_name)
else:
logging.warning("The metric %s not implemented yet" % metric)
logging.info('The result is ready: %s. Pushing it to back' % result)
self.push(str(result))
return
elif command == 'db':
file_path = data.split(":")[1]
if os.path.exists(file_path):
global file_tree
file_tree = get_tree_from_cache(file_path)
os.rename(file_path,MetricServer.db_path)
logging.info('File %s remaned to %s' % (file_path, MetricServer.db_path))
self.push('OK')
else:
logging.warning('File %s could not be found. Doing nothing' % file_path)
self.push('FAIL')
else:
logging.warning("The command %s not implemented yet")
self.push('FAIL')
class MetricServer(asyncore.dispatcher):
sock_path = '/tmp/hdfs_space.sock'
db_path = '/tmp/hdfs_space.data'
def __init__(self):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind(self.sock_path)
logging.info('Starting metric-server')
self.listen(5)
global file_tree
try:
file_tree = get_tree_from_cache(self.db_path)
except KeyError as e:
file_tree = TreeNode('')
def handle_accept(self):
pair = self.accept()
if pair is not None:
sock, addr = pair
logging.info('Incoming connection')
handler = ChatHandler(sock)
def handle_close(self):
self.close()
logging.info('The socket is closed')
def handle_expt(self):
logging.info("OOB detected for %s" % self)
if __name__ == '__main__':
file_tree = None
server = MetricServer()
try:
asyncore.loop()
finally:
if os.path.exists(server.sock_path):
os.unlink(server.sock_path)
|
bjornwallner/proq2-server
|
apps/modeller9v8/examples/python/steepest_descent.py
|
Python
|
gpl-3.0
| 1,980
| 0.000505
|
from modeller.optimizers import state_optimizer
class SteepestDescent(state_optimizer):
"""Very simple steepest descent optimizer, in Python"""
# Add options for our optimizer
_ok_keys = state_optimizer._ok_keys + ('min_atom_shift', 'min_e_diff',
'step_size', 'max_iterations')
def __init__(self, step_size=0.0001, min_atom_shift=0.01, min_e_diff=1.0,
max_iterations=None, **vars):
|
state_optimizer.__init__(self, step_size=step_size,
min_atom_shift=min_atom_shift,
min_e_diff=min_e_diff,
max_iterations=max_iterations, **vars)
def optimize(self, atmsel, **vars):
# Do normal optimization startup
state_optimizer.optimize(self,
|
atmsel, **vars)
# Get all parameters
alpha = self.get_parameter('step_size')
minshift = self.get_parameter('min_atom_shift')
min_ediff = self.get_parameter('min_e_diff')
maxit = self.get_parameter('max_iterations')
# Main optimization loop
state = self.get_state()
(olde, dstate) = self.energy(state)
while True:
for i in range(len(state)):
state[i] -= alpha * dstate[i]
(newe, dstate) = self.energy(state)
if abs(newe - olde) < min_ediff:
print "Finished at step %d due to energy criterion" % self.step
break
elif self.shiftmax < minshift:
print "Finished at step %d due to shift criterion" % self.step
break
elif maxit is not None and self.step >= maxit:
print "Finished at step %d due to step criterion" % self.step
break
if newe < olde:
alpha *= 2
else:
alpha /= 2
olde = newe
self.next_step()
self.finish()
|
goal/uwsgi
|
plugins/gevent/uwsgiplugin.py
|
Python
|
gpl-2.0
| 213
| 0
|
from dis
|
tutils import sysconfig
NAME = 'gevent'
CFLAGS = [
'
|
-I' + sysconfig.get_python_inc(),
'-I' + sysconfig.get_python_inc(plat_specific=True)
]
LDFLAGS = []
LIBS = []
GCC_LIST = ['gevent', 'hooks']
|
jasonehines/mycroft-core
|
mycroft/client/enclosure/api.py
|
Python
|
gpl-3.0
| 6,761
| 0
|
# Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
from mycroft.messagebus.message import Message
from mycroft.util.log import getLogger
__author__ = 'jdorleans'
LOGGER = getLogger(__name__)
class EnclosureAPI:
"""
This API is intended to be used to interface with the hardware
that is running Mycroft. It exposes all possible commands which
can be sent to a Mycroft enclosure implementation.
Different enclosure implementations may implement this differently
and/or may ignore certain API calls completely. For example,
the eyes_color() API might be ignore on a Mycroft that uses simple
LEDs which only turn on/off, or not at all on an implementation
where there is no face at all.
"""
def __init__(self, ws):
self.ws = ws
def reset(self):
"""The enclosure should restore itself to a started state.
Typically this would be represented by the eyes being 'open'
and the mouth reset to its default (smile or blank).
"""
se
|
lf.ws.emit(Message("enclosure.reset"))
def system_reset(self):
"""T
|
he enclosure hardware should reset any CPUs, etc."""
self.ws.emit(Message("enclosure.system.reset"))
def system_mute(self):
"""Turn off the system microphone (not listening for wakeword)."""
self.ws.emit(Message("enclosure.system.mute"))
def system_unmute(self):
"""Turn the system microphone on (listening for wakeword)."""
self.ws.emit(Message("enclosure.system.unmute"))
def system_blink(self, times):
"""The 'eyes' should blink the given number of times.
Args:
times (int): number of times to blink
"""
self.ws.emit(Message("enclosure.system.blink", {'times': times}))
def eyes_on(self):
"""Illuminate or show the eyes."""
self.ws.emit(Message("enclosure.eyes.on"))
def eyes_off(self):
"""Turn off or hide the eyes."""
self.ws.emit(Message("enclosure.eyes.off"))
def eyes_blink(self, side):
"""Make the eyes blink
Args:
side (str): 'r', 'l', or 'b' for 'right', 'left' or 'both'
"""
self.ws.emit(Message("enclosure.eyes.blink", {'side': side}))
def eyes_narrow(self):
"""Make the eyes look narrow, like a squint"""
self.ws.emit(Message("enclosure.eyes.narrow"))
def eyes_look(self, side):
"""Make the eyes look to the given side
Args:
side (str): 'r' for right
'l' for left
'u' for up
'd' for down
'c' for crossed
"""
self.ws.emit(Message("enclosure.eyes.look", {'side': side}))
def eyes_color(self, r=255, g=255, b=255):
"""Change the eye color to the given RGB color
Args:
r (int): 0-255, red value
g (int): 0-255, green value
b (int): 0-255, blue value
"""
self.ws.emit(Message("enclosure.eyes.color",
{'r': r, 'g': g, 'b': b}))
def eyes_brightness(self, level=30):
"""Set the brightness of the eyes in the display.
Args:
level (int): 1-30, bigger numbers being brighter
"""
self.ws.emit(Message("enclosure.eyes.level", {'level': level}))
def eyes_reset(self):
"""Restore the eyes to their default (ready) state."""
self.ws.emit(Message("enclosure.eyes.reset"))
def eyes_timed_spin(self, length):
"""Make the eyes 'roll' for the given time.
Args:
length (int): duration in milliseconds of roll, None = forever
"""
self.ws.emit(Message("enclosure.eyes.timedspin",
{'length': length}))
def eyes_volume(self, volume):
"""Indicate the volume using the eyes
Args:
volume (int): 0 to 11
"""
self.ws.emit(Message("enclosure.eyes.volume", {'volume': volume}))
def mouth_reset(self):
"""Restore the mouth display to normal (blank)"""
self.ws.emit(Message("enclosure.mouth.reset"))
def mouth_talk(self):
"""Show a generic 'talking' animation for non-synched speech"""
self.ws.emit(Message("enclosure.mouth.talk"))
def mouth_think(self):
"""Show a 'thinking' image or animation"""
self.ws.emit(Message("enclosure.mouth.think"))
def mouth_listen(self):
"""Show a 'thinking' image or animation"""
self.ws.emit(Message("enclosure.mouth.listen"))
def mouth_smile(self):
"""Show a 'smile' image or animation"""
self.ws.emit(Message("enclosure.mouth.smile"))
def mouth_viseme(self, code):
"""Display a viseme mouth shape for synched speech
Args:
code (int): 0 = shape for sounds like 'y' or 'aa'
1 = shape for sounds like 'aw'
2 = shape for sounds like 'uh' or 'r'
3 = shape for sounds like 'th' or 'sh'
4 = neutral shape for no sound
5 = shape for sounds like 'f' or 'v'
6 = shape for sounds like 'oy' or 'ao'
"""
self.ws.emit(Message("enclosure.mouth.viseme", {'code': code}))
def mouth_text(self, text=""):
"""Display text (scrolling as needed)
Args:
text (str): text string to display
"""
self.ws.emit(Message("enclosure.mouth.text", {'text': text}))
def weather_display(self, img_code, temp):
"""Show a weather icon (deprecated)"""
self.ws.emit(Message("enclosure.weather.display",
{'img_code': img_code, 'temp': temp}))
def activate_mouth_events(self):
"""Enable movement of the mouth with speech"""
self.ws.emit(Message('enclosure.mouth.events.activate'))
def deactivate_mouth_events(self):
"""Disable movement of the mouth with speech"""
self.ws.emit(Message('enclosure.mouth.events.deactivate'))
|
SciLifeLab/genologics
|
examples/get_samples2.py
|
Python
|
mit
| 756
| 0
|
"""Python interface to GenoLogics LIMS via its REST API.
Usage examples: Get some samples, and sample info.
Per Kraulis, Science for Life Labo
|
ratory, Stockholm, Sweden.
"""
from genologics.lims import *
from genologics.config import BASEURI, USERNAME, PASSWORD
lims = Lims(BASEURI, USERNAME, PASSWORD)
lims.check_version()
project = Project(lims, id='KRA61')
samples = lims.get_samples(projectlimsid=project.id)
print(len(samples), 'samples in'
|
, project)
for sample in samples:
print(sample, sample.name, sample.date_received, sample.artifact)
name = 'spruce_a'
artifacts = lims.get_artifacts(sample_name=name)
print(len(artifacts), 'artifacts for sample', name)
for artifact in artifacts:
print(artifact, artifact.name, artifact.qc_flag)
|
jawilson/home-assistant
|
tests/components/fritzbox/test_switch.py
|
Python
|
apache-2.0
| 5,502
| 0.000727
|
"""Tests for AVM Fritz!Box switch component."""
from datetime import timedelta
from unittest.mock import Mock
from requests.exceptions import HTTPError
from homeassistant.components.fritzbox.const import (
ATTR_STATE_DEVICE_LOCKED,
ATTR_STATE_LOCKED,
DOMAIN as FB_DOMAIN,
)
from homeassistant.components.sensor import (
ATTR_STATE_CLASS,
DOMAIN as SENSOR_DOMAIN,
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
)
from homeassistant.components.switch import DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_UNIT_OF_MEASUREMENT,
CONF_DEVICES,
ENERGY_KILO_WATT_HOUR,
POWER_WATT,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
STATE_UNAVAILABLE,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
import homeassistant.util.dt as dt_util
from . import FritzDeviceSwitchMock, setup_config_entry
from .const import CONF_FAKE_NAME, MOCK_CONFIG
from tests.common import async_fire_time_changed
ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}"
async def test_setup(hass: HomeAssistant, fritz: Mock):
"""Test setup of platform."""
device = FritzDeviceSwitchMock()
assert await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
state = hass.states.get(ENTITY_ID)
assert state
assert state.state == STATE_ON
assert state.attributes[ATTR_FRIENDLY_NAME] == CONF_FAKE_NAME
assert state.attributes[ATTR_STATE_DEVICE_LOCKED] == "fake_locked_device"
assert state.attributes[ATTR_STATE_LOCKED] == "fake_locked"
assert ATTR_STATE_CLASS not in state.attributes
state = hass.states.get(f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_temperature")
assert state
assert state.state == "1.23"
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Temperature"
assert state.attributes[ATTR_STATE_DEVICE_LOCKED] == "fake_locked_device"
assert state.attributes[ATTR_STATE_LOCKED] == "fake_locked"
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
assert state.attributes[ATTR_STATE_CLASS] == STATE_CLASS_MEASUREMENT
state = hass.states.get(f"{ENTITY_ID}_humidity")
assert state is None
state = hass.states.get(f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_power_consumption")
assert st
|
ate
assert state.state == "5.678"
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Power Consumption"
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == POWER_WATT
assert state.attributes[ATTR_STATE_CLASS] == STATE_CLASS_MEASUREMENT
state = hass.states.get(f"{SEN
|
SOR_DOMAIN}.{CONF_FAKE_NAME}_total_energy")
assert state
assert state.state == "1.234"
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Total Energy"
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == ENERGY_KILO_WATT_HOUR
assert state.attributes[ATTR_STATE_CLASS] == STATE_CLASS_TOTAL_INCREASING
async def test_turn_on(hass: HomeAssistant, fritz: Mock):
"""Test turn device on."""
device = FritzDeviceSwitchMock()
assert await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
assert await hass.services.async_call(
DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True
)
assert device.set_switch_state_on.call_count == 1
async def test_turn_off(hass: HomeAssistant, fritz: Mock):
"""Test turn device off."""
device = FritzDeviceSwitchMock()
assert await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
assert await hass.services.async_call(
DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True
)
assert device.set_switch_state_off.call_count == 1
async def test_update(hass: HomeAssistant, fritz: Mock):
"""Test update without error."""
device = FritzDeviceSwitchMock()
assert await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
assert fritz().update_devices.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert fritz().update_devices.call_count == 2
assert fritz().login.call_count == 1
async def test_update_error(hass: HomeAssistant, fritz: Mock):
"""Test update with error."""
device = FritzDeviceSwitchMock()
fritz().update_devices.side_effect = HTTPError("Boom")
assert not await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
assert fritz().update_devices.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert fritz().update_devices.call_count == 2
assert fritz().login.call_count == 2
async def test_assume_device_unavailable(hass: HomeAssistant, fritz: Mock):
"""Test assume device as unavailable."""
device = FritzDeviceSwitchMock()
device.voltage = 0
device.energy = 0
device.power = 0
assert await setup_config_entry(
hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz
)
state = hass.states.get(ENTITY_ID)
assert state
assert state.state == STATE_UNAVAILABLE
|
wazari972/WebAlbums
|
WebAlbums-FS/WebAlbums-Utils/Photowall/photowall.py
|
Python
|
gpl-3.0
| 15,846
| 0.023224
|
#!/usr/bin/env python
import os
import tempfile
import pipes
import subprocess
import time
import random
import shutil
try:
from wand.image import Image
from wand.display import display
except ImportError as e:
# cd /usr/lib/
# ln -s libMagickWand-6.Q16.so libMagickWand.so
print("Couldn't import Wand package.")
print("Please refer to #http://dahlia.kr/wand/ to install it.")
import traceback; traceback.print_exc()
raise e
try:
import magic
mime = magic.Magic()
except ImportError:
mime = None
#https://github.com/ahupp/python-magic
try:
from docopt import docopt
except ImportError:
print("Couldn't import Docopt package.")
print("Please refer to#https://github.com/docopt/docopt to install it.")
print("/!\\ Option parsing not possible, defaulting to hardcoded values/!\\")
def to_bool(val):
if val is None:
return false
return val == 1
def to_int(val):
return int(val)
def to_str(val):
return val
def to_path(val):
return val
OPT_TO_KEY = {
'--do-wrap' : ("DO_WRAP", to_bool),
'--line-height': ("LINE_HEIGHT", to_int),
'--nb-lines' : ('LINES', to_int),
'--no-caption' : ("WANT_NO_CAPTION", to_bool),
'--force-no-vfs': ("FORCE_VFS", to_bool),
'--force-vfs' : ("FORCE_NO_VFS", to_bool),
'--pick-random': ("PICK_RANDOM", to_bool),
'--put-random' : ("PUT_RANDOM", to_bool),
'--resize' : ("DO_RESIZE", to_bool),
'--sleep' : ('SLEEP_TIME', to_int),
'--width' : ('WIDTH', to_int),
'--no-switch-to-mini': ("NO_SWITCH_TO_MINI", to_bool),
'<path>' : ('PATH', to_path),
'<target>' : ('TARGET', to_path),
'--polaroid' : ("DO_POLAROID", to_bool),
'--format' : ("IMG_FORMAT_SUFFIX", to_str),
'--crop-size' : ("CROP_SIZE", to_int),
'~~use-vfs' : ("USE_VFS", to_bool),
'--help' : ("HELP", to_bool)
}
KEY_TO_OPT = dict([(key, (opt, ttype)) for opt, (key, ttype) in OPT_TO_KEY.items()])
PARAMS = {
"PATH" : "/home/kevin/mount/first",
"TARGET" : "/tmp/final.png",
#define the size of the picture
"WIDTH" : 2000,
#define how many lines do we want
"LINES": 2,
"LINE_HEIGHT": 200,
#minimum width of cropped image. Below that, we black it out
#only for POLAROID
"CROP_SIZE": 1000,
"IMG_FORMAT_SUFFIX": ".png",
# False if PATH is a normal directory, True if it is WebAlbums-FS
"USE_VFS": False,
"FORCE_VFS": False,
"FORCE_NO_VFS": False,
# True if end-of-line photos are wrapped to the next line
"DO_WRAP": False,
# True if we want a black background and white frame, plus details
"DO_POLAROID": True,
"WANT_NO_CAPTION": True,
# False if we want to add pictures randomly
"PUT_RANDOM": False,
"DO_RESIZE": False,
### VFS options ###
"NO_SWITCH_TO_MINI": False,
### Directory options ###
# False if we pick directory images sequentially, false if we take them randomly
"PICK_RANDOM": False, #not implemented yet
## Ra
|
ndom wall options ##
"SLEEP_TIME": 0,
"HELP": False
}
DEFAULTS = dict([(key, value) for key, value in PARAMS.items()])
DEFAULTS_docstr = dict([(KEY_TO_OPT[key][0], value) for key, value in PARAMS.items()])
usage = """Photo Wall for WebAlbums 3.
Usage:
photowall.py <path> <target> [options]
Arguments:
<path> The path where photos are picked up from. [default: %(<path>)s]
<target> The path where the target photo is written. Exc
|
ept in POLAROID+RANDOM mode, the image will be blanked out first. [default: %(<target>)s]
Options:
--polaroid Use polaroid-like images for the wall
--width <width> Set final image width. [default: %(--width)d]
--nb-lines <nb> Number on lines of the target image. [default: %(--nb-lines)d]
--resize Resize images before putting in the wall. [default: %(--resize)s]
--line-height <height> Set the height of a single image. [default: %(--line-height)d]
--do-wrap If not POLAROID, finish images on the next line. [default: %(--do-wrap)s]
--help Display this message
Polaroid mode options:
--crop-size <crop> Minimum size to allow cropping an image. [default: %(--crop-size)s]
--no-caption Disable caption. [default: %(--no-caption)s]
--put-random Put images randomly instead of linearily. [default: %(--put-random)s]
--sleep <time> If --put-random, time (in seconds) to go asleep before adding a new image. [default: %(--sleep)d]
Filesystem options:
--force-vfs Treat <path> as a VFS filesystem. [default: %(--force-vfs)s]
--force-no-vfs Treat <path> as a normal filesystem. [default: %(--force-no-vfs)s]
--no-switch-to-mini If VFS, don't switch from the normal image to the miniature. [default: %(--no-switch-to-mini)s]
--pick-random If not VFS, pick images randomly in the <path> folder. [default: %(--pick-random)s]
""" % DEFAULTS_docstr
class UpdateCallback:
def newExec(self):
pass
def newImage(self, row=0, col=0, filename=""):
print("%d.%d > %s" % (row, col, filename))
def updLine(self, row, tmpLine):
#print("--- %d ---" % row)
pass
def newFinal(self, name):
pass
def finished(self, name):
print("==========")
def stopRequested(self):
return False
def checkPause(self):
pass
updateCB = UpdateCallback()
if __name__ == "__main__":
arguments = docopt(usage, version="3.5-dev")
if arguments["--help"]:
print(usage)
exit()
param_args = dict([(OPT_TO_KEY[opt][0], OPT_TO_KEY[opt][1](value)) for opt, value in arguments.items()])
PARAMS = dict(PARAMS, **param_args)
###########################################
###########################################
previous = None
def get_next_file_vfs():
global previous
if previous is not None:
try:
os.unlink(previous)
except OSerror:
pass
files = os.listdir(PARAMS["PATH"])
for filename in files:
if not "By Years" in filename:
previous = PARAMS["PATH"]+filename
if "gpx" in previous:
return get_next_file()
to_return = previous
try:
to_return = os.readlink(to_return)
except OSError:
pass
if not PARAMS["NO_SWITCH_TO_MINI"]:
to_return = to_return.replace("/images/", "/miniatures/") + ".png"
return to_return
def get_file_details(filename):
try:
link = filename
try:
link = os.readlink(filename)
except OSError:
pass
link = pipes.quote(link)
names = link[link.index("/miniatures/" if not PARAMS["NO_SWITCH_TO_MINI"] else "/images"):].split("/")[2:]
theme, year, album, fname = names
return "%s (%s)" % (album, theme)
except Exception as e:
#print("Cannot get details from {}: {}".format(filename, e))
fname = get_file_details_dir(filename)
fname = fname.rpartition(".")[0]
fname = fname.replace("_", "\n")
return fname
###########################################
class GetFileDir:
def __init__(self, randomize):
self.idx = 0
self.files = os.listdir(PARAMS["PATH"])
if len(self.files) == 0:
raise EnvironmentError("No file available")
self.files.sort()
if randomize:
print("RANDOMIZE")
random.shuffle(self.files)
def get_next_file(self):
to_return = self.files[self.idx]
self.idx += 1
self.idx %= len(self.files)
return PARAMS["PATH"]+to_return
def get_file_details_dir(filename):
return filename[filename.rindex("/")+1:]
###########################################
###########################################
def do_append(first, second, underneath=False):
sign = "-" if underneath else "+"
background = "-background black" if PARAMS["DO_POLAROID"] else ""
command = "convert -gravity center %s %sappend %s %s %s" % (background, sign, first, second, first)
ret = subprocess.call(command, shell=True)
if ret != 0:
raise Exception("Command failed: ", command)
def do_polaroid (image, filename=None, background="black", suffix=None):
if suffix is None:
suffix = PARAMS["IMG_FORMAT_SUFFIX"]
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=suffix)
tmp.close()
image.save(filename=tmp.name)
|
symbooglix/boogie-runner
|
BoogieRunner/Runners/GPUVerify.py
|
Python
|
bsd-3-clause
| 2,173
| 0.010584
|
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
from . RunnerBase import RunnerBaseClass
from .. Analysers.GPUVerify import GPUVerifyAnalyser
import logging
import os
import psutil
import re
import sys
import yaml
_logger = logging.getLogger(__name__)
class GPUVerifyRunnerException(Exception):
def __init__(self, msg):
self.msg = msg
class GPUVerifyRunner(RunnerBaseClass):
softTimeoutDiff = 5
def __init__(self, boogieProgram, workingDirectory, rc):
_logger.debug('Initialising {}'.format(boogieProgram))
super(GPUVerifyRunner, self).__init__(boogieProgram, workingDirectory, rc)
# Sanity checks
# TODO
self.softTimeout = self.maxTimeInSeconds
if self.maxTimeInSeconds > 0:
# We use GPUVerify
|
's timeout function and enforce the
# requested timeout and enforce a hard timeout slightly later
self.maxTimeInSeconds = self.maxTimeInSeconds + self.softTimeoutDiff
if not self.toolPath.endswith('.py'):
raise GPUVerifyRunnerException(
'toolPath needs to be the GPUVerify python script')
@property
def name(self):
ret
|
urn "gpuverify"
def _buildResultDict(self):
results = super(GPUVerifyRunner, self)._buildResultDict()
# TODO: Remove this. It's now redundant
results['hit_hard_timeout'] = results['backend_timeout']
return results
def GetNewAnalyser(self, resultDict):
return GPUVerifyAnalyser(resultDict)
def run(self):
# Run using python interpreter
cmdLine = [ sys.executable, self.toolPath ]
cmdLine.append('--timeout={}'.format(self.softTimeout))
# Note we ignore self.entryPoint
_logger.info('Ignoring entry point {}'.format(self.entryPoint))
# GPUVerify needs PATH environment variable set
env = {}
path = os.getenv('PATH')
if path == None:
path = ""
env['PATH'] = path
cmdLine.extend(self.additionalArgs)
# Add the boogie source file as last arg
cmdLine.append(self.programPathArgument)
backendResult = self.runTool(cmdLine,
isDotNet=False,
envExtra=env)
if backendResult.outOfTime:
_logger.warning('GPUVerify hit hard timeout')
def get():
return GPUVerifyRunner
|
andela-bmwenda/cp2-bucketlist-api
|
app/models.py
|
Python
|
mit
| 2,762
| 0
|
from datetime import datetime
from flask_login import UserMixin
from marshmallow import Schema, fields
from werkzeug.security import generate_password_hash, check_password_hash
from app import db
class User(db.Model, UserMixin):
_
|
_tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), unique=True)
password = db.Column(db.String(50))
def __init__(self, username, password):
self.username = username
self.set_password(password=bytes(str(password), 'utf-8'))
self.password = self.pwd_hash
def set_password(self, passwo
|
rd):
self.pwd_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password, password)
def __repr__(self):
return '<User %r>' % self.username
class BucketList(db.Model):
__tablename__ = 'bucketlists'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey(User.id))
user = db.relationship('User',
backref=db.backref('bucketlists', lazy='dynamic'))
name = db.Column(db.String(100))
date_created = db.Column(db.DateTime)
date_modified = db.Column(db.DateTime)
created_by = db.Column(db.String(20))
def __init__(self, name, user_id, created_by):
self.name = name
self.user_id = user_id
self.created_by = created_by
self.date_created = datetime.utcnow()
self.date_modified = datetime.utcnow()
def __repr__(self):
return '<BucketList %r>' % self.name
class BucketlistSchema(Schema):
id = fields.Int()
name = fields.Str()
date_created = fields.DateTime()
date_modified = fields.DateTime()
created_by = fields.Str()
class BucketListItem(db.Model):
__tablename__ = 'items'
id = db.Column(db.Integer, primary_key=True)
bucketlist_id = db.Column(db.Integer, db.ForeignKey(BucketList.id))
bucketlist = db.relationship('BucketList',
backref=db.backref('items', lazy='dynamic'))
name = db.Column(db.String(100))
date_created = db.Column(db.DateTime)
date_modified = db.Column(db.DateTime)
done = db.Column(db.Boolean, default=False)
def __init__(self, name, bucketlist_id):
self.name = name
self.bucketlist_id = bucketlist_id
self.date_created = datetime.utcnow()
self.date_modified = datetime.utcnow()
self.done = False
def __repr__(self):
return '<BucketListItem %r>' % self.name
class BucketlistItemSchema(Schema):
id = fields.Int()
name = fields.Str()
date_created = fields.DateTime()
date_modified = fields.DateTime()
done = fields.Bool()
|
dhamaniasad/flask-elasticsearch
|
app.py
|
Python
|
unlicense
| 188
| 0
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
db = SQL
|
Alchemy(app)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db
|
'
app.debug = True
|
jtakayama/makahiki-draft
|
install/run_initialize_instance.py
|
Python
|
mit
| 7,542
| 0.004243
|
import os
import sys
import subprocess
import shlex
import sys
import StringIO
import datetime
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki" + os.sep)
from apps.utils import script_utils
def termination_string():
"""
Gets the current system time and appends it to a termination notice.
"""
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M:%S")
end_time = "Script exiting at %s\n" % time
return end_time
# Modified from manage_py_dir() in script_utils.py
def local_manage_py_dir():
"""Returns the directory holding the manage.py file as a string."""
return os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + os.sep + os.pardir + os.sep + "makahiki")
# Modified from local_reset_db(heroku_app) in script_utils.py
def local_reset_db(logfile):
"""reset db.
Returns a tuple result_tuple. result_tuple[0] has the logfile.
result_tuple[1] is True if the reset was aborted, and False if was not.
"""
local_reset_db_cancel = False
logfile.write("WARNING: This command will reset the database. " \
"All existing data will be deleted. This process is irreversible.\n")
print "WARNING: This command will reset the database. " \
"All existing data will be deleted. This process is irreversible.\n"
value = raw_input("Do you wish to continue (Y/n)? ")
while value != "Y" and value != "n":
logfile.write("Invalid option %s\n" % value)
print "Invalid option %s\n" % value
value = raw_input("Do you wish to continue (Y/n)? ")
if value == "n":
logfile.write("Do you wish to continue (Y/n)? %s\n" % value)
logfile.write("Operation cancelled.")
print "Operation cancelled.\n"
local_reset_db_cancel = True
result_tuple = [logfile, local_reset_db_cancel]
return result_tuple
elif value =="Y":
logfile.write("Do you wish to continue (Y/n)? %s\n" % value)
print "resetting the db..."
os.system("cd " + local_manage_py_dir() + "; python scripts/initialize_postgres.py")
result_tuple = [logfile, local_reset_db_cancel]
return result_tuple
def run(logfile):
"""
Initializes the Makahiki database with default options and logs the
output to a file. This should only be used to initialize local
installations.
"""
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M:%S")
start_time = "Makahiki instance initialization script started at %s\n" % time
logfile.write(start_time)
print start_time
try:
# Retrieve the user's home directory
USER_HOME = subprocess.check_output(["echo $HOME"], stderr=subprocess.STDOUT, shell=True)
# Remove newline from expected "/home/<username>\n"
USER_HOME = USER_HOME[:-1]
USER_PROJECT_HOME = USER_HOME + os.sep + "makahiki"
# cd to makahiki directory
os.chdir(USER_PROJECT_HOME)
# Capture console output from script_utils functions:
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Runs the initialization scripts in same order as
# makahiki/makahiki/scripts/initialize_instance.py
instance_type = None
heroku_app = None
manage_py = script_utils.manage_py_command()
manage_command = "python " + manage_py
fixture_path = "makahiki" + os.sep + "fixtures"
# Install requirements
script_utils.install_requirements()
# Switch back to standard I/O
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Reset the database
reset_db_result = local_reset_db(logfile)
# If successful, write the output of local_reset_db to a logfile
logfile = reset_db_result[0]
local_reset_db_cancel = reset_db_result[1]
if local_reset_db_cancel:
logfile.write("Makahiki instance initialization was cancelled by the user.")
print "Makahiki instance initialization was cancelled by the user."
end_time = termination_string()
logfile.write(end_time)
print end_time
return logfile
else:
# Resume capturing I/O
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Sync the database
script_utils.syncdb(manage_command)
# Switch I/O back, write output to logfile
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Resume capturing I/O
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Copy static files
script_utils.copy_static_media(heroku_app)
# Switch I/O back, write output to logfile
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Resume capturing I/O
normal_stdout = sys.stdout
output_capturer = StringIO.StringIO()
sys.stdout = output_capturer
# Load data
script_utils.load_data(manage_command, instance_type, fixture_path)
# Switc
|
h I/O back, write output to logfile
sys.stdout = normal_stdout
output = output_capturer.getvalue()
logfile.write(output)
print(output)
# Clear the logfile buffer.
logfile.flush()
os.fsync(logfile)
# Print a closing message
closing = "\nMakahiki initialization script has completed.\n"
logfile.write(closing)
print closing
end_time = term
|
ination_string()
logfile.write(end_time)
print end_time
return logfile
except subprocess.CalledProcessError as cpe:
logfile.write("CalledProcessError: ")
print "CalledProcessError: "
logfile.write(cpe.output)
print cpe.output
logfile.write("Warning: Makahiki initialization did not complete successfully.")
print "Warning: Makahiki initialization did not complete successfully."
end_time = termination_string()
logfile.write(end_time)
print end_time
return logfile
except OSError as ose:
logfile.write("OSError: ")
print "OSError: "
oserror_output = " errno: %s\n filename: %s\n strerror: %s\n" % (ose.errno, ose.filename, ose.strerror)
logfile.write(oserror_output)
print oserror_output
logfile.write("Warning: Makahiki initialization did not complete successfully.")
print "Warning: Makahiki initialization did not complete successfully."
end_time = termination_string()
logfile.write(end_time)
print end_time
return logfile
|
BBN-Q/pyqgl2
|
test/code/bugs/84.py
|
Python
|
apache-2.0
| 1,119
| 0.006256
|
from qgl2.qgl2 import qgl2decl, qgl2main, qreg
from qgl2.qgl2 import QRegister
from qgl2.qgl1 import X, Y, Z, Id, Utheta
from itertools import product
@qgl2decl
def cond_helper(q: qreg, cond):
if cond:
X(q)
@qgl2decl
def t1():
"""
Correct result is [ X(q1) ]
"""
q1 = QRegister('q1')
cond_helper(q1, False)
X(q1)
@qgl2decl
def t2():
"""
Correct result is [ X(q1) ]
"""
q1 = QRegister('q1')
q2 = QRegister('q2')
# We're not going to reference q2 anywhere,
# j
|
ust to make sure that the compiler doesn't
# freak out
X(q1)
@qgl2decl
def t3():
"""
Like t2, but with a function call
"""
q1 = QRegister('q1')
q2 = QRegister('q2')
cond_helper(q1, True)
@qgl2decl
def t4():
"""
Like t3, but the function call does nothing
"""
q1 =
|
QRegister('q1')
q2 = QRegister('q2')
cond_helper(q1, False)
X(q1) # need to do something
@qgl2decl
def t5():
"""
Like t3, but the function call does nothing
"""
q1 = QRegister('q1')
q2 = QRegister('q2')
# don't do anything at all
|
digistump/gerbmerge3
|
gerbmerge/aptable.py
|
Python
|
gpl-3.0
| 11,947
| 0.003264
|
#!/usr/bin/env python
"""
Manage apertures, read aperture table, etc.
--------------------------------------------------------------------
This program is licensed under the GNU General Public License (GPL)
Version 3. See http://www.fsf.org for details of the license.
Rugged Circuits LLC
http://ruggedcircuits.com/gerbmerge
"""
# Include standard modules
import sys
import re
# Include gerbmerge modules
import amacro
import util
# Recognized apertures and re pattern that matches its definition Thermals and
# annuli are generated using macros (see the eagle.def file) but only on inner
# layers. Octagons are also generated as macros (%AMOC8) but we handle these
# specially as the Eagle macro uses a replaceable macro parameter ($1) and
# GerbMerge doesn't handle these yet...only fixed macros (no parameters) are
# currently supported.
Apertures = (
('Rectangle', re.compile(r"^%AD(D\d+)R,([^X]+)X([^*]+)\*%$"), "%AD{:s}R,{:.5f}X{:.5f}*%\n"),
('Circle', re.compile(r"^%AD(D\d+)C,([^*]+)\*%$"), "%AD{:s}C,{:.5f}*%\n"),
('Oval', re.compile(r"^%AD(D\d+)O,([^X]+)X([^*]+)\*%$"), "%AD{:s}O,{:.5f}X{:.5f}*%\n"),
('Octagon', re.compile(r"^%AD(D\d+)OC8,([^*]+)\*%$"), "%AD{:s}OC8,{:.5f}*%\n"), # Specific to Eagle
('Macro', re.compile(r"^%AD(D\d+)([^*]+)\*%$"), "%AD{:s}{:s}*%\n")
)
# This loop defines names in this module like 'Rectangle',
# which are element 0 of the Apertures list above. So code
# will be like:
# import aptable
# A = aptable.Aperture(aptable.Rectangle, ......)
for ap in Apertures:
globals()[ap[0]] = ap
class Aperture:
def __init__(self, aptype, code, dimx, dimy=None):
assert aptype in Apertures
self.apname, self.pat, self.format = aptype
self.code = code
self.dimx = dimx # Macro name for Macro apertures
self.dimy = dimy # None for Macro apertures
if self.apname in ('Circle', 'Octagon', 'Macro'):
assert (dimy is None)
def isRectangle(self):
return self.apname == 'Rectangle'
def rectangleAsRect(self, X, Y):
"""Return a 4-tuple (minx,miny,maxx,maxy) describing the area covered by
this Rectangle aperture when flashed at center co-ordinates (X,Y)"""
dx = util.in2gerb(self.dimx)
dy = util.in2gerb(self.dimy)
if dx & 1: # Odd-sized: X extents are (dx+1)/2 on the left and (dx-1)/2 on the right
xm = (dx + 1) / 2
xp = xm - 1
else: # Even-sized: X extents are X-dx/2 and X+dx/2
xm = xp = dx / 2
if dy & 1: # Odd-sized: Y extents are (dy+1)/2 below and (dy-1)/2 above
ym = (dy + 1) / 2
yp = ym - 1
else: # Even-sized: Y extents are Y-dy/2 and Y+dy/2
ym = yp = dy / 2
return (X - xm, Y - ym, X + xp, Y + yp)
def getAdjusted(self, minimum):
"""
Adjust aperture properties to conform to minimum feature dimensions
Return new aperture if required, else return False
"""
dimx = dimy = None
# Check for X and Y dimensions less than minimum
if self.dimx is not None and self.dimx < minimum:
dimx = minimum
if self.dimy is not None and self.dimx < minimum:
dimy = minimum
# Return new aperture if needed
if dimx is not None or dimy is not None:
if dimx is None:
dimx = self.dimx
if dimy is None:
dimy = self.dimy
return Aperture((self.apname, self.pat, self.format), self.code, dimx, dimy)
else:
return False # no new aperture needs to be created
def rotate(self
|
, GAMT, RevGAMT):
if self.apname in ('Macro',):
# Construct a rotated macro, see if it's in the GAMT, and set self.dimx
# to its name if so. If not, add the rotated macro to the GAMT and set
# self.dimx to the new name. Recall that GAMT maps name to macro
# (e.g., GAMT['M9'] = ApertureMacro(...)) while RevGAMT maps hash to
|
# macro name (e.g., RevGAMT[hash] = 'M9')
AMR = GAMT[self.dimx].rotated()
hash = AMR.hash()
try:
self.dimx = RevGAMT[hash]
except KeyError:
AMR = amacro.addToApertureMacroTable(GAMT, AMR) # adds to GAMT and modifies name to global name
self.dimx = RevGAMT[hash] = AMR.name
elif self.dimy is not None: # Rectangles and Ovals have a dimy setting and need to be rotated
t = self.dimx
self.dimx = self.dimy
self.dimy = t
def rotated(self, GAMT, RevGAMT):
# deepcopy doesn't work on re patterns for some reason so we copy ourselves manually
APR = Aperture((self.apname, self.pat, self.format), self.code, self.dimx, self.dimy)
APR.rotate(GAMT, RevGAMT)
return APR
def dump(self, fid=sys.stdout):
fid.write(str(self))
def __str__(self):
return "{:s}: {:s}".format(self.code, self.hash())
def hash(self):
if self.dimy:
return ("{:s} ({:.5f} x {:.5f})".format(self.apname, self.dimx, self.dimy))
else:
if self.apname in ('Macro',):
return ("{:s} ({:s})".format(self.apname, self.dimx))
else:
return ("{:s} ({:.5f})".format(self.apname, self.dimx))
def writeDef(self, fid):
if self.dimy:
fid.write(self.format.format(self.code, self.dimx, self.dimy))
else:
fid.write(self.format.format(self.code, self.dimx))
# Parse the aperture definition in line 's'. macroNames is an aperture macro dictionary
# that translates macro names local to this file to global names in the GAMT. We make
# the translation right away so that the return value from this function is an aperture
# definition with a global macro name, e.g., 'ADD10M5'
def parseAperture(s, knownMacroNames):
for ap in Apertures:
match = ap[1].match(s)
if match:
dimy = None
if ap[0] in ('Circle', 'Octagon', 'Macro'):
code, dimx = match.groups()
else:
code, dimx, dimy = match.groups()
if ap[0] in ('Macro',):
if dimx in knownMacroNames:
dimx = knownMacroNames[dimx] # dimx is now GLOBAL, permanent macro name (e.g., 'M2')
else:
raise RuntimeError("Aperture Macro name \"{:s}\" not defined".format(dimx))
else:
try:
dimx = float(dimx)
if dimy:
dimy = float(dimy)
except:
raise RuntimeError("Illegal floating point aperture size")
return Aperture(ap, code, dimx, dimy)
return None
# This function returns a dictionary where each key is an
# aperture code string (e.g., "D11") and the value is the
# Aperture object that represents it. For example:
#
# %ADD12R,0.0630X0.0630*%
#
# from a Gerber file would result in the dictionary entry:
#
# "D12": Aperture(ap, 'D10', 0.063, 0.063)
#
# The input fileList is a list of pathnames which will be read to construct the
# aperture table for a job. All the files in the given list will be so
# examined, and a global aperture table will be constructed as a dictionary.
# Same goes for the global aperture macro table.
tool_pat = re.compile(r"^(?:G54)?D\d+\*$")
def constructApertureTable(fileList, GAT, GAMT):
# First we construct a dictionary where each key is the
# string representation of the aperture. Then we go back and assign
# numbers. For aperture macros, we construct their final version
# (i.e., 'M1', 'M2', etc.) right away, as they are parsed. Thus,
# we translate from 'THX10N' or whatever to 'M2' right away.
GAT.clear() # Clear Global Aperture Table
GAMT.clear() # Clear Global Aperture Macro Table
RevGAMT = {} # Dictionary keyed by aperture macro hash and returning macro name
AT = {} # Aperture Table for this file
for fname in fileL
|
julienaubert/clothstream
|
clothstream/collection/signals.py
|
Python
|
mit
| 404
| 0
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from clothstream.user_profile.models import UserProfile
@receiver(post_save, sender=UserPr
|
ofile)
def create_initial_collection(sender, created, instance, **kwargs):
from clothstream.collection.models import Collection
if created:
Collection.objects.create(owner=instance, title=u'My first c
|
ollection')
|
toystori/v2
|
app/toy/views.py
|
Python
|
mit
| 396
| 0
|
from django.urls import reverse_lazy
from django.views.generic import ListView
from django.vi
|
ews.generic.edit import UpdateView
from .models import Toy
class ToyEditView(UpdateView):
model = Toy
fields = '__all__'
template_name_suffix = '_edit'
success_url = reverse_lazy('toy:list
|
')
class ToyListView(ListView):
def get_queryset(self):
return Toy.objects.all()
|
plajjan/pybgp
|
pybgp/nlri.py
|
Python
|
mit
| 5,029
| 0.003977
|
import array
import struct
import socket
from odict import OrderedDict as OD
class NLRI:
def __init__(self, afi, safi, val):
self.afi = afi
self.safi = safi
self.val = val
def encode(self):
return self.val
class vpnv4(NLRI):
def __init__(self, labels, rd, prefix):
self.labels = labels
self.rd = rd
self.prefix = prefix
def __repr__(self):
if self.labels:
l = ','.join([str(l) for l in self.labels])
else:
l = 'none'
return '<vpnv4 label %s rd %s prefix %s>' % (l, self.rd, self.prefix)
def __str__(self):
return '%s:%s' % (self.rd, self.prefix)
def __cmp__(self, other):
if isinstance(other, vpnv4):
return cmp(
(self.labels, self.rd, self.prefix),
|
(other.labels, other.rd, other.prefix),
)
return -1
def encode(self):
plen = 0
v = ''
labels = self.labels[:]
if not labels:
return '\0'
labels = [l<<4 for l in labels]
labels[-1] |= 1
for l in labels:
lo = l & 0xff
hi = (l & 0xffff00) >> 8
v += struct.pack('>HB', hi, lo)
|
plen += 24
l, r = self.rd.split(':')
if '.' in l:
ip = socket.inet_aton(l)
rd = struct.pack('!H4sH', 1, ip, int(r))
else:
rd = struct.pack('!HHI', 0, int(l), int(r))
v += rd
plen += 64
ip, masklen = self.prefix.split('/')
ip = socket.inet_aton(ip)
masklen = int(masklen)
plen += masklen
if masklen > 24:
v += ip
elif masklen > 16:
v += ip[:3]
elif masklen > 8:
v += ip[:2]
elif masklen > 0:
v += ip[:1]
else:
pass
return struct.pack('B', plen) + v
@classmethod
def from_bytes(cls, plen, val):
if plen==0:
# what the hell?
return cls([], '0:0', '0.0.0.0/0')
idx = 0
# plen is the length, in bits, of all the MPLS labels, plus the 8-byte RD, plus the IP prefix
labels = []
while True:
ls, = struct.unpack_from('3s', val, idx)
idx += 3
plen -= 24
if ls=='\x80\x00\x00':
# special null label for vpnv4 withdraws
labels = None
break
label, = struct.unpack_from('!I', '\x00'+ls)
bottom = label & 1
labels.append(label >> 4)
if bottom:
break
rdtype, rd = struct.unpack_from('!H6s', val, idx)
if rdtype==1:
rdip, num = struct.unpack('!4sH', rd)
rdip = socket.inet_ntoa(rdip)
rd = '%s:%s' % (rdip, num)
else:
num1, num2 = struct.unpack('!HI', rd)
rd = '%s:%s' % (num1, num2)
idx += 8
plen -= 64
ipl = pb(plen)
ip = val[idx:idx+ipl]
idx += ipl
prefix = pip(ip, plen)
return cls(labels, rd, prefix)
class ipv4(NLRI):
def __init__(self, prefix):
self.prefix = prefix
def __cmp__(self, other):
if isinstance(other, ipv4):
aip, alen = self.prefix.split('/')
alen = int(alen)
aip = socket.inet_aton(aip)
bip, blen = other.prefix.split('/')
blen = int(blen)
bip = socket.inet_aton(bip)
return cmp((aip,alen),(bip,blen))
return -1
def encode(self):
plen = 0
v = ''
ip, masklen = self.prefix.split('/')
ip = socket.inet_aton(ip)
masklen = int(masklen)
plen += masklen
if masklen > 24:
v += ip
elif masklen > 16:
v += ip[:3]
elif masklen > 8:
v += ip[:2]
elif masklen > 0:
v += ip[:1]
else:
pass
return struct.pack('B', plen) + v
def __repr__(self):
return '<ipv4 %s>' % (self.prefix,)
def __str__(self):
return self.prefix
@classmethod
def from_bytes(cls, plen, val):
return cls(pip(val, plen))
def pb(masklen):
if masklen > 24:
return 4
elif masklen > 16:
return 3
elif masklen > 8:
return 2
elif masklen > 0:
return 1
return 0
def pip(pi, masklen):
pi += '\x00\x00\x00\x00'
return '%s/%s' % (socket.inet_ntoa(pi[:4]), masklen)
def parse(bytes, afi=1, safi=0):
rv = []
if afi==1 and safi==128:
klass = vpnv4
else:
klass = ipv4
idx = 0
while idx < len(bytes):
plen, = struct.unpack_from('B', bytes, idx)
idx += 1
nbytes, rest = divmod(plen, 8)
if rest:
nbytes += 1
val = bytes[idx:idx+nbytes]
idx += nbytes
rv.append(klass.from_bytes(plen, val))
return rv
|
RuiNascimento/krepo
|
plugin.video.sparkle/resources/lib/modules/subreddits.py
|
Python
|
gpl-2.0
| 3,534
| 0.004527
|
import os, sys, re, json
from praw2 import Reddit
reload(sys)
try:
from xbmc import log
except:
def log(msg):
print(msg)
sys.setdefaultencoding("utf-8")
CLIENT_ID = 'J_0zNv7dXM1n3Q'
CLIENT_SECRET = 'sfiPkzKDd8LZl3Ie1WLAvpCICH4'
USER_AGENT = 'sparkle streams 1.0'
class SubRedditEvents(object):
as_regex_str = r'(acestream://[^$\s]+)'
def __init__(self, username=None, password=None, client=None):
self.client = client or Reddit(client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
user_agent=USER_AGENT,
username=username,
password=password,
)
self.as_regex = re.compile(self.as_regex_str, re.IGNORECASE)
@staticmethod
def get_as_links(body):
"""
For each acestream link, return a tuple of acestream link,
and link quality
"""
links = []
for entry in body.split('\n'):
res = re.findall('(.*)(acestream://[a-z0-9]+)\s*(.*)', entry)
if res:
pre, acelink, post = res[0]
if len(pre.strip()) > len(post.strip()):
links.append((acelink.strip(), pre.strip()))
else:
links.append((acelink.strip(), post.strip()))
return links
@staticmethod
def priority(entry):
"""
For cases where we have multiple entries for the same acestream link,
prioritize based on the quality text to get the best text possible
"""
if not entry[0]:
return (entry, 3)
elif re.search('.*\[.*\].*', entry[0]):
return (entry, 1)
else:
return (entry, 2)
@staticmethod
def collapse(entries):
"""
Collapse oure list of acestream entries to pick only one with the best
quality text
"""
results = []
prev = None
# Sort the entries by our priority logic, then iterate
for entry in sorted(entries, key=lambda entry: priority(entry), reverse=True):
if prev != entry[0]:
results.append(entry)
prev = entry[0]
return results
def get_events(self, subreddit, filtering=False):
subs = []
path = '/r/{}'.format(subreddit)
for submission in self.client.get(path):
sub_id = submission.id
score = submission.score
|
title = submission.title
title = title.encode('utf-8')
subs.append({'submission_id': sub_id, 'title'
|
: title, 'score': score })
return sorted(subs, key=lambda d: d['score'], reverse=True)
def get_event_links(self, submission_id):
submission = self.client.submission(id=submission_id)
links = []
scores = {}
# Add the extracted links and details tuple
for c in submission.comments.list():
if hasattr(c, 'body'):
links.extend(self.get_as_links(c.body.encode('utf-8')))
# Add entry to our scores table taking the largest score for a given
# acestream link
score = c.score if hasattr(c, 'score') else 0
for entry in links:
scores[entry[0]] = max(scores.get(entry[0], 0), score)
if len(links) > 0:
return [(s, q, a) for ((a, q), s) in
zip(links, map(lambda x: scores[x[0]], links))]
else:
return links
|
iamdork/compose
|
dork_compose/__init__.py
|
Python
|
mit
| 92
| 0
|
__ve
|
rsion__ = '1.13.0.0.0.1
|
'
if __name__ == "__main__":
from main import run
run()
|
lasr/orbital_mechanics
|
orbit/mee2coe.py
|
Python
|
mit
| 1,025
| 0
|
"""Created on Sat Oct 01 2015 16:14.
@author: Nathan Budd
"""
import numpy as np
def mee2coe(MEE, mu=1.):
"""
Convert modified equinoctial elements to classical orbital elements.
Parameters
----------
MEE : ndarray
mx6 array of elements ordered as [p f g h k L].
mu : float
Standard gravitational parameter. Defaults to canonical units.
Returns
-------
COE : ndarray
mx6 array of elements ordered as [p e i W w f].
"""
p = MEE[0:, 0:1]
f = MEE[0:, 1:2]
g = MEE[0:, 2:3]
h = MEE[0:, 3:4]
k = MEE[0:, 4:5]
L = MEE[0:, 5:6]
# inclination
i = 2. * np.arctan((h**2 + k**2)**.5)
# right ascension of the ascending node
W = np.mod(np.arctan2(k, h), 2*np.pi)
# eccentr
|
icity
e = (f**2 + g**2)**.5
# argument of periapsis
w_bar = np.mod(np.arctan2(g, f), 2*np.pi)
w = np.mod(w_bar - W, 2*np.pi)
# true anomaly
f = np.m
|
od(L - w_bar, 2*np.pi)
return np.concatenate((p, e, i, W, w, f), 1)
|
tmthydvnprt/pfcompute
|
pf/report.py
|
Python
|
mit
| 345
| 0
|
"""
report.py
Functions to create various reports.
project : pf
version : 0.0.0
status : development
modifydate :
createdate :
website : https://github.com/tmthydvnprt/pf
author : tmthydvn
|
prt
e
|
mail : tim@tmthydvnprt.com
maintainer : tmthydvnprt
license : MIT
copyright : Copyright 2016, tmthydvnprt
credits :
"""
|
dwrpayne/zulip
|
zerver/tests/test_decorators.py
|
Python
|
apache-2.0
| 5,333
| 0.001313
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from zerver.decorator import \
REQ, has_request_variables, RequestVariableMissingError, \
RequestVariableConversionError, JsonableError
from zerver.lib.validator import (
check_string, check_dict, check_bool, check_int, check_list
)
import ujson
class DecoratorTestCase(TestCase):
def test_REQ_converter(self):
def my_converter(data):
lst = ujson.loads(data)
if not isinstance(lst, list):
raise ValueError('not a list')
if 13 in lst:
raise JsonableError('13 is an unlucky number!')
return lst
@has_request_variables
def get_total(request, numbers=REQ(converter=my_converter)):
return sum(numbers)
class Request(object):
REQUEST = {} # type: Dict[str, str]
request = Request()
with self.assertRaises(RequestVariableMissingError):
get_total(request)
request.REQUEST['numbers'] = 'bad_value'
with self.assertRaises(RequestVariableConversionError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), "Bad value for 'numbers': bad_value")
request.REQUEST['numbers'] = ujson.dumps([2, 3, 5, 8, 13, 21])
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), "13 is an unlucky number!")
request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
result = get_total(request)
self.assertEqual(result, 21)
def test_REQ_validator(self):
@has_request_variables
def get_total(request, numbers=REQ(validator=check_list(check_int))):
return sum(numbers)
class Request(object):
REQUEST = {} # type: Dict[str, str]
request = Request()
with self.assertRaises(RequestVariableMissingError):
get_total(request)
request.REQUEST['numbers'] = 'bad_value'
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), 'argument "numbers" is not valid json.')
request.REQUEST['numbers'] = ujson.dumps([1, 2, "what?", 4, 5, 6])
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), 'numbers[2] is not an integer')
request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
result = get_total(request)
self.assertEqual(result, 21)
class ValidatorTestCase(TestCase):
def test_check_string(self):
x = "hello"
self.assertEqual(check_string('x', x), None)
x = 4
self.assertEqual(check_string('x', x), 'x is not a string')
def test_check_bool(self):
x = True
self.assertEqual(check_bool('x', x), None)
x = 4
self.assertEqual(check_bool('x', x), 'x is not a boolean')
def test_check_int(self):
x = 5
self.assertEqual(check_int('x', x), None)
x = [{}]
self.assertEqual(check_int('x', x), 'x is not an integer')
def test_check_list(self):
x = 999
error = check_list(check_string)('x', x)
self.assertEqual(error, 'x is not a list')
x = ["hello", 5]
error = check_list(check_string)('x', x)
self.assertEqual(error, 'x[1] is not a string')
x = [["yo"], ["hello", "goodbye", 5]]
error = check_list(check_list(check_string))('x', x)
self.assertEqual(error, 'x[1][2] is not a string')
x = ["hello", "goodbye", "hello again"]
error = check_list(check_string, length=2)('x', x)
self.assertEqual(error, 'x should have exactly 2 items')
def test_check_dict(self):
keys = [
('names', check_list(check_string)),
('city', check_string),
]
x = {
'names': ['alice', 'bob'],
'city': 'Boston',
}
error = check_dict(keys)('x', x)
self.assertEqual(error, None)
x = 999
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x is not a dict')
x = {}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'names key is missing from x')
x = {
'names': ['alice', 'bob', {}]
}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x["names"][2] is not a string')
x = {
'names': ['alice', 'bob'],
'city': 5
}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x["city"] is not a string')
def test_encapsulation(self):
# There might be situations where we want deep
# v
|
alidation, but the error message should be customized.
# This is an ex
|
ample.
def check_person(val):
error = check_dict([
['name', check_string],
['age', check_int],
])('_', val)
if error:
return 'This is not a valid person'
person = {'name': 'King Lear', 'age': 42}
self.assertEqual(check_person(person), None)
person = 'misconfigured data'
self.assertEqual(check_person(person), 'This is not a valid person')
|
samyk/proxmark3
|
tools/findbits_test.py
|
Python
|
gpl-2.0
| 1,837
| 0.003266
|
#!/usr/bin/env python3
import unittest, sys, findbits
class TestFindBits(unittest.TestCase):
def setUp(self):
self.old_stdout = sys.stdout
sys.stdout = OutputBuffer()
def tearDown(self):
sys.stdout = self.old_stdout
INVERT_CASES = [
('10', '01'),
('', ''),
]
def test_invert(self):
self.commutative_test(findbits.invert, self.INVERT_CASES)
SEARCH_CASES = [
('1111', '10111101', ['Match at bit 2', '0<1111>0']),
('00', '10111101', ['Not found']),
]
def test_search(self):
for target, data, expected_fragments in self.SEARCH_CASES:
sys.stdout.clear_buffer()
findbits.search(target, data)
for fragment in expected_fragments:
self.assertIn(fragment, sys.stdout.content)
BINSTRING_CASES = [
(42, '101010'),
(1, '1'),
(0, ''),
]
def test_binstring(self):
self.unary_operation_test(findbits.binstring, self.BINSTRING_CASES)
REVERSE_CASES = [
('abc', 'cba'),
('', ''),
]
def test_stringreverse(self):
self.commutative_test(findbits.stringreverse, self.REVERSE_CASES)
def commutative_test(self, opera
|
tion, cases):
self.unary_operation_test(operation, cases)
self.unary_operation_test(operation, map(reversed, cases))
def unary_operation_test(self, operation, cases):
for case_in, case_out in cases:
self.assertEqual(operation(ca
|
se_in), case_out)
class OutputBuffer(object):
def __init__(self):
self.clear_buffer()
def clear_buffer(self):
self.content = ''
def write(self, data):
self.content += data
if __name__ == '__main__':
unittest.main()
|
OJFord/IARAI
|
IARAI/iarai.py
|
Python
|
mit
| 4,896
| 0.045761
|
#!/usr/bin/env python3
from sys import argv, exit
from cmd import Cmd
from copy import deepcopy
from tabulate import tabulate
import json
import shlex
__author__ = 'OJFord'
__version__ = '1.0dev'
class Interpreter(Cmd):
"""IARAI: A Relational Algebra Interpreter."""
def __init__(self, relfile):
super().__init__()
self.prompt = 'RA> '
self.intro = '^D to exit. help[ cmd] for more info. Tab completion.'
self.doc_header = 'Relation may be given as `(jsonfile.relation)`.'
self.doc_header += 'Alternatively, `$` refers to working relation.'
with open(relfile) as f:
self.file = json.loads(f.read())
self.fname = self.file['relation']
self.working = None
self.chain = '' # working command chain
def write(self):
print( self.chain + ' (' + self.working['relation'] + ')' )
print( tabulate(self.working['tuples'], headers=self.working['attributes']) )
print()
def debugLine(self, line):
before = deepcopy(self.working)
self.postcmd( self.onecmd( self.precmd(line) ), line)
after = self.working
return before, a
|
fter
@staticmethod
def chainable(cmd, args):
return cmd + ('_' + args[1:] if args[1:] else '') + ' '
def cmdloop(self):
try:
return super().cmdloop()
except KeyboardInterrupt:
# cancel command without crashing out of interpreter
|
self.intro = None
return self.cmdloop()
def precmd(self, line):
if not line or line == 'EOF' or line.find('help') == 0:
return line
argsend = line.find('(')
if argsend == -1:
argsend = line.find('$')
rel = line[argsend:]
cmd = line[0]
args= shlex.split( line[1:argsend] )
if len(args) >= 2 or len(args) >= 1 and args[0][0] not in ['_','(','$']:
if args[0][0] == '_':
rrecurse= ' '.join(args[1:])
largs = args[0]
else:
rrecurse= ' '.join(args)
largs = ''
# execute end of line
self.postcmd( self.onecmd( self.precmd(rrecurse+rel) ), rrecurse+rel )
# 'restart' to finish up left of line
return self.precmd(cmd + largs + ' $')
elif rel == '$':
if not self.working:
print('Error: no current working relation, use file first.')
raise KeyboardInterrupt # hacky af
else:
# continue with working relation
pass
elif rel == '(' + self.fname + ')':
self.chain = ''
self.working = deepcopy(self.file)
else:
print('Error: last argument must be a valid relation.')
raise KeyboardInterrupt # hacky af
if args: # single string args, just remove leading '_'
args = ' ' + args[0][1:]
else:
args = ''
self.chain = self.chainable(cmd, args) + self.chain
return cmd+args
def default(self, line):
# undo add command to chain.. unfortunately precmd() executes even on invalid
cmd, args = line[0], shlex.split(line[1:])
self.chain = self.chain[ len( self.chainable(cmd, args) ):]
super().default(line)
def emptyline(self):
# overrides super's repeat last line, which would make little sense
pass
def do_EOF(self, line):
"""Exits."""
return True
def do_p(self, args):
""" 'p' for pi - project.
Projects the given attributes of a relation, or all if none specified.
usage: p [ATTR,...] (REL)
"""
if args:
allAtts = self.working['attributes']
# put in same order
prjAtts = [ att for att in allAtts if att in args.split(',') ]
prjAtts+= [ att for att in args.split(',') if att not in prjAtts ]
# project
for i,tup in enumerate(self.working['tuples']):
self.working['tuples'][i] = [ o for j,o in enumerate(tup) if allAtts[j] in prjAtts ]
self.working['tuples'][i]+= [ None for o in prjAtts if o not in allAtts ]
self.working['attributes'] = prjAtts
self.write()
def do_s(self, args):
""" 's' for sigma - select.
Selects from a relation that which satisfies the given proposition.
usage: s [PROP] (REL)
"""
if '/\\' in args or '\\/' in args:
raise Exception('Error: not implemented, use e.g. `s_prop2 s_prop1 $` to AND for now')
if args:
if args[0] in ['¬', '~', '!']:
neg = True
args= args[1:]
else:
neg = False
(att, val) = tuple(args.split('='))
else:
att = val = None
if att:
tups = self.working['tuples']
atts = self.working['attributes']
if neg:
self.working['tuples'] = [ t for t in tups if t[ atts.index(att) ] != val ]
else:
self.working['tuples'] = [ t for t in tups if t[ atts.index(att) ] == val ]
def do_r(self, args):
""" 'r' for rho - rename.
Renames a given attribute of a relation.
usage: r NEW_NAME/OLD_NAME (REL)
"""
pairs = [ tuple(p.split('/')) for p in args.split(',') ]
atts = self.working['attributes']
for (new, old) in pairs:
if old in atts:
self.working['attributes'][ atts.index(old) ] = new
if __name__ == '__main__':
if len(argv) != 2:
print('Error: Single argument - JSON relation file - required.')
print('usage: python iarai.py relation.json')
exit(1)
else:
Interpreter(argv[1]).cmdloop()
|
pcapriotti/pledger
|
pledger/rule.py
|
Python
|
mit
| 1,570
| 0.000637
|
import itertools
class RuleCollection(object):
def __
|
init__(self):
self.rules = {}
def add_rule(self, rule, level=0):
self.rules.setdefault(level, [])
self.
|
rules[level].append(rule)
def apply(self, transaction, entry):
entries = [entry]
levels = list(self.rules.keys())
levels.sort()
for level in levels:
rules = self.rules[level]
new_entries = []
for entry in entries:
new_entries.append(entry)
for rule in rules:
new_entries += list(rule.apply(transaction, entry))
entries = new_entries
return entries
class Generator(object):
def __init__(self, generator):
self.generator = generator
def __call__(self, *args):
return self.generator(*args)
def __add__(self, other):
@Generator
def result(*args):
return itertools.chain(self(*args), other(*args))
return result
Generator.null = Generator(lambda *args: [])
class Rule(object):
def __init__(self, filter, generator):
self.filter = filter
self.generator = generator
def apply(self, transaction, entry):
if self.filter(transaction, entry):
return self.generator(entry.info(transaction))
else:
return []
class TransactionRule(Rule):
def apply(self, transaction):
if self.filter(transaction, None):
return self.generator(transaction)
else:
return transaction
|
Maple0/Algorithm
|
leetcode/merge-overlapping-intervals.py
|
Python
|
mit
| 1,961
| 0.042325
|
#Author: Maple0
#Github:https://github.com/Maple0
#4th Sep 2016
#Given a collection of intervals, merge all overlapping intervals.
#For example,
#Given [1,3],[2,6],[8,10
|
],[15,18],
#return [1,6],[8,10],[15,18].
class Interval(object):
def __init__(self, s=0, e=0):
self.start = s
self.end = e
class Merge_ResultSet(object):
def __init__(self,is_modified,
|
merged_numbers):
self.is_modified = is_modified
self.merged_numbers = merged_numbers
class Solution(object):
def inner_merge(self,numbers):
is_modified=False
length=len(numbers)
merged_numbers=[numbers[0]]
for i in range(1,length):
c_start=numbers[i].start
c_end=numbers[i].end
check_status=0
for merged_num in merged_numbers:
m_start=merged_num.start
m_end=merged_num.end
if c_start >= m_start and c_end <=m_end:
check_status=1
if c_start < m_start and c_end>=m_start and c_end <= m_end:
merged_num.start=c_start
check_status=2
elif c_start >= m_start and c_start<=m_end and c_end > m_end:
merged_num.end=c_end
check_status=2
elif c_start<= m_start and c_end>=m_end:
if merged_num.start!=c_start:
merged_num.start=c_start
check_status=2
if merged_num.end!=c_end:
merged_num.end=c_end
check_status=2
if check_status==0:
merged_numbers.append(numbers[i])
if check_status==2:
is_modified=True
return Merge_ResultSet(is_modified,merged_numbers)
def merge(self, numbers):
length=len(numbers)
if length < 2:
return numbers
result=self.inner_merge(numbers)
while result.is_modified==True:
result=self.inner_merge(numbers)
return result.merged_numbers
num3=[Interval(1,3),Interval(0,6),Interval(7,7),Interval(8,9),Interval(0,10)]
results=Solution().merge(num3)
for x in results:
print(x.start,x.end)
|
shenfei/oj_codes
|
leetcode/python/n219_Contains_Duplicate_II.py
|
Python
|
mit
| 659
| 0.003035
|
from collections import defaultdict
class Solution:
def containsNearbyDuplicate(self, nu
|
ms, k):
"""
|
:type nums: List[int]
:type k: int
:rtype: bool
"""
indices = defaultdict(list)
for i, x in enumerate(nums):
indices[x].append(i)
for _, v in indices.items():
if len(v) <= 1:
continue
v.sort()
for i in range(len(v) - 1):
if v[i + 1] - v[i] <= k:
return True
return False
if __name__ == "__main__":
sol = Solution()
x = [-1, -1]
print(sol.containsNearbyDuplicate(x, 1))
|
mjn19172/Savu
|
savu/test/simple_tomo_test.py
|
Python
|
apache-2.0
| 1,360
| 0.001471
|
# Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: tomo_recon
:platform: Unix
:synopsis: runner for tests using the MPI framework
.. moduleauthor:: Mark Basham <scientificsoftware@diamond.ac.uk>
"""
import unittest
import tempfile
from savu.test import test_utils as tu
from savu.test.plugin_runner_test import run_protected_plugin_runner
class SimpleTomoTest(unittest.TestCase):
def test_process(self):
options = {
"transport": "hdf5",
"process_names": "CPU0",
"data_file": tu.get_test_data_path('24737.nxs'),
"process_file": tu.get_test_data_path('simple_recon_test_process.nxs'),
"out_path": tempfile.mkdtemp()
}
run_protected_plugin_runner(opti
|
ons)
if __name__ ==
|
"__main__":
unittest.main()
|
susanctu/Crazyfish-Public
|
webgen/webgen.py
|
Python
|
mit
| 8,518
| 0.008922
|
import sys
import re
import argparse
import os.path
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
tag_re = (re.compile('(%s.*?%s)' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END))))
name_extract_re = (re.compile('%s\W*block\W+([^\W]+)\W*?%s' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END))))
endblock_re = (re.compile('%s\W*endblock\W*?%s' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END))))
# for string formatting
JS_FORMAT = '$(\"%s\" ).on(\"%s\", function() { window.location = \"%s\"});'
MALFORMED_CONFIG_FORMAT = 'Mandatory definition of %s missing from config file.\n'
class TagError(Exception):
pass
class OverwriteError(Exception):
pass
def tokenize(template_string):
"""
Return a list of tokens from a given template_string.
Taken from the Django template code.
"""
in_tag = False
result = []
for bit in tag_re.split(template_string):
if bit:
result.append((bit, in_tag))
in_tag = not in_tag
return result
def is_start_tag(token):
"""
Args:
token: the path to the configuration file (string)
"""
match = name_extract_re.search(token)
if match:
return True
else:
return False
def get_block_name(token):
"""
Assumes that token is a start tag.
Args:
token: the path to the configuration file (string)
"""
match = name_extract_re.search(token)
if match:
return match.group(1)
else:
raise TagError('Failed to extract block name from %s' % token)
def is_end_tag(token):
"""
Args:
token: the path to the configuration file (string)
"""
match = endblock_re.search(token)
if match:
return True
else:
return False
def generate_page(template, new_page, block_content, clobber=False, flow_info=None):
"""
Takes in the name of the template, the name of the page to be generated,
a dictionary mapping block names to content they should be replaced with,
and optional flow information (a map of classes/id's mapping to tuples
(event, page to redirect to)).
Raises:
TagError and Exception.
"""
if not clobber and os.path.isfile(new_page):
raise OverwriteError('%s already exists. (use --clobber to overwrite)' % new_page)
else:
output = open(new_page, 'w')
# open the template and tokenize it
src = open(template, 'r')
tokens = tokenize(src.read())
src.close()
tag_depth = 0
repl_tag_depth = 0 # start counting whenever we enter a block that is supposed to be replaced
# repl_block is the name of the block to replace, None means we're not in one
repl_block = None
for token, is_tag in tokens:
if not is_tag and not repl_block:
output.write(token)
elif not is_tag: # but in a block that should be replaced
pass
elif is_tag and repl_block:
# so this could be an unreferenced start tag
if is_start_tag(token):
if get_block_name(token) in block_content:
raise TagError('Cannot replace 2 blocks when one nested inside other, here %s is inside %s.'
% (get_block_name(token), repl_block))
else:
repl_tag_depth += 1
tag_depth += 1
else: # or an endtag
repl_tag_depth -= 1
tag_depth -=1
if repl_tag_depth == 0:
# write the replacement text
output.write(block_content[repl_block])
repl_block = None
else: # is_tag and not repl_block
if is_start_tag(token):
tag_depth += 1
if get_block_name(token) in block_content:
repl_block = get_block_name(token)
repl_tag_depth += 1
else: # endblock
tag_depth -= 1
if tag_depth < 0:
raise TagError('Found more endtags than start tags.')
output.write('<script src=\"https://code.jquery.com/jquery.js\"></script>')
output.write('<script>')
if flow_info: # TODO (susanctu): this works but SHOULD go before the last html tag
for class_or_id in flow_info.keys():
output.write(JS_FORMAT % (class_or_id,
flow_info[class_or_id][0],
flow_info[class_or_id][1]))
output.write('</script>')
output.close()
def load_config(config_file):
"""
Opens config_file, which is executed as a python script.
(Not exactly safe, but since the user is running this on his/her own
computer, we don't bother to do anything more secure.) Checks that the
config file defines PAGES and FLOWS.
PAGES should be defined as follows:
PAGES = {'index1.html': ('index_template.html', {}),
'index2.html': ('index_template.html',
{'central': 'replacement text'})}
Each key in the PAGES dictionary is a 2-tuple containing the template to
generate the page from and dictinary mappng from block names to the text to
replace the current block content
|
s with.
Blocks should be specified in templates as
{% block central %}
contents of block, blah, blah, blah
{% endblock %}
where the block can be given an
|
y name without whitespace (here, the block
is called 'central')
Note that in the above example, index.html is just index_template with all
block tags removed but their contents preserved (i.e., if you don't specify
a block by name in PAGES but it exists in the template, the page will be
generated with just the tags stripped.)
FLOWS should be defined as follows:
FLOWS = {'index1.html': {'.central': ('click', 'index2.html')},
'index2.html': {'.central': ('click', 'index3.html')}}
where each value in FLOWS is a dictionary mapping from classes/ids to
2-tuples of jquery events and the page that we should navigate to
when the event happens on an element with that class/id.
It is ok for FLOWS to be empty.
Args:
config_file: the path to the configuration file (string)
Returns:
Tuple containing PAGES and FLOWS.
"""
f = open(config_file, 'r')
exec(f);
try:
PAGES
except NameError:
sys.stderr.write(MALFORMED_CONFIG_FORMAT % 'PAGES')
try:
FLOWS
except NameError:
sys.stderr.write(MALFORMED_CONFIG_FORMAT % 'FLOWS')
return (PAGES, FLOWS)
def main():
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument(
'config_file', help='a configfile to tell this tool what to generate')
parser.add_argument('--clobber', '-c',
action='store_true',
help='ok to overwrite files')
args = parser.parse_args()
# load config
PAGES, FLOWS = load_config(args.config_file)
# generate each page specified in the config
# with appropriate navigation between them
for new_page, src_info in PAGES.items():
if len(src_info) != 2:
sys.stderr.write(
'Template and fill-in info pair %s contains too many elements.\n' % str(src_info))
exit(1)
if type(src_info[1]) is not dict:
sys.stderr.write(
'Did not get expected block / replacement pairs: %s.\n' % str(src_info))
exit(1)
try:
if new_page in FLOWS:
generate_page(src_info[0],
new_page,
src_info[1],
args.clobber,
FLOWS[new_page])
else:
sys.stderr.write(
'WARNING: No FLOWS found for navigation away from %s\n' % new_page)
generate_page(src_info[0], new_page, src_info[1], args.clobber)
except (TagError, Overw
|
pylbert/upm
|
examples/python/lsm303d.py
|
Python
|
mit
| 2,423
| 0.001651
|
#!/usr/bin/env python
# Author: Jon Trulson <jtrulson@ics.com>
# Copyright (c) 2017 Intel Corporation.
#
# The MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_lsm303d as sensorObj
def main():
# Instantiate a BMP250E instance using default i2c bus and address
s
|
ensor = sensorObj.LSM303D()
## Exit handlers ##
# This function stops python from printing a stacktrace when you
# hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Regist
|
er exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
# now output data every 250 milliseconds
while (1):
sensor.update()
data = sensor.getAccelerometer()
print("Accelerometer x:", data[0], end=' ')
print(" y:", data[1], end=' ')
print(" z:", data[2], end=' ')
print(" g")
data = sensor.getMagnetometer()
print("Magnetometer x:", data[0], end=' ')
print(" y:", data[1], end=' ')
print(" z:", data[2], end=' ')
print(" uT")
print("Temperature: ", sensor.getTemperature())
print()
time.sleep(.250)
if __name__ == '__main__':
main()
|
samihuc/PolyglotDB
|
polyglotdb/query/lexicon/query.py
|
Python
|
mit
| 1,736
| 0.004608
|
from ..base import BaseQuery
class LexiconQuery(BaseQuery):
def __init__(self, corpus, to_find):
super(LexiconQuery, self).__init__(corpus, to_find)
def create_subset(self, label):
|
"""
Set properties of the returned tokens.
"""
labels_to_add = []
if self.to_find.node_type not in self.corpus.hierarchy.subset_types or \
label not in self.corpus.hierarchy.subset_types[self.to_find.node_type]:
labels_to_add.append(label)
super(LexiconQuery, self).create_subset(label)
if labels_to_add:
self.corpus.hierarchy.add_type_labels(self.corpus, self.t
|
o_find.node_type, labels_to_add)
self.corpus.encode_hierarchy()
def remove_subset(self, label):
""" removes all token labels"""
super(LexiconQuery, self).remove_subset(label)
self.corpus.hierarchy.remove_type_labels(self.corpus, self.to_find.node_type, [label])
def set_properties(self, **kwargs):
"""
Set properties of the returned tokens.
"""
props_to_remove = []
props_to_add = []
for k, v in kwargs.items():
if v is None:
props_to_remove.append(k)
else:
if not self.corpus.hierarchy.has_type_property(self.to_find.node_type, k):
props_to_add.append((k, type(kwargs[k])))
super(LexiconQuery, self).set_properties(**kwargs)
if props_to_add:
self.corpus.hierarchy.add_type_properties(self.corpus, self.to_find.node_type, props_to_add)
if props_to_remove:
self.corpus.hierarchy.remove_type_properties(self.corpus, self.to_find.node_type, props_to_remove)
|
codinuum/cca
|
python/src/cca/factutil/rdf.py
|
Python
|
apache-2.0
| 9,601
| 0
|
#!/usr/bin/env python3
'''
Factutil: helper scripts for source code entities
Copyright 2012-2021 Codinuum Software Lab <https://codinuum.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import os
import gzip
import tempfile
from functools import reduce
import rdflib
from rdflib.namespace import XSD
import logging
logger = logging.getLogger()
def uri_split(uri):
lname = uri.split('/')[-1].split('#')[-1]
ns = uri[:len(uri)-len(lname)]
return ns, lname
class RDFNode(object):
def __init__(self, nd):
self._valid = True
self._node = nd
def __eq__(self, other):
res = False
if isinstance(other, RDFNode):
res = self._node == other._node
return res
def is_valid(self):
return self._valid
def as_node(self):
return self._node
class Resource(RDFNode):
def __init__(self, uri=None, **args):
nd = args.get('node', None)
if nd is not None:
RDFNode.__init__(self, nd)
else:
if uri is not None:
try:
RDFNode.__init__(self, rdflib.term.URIRef(uri))
except Exception:
logger.warning('uri="%s"(%s)' % (uri, str(type(uri))))
raise
else:
RDFNode.__init__(self, rdflib.term.BNode()) # blank node
def __eq__(self, other):
res = False
if isinstance(other, Resource):
if isinstance(self._node, rdflib.term.URIRef) \
and isinstance(other._node, rdflib.term.URIRef):
res = self.get_uri() == other.get_uri()
else:
res = self._node == other._node
return res
def __lt__(self, other):
return str(self.get_uri()) < str(other.get_uri())
def __gt__(self, other):
return str(self.get_uri()) > str(other.get_uri())
def __le__(self, other):
self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
self.__eq__(other) or self.__gt__(other)
def __hash__(self):
return str(self.get_uri()).__hash__()
def __str__(self):
return '<%s>' % self.get_uri()
def get_uri(self):
return str(str(self.as_node()))
def get_namespane(self):
ns, ln = uri_split(self.get_uri())
return ns
def get_local_name(self):
ns, ln = uri_split(self.get_uri())
return ln
class Literal(RDFNode):
def __init__(self, literal="", **args):
nd = args.get('node', None)
if nd is not None:
RDFNode.__init__(self, nd)
else:
RDFNode.__init__(self, rdflib.Literal(literal, **args))
def __eq__(self, other):
res = False
if isinstance(other, Literal):
res = self._node.eq(other._node)
return res
def __str__(self):
return '"%s"' % self.get_content()
def get_content(self):
return self._node.value
def make_literal(x):
lit = None
if isinstance(x, bool):
lit = Literal(literal=str(x).lower(), datatype=XSD.boolean)
elif isinstance(x, int):
if x >= 0:
lit = Literal(literal=str(x), datatype=XSD.nonNegativeInteger)
else:
lit = Literal(literal=str(x), datatype=XSD.integer)
elif isinstance(x, float):
lit = Literal(literal=str(x), datatype=XSD.double)
# elif isinstance(x, str):
# lit = Literal(literal=x.encode('utf-8'))
else:
lit = Literal(literal=str(x))
return lit
class Predicate(Resource):
def __init__(self, ns=None, lname=None, **args):
self._lname = None
self._ns = None
uri = None
node = args.get('node', None)
if ns is None or lname is None:
uri = args.get('uri', None)
if uri is None:
if node is not None:
if isinstance(node, rdflib.term.URIRef):
uri = str(str(node))
if uri is not None:
self._ns, self._lname = uri_split(uri)
else:
self._ns = ns
self._lname = lname
uri = ns + lname
Resource.__init__(self, uri, **args)
def __str__(self):
return '<%s>' % self.get_uri()
def get_namespace(self):
return self._ns
def get_local_name(self):
return self._lname
class Statement(object):
def __init__(self, subject=None, predicate=None, object=None, **args):
try:
stmt = args['statement']
self.subject = stmt.subject
self.predicate = stmt.predicate
self.object = stmt.object
self._stmt = stmt._stmt
except KeyError:
self.subject = subje
|
ct
self.predicate = predicate
self.object = object
s = None
p = None
o = None
if isinstance(subject, Resource):
s = subject.as_node()
if isinstance(predicate, Predicate):
p = predicate.as_node()
if isinstance(object,
|
RDFNode):
o = object.as_node()
self._stmt = (s, p, o)
def __eq__(self, other):
res = False
if isinstance(other, Statement):
res = reduce(lambda x, y: x and y,
[self.subject == other.subject,
self.predicate == other.predicate,
self.object == other.object])
return res
class Graph(object):
def __init__(self, ns_tbl, large=False):
if large:
self._model = rdflib.graph.Graph('BerkeleyDB')
else:
self._model = rdflib.graph.Graph('Memory')
self._g_pred_map = {}
self._pred_tbl = {}
self.l_true = Literal('true')
self.l_false = Literal('false')
self.namespace_tbl = ns_tbl
def set_namespace(self, prefix, uri):
self.namespace_tbl[prefix] = uri
def contains(self, s, p, o):
stmt = self._create_statement(s, p, o)
return (stmt in self._model)
def find_statements(self, t):
return self._model.triples(t)
def size(self):
return len(self._model)
def _add(self, subj, pred, obj):
self._model.add((subj.as_node(), pred.as_node(), obj.as_node()))
def _create_statement(self, subj, pred, obj):
s = None
p = None
o = None
if subj:
s = subj.as_node()
if pred:
p = pred.as_node()
if obj:
o = obj.as_node()
return (s, p, o)
def _guess_fmt(self, path):
fmt = ''
if path.endswith('.nt'):
fmt = 'nt'
elif path.endswith('.ttl'):
fmt = 'turtle'
elif path.endswith('.rdf'):
fmt = 'xml'
if path.endswith('.nt.gz'):
fmt = 'nt'
elif path.endswith('.ttl.gz'):
fmt = 'turtle'
elif path.endswith('.rdf.gz'):
fmt = 'xml'
return fmt
def _mktemp(self):
(fd, path) = tempfile.mkstemp()
os.close(fd)
return path
def _gzipped(self, path):
return path.endswith('.gz')
def _gzip(self, from_file, to_file):
f_from = open(from_file, 'rb')
f_to = gzip.open(to_file, 'wb')
f_to.writelines(f_from)
f_to.close()
f_from.close()
def _gunzip(self, from_file, to_file):
f_from = gzip.open(from_file, 'rb')
f_to = open(to_file, 'wb')
f_to.writelines(f_from)
f_to.close()
f_from.close()
def write(self, path, fmt='', base_uri=None
|
Yellowen/Owrang
|
accounts/doctype/account/test_account.py
|
Python
|
agpl-3.0
| 1,859
| 0.024744
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
def make_test_records(verbose):
from webnotes.test_runner import make_test_o
|
bjects
accounts = [
# [account_name, parent_account, group_or_ledger]
["_Test Account Bank Account", "Bank Accounts", "Ledger"],
["_Test Account Stock Expenses", "Direct Expenses", "Group"],
["_Test Account Shipping Charges", "_Test Account Stock Expenses", "Ledger"],
["_Test Account Customs Duty", "_Test Account Stock Expenses", "Ledger"],
["_Test Account Tax Assets", "Current Assets", "Group"],
["_Test Account VAT", "_Test Account Tax Assets", "Ledger"],
["_Test
|
Account Service Tax", "_Test Account Tax Assets", "Ledger"],
["_Test Account Reserves and Surplus", "Current Liabilities", "Ledger"],
["_Test Account Cost for Goods Sold", "Expenses", "Ledger"],
["_Test Account Excise Duty", "_Test Account Tax Assets", "Ledger"],
["_Test Account Education Cess", "_Test Account Tax Assets", "Ledger"],
["_Test Account S&H Education Cess", "_Test Account Tax Assets", "Ledger"],
["_Test Account CST", "Direct Expenses", "Ledger"],
["_Test Account Discount", "Direct Expenses", "Ledger"],
# related to Account Inventory Integration
["_Test Account Stock In Hand", "Current Assets", "Ledger"],
["_Test Account Fixed Assets", "Current Assets", "Ledger"],
]
for company, abbr in [["_Test Company", "_TC"], ["_Test Company 1", "_TC1"]]:
test_objects = make_test_objects("Account", [[{
"doctype": "Account",
"account_name": account_name,
"parent_account": parent_account + " - " + abbr,
"company": company,
"group_or_ledger": group_or_ledger
}] for account_name, parent_account, group_or_ledger in accounts])
return test_objects
|
raildo/nova
|
nova/network/neutronv2/api.py
|
Python
|
apache-2.0
| 88,749
| 0.000237
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved
# Copyright (c) 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable
|
law or agreed to in writing, software
#
|
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import time
import uuid
from keystoneclient import auth
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth import token_endpoint
from keystoneclient import session
from neutronclient.common import exceptions as neutron_client_exc
from neutronclient.v2_0 import client as clientv20
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import uuidutils
import six
from nova.api.openstack import extensions
from nova.compute import utils as compute_utils
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
from nova.network import base_api
from nova.network import model as network_model
from nova.network.neutronv2 import constants
from nova import objects
from nova.pci import manager as pci_manager
from nova.pci import request as pci_request
from nova.pci import whitelist as pci_whitelist
neutron_opts = [
cfg.StrOpt('url',
default='http://127.0.0.1:9696',
help='URL for connecting to neutron'),
# deprecated in Kilo, may be removed in Mitaka
# NOTE(mikal): we could have removed in Liberty, but we forgot to set
# deprecated_for_removal for this flag so no warnings were emitted.
cfg.StrOpt('admin_user_id',
deprecated_for_removal=True,
help='User id for connecting to neutron in admin context. '
'DEPRECATED: specify an auth_plugin and appropriate '
'credentials instead.'),
# deprecated in Kilo, may be removed in Mitaka
# NOTE(mikal): we could have removed in Liberty, but we forgot to set
# deprecated_for_removal for this flag so no warnings were emitted.
cfg.StrOpt('admin_username',
deprecated_for_removal=True,
help='Username for connecting to neutron in admin context '
'DEPRECATED: specify an auth_plugin and appropriate '
'credentials instead.'),
# deprecated in Kilo, may be removed in Mitaka
# NOTE(mikal): we could have removed in Liberty, but we forgot to set
# deprecated_for_removal for this flag so no warnings were emitted.
cfg.StrOpt('admin_password',
deprecated_for_removal=True,
help='Password for connecting to neutron in admin context '
'DEPRECATED: specify an auth_plugin and appropriate '
'credentials instead.',
secret=True),
# deprecated in Kilo, may be removed in Mitaka
# NOTE(mikal): we could have removed in Liberty, but we forgot to set
# deprecated_for_removal for this flag so no warnings were emitted.
cfg.StrOpt('admin_tenant_id',
deprecated_for_removal=True,
help='Tenant id for connecting to neutron in admin context '
'DEPRECATED: specify an auth_plugin and appropriate '
'credentials instead.'),
# deprecated in Kilo, may be removed in Mitaka
# NOTE(mikal): we could have removed in Liberty, but we forgot to set
# deprecated_for_removal for this flag so no warnings were emitted.
cfg.StrOpt('admin_tenant_name',
deprecated_for_removal=True,
help='Tenant name for connecting to neutron in admin context. '
'This option will be ignored if neutron_admin_tenant_id '
'is set. Note that with Keystone V3 tenant names are '
'only unique within a domain. '
'DEPRECATED: specify an auth_plugin and appropriate '
'credentials instead.'),
cfg.StrOpt('region_name',
help='Region name for connecting to neutron in admin context'),
# deprecated in Kilo, may be removed in Mitaka
# NOTE(mikal): we could have removed in Liberty, but we forgot to set
# deprecated_for_removal for this flag so no warnings were emitted.
cfg.StrOpt('admin_auth_url',
default='http://localhost:5000/v2.0',
deprecated_for_removal=True,
help='Authorization URL for connecting to neutron in admin '
'context. DEPRECATED: specify an auth_plugin and '
'appropriate credentials instead.'),
# deprecated in Kilo, may be removed in Mitaka
# NOTE(mikal): we could have removed in Liberty, but we forgot to set
# deprecated_for_removal for this flag so no warnings were emitted.
cfg.StrOpt('auth_strategy',
default='keystone',
deprecated_for_removal=True,
help='Authorization strategy for connecting to neutron in '
'admin context. DEPRECATED: specify an auth_plugin and '
'appropriate credentials instead. If an auth_plugin is '
'specified strategy will be ignored.'),
# TODO(berrange) temporary hack until Neutron can pass over the
# name of the OVS bridge it is configured with
cfg.StrOpt('ovs_bridge',
default='br-int',
help='Name of Integration Bridge used by Open vSwitch'),
cfg.IntOpt('extension_sync_interval',
default=600,
help='Number of seconds before querying neutron for'
' extensions'),
]
NEUTRON_GROUP = 'neutron'
CONF = cfg.CONF
CONF.register_opts(neutron_opts, NEUTRON_GROUP)
deprecations = {'cafile': [cfg.DeprecatedOpt('ca_certificates_file',
group=NEUTRON_GROUP)],
'insecure': [cfg.DeprecatedOpt('api_insecure',
group=NEUTRON_GROUP)],
'timeout': [cfg.DeprecatedOpt('url_timeout',
group=NEUTRON_GROUP)]}
_neutron_options = session.Session.register_conf_options(
CONF, NEUTRON_GROUP, deprecated_opts=deprecations)
auth.register_conf_options(CONF, NEUTRON_GROUP)
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('flat_injected', 'nova.network.manager')
LOG = logging.getLogger(__name__)
soft_external_network_attach_authorize = extensions.soft_core_authorizer(
'network', 'attach_external_network')
_SESSION = None
_ADMIN_AUTH = None
def list_opts():
list = copy.deepcopy(_neutron_options)
list.insert(0, auth.get_common_conf_options()[0])
# NOTE(dims): There are a lot of auth plugins, we just generate
# the config options for a few common ones
plugins = ['password', 'v2password', 'v3password']
for name in plugins:
for plugin_option in auth.get_plugin_class(name).get_options():
found = False
for option in list:
if option.name == plugin_option.name:
found = True
break
if not found:
list.append(plugin_option)
list.sort(key=lambda x: x.name)
return [(NEUTRON_GROUP, list)]
def reset_state():
global _ADMIN_AUTH
global _SESSION
_ADMIN_AUTH = None
_SESSION = None
def _load_auth_plugin(conf):
auth_plugin = auth.load_from_conf_options(conf, NEUTRON_GROUP)
if auth_plugin:
return auth_plugin
if conf.neutron.auth_strategy == 'noauth':
if not conf.neutron.url:
message = _('For "noauth" authentication strategy, the '
'endpoint must be specified conf.neutron.url')
raise neutro
|
jldaniel/Gaia
|
Algorithms/algorithm_base.py
|
Python
|
mit
| 29,920
| 0.000836
|
__author__ = 'jdaniel'
import copy
import random
import itertools
import operator
import math
import struct
import os
import sys
import json
from collections import defaultdict
class AlgorithmBase(object):
def __init__(self, objective_function):
"""
Base Algorithm class which contains utility functionality
common to all other algorithms and acts as the standalone
API for Algorithm usage.
:param objective_function: <function> The model function to be used
def my_objective(x):
f = list_of_objective_values
h = list_of_equality_constraint_values
g = list_of_inequality_constraint_values
return [f,h,g]
:return: None
"""
self._objective_function = objective_function
self._variables = []
self._equality_constraints = []
self._inequality_constraints = []
self._objectives = []
# Algorithm Options
self._pop_size = None
self._generations = None
self._conv_tol = None
self._eqcon_tol = None
self._seed = None
self._eta_c = None
self._eta_m = None
self._p_cross = None
self._p_mut = None
self._islands = None
self._epoch = None
self._migrants = None
self._spheres = None
# Problem information
self._ndim = None
self._neqcon = None
self._nneqcon = None
self._lower_bound = []
self._upper_bound = []
# Data objects
self._history = History()
self._archive = Archive()
self._metadata = Metadata()
# Random number generator
self._rnd = random.Random()
def register_variable(self, name, lower, upper):
"""
Register a decision variable with the algorithm
:param name: <string> Reference name of the decision variable
:param lower: <float> Lower bound for the variable
:param upper: <float> Upper bound for the variable
:return: None
"""
var = Variable(name, lower, upper)
self._variables.append(var)
def register_constraint(self, name, ctype):
"""
Register a constraint variable with the algorithm
:param name: <string> Reference name of the constraint variable
:param ctype: <string> Set constraint type, 'e': equality constraint; 'i': inequality constraint
:return: None
"""
con = Constraint(name)
if ctype == 'e':
self._equality_constraints.append(con)
elif ctype == 'i':
self._inequality_constraints.append(con)
else:
err_msg = 'Unrecognized constraint type ' + repr(ctype)
raise AlgorithmException(err_msg)
def register_objective(self, name):
"""
Register an objective variable with the algorithm
:param name: <string> Reference name of the objective variable
:return: None
"""
obj = Objective(name)
self._objectives.append(obj)
def set_options(self, option, value):
"""
Set an algorithm option value
:param option: <string> Name of the option to set
:param value: <int, float> Value of the option to set
:return: None
"""
if option == 'population_size':
self.check_population_size(value)
self._pop_size = value
elif option == 'generations':
self.check_generations(value)
self._generations = value
elif option == 'conv_tol':
self.check_conv_tol(value)
self._conv_tol = value
elif option == 'eqcon_tol':
self.check_eqcon_tol(value)
self._eqcon_tol = value
elif option == 'eta_c':
self.check_eta_c(value)
self._eta_c = value
elif option == 'eta_m':
self.check_eta_m(value)
self._eta_m = value
elif option == 'p_cross':
self.check_p_cross(value)
self._p_cross = value
elif option == 'p_mut':
self.check_p_mut(value)
self._p_mut = value
elif option == 'islands':
self.check_islands(value)
self._islands = value
elif option == 'epoch':
self.check_epoch(value)
self._epoch = value
elif option == 'migrants':
self.check_migrants(value)
self._migrants = value
elif option == 'spheres':
self.check_spheres(value)
self._spheres = value
elif option == 'seed':
self.set_seed(value)
else:
err_msg = 'Unrecognized option ' + repr(option)
raise AlgorithmException(err_msg)
def set_seed(self, value):
"""
Set the seed value for the optimisation
:param value: Value to set
:return: None
"""
if value == 0:
self._seed = struct.unpack("<L", os.urandom(4))[0]
else:
self._seed = value
self._rnd.seed(self._seed)
@staticmethod
def check_population_size(value):
"""
Check the population value
:param value: Value to set
:return:
"""
# Check if integer
if not isinstance(value, (int, long)):
err_msg = 'Population is not an integer'
raise AlgorithmException(err_msg)
# Check if greater than zero
if value <= 0:
err_msg = 'Population size must be greater than zero'
raise AlgorithmException(err_msg)
# Check if divisible by 4
if value % 4 != 0:
err_msg = 'Population size must be evenly divisible by four'
raise AlgorithmException(err_msg)
@staticmethod
def check_generations(value):
"""
Check the generations value
:param value: Value to set
:return: None
"""
if value <= 0:
err_msg = 'The generations value but be an integer greater than 0'
raise AlgorithmException(err_msg)
@staticmethod
def check_conv_tol(value):
"""
Check the convergence tolerance value
:param value: Value to set
:return: None
"""
# Check if between (0.0, 1.0)
if value >= 1.0 or value <= 0.0:
err_msg = 'The convergence tolerance value conv_tol must be between (0.0, 1.0)'
raise AlgorithmException(err_msg)
@staticmethod
def check_eqcon_tol(value):
"""
Check the equality constraint tolerance value
:param value: Value to set
:return: None
"""
# Check if greater than 0
if value <= 0.0:
err_msg = 'The equality constraint tolerance value eqcon_tol must be greater than 0'
raise AlgorithmException(err_msg)
@staticmethod
def check_eta_c(value):
"""
Check the crossover distribution index value
:param value: Value to set
:return: None
"""
# Check if greater than zero
if value <= 0:
err_msg = 'The crossover distribution index eta_c must be greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_eta_m(value):
"""
Check the mutation distribution index value
:param value: Value to set
:return: None
"""
# Check if greater than zero
if value <= 0:
err_msg = 'The mutation distribution index eta_m must be greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_p_cross(value):
"""
Check the crossover probability value
:param value: Value to set
|
:return: None
"""
# Check if between (0.0, 1.0)
|
if value < 0.0 or value > 1.0:
err_msg = 'The crossover probability p_cross must be between 0.0 and 1.0'
raise AlgorithmException(err_msg)
@staticmethod
def check_p_mut(value):
"""
Check the mutation probability value
:p
|
kramwens/order_bot
|
venv/lib/python2.7/site-packages/tests/pricing/test_messaging_countries.py
|
Python
|
mit
| 3,318
| 0
|
import unittest
from mock import patch
from nose.tools import assert_equal
from tests.tools import create_mock_json
from twilio.rest.resources.pricing.messaging_countries import (
MessagingCountries
)
AUTH = ("AC123", "token")
BASE_URI = "https://pricing.twilio.com/v1"
class MessagingCountriesTest(unittest.TestCase):
@patch('twilio.rest.resources.base.make_twilio_request')
def test_messaging_countries(self, request):
resp = create_mock_json(
'tests/resources/pricing/messaging_countries_list.json')
resp.status_code = 200
request.return_value = resp
countries = MessagingCountries(BASE_URI + "/Messaging", AUTH)
result = countries.list()
assert_equal(result[0].iso_country, "AT")
assert_equal(len(result), 2)
request.assert_called_with(
"GET",
"{0}/Messaging/Countries".format(BASE_URI),
auth=AUTH,
use_json_extension=False,
params={}
)
@patch('twilio.rest.resources.base.make_twilio_request')
def test_messaging_country(self, request):
resp = create_mock_json(
'tests/resources/pricing/messaging_countries_instance.json')
resp.status_code = 200
request.return_value = resp
countries = MessagingCountries(BASE_URI + "/Messaging", AUTH)
result = countries.get('US')
assert_equal(result.iso_country, "US")
assert_equal(result.price_unit, "usd")
assert_equal(result.outbound_sms_prices[0]['mcc'], "311")
assert_equal(result.outbound_sms_prices[0]['mnc'], "484")
assert_equal(result.outbound_sms_prices[0]['carrier'], "Verizon")
prices = result.outbound_sms_prices[0]['prices']
assert_equal(prices[0]['number_type'], "mobile")
assert_equal(prices[0]['base_price'], "0.0075")
assert_equal(prices[0]['current_price'], "0.0070")
assert_equal(prices[1]['number_type'], "local")
assert_equal(prices[1]['base_price'], "0.0075")
assert_equal(prices[1]['current_price'], "0.0070")
assert_equal(prices[2]['number_type'], "shortcode")
assert_equal(prices[2]['base_price'], "0.01")
assert_equal(prices[2]['current_price'], "0.01")
assert_equal(prices[3]['number_type'], "toll-free")
assert_equal(prices[3]['base_price'], "0.0075")
assert_equal(prices[3]['current_price'], "0.0075")
inbound_sms_prices = result.inbound_sms_prices
assert_equal(inbound_sms_prices[0]['number_type'], "local")
assert_equal(inbound_sms_prices[0]['base_price'], "0.0075")
assert_equal(inbound_sms_prices[0]['current_price'], "0.0075")
assert_equal(inbound_sms_prices[1]['number_type'], "shortco
|
de")
assert_equal(inbound_sms_prices[1]['base_price'], "0.0075")
assert_equal(inbound_sms_prices[1]['current_price'],
|
"0.005")
assert_equal(inbound_sms_prices[2]['number_type'], "toll-free")
assert_equal(inbound_sms_prices[2]['base_price'], "0.0075")
assert_equal(inbound_sms_prices[2]['current_price'], "0.0075")
request.assert_called_with(
"GET",
"{0}/Messaging/Countries/US".format(BASE_URI),
auth=AUTH,
use_json_extension=False,
)
|
mdmintz/seleniumspot
|
seleniumbase/plugins/selenium_plugin.py
|
Python
|
mit
| 7,978
| 0
|
"""
This plugin gives the power of Selenium to nosetests
by providing a WebDriver object for the tests to use.
"""
from nose.plugins import Plugin
from pyvirtualdisplay import Display
from seleniumbase.core import proxy_helper
from seleniumbase.fixtures import constants
class SeleniumBrowser(Plugin):
"""
The plugin for Selenium tests. Takes in key arguments and then
creates a WebDriver object. All arguments are passed to the tests.
The following command line options are available to the tests:
self.options.browser -- the browser to use (--browser)
self.options.server -- the server used by the test (--server)
self.options.port -- the port used by the test (--port)
self.options.proxy -- designates the proxy server:port to use. (--proxy)
self.options.headless -- the option to run headlessly (--headless)
self.options.demo_mode -- the option to slow down Selenium (--demo_mode)
self.options.demo_sleep -- Selenium action delay in DemoMode (--demo_sleep)
self.options.highlights -- # of highlight animations shown (--highlights)
self.options.message_duration -- Messenger alert time (--message_duration)
self.options.js_checking_on -- option to check for js errors (--check_js)
self.options.ad_block -- the option to block some display ads (--ad_block)
self.options.verify_delay -- delay before MasterQA checks (--verify_delay)
self.options.timeout_multiplier -- increase defaults (--timeout_multiplier)
"""
name = 'selenium' # Usage: --with-selenium
def options(self, parser, env):
super(SeleniumBrowser, self).options(parser, env=env)
parser.add_option(
'--browser', action='store',
dest='browser',
choices=constants.ValidBrowsers.valid_browsers,
default=constants.Browser.GOOGLE_CHROME,
help="""Specifies the web browser to use. Default: Chrome.
If you want to use Firefox, explicitly indicate that.
Example: (--browser=firefox)""")
parser.add_option(
'--browser_version', action='store',
dest='browser_version',
default="latest",
help="""The browser version to use. Explicitly select
a version number or use "latest".""")
parser.add_option(
'--cap_file', action='store',
dest='cap_file',
default=None,
help="""The file that stores browser desired capabilities
for BrowserStack or Sauce Labs web drivers.""")
parser.add_option(
'--server', action='store', dest='servername',
default='localhost',
help="""Designates the Selenium Grid server to use.
Default: localhost.""")
parser.add_option(
'--port', action='store', dest='port',
default='4444',
help="""Designates the Selenium Grid port to use.
Default: 4444.""")
parser.add_option(
'--proxy', action='store',
dest='proxy_string',
default=None,
help="""Designates the proxy server:port to use.
Format: servername:port. OR
username:password@servername:port OR
A dict key from proxy_list.PROXY_LIST
Default: None.""")
parser.add_option(
'--headless', action="store_true",
dest='headless',
default=False,
help="""Using this makes Webdriver run headlessly,
which is required on headless machines.""")
parser.add_option(
'--demo_mode', action="store_true",
dest='demo_mode',
default=False,
help="""Using this slows down the automation so that
you can see what it's actually doing.""")
parser.add_option(
'--demo_sleep', action='store', dest='demo_sleep',
default=None,
help="""Setting this overrides the Demo Mode sleep
time that happens after browser actions.""")
parser.add_option(
'--highlights', action='store',
dest='highlights', default=None,
help="""Setting this overrides the default number of
highlight animation loops to have per call.""")
parser.add_option(
'--message_duration', action="store",
dest='message_duration', default=None,
help="""Setting this overrides the default time that
messenger notifications remain visible when reaching
assert statements during Demo Mode.""")
parser.add_option(
'--check_js', action="store_true",
dest='js_checking_on',
default=False,
help="""The option to check for JavaScript errors after
every page load.""")
parser.add_option(
'--ad_block', action="store_true",
dest='ad_block_on',
default=False,
help="""Using this makes WebDriver block display ads
that are defined in ad_block_list.AD_BLOCK_LIST.""")
parser.add_option(
'--verify_delay', action='store',
dest='ver
|
ify_delay', default=None,
help="""Setting this overrides the default wait time
before each MasterQA verification pop-up.""")
parser.add_option(
'--timeout_multiplier', action='store',
dest='timeout_multiplier',
default=None,
help="""Setting this overrides the default timeout
by the mu
|
ltiplier when waiting for page elements.
Unused when tests overide the default value.""")
def configure(self, options, conf):
super(SeleniumBrowser, self).configure(options, conf)
self.enabled = True # Used if test class inherits BaseCase
self.options = options
self.headless_active = False # Default setting
proxy_helper.remove_proxy_zip_if_present()
def beforeTest(self, test):
test.test.browser = self.options.browser
test.test.cap_file = self.options.cap_file
test.test.headless = self.options.headless
test.test.servername = self.options.servername
test.test.port = self.options.port
test.test.proxy_string = self.options.proxy_string
test.test.demo_mode = self.options.demo_mode
test.test.demo_sleep = self.options.demo_sleep
test.test.highlights = self.options.highlights
test.test.message_duration = self.options.message_duration
test.test.js_checking_on = self.options.js_checking_on
test.test.ad_block_on = self.options.ad_block_on
test.test.verify_delay = self.options.verify_delay # MasterQA
test.test.timeout_multiplier = self.options.timeout_multiplier
test.test.use_grid = False
if test.test.servername != "localhost":
# Use Selenium Grid (Use --server=127.0.0.1 for localhost Grid)
test.test.use_grid = True
if self.options.headless:
self.display = Display(visible=0, size=(1920, 1200))
self.display.start()
self.headless_active = True
# The driver will be received later
self.driver = None
test.test.driver = self.driver
def finalize(self, result):
""" This runs after all tests have completed with nosetests. """
proxy_helper.remove_proxy_zip_if_present()
def afterTest(self, test):
try:
# If the browser window is still open, close it now.
self.driver.quit()
except AttributeError:
pass
except Exception:
pass
if self.options.headless:
if self.headless_active:
self.display.stop()
|
nsavch/xanmel
|
xanmel/logcfg.py
|
Python
|
gpl-3.0
| 723
| 0
|
def logging_config(level):
return {
'version': 1,
'propagate': True,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
}
},
|
'handlers': {
|
'console': {
'class': 'logging.StreamHandler',
'level': level,
'formatter': 'simple'
}
},
# 'loggers': {
# 'asyncio': {
# 'level': 'DEBUG',
# 'handlers': ['console']
# },
# },
'root': {
'level': 'DEBUG',
'handlers': ['console']
}
}
|
Justyer/KuaikanSpider
|
KuaikanSpider/KuaikanSpider/pipelines.py
|
Python
|
mit
| 1,794
| 0.002787
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
import codecs
import scrapy
from collections import OrderedDict
from scrapy.pipelines.images import ImagesPipeline
from scrapy.exceptions import DropItem
from KuaikanSpider.items import *
class KuaikanspiderPipeline(object):
def process_item(self, item, spider):
return item
class JsonPipeline(object):
def __init__(self):
self.file = codecs.open('newimg.json', 'wb', encoding='utf-8')
def process_item(self, item, spider):
line = ''
picindex = 1
for image_title, image_url, image_path in zip(item['image_titles'], item['image_urls'], item['image_paths']):
new_item = ImgSingleItem()
new_item['image_character'] = item['image_character']
new_item['image_character'] = picindex
new_item['image_title'] = image_title
new_item['image_url'] = image_url
new_item['image_path'] = image_path
line += json.dumps(OrderedDict(new_item), ensure_ascii=False, sort_keys=False) + '\
|
n'
picindex = picindex + 1
self.file.write(line)
return item
class ImgDownload
|
Pipeline(ImagesPipeline):
def get_media_requests(self, item, info):
if item['image_urls'] is not None:
for image_url in item['image_urls']:
yield scrapy.Request(image_url)
def item_completed(self, results, item, info):
image_paths = [x['path'] for ok, x in results if ok]
if not image_paths:
raise DropItem('Item contains no images')
item['image_paths'] = image_paths
return item
|
wwj718/edx-video
|
lms/djangoapps/instructor/tests/test_hint_manager.py
|
Python
|
agpl-3.0
| 7,620
| 0.001969
|
import json
from django.test.client import Client, RequestFactory
from django.test.utils import override_settings
from courseware.models import XModuleContentField
from courseware.tests.factories import ContentFactory
from courseware.tests.tests import TEST_DATA_MONGO_MODULESTORE
import instructor.hint_manager as view
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE)
class HintManagerTest(ModuleStoreTestCase):
def setUp(self):
"""
Makes a course, which will be the same for all tests.
Set up mako middleware, which is necessary for template rendering to happen.
"""
self.course = CourseFactory.create(org='Me', number='19.002', display_name='test_course')
self.url = '/courses/Me/19.002/test_course/hint_manager'
self.user = UserFactory.create(username='robot', email='robot@edx.org', password='test', is_staff=True)
self.c = Client()
self.c.login(username='robot', password='test')
self.problem_id = 'i4x://Me/19.002/crowdsource_hinter/crowdsource_hinter_001'
self.course_id = 'Me/19.002/test_course'
ContentFactory.create(field_name='hints',
definition_id=self.problem_id,
value=json.dumps({'1.0': {'1': ['Hint 1', 2],
'3': ['Hint 3', 12]},
'2.0': {'4': ['Hint 4', 3]}
}))
ContentFactory.create(field_name='mod_queue',
definition_id=self.problem_id,
value=json.dumps({'2.0': {'2': ['Hint 2', 1]}}))
ContentFactory.create(field_name='hint_pk',
definition_id=self.problem_id,
value=5)
# Mock out location_to_problem_name, which ordinarily accesses the modulestore.
# (I can't figure out how to get fake structures into the modulestore.)
view.location_to_problem_name = lambda course_id, loc: "Test problem"
def test_student_block(self):
"""
Makes sure that students cannot see the hint management view.
"""
c = Client()
UserFactory.create(username='student', email='student@edx.org', password='test')
c.login(username='student', password='test')
out = c.get(self.url)
print out
self.assertTrue('Sorry, but students are not allowed to access the hint manager!' in out.content)
def test_staff_access(self):
"""
Makes sure that staff can access the hint management view.
"""
out = self.c.get('/courses/Me/19.002/test_course/hint_manager')
print out
self.assertTrue('Hints Awaiting Moderation' in out.content)
def test_invalid_field_access(self):
"""
Makes sure that field names other than 'mod_queue' and 'hints' are
rejected.
"""
out = self.c.post(self.url, {'op': 'delete hints', 'field': 'all your private data'})
print out
self.assertTrue('an invalid field was accessed' in out.content)
def test_switchfields(self):
"""
Checks that the op: 'switch fields' POST request works.
"""
out = self.c.post(self.url, {'op': 'switch fields', 'field': 'mod_queue'})
print out
self.assertTrue('Hint 2' in out.content)
def test_gethints(self):
"""
Checks that gethints returns the right data.
"""
request = RequestFactory()
post = request.post(self.url, {'field': 'mod_queue'})
out = view.get_hints(post, self.course_id, 'mod_queue')
print out
self.assertTrue(out['other_field'] == 'hints')
expected = {self.problem_id: [(u'2.0', {u'2': [u'Hint 2', 1]})]}
self.assertTrue(out['all_hints'] == expected)
def test_gethints_other(self):
"""
Same as above, with hints instead of mod_queue
"""
request = RequestFactory()
post = request.post(self.url, {'field': 'hints'})
out = view.get_hints(post, self.course_id, 'hints')
print out
self.assertTrue(out['other_field'] == 'mod_queue')
expected = {self.problem_id: [('1.0', {'1': ['Hint 1', 2],
'3': ['Hint 3', 12]}),
('2.0', {'4': ['Hint 4', 3]})
]}
self.assertTrue(out['all_hints'] == expected)
def test_deletehints(self):
"""
Checks that delete_hints deletes the right stuff.
"""
request = RequestFactory()
post = request.post(self.url, {'field': 'hints',
'op': 'delete hints',
1: [self.problem_id, '1.0', '1']})
view.delete_hints(post, self.course_id, 'hints')
problem_hints = XModuleContentField.objects.get(field_name='hints', definition_id=self.problem_id).value
self.assertTrue('1' not in json.loads(problem_hints)['1.0'])
def test_changevotes(self):
"""
Checks that vote changing works.
"""
request = RequestFactory()
post = request.post(self.url, {'field': 'hints',
'op': 'change votes',
1: [self.problem_id, '1.0', '1', 5]})
view.change_votes(post, self.course_id, 'hints')
problem_hints = XModuleContentField.objects.get(field_name='hints', definition_id=self.problem_id).value
# hints[answer][hint_pk (string)] = [hint text, vote count]
print json.loads(problem_hints)['1.0']['1']
self.assertTrue(json.loads(problem_hints)['1.0']['1'][1] == 5)
def test_addhint(self):
"""
Check that instructors can add new hints.
"""
request = RequestFactory()
post = request.post(self.url, {'field': 'mod_queue',
'op': 'add hint',
'problem': self.problem_id,
'answer': '3.14',
'hint': 'This is a new hint.'})
view.add_hint(post, self.course_id, 'mod_queue')
problem_hints = XModuleContentField.objects.get(field_name='mod_queue', definition_id=self.problem_id).value
|
self.assertTrue('3.14' in json.loads(problem_hints))
def test_approve(self):
"""
Check that instructors can approve hints. (Move them
from the mod_queue to the hints.)
"""
request = RequestFactory()
post = request.post(self.url, {'field': 'mod_queue',
'op': 'approve',
|
1: [self.problem_id, '2.0', '2']})
view.approve(post, self.course_id, 'mod_queue')
problem_hints = XModuleContentField.objects.get(field_name='mod_queue', definition_id=self.problem_id).value
self.assertTrue('2.0' not in json.loads(problem_hints) or len(json.loads(problem_hints)['2.0']) == 0)
problem_hints = XModuleContentField.objects.get(field_name='hints', definition_id=self.problem_id).value
self.assertTrue(json.loads(problem_hints)['2.0']['2'] == ['Hint 2', 1])
self.assertTrue(len(json.loads(problem_hints)['2.0']) == 2)
|
omprakasha/odoo
|
openerp/osv/orm.py
|
Python
|
agpl-3.0
| 6,222
| 0.002572
|
import simplejson
from lxml import etree
from ..exceptions import except_orm
from ..models import (
MetaModel,
BaseModel,
Model, TransientModel, AbstractModel,
MAGIC_COLUMNS,
LOG_ACCESS_COLUMNS,
)
from openerp.tools.safe_eval import safe_eval as eval
# extra definitions for backward compatibility
browse_record_list = BaseModel
class browse_record(object):
""" Pseudo-class for testing record instances """
class __metaclass__(type):
def __instancecheck__(self, inst):
return isinstance(inst, BaseModel) and len(inst) <= 1
class browse_null(object):
""" Pseudo-class for testing null instances """
class __metaclass__(type):
def __instancecheck__(self, inst):
return isinstance(inst, BaseModel) and not inst
def transfer_field_to_modifiers(field, modifiers):
default_values = {}
state_exceptions = {}
for attr in ('invisible', 'readonly', 'required'):
state_exceptions[attr] = []
default_values[attr] = bool(field.get(attr))
for state, modifs in (field.get("states",{})).items():
for modif in modifs:
if defa
|
ult_values[modif[0]] != modif[1]:
state_exceptions[modif[0]].append(state)
for attr, default_value in default_values.items():
if state_exceptions[attr]:
|
modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
else:
modifiers[attr] = default_value
# Don't deal with groups, it is done by check_group().
# Need the context to evaluate the invisible attribute on tree views.
# For non-tree views, the context shouldn't be given.
def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
if node.get('attrs'):
modifiers.update(eval(node.get('attrs')))
if node.get('states'):
if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
# TODO combine with AND or OR, use implicit AND for now.
modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
else:
modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
for a in ('invisible', 'readonly', 'required'):
if node.get(a):
v = bool(eval(node.get(a), {'context': context or {}}))
if in_tree_view and a == 'invisible':
# Invisible in a tree view has a specific meaning, make it a
# new key in the modifiers attribute.
modifiers['tree_invisible'] = v
elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
# Don't set the attribute to False if a dynamic value was
# provided (i.e. a domain from attrs or states).
modifiers[a] = v
def simplify_modifiers(modifiers):
for a in ('invisible', 'readonly', 'required'):
if a in modifiers and not modifiers[a]:
del modifiers[a]
def transfer_modifiers_to_node(modifiers, node):
if modifiers:
simplify_modifiers(modifiers)
node.set('modifiers', simplejson.dumps(modifiers))
def setup_modifiers(node, field=None, context=None, in_tree_view=False):
""" Processes node attributes and field descriptors to generate
the ``modifiers`` node attribute and set it on the provided node.
Alters its first argument in-place.
:param node: ``field`` node from an OpenERP view
:type node: lxml.etree._Element
:param dict field: field descriptor corresponding to the provided node
:param dict context: execution context used to evaluate node attributes
:param bool in_tree_view: triggers the ``tree_invisible`` code
path (separate from ``invisible``): in
tree view there are two levels of
invisibility, cell content (a column is
present but the cell itself is not
displayed) with ``invisible`` and column
invisibility (the whole column is
hidden) with ``tree_invisible``.
:returns: nothing
"""
modifiers = {}
if field is not None:
transfer_field_to_modifiers(field, modifiers)
transfer_node_to_modifiers(
node, modifiers, context=context, in_tree_view=in_tree_view)
transfer_modifiers_to_node(modifiers, node)
def test_modifiers(what, expected):
modifiers = {}
if isinstance(what, basestring):
node = etree.fromstring(what)
transfer_node_to_modifiers(node, modifiers)
simplify_modifiers(modifiers)
json = simplejson.dumps(modifiers)
assert json == expected, "%s != %s" % (json, expected)
elif isinstance(what, dict):
transfer_field_to_modifiers(what, modifiers)
simplify_modifiers(modifiers)
json = simplejson.dumps(modifiers)
assert json == expected, "%s != %s" % (json, expected)
# To use this test:
# import openerp
# openerp.osv.orm.modifiers_tests()
def modifiers_tests():
test_modifiers('<field name="a"/>', '{}')
test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
test_modifiers('<field name="a" required="1"/>', '{"required": true}')
test_modifiers('<field name="a" invisible="0"/>', '{}')
test_modifiers('<field name="a" readonly="0"/>', '{}')
test_modifiers('<field name="a" required="0"/>', '{}')
test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
# The dictionary is supposed to be the result of fields_get().
test_modifiers({}, '{}')
test_modifiers({"invisible": True}, '{"invisible": true}')
test_modifiers({"invisible": False}, '{}')
|
nylas/sync-engine
|
tests/imap/network/test_drafts_syncback.py
|
Python
|
agpl-3.0
| 3,701
| 0
|
import uuid
from datetime import datetime
import pytest
from tests.util.crispin import crispin_client
ACCOUNT_ID = 1
NAMESPACE_ID = 1
THREAD_ID = 2
# These tests use a real Gmail test account and idempotently put the account
# back to the state it started in when the test is done.
@pytest.fixture(scope='function')
def message(db, config):
from inbox.models.backends.imap import ImapAccount
account = db.session.query(ImapAccount).get(ACCOUNT_ID)
to = [{'name': u'"\u2605The red-haired mermaid\u2605"',
'email': account.email_address}]
subject = 'Draft test: ' + str(uuid.uuid4().hex)
body = '<html><body><h2>Sea, birds, yoga and sand.</h2></body></html>'
return (to, subject, body)
def test_remote_save_draft(db, config, message):
""" Tests the save_draft function, which saves the draft to the remote. """
from inbox.actions.backends.gmail import remote_save_draft
from inbox.sendmail.base import _parse_recipients
from inbox.sendmail.message import create_email, Recipients
from inbox.models import Account
account = db.session.query(Account).get(ACCOUNT_ID)
to, subject, body = message
to_addr = _parse_recipients(to)
recipients = Recipients(to_addr, [], [])
email = create_email(account.sender_name, account.email_address, None,
recipients, subject, body, None)
date = datetime.utcnow()
remote_save_draft(account, account.drafts_folder.name, email.to_string(),
db.session, date)
with crispin_client(account.id, account.provider) as c:
criteria = ['NOT DELETED', 'SUBJECT "{0}"'.format(subject)]
c.conn.select_folder(account.drafts_folder.name, readonly=False)
draft_uids = c.conn.search(criteria)
assert draft_uids, 'Message missing from Drafts folder'
flags = c.conn.get_flags(draft_uids)
for uid in draft_uids:
f = flags.get(uid)
assert f and '\\Draft' in f, "Message missing '\\Draft' flag"
c.conn.delete_messages(draft_uids)
c.conn.expunge()
def test_remote_delete_draft(db, config, message):
"""
Tests the delete_draft function, which deletes the draft from the
remote.
"""
from inbox.actions.backends.gmail import (remote_save_draft,
remote_delete_draft)
from inbox.sendmail.base import _parse_recipients
from inbox.sendmail.message import create_email, Recipients
from inbox.models import Account
account = db.session.query(Account).get(ACCOUNT_ID)
to, subject, body = message
to_addr = _parse_recipients(to)
recipients = Recipients(to_addr, [], [])
email = create_email(account.sender_name, account.email_address, None,
recipients, subject, body, None)
date = datetime.utcnow()
# Save on remote
remote_save_draft(account, account.drafts_folder.name, email.to_string(),
db.session, date)
inbox_uid = email.headers['X-
|
INBOX-ID']
with crispin_client(account.id, account.provider) as c:
criteria = ['DRAFT', 'NOT DELETED',
'HEADER X-INBOX-ID {0}'.format(inbox_uid)]
c.conn.select_folder(account.drafts_folder.name, readonly=False)
uids = c.co
|
nn.search(criteria)
assert uids, 'Message missing from Drafts folder'
# Delete on remote
remote_delete_draft(account, account.drafts_folder.name, inbox_uid,
db.session)
c.conn.select_folder(account.drafts_folder.name, readonly=False)
uids = c.conn.search(criteria)
assert not uids, 'Message still in Drafts folder'
|
kwss/keystone
|
keystone/credential/backends/sql.py
|
Python
|
apache-2.0
| 3,229
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.common import sql
from keystone.common.sql import migration
from keystone import credential
from keystone import exception
class CredentialModel(sql.ModelBase, sql.DictBase):
__tablename__ = 'credential'
attributes = ['id', 'user_id', 'project_id', 'blob', 'type']
id = sql.Column(sql.String(64), primary_key=True)
user_id = sql.Column(sql.String(64),
nullable=False)
project_id = sql.Column(sql.String(64))
blob = sql.Column(sql.JsonBlob(), nullable=False)
type = sql.Column(sql.String(255), nullable=False)
extra = sql.Column(sql.JsonBlob())
class Credential(sql.Base, credential.Driver):
# Internal interface to manage the database
def db_sync(self, version=None):
migration.db_sync(version=version)
# credential crud
@sql.handle_conflicts(type='credential')
def create_credential(self, credential_id, credential):
session = self.get_session()
with session.begin():
ref = CredentialModel.from_dict(credential)
session.add(ref)
session.flush()
return ref.to_dict()
def list_credentials(self):
session = self.get_session()
r
|
efs = session.query(CredentialModel).all()
return [ref.to_dict() for ref in refs]
def _get_creden
|
tial(self, session, credential_id):
ref = session.query(CredentialModel).get(credential_id)
if ref is None:
raise exception.CredentialNotFound(credential_id=credential_id)
return ref
def get_credential(self, credential_id):
session = self.get_session()
return self._get_credential(session, credential_id).to_dict()
@sql.handle_conflicts(type='credential')
def update_credential(self, credential_id, credential):
session = self.get_session()
with session.begin():
ref = self._get_credential(session, credential_id)
old_dict = ref.to_dict()
for k in credential:
old_dict[k] = credential[k]
new_credential = CredentialModel.from_dict(old_dict)
for attr in CredentialModel.attributes:
if attr != 'id':
setattr(ref, attr, getattr(new_credential, attr))
ref.extra = new_credential.extra
session.flush()
return ref.to_dict()
def delete_credential(self, credential_id):
session = self.get_session()
with session.begin():
ref = self._get_credential(session, credential_id)
session.delete(ref)
session.flush()
|
rizar/actor-critic-public
|
bin/pack_to_hdf5.py
|
Python
|
mit
| 2,224
| 0.001349
|
#!/usr/bin/env python
import h5py
import numpy
import argparse
import cPickle
from fuel.datasets.hdf5 import H5PYDataset
def pack(f, name, dataset_pathes):
datasets = [cPickle.load(open(path)) for path in dataset_pathes]
data = sum(datasets, [])
dtype = h5py.special_dtype(vlen=numpy.dtype('int32'))
table = f.create_dataset(name, (len(data),), dtype=dtype)
for i, example in enumerate(data):
table[i] = example
return numpy.array([len(d) for d in datasets])
if __name__ == '__main__':
parser = argparse.ArgumentParser("Pack data to HDF5")
parser.add_argument('-s', dest='sources', nargs='*', help="Source datasets")
parser.add_argument('-t', dest='targets', nargs='*', help="Target datasets")
parser.add_argument('-n', dest='names', nargs='*', help="Dataset names")
parser.add_argument('-i', dest='add_ids',
action='store_true
|
', default=False,
help="Add integer IDs")
pa
|
rser.add_argument('dest', help="Destination")
args = parser.parse_args()
assert len(args.sources) == len(args.targets)
assert len(args.sources) == len(args.names)
with h5py.File(args.dest, mode='w') as f:
lengths = pack(f, "sources", args.sources)
assert numpy.all(lengths == pack(f, "targets", args.targets))
offsets = [0] + list(lengths.cumsum())
total_len = offsets[-1]
if args.add_ids:
id_table = f.create_dataset('ids',
data=numpy.arange(total_len,
dtype='int32'))
split_dict = {
args.names[i]:
{'sources': (offsets[i], offsets[i + 1]),
'targets': (offsets[i], offsets[i + 1]),
'ids': (offsets[i], offsets[i + 1])}
for i in range(len(args.names))}
else:
split_dict = {
args.names[i]:
{'sources': (offsets[i], offsets[i + 1]),
'targets': (offsets[i], offsets[i + 1])}
for i in range(len(args.names))}
f.attrs['split'] = H5PYDataset.create_split_array(split_dict)
|
atykhonov/fget
|
fget/settings.py
|
Python
|
mit
| 1,884
| 0
|
import os
import pkg_resources
import yaml
from fget.utils import fgetprint
from fget.resource.root import Root
class CachedSettings(object):
def __init__(self, cache_dir):
self.cache_dir = cache_dir
def init(self):
settings_filename = 'fget.yaml'
cached_filename = 'fget.jobs'
cached_settings_file = os.path.join(self.cache_dir, cached_filename)
self.cached_settings = {}
if not os.path.isfile(cached_settings_file):
fgetprint('Initiating. Please wait...')
settings_file = \
pkg_resources.resource_filename('fget', settings_filename)
with open(settings_file) as f:
settings = yaml.load(f.read())
for url in settings.get('JENKINS_URLS', []):
url = url.strip('/')
fgetprint('Retrieving jobs from {0}'.format(url))
root_resource = Root(url)
for job in root_resource.get_jobs():
if url not in self.cached_settings:
self.cached_settings[url] = []
self.cached_settings[url].append(s
|
tr(job['name']))
with open(cached_settings_file, 'w') as f:
|
for key in self.cached_settings.keys():
f.write(key + '\n')
for value in self.cached_settings[key]:
f.write(value + '\n')
fgetprint('Initiating. Finished.')
else:
with open(cached_settings_file) as f:
for line in f:
if line.startswith('http://'):
url = line.strip()
self.cached_settings[url] = []
continue
self.cached_settings[url].append(line.strip())
def get_settings(self):
return self.cached_settings
|
mogoweb/chromium-crosswalk
|
tools/perf/measurements/rasterize_and_record.py
|
Python
|
bsd-3-clause
| 7,809
| 0.005506
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import time
from metrics import smoothness
from telemetry.page import page_measurement
class StatsCollector(object):
def __init__(self, timeline):
"""
Utility class for collecting rendering stats from timeline model.
timeline -- The timeline model
"""
self.timeline = timeline
self.total_best_rasterize_time = 0
self.total_best_record_time = 0
self.total_pixels_rasterized = 0
self.total_pixels_recorded = 0
self.trigger_event = self.FindTriggerEvent()
self.renderer_process = self.trigger_event.start_thread.parent
def FindTriggerEvent(self):
events = [s for
s in self.timeline.GetAllEventsOfName(
'measureNextFrame')
if s.parent_slice == None]
if len(events) != 1:
raise LookupError, 'no measureNextFrame event found'
return events[0]
def FindFrameNumber(self, trigger_time):
start_event = None
for event in self.renderer_process.IterAllSlicesOfName(
"LayerTreeHost::UpdateLayers"):
if event.start > trigger_time:
if start_event == None:
start_event = event
elif event.start < start_event.start:
start_event = event
if start_event is None:
raise LookupError, \
'no LayterTreeHost::UpdateLayers after measureNextFrame found'
return start_event.args["source_frame_number"]
def GatherRasterizeStats(self, frame_number):
for event in self.renderer_process.IterAllSlicesOfName(
"RasterWorkerPoolTaskImpl::RunRasterOnThread"):
if event.args["data"]["source_frame_number"] == frame_number:
for raster_loop_event in event.GetAllSubSlicesOfName("RasterLoop"):
best_rasterize_time = float("inf")
for raster_event in raster_loop_event.GetAllSubSlicesOfName(
"Picture::Raster"):
if "num_pixels_rasterized" in raster_event.args:
best_rasterize_time = min(best_rasterize_time,
raster_event.duration)
self.total_pixels_rasterized += \
raster_event.args["num_pixels_rasterize
|
d"]
if best_rasterize_time == float('inf'):
best_rasterize_time = 0
self.total_best_rasterize_
|
time += best_rasterize_time
def GatherRecordStats(self, frame_number):
for event in self.renderer_process.IterAllSlicesOfName(
"PictureLayer::Update"):
if event.args["source_frame_number"] == frame_number:
for record_loop_event in event.GetAllSubSlicesOfName("RecordLoop"):
best_record_time = float('inf')
for record_event in record_loop_event.GetAllSubSlicesOfName(
"Picture::Record"):
best_record_time = min(best_record_time, record_event.duration)
self.total_pixels_recorded += (
record_event.args["data"]["width"] *
record_event.args["data"]["height"])
if best_record_time == float('inf'):
best_record_time = 0
self.total_best_record_time += best_record_time
def GatherRenderingStats(self):
trigger_time = self.trigger_event.start
frame_number = self.FindFrameNumber(trigger_time)
self.GatherRasterizeStats(frame_number)
self.GatherRecordStats(frame_number)
def DivideIfPossibleOrZero(numerator, denominator):
if denominator == 0:
return 0
return numerator / denominator
class RasterizeAndRecord(page_measurement.PageMeasurement):
def __init__(self):
super(RasterizeAndRecord, self).__init__('', True)
self._metrics = None
def AddCommandLineOptions(self, parser):
parser.add_option('--report-all-results', dest='report_all_results',
action='store_true',
help='Reports all data collected')
parser.add_option('--raster-record-repeat', dest='raster_record_repeat',
default=20,
help='Repetitions in raster and record loops.' +
'Higher values reduce variance, but can cause' +
'instability (timeouts, event buffer overflows, etc.).')
parser.add_option('--start-wait-time', dest='start_wait_time',
default=2,
help='Wait time before the benchmark is started ' +
'(must be long enought to load all content)')
parser.add_option('--stop-wait-time', dest='stop_wait_time',
default=5,
help='Wait time before measurement is taken ' +
'(must be long enough to render one frame)')
def CustomizeBrowserOptions(self, options):
smoothness.SmoothnessMetrics.CustomizeBrowserOptions(options)
# Run each raster task N times. This allows us to report the time for the
# best run, effectively excluding cache effects and time when the thread is
# de-scheduled.
options.AppendExtraBrowserArgs([
'--slow-down-raster-scale-factor=%d' % options.raster_record_repeat,
# Enable impl-side-painting. Current version of benchmark only works for
# this mode.
'--enable-impl-side-painting',
'--force-compositing-mode',
'--enable-threaded-compositing'
])
def MeasurePage(self, page, tab, results):
self._metrics = smoothness.SmoothnessMetrics(tab)
# Rasterize only what's visible.
tab.ExecuteJavaScript(
'chrome.gpuBenchmarking.setRasterizeOnlyVisibleContent();')
# Wait until the page has loaded and come to a somewhat steady state.
# Needs to be adjusted for every device (~2 seconds for workstation).
time.sleep(float(self.options.start_wait_time))
# Render one frame before we start gathering a trace. On some pages, the
# first frame requested has more variance in the number of pixels
# rasterized.
tab.ExecuteJavaScript("""
window.__rafFired = false;
window.webkitRequestAnimationFrame(function() {
chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers();
window.__rafFired = true;
});
""")
tab.browser.StartTracing('webkit.console,benchmark', 60)
self._metrics.Start()
tab.ExecuteJavaScript("""
console.time("measureNextFrame");
window.__rafFired = false;
window.webkitRequestAnimationFrame(function() {
chrome.gpuBenchmarking.setNeedsDisplayOnAllLayers();
window.__rafFired = true;
});
""")
# Wait until the frame was drawn.
# Needs to be adjusted for every device and for different
# raster_record_repeat counts.
# TODO(ernstm): replace by call-back.
time.sleep(float(self.options.stop_wait_time))
tab.ExecuteJavaScript('console.timeEnd("measureNextFrame")')
self._metrics.Stop()
timeline = tab.browser.StopTracing().AsTimelineModel()
collector = StatsCollector(timeline)
collector.GatherRenderingStats()
rendering_stats = self._metrics.end_values
results.Add('best_rasterize_time', 'seconds',
collector.total_best_rasterize_time / 1.e3,
data_type='unimportant')
results.Add('best_record_time', 'seconds',
collector.total_best_record_time / 1.e3,
data_type='unimportant')
results.Add('total_pixels_rasterized', 'pixels',
collector.total_pixels_rasterized,
data_type='unimportant')
results.Add('total_pixels_recorded', 'pixels',
collector.total_pixels_recorded,
data_type='unimportant')
if self.options.report_all_results:
for k, v in rendering_stats.iteritems():
results.Add(k, '', v)
|
vileopratama/vitech
|
src/openerp/report/printscreen/ps_list.py
|
Python
|
mit
| 11,008
| 0.007813
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import openerp
from openerp.report.interface import report_int
import openerp.tools as tools
from openerp.tools.safe_eval import safe_eval as eval
from lxml import etree
from openerp.report import render, report_sxw
import locale
import time, os
from operator import itemgetter
from datetime import datetime
class report_printscreen_list(report_int):
def __init__(self, name):
report_int.__init__(self, name)
self.context = {}
self.groupby = []
self.cr=''
def _parse_node(self, root_node):
result = []
for node in root_node:
field_name = node.get('name')
if not eval(str(node.attrib.get('invisible',False)),{'context':self.context}):
if node.tag == 'field':
if field_name in self.groupby:
continue
result.append(field_name)
else:
result.extend(self._parse_node(node))
return result
def _parse_string(self, view):
try:
dom = etree.XML(view.encode('utf-8'))
except Exception:
dom = etree.XML(view)
return self._parse_node(dom)
def create(self, cr, uid, ids, datas, context=None):
if not context:
context={}
self.cr=cr
self.context = context
self.groupby = context.get('group_by',[])
self.groupby_no_leaf = context.get('group_by_no_leaf',False)
registry = openerp.registry(cr.dbname)
model = registry[datas['model']]
model_id = registry['ir.model'].search(cr, uid, [('model','=',model._name)])
model_desc = model._description
if model_id:
model_desc = registry['ir.model'].browse(cr, uid, model_id[0], context).name
self.title = model_desc
datas['ids'] = ids
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
fields_order = self.groupby + self._parse_string(result['arch'])
if self.groupby:
rows = []
def get_groupby_data(groupby = [], domain = []):
records
|
= model.read_group(cr, uid, domain, fields_order, groupby , 0, None, context)
for rec in records:
rec['__group'] = True
rec['__no_leaf'] = self.groupby_no_leaf
rec['__grouped_by'] = groupby[0] if (isinstance(groupby, list) and groupby) else groupby
for f in fields_order:
if f not in rec:
|
rec.update({f:False})
elif isinstance(rec[f], tuple):
rec[f] = rec[f][1]
rows.append(rec)
inner_groupby = (rec.get('__context', {})).get('group_by',[])
inner_domain = rec.get('__domain', [])
if inner_groupby:
get_groupby_data(inner_groupby, inner_domain)
else:
if self.groupby_no_leaf:
continue
child_ids = model.search(cr, uid, inner_domain)
res = model.read(cr, uid, child_ids, result['fields'].keys(), context)
res.sort(lambda x,y: cmp(ids.index(x['id']), ids.index(y['id'])))
rows.extend(res)
dom = [('id','in',ids)]
if self.groupby_no_leaf and len(ids) and not ids[0]:
dom = datas.get('_domain',[])
get_groupby_data(self.groupby, dom)
else:
rows = model.read(cr, uid, datas['ids'], result['fields'].keys(), context)
ids2 = map(itemgetter('id'), rows) # getting the ids from read result
if datas['ids'] != ids2: # sorted ids were not taken into consideration for print screen
rows_new = []
for id in datas['ids']:
rows_new += [elem for elem in rows if elem['id'] == id]
rows = rows_new
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model_desc)
return self.obj.get(), 'pdf'
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
pageSize=[297.0, 210.0]
new_doc = etree.Element("report")
config = etree.SubElement(new_doc, 'config')
def _append_node(name, text):
n = etree.SubElement(config, name)
n.text = text
#_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
_append_node('report-header', title)
registry = openerp.registry(self.cr.dbname)
_append_node('company', registry['res.users'].browse(self.cr,uid,uid).company_id.name)
rpt_obj = registry['res.users']
rml_obj=report_sxw.rml_parse(self.cr, uid, rpt_obj._name,context)
_append_node('header-date', str(rml_obj.formatLang(time.strftime("%Y-%m-%d"),date=True))+' ' + str(time.strftime("%H:%M")))
l = []
t = 0
strmax = (pageSize[0]-40) * 2.8346
temp = []
tsum = []
for i in range(0, len(fields_order)):
temp.append(0)
tsum.append(0)
ince = -1
for f in fields_order:
s = 0
ince += 1
if fields[f]['type'] in ('date','time','datetime','float','integer'):
s = 60
strmax -= s
if fields[f]['type'] in ('float','integer'):
temp[ince] = 1
else:
t += fields[f].get('size', 80) / 28 + 1
l.append(s)
for pos in range(len(l)):
if not l[pos]:
s = fields[fields_order[pos]].get('size', 80) / 28 + 1
l[pos] = strmax * s / t
_append_node('tableSize', ','.join(map(str,l)) )
header = etree.SubElement(new_doc, 'header')
for f in fields_order:
field = etree.SubElement(header, 'field')
field.text = tools.ustr(fields[f]['string'] or '')
lines = etree.SubElement(new_doc, 'lines')
for line in results:
node_line = etree.SubElement(lines, 'row')
count = -1
for f in fields_order:
float_flag = 0
count += 1
if fields[f]['type']=='many2one' and line[f]:
if not line.get('__group'):
line[f] = line[f][1]
if fields[f]['type']=='selection' and line[f]:
for key, value in fields[f]['selection']:
if key == line[f]:
line[f] = value
break
if fields[f]['type'] in ('one2many','many2many') and line[f]:
line[f] = '( '+tools.ustr(len(line[f])) + ' )'
if fields[f]['type'] == 'float' and line[f]:
precision=(('digits' in fields[f]) and fields[f]['digits'][1]) or 2
prec ='%.' + str(precision) +'f'
line[f]=prec%(line[f])
float_flag = 1
if fields[f]['type'] == 'date' and line[f]:
new_d1 = line[f]
if not line.get('__group'):
format = str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))
d1 = datetime.strptime(line[f],'%Y-%m-%d')
new_d1 = d1.strftime(format)
line[f] = new_d1
if fields[f]['type'] == 'time' and line[f]:
new_d1 = line[f]
if not line.get('__group'):
format = str(locale.nl_langinfo(locale.T_FMT))
|
spyder-ide/qtawesome
|
setupbase.py
|
Python
|
mit
| 12,662
| 0.001422
|
# -*- coding: utf-8 -*-
import os
import re
import io
import sys
import csv
import json
import shutil
import hashlib
import zipfile
import tempfile
try:
from fontTools import ttLib
except ImportError:
ttLib = None
from urllib.request import urlopen
import distutils.cmd
import distutils.log
HERE = os.path.abspath(os.path.dirname(__file__))
ICONIC_FONT_PY_PATH = os.path.join(HERE, 'qtawesome', 'iconic_font.py')
def rename_font(font_path, font_name):
"""
Font renaming code originally from:
https://github.com/chrissimpkins/fontname.py/blob/master/fontname.py
"""
tt = ttLib.TTFont(font_path, recalcBBoxes=False, recalcTimestamp=False)
namerecord_list = tt["name"].names
variant = ""
# determine font variant for this file path from name record nameID 2
for record in namerecord_list:
if record.nameID == 2:
variant = str(record)
break
# test that a variant name was found in the OpenType tables of the font
if len(variant) == 0:
raise ValueError(
"Unable to detect the font variant from the OpenType name table in: %s" % font_path)
# Here are some sample name records to give you an idea of the name tables:
# ID 0: 'Copyright (c) Font Awesome'
# ID 1
|
: 'Font Awesome 5 Free Regular'
# ID 2: 'Regular'
# ID 3: 'Font
|
Awesome 5 Free Regular-5.14.0'
# ID 4: 'Font Awesome 5 Free Regular'
# ID 5: '331.264 (Font Awesome version: 5.14.0)'
# ID 6: 'FontAwesome5Free-Regular'
# ID 10: "The web's most popular icon set and toolkit."
# ID 11: 'https://fontawesome.com'
# ID 16: 'Font Awesome 5 Free'
# ID 17: 'Regular'
# ID 18: 'Font Awesome 5 Free Regular'
# ID 21: 'Font Awesome 5 Free'
# ID 22: 'Regular'
# modify the opentype table data in memory with updated values
for record in namerecord_list:
if record.nameID in (1, 4, 16, 21):
print(f"Renaming font name record at ID {record.nameID}: {record.string} --> {font_name}")
record.string = font_name
# write changes to the font file
try:
tt.save(font_path, reorderTables=False)
except:
raise RuntimeError(
f"ERROR: unable to write new name to OpenType tables for: {font_path}")
class UpdateFA5Command(distutils.cmd.Command):
"""A custom command to make updating FontAwesome 5.x easy!"""
description = 'Try to update the FontAwesome 5.x data in the project.'
user_options = [
('fa-version=', None, 'FA version.'),
('zip-path=', None, 'Read from local zip file path.'),
]
# Update these below if the FontAwesome changes their structure:
FA_STYLES = ('regular', 'solid', 'brands')
CHARMAP_PATH_TEMPLATE = os.path.join(HERE, 'qtawesome', 'fonts', 'fontawesome5-{style}-webfont-charmap.json')
TTF_PATH_TEMPLATE = os.path.join(HERE, 'qtawesome', 'fonts', 'fontawesome5-{style}-webfont.ttf')
URL_TEMPLATE = 'https://github.com/FortAwesome/Font-Awesome/' \
'releases/download/{version}/fontawesome-free-{version}-web.zip'
def initialize_options(self):
"""Set default values for the command options."""
self.fa_version = ''
self.zip_path = ''
def finalize_options(self):
"""Validate the command options."""
assert bool(self.fa_version), 'FA version is mandatory for this command.'
if self.zip_path:
assert os.path.exists(self.zip_path), (
'Local zipfile does not exist: %s' % self.zip_path)
def __print(self, msg):
"""Shortcut for printing with the distutils logger."""
self.announce(msg, level=distutils.log.INFO)
def __get_charmap_path(self, style):
"""Get the project FA charmap path for a given style."""
return self.CHARMAP_PATH_TEMPLATE.format(style=style)
def __get_ttf_path(self, style):
"""Get the project FA font path for a given style."""
return self.TTF_PATH_TEMPLATE.format(style=style)
@property
def __release_url(self):
"""Get the release URL."""
return self.URL_TEMPLATE.format(version=self.fa_version)
@property
def __zip_file(self):
"""Get a file object of the FA zip file."""
if self.zip_path:
# If using a local file, just open it:
self.__print('Opening local zipfile: %s' % self.zip_path)
return open(self.zip_path, 'rb')
# Otherwise, download it and make a file object in-memory:
url = self.__release_url
self.__print('Downloading from URL: %s' % url)
response = urlopen(url)
return io.BytesIO(response.read())
@property
def __zipped_files_data(self):
"""Get a dict of all files of interest from the FA release zipfile."""
files = {}
with zipfile.ZipFile(self.__zip_file) as thezip:
for zipinfo in thezip.infolist():
if zipinfo.filename.endswith('metadata/icons.json'):
with thezip.open(zipinfo) as compressed_file:
files['icons.json'] = compressed_file.read()
elif zipinfo.filename.endswith('.ttf'):
# For the record, the paths usually look like this:
# webfonts/fa-brands-400.ttf
# webfonts/fa-regular-400.ttf
# webfonts/fa-solid-900.ttf
name = os.path.basename(zipinfo.filename)
tokens = name.split('-')
style = tokens[1]
if style in self.FA_STYLES:
with thezip.open(zipinfo) as compressed_file:
files[style] = compressed_file.read()
# Safety checks:
assert all(style in files for style in self.FA_STYLES), \
'Not all FA styles found! Update code is broken.'
assert 'icons.json' in files, 'icons.json not found! Update code is broken.'
return files
def run(self):
"""Run command."""
files = self.__zipped_files_data
hashes = {}
icons = {}
# Read icons.json (from the webfont zip download)
data = json.loads(files['icons.json'])
# Group icons by style, since not all icons exist for all styles:
for icon, info in data.items():
for style in info['styles']:
icons.setdefault(str(style), {})
icons[str(style)][icon] = str(info['unicode'])
# For every FA "style":
for style, details in icons.items():
# Dump a .json charmap file:
charmapPath = self.__get_charmap_path(style)
self.__print('Dumping updated "%s" charmap: %s' % (style, charmapPath))
with open(charmapPath, 'w+') as f:
json.dump(details, f, indent=4, sort_keys=True)
# Dump a .ttf font file:
font_path = self.__get_ttf_path(style)
data = files[style]
self.__print('Dumping updated "%s" font: %s' % (style, font_path))
with open(font_path, 'wb+') as f:
f.write(data)
# Fix to prevent repeated font names:
if style in ('regular', 'solid'):
new_name = str("Font Awesome 5 Free %s") % style.title()
self.__print('Renaming font to "%s" in: %s' % (new_name, font_path))
if ttLib is not None:
rename_font(font_path, new_name)
else:
sys.exit(
"This special command requires the module 'fonttools': "
"https://github.com/fonttools/fonttools/")
# Reread the data since we just edited the font file:
with open(font_path, 'rb') as f:
data = f.read()
files[style] = data
# Store hashes for later:
hashes[style] = hashlib.md5(data).hexdigest()
# Now it's time to patch "iconic_font.py":
iconic_path = ICONIC_FONT_PY_PATH
self.__print('Patching new MD5 hashes in: %s' % iconic_path)
with open(iconic_path, 'r') as iconic
|
vivhou/Python-coursework-files
|
final project data analysis/webscraping scripts/public_school_tuition_data.py
|
Python
|
artistic-2.0
| 2,976
| 0.023522
|
import requests
import math
import re
from bs4 import BeautifulSoup
ROOT_URL = "http://nces.ed.gov/collegenavigator"
INDEX_URL = ROOT_URL + "?s=all&l=93&ct=1&ic=1&an=5&ax=50"
PAGINATION_DIV_ID = "ctl00_cphCollegeNavBody_ucResultsMain_divMsg"
def get_num_pages(pagination):
"""Returns the total number of pages given pagination string
:param pagination: Pagination string (i.e.: 1-20 of 100 Results)
"""
words = pagination.split()
per_page = int(words[0][2:])
total = int(words[2])
pages = total / per_page
# Add one if pages doesn't divide evenly
if total % per_page != 0:
pages = pages + 1
return pages
def is_college_link(href):
"""Returns whether or not an anchor is a college link
:param href: hyperlink string
"""
return href and re.compile("id=").search(href)
def get_colleges():
response = requests.get(INDEX_URL)
soup = BeautifulSoup(response.text, "html.parser")
# Get the total number of pages in the result
pagination = soup.find("div", attrs={"id": PAGINATION_DIV_ID})
pages = get_num_pages(pagination.get_text())
# Store colleges in list of dictionaries
colleges = []
college = {}
# Iterate over all of the pages
for i in range(1, pages+1):
print("Parsing colleges page: " + str(i))
response = requests.get(INDEX_URL + "&pg=" + str(i))
soup = BeautifulSoup(response.text, "html.parser")
# There is only one "resultsTable" in the HTML that
# contains the list of college links and information
table = soup.find("table", attrs={"class": "resultsTable"})
for link in table.findChildren(href=is_college_link):
college['name'] = link.get_text()
college['url'] = link.get('href')
colleges.append(college.copy())
return colleges
def get_college_tuition_data(college):
"""Retrieves college tuition data and adds to college
:param college: college dictionary container
"""
response = requests.get(ROOT_URL + college['url'])
soup = BeautifulSoup(response.text, "html.parser")
expenses = soup.find("div", a
|
ttrs={"id": "expenses"})
table = expenses.find("tbody")
try:
# Get In-state Tuition Change
row = table.find(string="In-state").parent.parent
cols = row.find_all("td")
college['In-state'] = cols[5].get_text()
# Get Out-of-state Tuition Change
row = table.find(string="Out-of-state").parent.parent
cols = row.find_all("td")
college['Out-of-state'] = cols[5].get_text()
except (AttributeError, TypeError):
print('\033[93m' + college['name'] + "
|
has no tuition data, skipping!!" + '\033[0m')
college['In-state'] = "-"
college['Out-of-state'] = "-"
# Get initial list of colleges and links
print("Getting initial list of colleges")
colleges = get_colleges()
# Get additional tuition data for each college
for college in colleges:
print(college['name'] + ": Retrieving tuition data")
get_college_tuition_data(college)
for college in colleges:
print(repr(str(college['name'])) + ": In-state = " + college['In-state'] + ", Out-of-state = " + college['Out-of-state'])
|
inspirehep/invenio-records
|
invenio_records/signals.py
|
Python
|
gpl-2.0
| 2,474
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Record module signals."""
from blinker import Namespace
_signals = Namespace()
record_viewed = _signals.signal('record-viewed')
"""
This signal is sent when a detailed view of record is displayed.
Parameters:
recid - id of record
id_user - id of user or 0 for guest
request - flask request object
Example subscriber:
.. code-block:: python
def subscriber(sender, recid=0, id_user=0, request=None):
...
"""
before_record
|
_insert = _signals.signal('before-record-insert')
"""Signal sent before a record is inserted.
Example subscriber
.. code-block:: python
def listener(sender, *args, **kwargs):
sender['key'] = sum(args)
from invenio_records.signals import before_record_insert
before_record_insert.connect(
listener
)
"""
after_record_insert = _signals.signal('after-record-insert')
"""Signal sent after a record is inserted.
.. note::
No modifica
|
tion are allowed on record object.
"""
before_record_update = _signals.signal('before-record-update')
"""Signal sent before a record is update."""
after_record_update = _signals.signal('after-record-update')
"""Signal sent after a record is updated."""
before_record_index = _signals.signal('before-record-index')
"""Signal sent before a record is indexed.
Example subscriber
.. code-block:: python
def listener(sender, **kwargs):
info = fetch_some_info_for_recid(sender)
kwargs['json']['more_info'] = info
from invenio_records.signals import before_record_index
before_record_index.connect(
listener
)
"""
after_record_index = _signals.signal('after-record-index')
"""Signal sent after a record is indexed."""
|
endlessm/chromium-browser
|
third_party/catapult/third_party/gsutil/gslib/vendored/boto/tests/integration/gs/test_basic.py
|
Python
|
bsd-3-clause
| 22,160
| 0.002211
|
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# Copyright (c) 2011, Nexenta Systems, Inc.
# Copyright (c) 2012, Google, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, t
|
o any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing
|
conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Some integration tests for the GSConnection
"""
import os
import re
import StringIO
import urllib
import xml.sax
from boto import handler
from boto import storage_uri
from boto.gs.acl import ACL
from boto.gs.cors import Cors
from boto.gs.lifecycle import LifecycleConfig
from tests.integration.gs.testcase import GSTestCase
CORS_EMPTY = '<CorsConfig></CorsConfig>'
CORS_DOC = ('<CorsConfig><Cors><Origins><Origin>origin1.example.com'
'</Origin><Origin>origin2.example.com</Origin></Origins>'
'<Methods><Method>GET</Method><Method>PUT</Method>'
'<Method>POST</Method></Methods><ResponseHeaders>'
'<ResponseHeader>foo</ResponseHeader>'
'<ResponseHeader>bar</ResponseHeader></ResponseHeaders>'
'</Cors></CorsConfig>')
ENCRYPTION_CONFIG_WITH_KEY = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<EncryptionConfiguration>'
'<DefaultKmsKeyName>%s</DefaultKmsKeyName>'
'</EncryptionConfiguration>')
LIFECYCLE_EMPTY = ('<?xml version="1.0" encoding="UTF-8"?>'
'<LifecycleConfiguration></LifecycleConfiguration>')
LIFECYCLE_DOC = ('<?xml version="1.0" encoding="UTF-8"?>'
'<LifecycleConfiguration><Rule>'
'<Action><Delete/></Action>'
'<Condition>''<IsLive>true</IsLive>'
'<MatchesStorageClass>STANDARD</MatchesStorageClass>'
'<Age>365</Age>'
'<CreatedBefore>2013-01-15</CreatedBefore>'
'<NumberOfNewerVersions>3</NumberOfNewerVersions>'
'</Condition></Rule><Rule>'
'<Action><SetStorageClass>NEARLINE</SetStorageClass></Action>'
'<Condition><Age>366</Age>'
'</Condition></Rule></LifecycleConfiguration>')
LIFECYCLE_CONDITIONS_FOR_DELETE_RULE = {
'Age': '365',
'CreatedBefore': '2013-01-15',
'NumberOfNewerVersions': '3',
'IsLive': 'true',
'MatchesStorageClass': ['STANDARD']}
LIFECYCLE_CONDITIONS_FOR_SET_STORAGE_CLASS_RULE = {'Age': '366'}
BILLING_EMPTY = {'BillingConfiguration': {}}
BILLING_ENABLED = {'BillingConfiguration': {'RequesterPays': 'Enabled'}}
BILLING_DISABLED = {'BillingConfiguration': {'RequesterPays': 'Disabled'}}
# Regexp for matching project-private default object ACL.
PROJECT_PRIVATE_RE = ('\s*<AccessControlList>\s*<Entries>\s*<Entry>'
'\s*<Scope type="GroupById">\s*<ID>[-a-zA-Z0-9]+</ID>'
'\s*(<Name>[^<]+</Name>)?\s*</Scope>'
'\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>'
'\s*<Scope type="GroupById">\s*<ID>[-a-zA-Z0-9]+</ID>'
'\s*(<Name>[^<]+</Name>)?\s*</Scope>'
'\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>'
'\s*<Scope type="GroupById">\s*<ID>[-a-zA-Z0-9]+</ID>'
'\s*(<Name>[^<]+</Name>)?\s*</Scope>'
'\s*<Permission>READ</Permission>\s*</Entry>\s*</Entries>'
'\s*</AccessControlList>\s*')
class GSBasicTest(GSTestCase):
"""Tests some basic GCS functionality."""
def test_read_write(self):
"""Tests basic read/write to keys."""
bucket = self._MakeBucket()
bucket_name = bucket.name
# now try a get_bucket call and see if it's really there
bucket = self._GetConnection().get_bucket(bucket_name)
key_name = 'foobar'
k = bucket.new_key(key_name)
s1 = 'This is a test of file upload and download'
k.set_contents_from_string(s1)
tmpdir = self._MakeTempDir()
fpath = os.path.join(tmpdir, key_name)
fp = open(fpath, 'wb')
# now get the contents from gcs to a local file
k.get_contents_to_file(fp)
fp.close()
fp = open(fpath)
# check to make sure content read from gcs is identical to original
self.assertEqual(s1, fp.read())
fp.close()
# Use generate_url to get the contents
url = self._conn.generate_url(900, 'GET', bucket=bucket.name, key=key_name)
f = urllib.urlopen(url)
self.assertEqual(s1, f.read())
f.close()
# check to make sure set_contents_from_file is working
sfp = StringIO.StringIO('foo')
k.set_contents_from_file(sfp)
self.assertEqual(k.get_contents_as_string(), 'foo')
sfp2 = StringIO.StringIO('foo2')
k.set_contents_from_file(sfp2)
self.assertEqual(k.get_contents_as_string(), 'foo2')
def test_get_all_keys(self):
"""Tests get_all_keys."""
phony_mimetype = 'application/x-boto-test'
headers = {'Content-Type': phony_mimetype}
tmpdir = self._MakeTempDir()
fpath = os.path.join(tmpdir, 'foobar1')
fpath2 = os.path.join(tmpdir, 'foobar')
with open(fpath2, 'w') as f:
f.write('test-data')
bucket = self._MakeBucket()
# First load some data for the first one, overriding content type.
k = bucket.new_key('foobar')
s1 = 'test-contents'
s2 = 'test-contents2'
k.name = 'foo/bar'
k.set_contents_from_string(s1, headers)
k.name = 'foo/bas'
k.set_contents_from_filename(fpath2)
k.name = 'foo/bat'
k.set_contents_from_string(s1)
k.name = 'fie/bar'
k.set_contents_from_string(s1)
k.name = 'fie/bas'
k.set_contents_from_string(s1)
k.name = 'fie/bat'
k.set_contents_from_string(s1)
# try resetting the contents to another value
md5 = k.md5
k.set_contents_from_string(s2)
self.assertNotEqual(k.md5, md5)
fp2 = open(fpath2, 'rb')
k.md5 = None
k.base64md5 = None
k.set_contents_from_stream(fp2)
fp = open(fpath, 'wb')
k.get_contents_to_file(fp)
fp.close()
fp2.seek(0, 0)
fp = open(fpath, 'rb')
self.assertEqual(fp2.read(), fp.read())
fp.close()
fp2.close()
all = bucket.get_all_keys()
self.assertEqual(len(all), 6)
rs = bucket.get_all_keys(prefix='foo')
self.assertEqual(len(rs), 3)
rs = bucket.get_all_keys(prefix='', delimiter='/')
self.assertEqual(len(rs), 2)
rs = bucket.get_all_keys(maxkeys=5)
self.assertEqual(len(rs), 5)
def test_bucket_lookup(self):
"""Test the bucket lookup method."""
bucket = self._MakeBucket()
k = bucket.new_key('foo/bar')
phony_mimetype = 'application/x-boto-test'
headers = {'Content-Type': phony_mimetype}
k.set_contents_from_string('testdata', headers)
k = bucket.lookup('foo/bar')
self.assertIsInstance(k, bucket.key_class)
self.assertEqual(k.content_type, phony_mimetype)
k = bucket.lookup('notthere')
self.assertIsNone(k)
def test_metadata(self):
"""Test key metadata operations."""
bucket = self._MakeBucket()
k = self._MakeKey(bucket=bucket)
key_name = k.name
s1 = 'This
|
martinkiefer/join-kde
|
code/JoinSampleCodeGenerator.py
|
Python
|
gpl-3.0
| 10,901
| 0.013026
|
#Code generator for gauss kernel
import Utils
local_size = 64
def generatePreamble(f):
print >>f, """
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
#ifndef M_SQRT2
#define M_SQRT2 1.41421356237309504880168872420969808
#endif
typedef double T;
"""
def rangeEstimateFunction(f):
print >>f, """
unsigned int range(unsigned int v, unsigned int u, unsigned int l){
if(v == 0){
return 0;
}
return v >= l && v <= u;
}
"""
def pointEstimateFunction(f):
print >>f, """
unsigned int point(unsigned int v, unsigned int p){
return v == p;
}
"""
def generateEstimateKernel(f,kname,qtypes):
print >>f, "__kernel void %s(" % kname
for i,k in enumerate(qtypes):
if k == "range":
print >>f, " __global unsigned int* c%s, unsigned int u%s, unsigned int l%s, " % (i,i,i)
elif k == "point":
print >>f, " __global unsigned int* c%s, unsigned int p%s, " % (i,i)
else:
raise Exception("Unsupported kernel.")
print >>f, " __global unsigned long* o, unsigned int ss){"
print >>f, " unsigned int counter = 0;"
print >>f, " for(unsigned int offset = 0; offset < ss; offset += get_global_size(0)){"
print >>f, " if (offset + get_global_id(0) < ss){"
for i,k in enumerate(qtypes):
if k == "point":
print >>f, " unsigned int ec%s = point(c%s[offset+get_global_id(0)], p%s);" % (i,i,i)
elif k == "range":
print >>f, " unsigned int ec%s = range(c%s[offset+get_global_id(0)], u%s, l%s);" % (i,i,i,i)
else:
raise Exception("Unsupported kernel.")
print >>f, " counter += 1 ",
for i,k in enumerate(qtypes):
print >>f, "&& ec%s" % i,
print >>f, ";"
print >>f, " }"
print >>f, " }"
print >>f, " o[get_global_id(0)] = counter;"
print >>f, "}"
print >>f
def generateCIncludes(f):
print >>f, """
#include <iostream>
#include <string>
#include <fstream>
#include <streambuf>
#include <nlopt.h>
#include <sstream>
#include <cmath>
#include <boost/compute/core.hpp>
#include <boost/compute/algorithm/transform.hpp>
#include <boost/compute/algorithm/reduce.hpp>
#include <boost/compute/container/vector.hpp>
#include <boost/compute/functional/math.hpp>
namespace compute = boost::compute;
"""
def generateGPUJoinSampleCode(i,query,estimator,stats,cu_factor):
ts, dv = stats
qtype = []
remap = []
#Generate Kernels
|
with open("./%s_kernels.cl" % i,'w') as cf:
generatePreamble(cf)
cols = Utils.generateInvariantColumns(query)
for j,indices in enumerate(cols):
qtype.extend([query.tables[j].columns[index].type for index in indices ])
remap.extend([(j,index) fo
|
r index in indices ])
rangeEstimateFunction(cf)
pointEstimateFunction(cf)
generateEstimateKernel(cf,"estimate",qtype)
with open("./%s_GPUJS.cpp" % i,'w') as cf:
generateCIncludes(cf)
generateGPUJoinSampleParameterArray(cf,query,estimator,qtype)
Utils.generateGPUVectorConverterFunction(cf)
Utils.generateUintFileReaderFunction(cf)
generateGPUJoinSampleEstimateFunction(cf,query,estimator,qtype)
generateGPUJoinSampleTestWrapper(cf,query,estimator,qtype)
print >>cf, """
int main( int argc, const char* argv[] ){
parameters p;
compute::device device = compute::system::default_device();
p.ctx = compute::context(device);
p.queue=compute::command_queue(p.ctx, device);
"""
print >>cf, """
std::ifstream t("./%s_kernels.cl");
t.exceptions ( std::ifstream::failbit | std::ifstream::badbit );
std::string str((std::istreambuf_iterator<char>(t)),
std::istreambuf_iterator<char>());
""" % i
#Read table sizes and read columns into memory and transfer to device the GPU
print >>cf, " std::stringstream iteration_stream;"
print >>cf, " p.iteration = (unsigned int) atoi(argv[2]);"
print >>cf, " iteration_stream << \"./iteration\" << std::setw(2) << std::setfill('0') << argv[2];"
print >>cf, " p.ss = atoi(argv[1]);"
print >> cf, " p.local = 64;"
print >> cf, " p.global = std::min((size_t) p.ctx.get_device().compute_units()*%s, ((p.ss-1)/p.local+1)*p.local);" % cu_factor
print >>cf, " p.ts = %s;" % (ts)
for cid,kernel in enumerate(qtype):
print >>cf, " std::stringstream s_c%s_stream ;" % (cid)
print >>cf, " s_c%s_stream << iteration_stream.str() << \"/jsample_\" << atoi(argv[1]) << \"_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid)
print >>cf, " std::string s_c%s_string = s_c%s_stream.str();" % (cid,cid)
print >>cf, " unsigned int* s_c%s = readUArrayFromFile(s_c%s_string.c_str());" % (cid,cid)
print >>cf, " p.s_c%s = toGPUVector(s_c%s, p.ss, p.ctx, p.queue);" % (cid,cid)
print >>cf
print >>cf, """
compute::program pr = compute::program::create_with_source(str,p.ctx);
try{
std::ostringstream oss;
pr.build(oss.str());
} catch(const std::exception& ex){
std::cout << pr.build_log() << std::endl;
}
"""
print >>cf, " p.out = compute::vector<unsigned long>(p.global, p.ctx);"
print >>cf, " p.estk = pr.create_kernel(\"estimate\");"
print >>cf, " std::string test_cardinality_string = iteration_stream.str() + \"/test_join_true.dump\";"
print >>cf, " p.test_cardinality = readUArrayFromFile(test_cardinality_string.c_str());"
for cid,ty in enumerate(qtype):
if ty == "range":
print >>cf, " std::string test_l_c%s_string = iteration_stream.str() + \"/test_join_l_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid)
print >>cf, " p.test_l_c%s= readUArrayFromFile(test_l_c%s_string.c_str());" % (cid,cid)
print >>cf, " std::string test_u_c%s_string = iteration_stream.str() + \"/test_join_u_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid)
print >>cf, " p.test_u_c%s = readUArrayFromFile(test_u_c%s_string.c_str());" % (cid,cid)
elif ty == "point":
print >>cf, " std::string test_p_c%s_string = iteration_stream.str() + \"/test_join_p_%s_%s.dump\";" % (cid,query.tables[remap[cid][0]].tid,query.tables[remap[cid][0]].columns[remap[cid][1]].cid)
print >>cf, " p.test_p_c%s = readUArrayFromFile(test_p_c%s_string.c_str());" % (cid,cid)
else:
raise Exception("I don't know this ctype.")
print >>cf
print >>cf, " join_test(&p);"
print >>cf, "}"
def generateGPUJoinSampleParameterArray(f,query,estimator,qtypes):
print >>f, """
typedef struct{
compute::command_queue queue;
compute::context ctx;
"""
print >>f, " unsigned int iteration;"
print >>f, " size_t ss;"
print >>f, " size_t global;"
print >>f, " size_t local;"
print >>f, " unsigned int ts;"
print >>f, " compute::kernel estk;"
for cid,kernel in enumerate(qtypes):
print >>f, " compute::vector<unsigned int> s_c%s;" % (cid)
for cid,kernel in enumerate(qtypes):
if kernel == "range":
print >>f, " unsigned int* test_l_c%s;" % (cid)
print >>f, " unsigned int* test_u_c%s;" % (cid)
else:
print >>f, " unsigned int* test_p_c%s;" % (cid)
print >>f, " compute::vector<unsigned long> out;"
print >>f, " unsigned int* test_cardinality;"
print >>f, """
} parameters;
"""
def generateGPUJoinSampleEstimateFunction(f, query, estimator, qtypes):
print >> f, "double join_estimate_instance(parameters* p"
for cid, qtype in enumerate(qtypes):
# Start with co
|
BambooHR/rapid
|
rapid/master/data/migrations/versions/ce9be6e8354c_test_history_date_created.py
|
Python
|
apache-2.0
| 1,560
| 0.005769
|
"""
Copyright (c) 2015 Michael Bright and Bamboo HR LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
test_history_date_created
Revision ID: ce9be6e8354c
Revises: bf363c3a9ef0
Create Date: 2018-04-30 18:44:54.258839
"""
# revision identifiers, used by Alembic.
import datetime
from sqlalchemy import func
revision = 'ce9be6e8354c'
down_revision = 'bf363c3a9ef0'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
if 'sqlite' == op.get_context().dialect.name:
op.add_column('qa_te
|
st_histories', sa.Column('date_created', sa.DateTime(), default=datetime.datetime.utcnow()))
else:
op.add_column('qa_test_histories'
|
, sa.Column('date_created', sa.DateTime(), nullable=False, server_default=func.now(), default=datetime.datetime.utcnow()))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('qa_test_histories', 'date_created')
### end Alembic commands ###
|
ludia/kinesis_producer
|
kinesis_producer/accumulator.py
|
Python
|
mit
| 1,297
| 0
|
import time
class RecordAcc
|
umulator(object):
def __init__(self, buffer_class, config):
self.config = config
self.buffer_time_limit = con
|
fig['buffer_time_limit']
self._buffer_class = buffer_class
self._reset_buffer()
def _reset_buffer(self):
self._buffer = self._buffer_class(config=self.config)
self._buffer_started_at = None
def try_append(self, record):
"""Attempt to accumulate a record. Return False if buffer is full."""
success = self._buffer.try_append(record)
if success:
self._buffer_started_at = time.time()
return success
def is_ready(self):
"""Check whether the buffer is ready."""
if self._buffer_started_at is None:
return False
if self._buffer.is_ready():
return True
elapsed = time.time() - self._buffer_started_at
return elapsed >= self.buffer_time_limit
def has_records(self):
"""Check whether the buffer has records."""
return self._buffer_started_at is not None
def flush(self):
"""Close the buffer and return it."""
if self._buffer_started_at is None:
return
buf = self._buffer.flush()
self._reset_buffer()
return buf
|
Taapat/enigma2-plugin-youtube
|
test/RcModel.py
|
Python
|
gpl-2.0
| 424
| 0.03066
|
class RcModel:
RcModels = {}
def r
|
cIsDefault(self):
return True
def getRcFile(self, ext=''):
return ext
def getRcFolder(self, GetDefault=True):
return 'enigma2/data/'
def getRcImg(self):
return self.getRcFile('enigma
|
2/data/rc.png')
def getRcPositions(self):
return self.getRcFile('enigma2/data/rcpositions.xml')
def getRcLocation(self):
return self.getRcFile('enigma2/data/')
rc_model = RcModel()
|
xbmc/atv2
|
xbmc/lib/libPython/Python/Lib/test/test_codecmaps_cn.py
|
Python
|
gpl-2.0
| 1,062
| 0.00565
|
#!/usr/bin/env python
#
# test_codecmaps_cn.py
# Codec mapping tests for PRC encodings
#
# $CJKCodecs: test_codecmaps_cn.py,v 1.3 2004/06/19 06:09:55 perky Exp $
from test import test_support
from test import test_multibytecodec_support
import unittest
class TestGB2312Map(test_multibytecodec_support.TestBase_Mapping,
unittes
|
t.TestCase):
encoding = 'gb2312'
mapfilename = 'EUC-CN.TXT'
mapfileurl = 'http://people.freebsd.org/~perky/i18n/EUC-CN.TXT'
class TestGBKMap(test_multibytecodec_support.TestBase_Mapping,
unittest.TestCase):
encoding = 'gbk'
mapfilename = 'CP936.TXT'
mapfil
|
eurl = 'http://www.unicode.org/Public/MAPPINGS/VENDORS/' \
'MICSFT/WINDOWS/CP936.TXT'
def test_main():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestGB2312Map))
suite.addTest(unittest.makeSuite(TestGBKMap))
test_support.run_suite(suite)
test_multibytecodec_support.register_skip_expected(TestGB2312Map, TestGBKMap)
if __name__ == "__main__":
test_main()
|
ShaguptaS/python
|
bigml/tests/create_dataset_steps.py
|
Python
|
apache-2.0
| 5,400
| 0.005926
|
import time
import json
from datetime import datetime, timedelta
from world import world, res_filename
from bigml.api import HTTP_CREATED
from bigml.api import HTTP_OK
from bigml.api import HTTP_ACCEPTED
from bigml.api import FINISHED
from bigml.api import FAULTY
from bigml.api import get_status
import read_dataset_steps as read
#@step(r'I create a dataset$')
def i_create_a_dataset(step):
resource = world.api.create_dataset(world.source['resource'])
world.status = resource['code']
assert world.status == HTTP_CREATED
world.location = resource['location']
world.dataset = resource['object']
world.datasets.append(resource['resource'])
#@step(r'I download the dataset file to "(.*)"$')
def i_export_a_dataset(step, local_file):
world.api.download_dataset(world.dataset['resource'],
filename=res_filename(local_file))
#@step(r'file "(.*)" is like file "(.*)"$')
def files_equal(step, local_file, data):
contents_local_file = open(res_filename(local_file)).read()
contents_data = open(res_filename(data)).read()
assert contents_local_file == contents_data
#@step(r'I create a dataset with "(.*)"')
def i_create_a_dataset_with(step, data="{}"):
resource = world.api.create_dataset(world.source['resource'],
json.loads(data))
world.status = resource['code']
assert world.status == HTTP_CREATED
world.location = resource['location']
world.dataset = resource['object']
world.datasets.append(resource['resource'])
#@step(r'I wait until the dataset status code is either (\d) or (\d) less than (\d+)')
def wait_until_dataset_status_code_is(step, code1, code2, secs):
start = datetime.utcnow()
read.i_get_the_dataset(step, world.dataset['resource'])
status = get_status(world.dataset)
while (status['code'] != int(code1) and
status['code'] != int(code2)):
time.sleep(3)
assert datetime.utcnow() - start < timedelta(seconds=int(secs))
read.i_get_the_dataset(step, world.dataset['resource'])
status = get_status(world.dataset)
assert status['code'] == int(code1)
#@step(r'I wait until the dataset is ready less than (\d+)')
def the_dataset_is_finished_in_less_than(step, secs):
wait_until_dataset_status_code_is(step, FINISHED, FAULTY, secs)
#@step(r'I make the dataset public')
|
def make_the_dataset_public(
|
step):
resource = world.api.update_dataset(world.dataset['resource'],
{'private': False})
world.status = resource['code']
assert world.status == HTTP_ACCEPTED
world.location = resource['location']
world.dataset = resource['object']
#@step(r'I get the dataset status using the dataset\'s public url')
def build_local_dataset_from_public_url(step):
world.dataset = world.api.get_dataset("public/%s" %
world.dataset['resource'])
#@step(r'the dataset\'s status is FINISHED')
def dataset_status_finished(step):
assert get_status(world.dataset)['code'] == FINISHED
#@step(r'I create a dataset extracting a (.*) sample$')
def i_create_a_split_dataset(step, rate):
world.origin_dataset = world.dataset
resource = world.api.create_dataset(world.dataset['resource'],
{'sample_rate': float(rate)})
world.status = resource['code']
assert world.status == HTTP_CREATED
world.location = resource['location']
world.dataset = resource['object']
world.datasets.append(resource['resource'])
#@step(r'I compare the datasets\' instances$')
def i_compare_datasets_instances(step):
world.datasets_instances = (world.dataset['rows'],
world.origin_dataset['rows'])
#@step(r'the proportion of instances between datasets is (.*)$')
def proportion_datasets_instances(step, rate):
if (int(world.datasets_instances[1] * float(rate)) == world.datasets_instances[0]):
assert True
else:
assert False, (
"Instances in split: %s, expected %s" % (
world.datasets_instances[0],
int(world.datasets_instances[1] * float(rate))))
#@step(r'I create a dataset associated to centroid "(.*)"')
def i_create_a_dataset_from_cluster(step, centroid_id):
resource = world.api.create_dataset(
world.cluster['resource'],
args={'centroid': centroid_id})
world.status = resource['code']
assert world.status == HTTP_CREATED
world.location = resource['location']
world.dataset = resource['object']
world.datasets.append(resource['resource'])
#@step(r'I create a dataset from the cluster and the centroid$')
def i_create_a_dataset_from_cluster_centroid(step):
i_create_a_dataset_from_cluster(step, world.centroid['centroid_id'])
#@step(r'the dataset is associated to the centroid "(.*)" of the cluster')
def is_associated_to_centroid_id(step, centroid_id):
cluster = world.api.get_cluster(world.cluster['resource'])
world.status = cluster['code']
assert world.status == HTTP_OK
assert "dataset/%s" % (
cluster['object']['cluster_datasets'][
centroid_id]) == world.dataset['resource']
#@step(r'I check that the dataset is created for the cluster and the centroid$')
def i_check_dataset_from_cluster_centroid(step):
is_associated_to_centroid_id(step, world.centroid['centroid_id'])
|
proteus-cpi/pyside-chaco-template
|
chaco-in-pyside/chaco-in-pyside/__main__.py
|
Python
|
lgpl-3.0
| 147
| 0.006803
|
from ChacoInPyS
|
ideUi import *
import sys
if __name__ == '__main__':
print "Starting chaco_in_pysi
|
de app"
ChacoInPySideUi_main(sys.argv)
|
ritstudentgovernment/PawPrints
|
petitions/management/commands/renderfiles.py
|
Python
|
apache-2.0
| 3,203
| 0.000312
|
"""
Renders css/js files to use config data in config.yml
Peter Zujko
"""
from django.core.management.base import BaseCommand
from django.conf import settings
from django.template.loader import render_to_string
from os import listdir
from os.path import isfile, join
import os
import json
import base64
class Command(BaseCommand):
petitions_dir = os.path.join(settings.BASE_DIR, "petitions/static")
profile_dir = os.path.join(settings.BASE_DIR, "profile/static")
def handle(self, *args, **options):
CONFIG = settings.CONFIG
social = []
# Set icons to base64
for icon in CONFIG['social']['social_links']:
data = icon
file_loc = settings.BASE_DIR+icon['imgURL']
ext = file_loc.split('.')[1]
with open(file_loc, 'rb') as file:
data_str = ""
if ext == 'svg':
data_str = "data:image/svg+xml;utf8;base64,"
elif ext == 'png':
data_str = "data:image/png;base64,"
data['imgURL'] = data_str + \
base64.b64encode(file.read()).decode("utf-8")
social.append(data)
petition_file_names = [f for f in listdir(
self.petitions_dir) if isfile(join(self.petitions_dir, f))]
profile_file_names = [f for f in listdir(
self.profile_dir) if isfile(join(self.profile_dir, f))]
colors = settings.CONFIG["ui"]["colors"]
data_object = {
'name': CONFIG['name'],
'colors': colors,
'header_title': CONFIG['text']['header_title'],
'images': CONFIG['ui']['slideshow_images'],
'social': social,
'default_title': CONFIG['petitions']['default_title'],
'default_body': CONFIG['petitions']['default_body'],
'org': CONFIG['organization']
}
# Grab all file names in petitions/static
for file in petition_file_names:
path = self.petitions_dir + "/" + file
template = render_to_string(path, data_object)
static_dir = ""
# Check file extension
ext = file.split(".")[1]
if ext == "css":
static_dir = os.path.join(
settings.BASE_DIR, 'static/css/'+file)
elif ext == "js":
static_dir = os.path.join(settings.BASE_DIR, 'static/js/'+file)
with open(static_dir, 'w+') as f:
f.write(template)
for file in profile_file_names:
path = self.profile_dir + "/" + file
template = render_to_string(path, data_object)
static_dir = ""
# Check file extension
ext = file.split(".")[1]
if ext == "css":
static_dir = os.path.join(
|
settings.BASE_DIR, 'static/css/'+file)
elif ext == "js":
static_dir = os.path.join(settings.BASE_DIR, 'static/js/'+file)
with open(static_dir, 'w+') as f:
f.writ
|
e(template)
print("Rendered the following " +
str(petition_file_names) + str(profile_file_names))
|
witten/borgmatic
|
tests/integration/config/test_validate.py
|
Python
|
gpl-3.0
| 6,931
| 0.001299
|
import io
import string
import sys
import pytest
from flexmock import flexmock
from borgmatic.config import validate as module
def test_schema_filename_returns_plausable_path():
schema_path = module.schema_filename()
assert schema_path.endswith('/schema.yaml')
def mock_config_and_schema(config_yaml, schema_yaml=None):
'''
Set up mocks for the given config config YAML string and the schema YAML string, or the default
schema if no schema is provided. The idea is that that the code under test consumes these mocks
when parsing the configuration.
'''
config_stream = io.StringIO(config_yaml)
if schema_yaml is None:
schema_stream = open(module.schema_filename())
else:
schema_stream = io.StringIO(schema_yaml)
builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('config.yaml').and_return(config_stream)
builtins.should_receive('open').with_args('schema.yaml').and_return(schema_stream)
def test_parse_configuration_transforms_file_into_mapping():
mock_config_and_schema(
'''
location:
source_directories:
- /home
- /etc
repositories:
- hostname.borg
retention:
keep_minutely: 60
keep_hourly: 24
keep_daily: 7
consistency:
checks:
- repository
- archives
'''
)
result = module.parse_configuration('config.yaml', 'schema.yaml')
assert result == {
'location': {'source_directories': ['/home', '/etc'], 'repositories': ['hostname.borg']},
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60},
'consistency': {'checks': ['repository', 'archives']},
}
def test_parse_configuration_passes_through_quoted_punctuation():
escaped_punctuation = string.punctuation.replace('\\', r'\\').replace('"', r'\"')
mock_config_and_schema(
'''
location:
source_directories:
- /home
repositories:
- "{}.borg"
'''.format(
escaped_punctuation
)
)
result = module.parse_configuration('config.yaml', 'schema.yaml')
assert result == {
'location': {
'source_directories': ['/home'],
'repositories': ['{}.borg'.format(string.punctuation)],
}
}
def test_parse_configuration_with_schema_l
|
acking_examples_does_not_raise():
mock_config_and_schema(
'''
location:
source_directories:
- /home
repositories:
- hostname.borg
''',
'''
map:
location:
required: true
map:
source_directories:
required: true
seq:
- type: scalar
repositories:
|
required: true
seq:
- type: scalar
''',
)
module.parse_configuration('config.yaml', 'schema.yaml')
def test_parse_configuration_inlines_include():
mock_config_and_schema(
'''
location:
source_directories:
- /home
repositories:
- hostname.borg
retention:
!include include.yaml
'''
)
builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('include.yaml').and_return(
'''
keep_daily: 7
keep_hourly: 24
'''
)
result = module.parse_configuration('config.yaml', 'schema.yaml')
assert result == {
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
'retention': {'keep_daily': 7, 'keep_hourly': 24},
}
def test_parse_configuration_merges_include():
mock_config_and_schema(
'''
location:
source_directories:
- /home
repositories:
- hostname.borg
retention:
keep_daily: 1
<<: !include include.yaml
'''
)
builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('include.yaml').and_return(
'''
keep_daily: 7
keep_hourly: 24
'''
)
result = module.parse_configuration('config.yaml', 'schema.yaml')
assert result == {
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
'retention': {'keep_daily': 1, 'keep_hourly': 24},
}
def test_parse_configuration_raises_for_missing_config_file():
with pytest.raises(FileNotFoundError):
module.parse_configuration('config.yaml', 'schema.yaml')
def test_parse_configuration_raises_for_missing_schema_file():
mock_config_and_schema('')
builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('schema.yaml').and_raise(FileNotFoundError)
with pytest.raises(FileNotFoundError):
module.parse_configuration('config.yaml', 'schema.yaml')
def test_parse_configuration_raises_for_syntax_error():
mock_config_and_schema('foo:\nbar')
with pytest.raises(ValueError):
module.parse_configuration('config.yaml', 'schema.yaml')
def test_parse_configuration_raises_for_validation_error():
mock_config_and_schema(
'''
location:
source_directories: yes
repositories:
- hostname.borg
'''
)
with pytest.raises(module.Validation_error):
module.parse_configuration('config.yaml', 'schema.yaml')
def test_parse_configuration_applies_overrides():
mock_config_and_schema(
'''
location:
source_directories:
- /home
repositories:
- hostname.borg
local_path: borg1
'''
)
result = module.parse_configuration(
'config.yaml', 'schema.yaml', overrides=['location.local_path=borg2']
)
assert result == {
'location': {
'source_directories': ['/home'],
'repositories': ['hostname.borg'],
'local_path': 'borg2',
}
}
def test_parse_configuration_applies_normalization():
mock_config_and_schema(
'''
location:
source_directories:
- /home
repositories:
- hostname.borg
exclude_if_present: .nobackup
'''
)
result = module.parse_configuration('config.yaml', 'schema.yaml')
assert result == {
'location': {
'source_directories': ['/home'],
'repositories': ['hostname.borg'],
'exclude_if_present': ['.nobackup'],
}
}
|
benaustin2000/ShanghaiHousePrice
|
GetChengJiaoListV0.2.py
|
Python
|
apache-2.0
| 7,661
| 0.022519
|
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 27 23:40:50 2018
@author: austin
"""
import requests
import re
from bs4 import BeautifulSoup,SoupStrainer
#import matplotlib.pyplot as plt
from fake_useragent import UserAgent
import time,random,sys
import pandas#pandas大法好
#ua=UserAgent()#使用随机header,模拟人类
#headers1={'User-Agent': 'ua.random'}#使用随机header,模拟人类
TotalPrice=[] #Total price
InitialPrice=[]
UnitPrice=[] #price per meter
HouseArea=[]
HouseHeight=[]
HouseConfig=[]
HouseCommunit=[]
HouseLocMajor=[]
HouseLocMinor=[]
HouseBuildYear=[]
HouseDealDate=[]
HouseDealCycle=[]
LinkUrl=[]
StrainerPriceInfo = SoupStrainer('a',attrs={'class':'nostyle'})
StrainerChengJiaoList = SoupStrainer('ul',attrs={'class':'listContent'})
StrainerTotalPage = SoupStrainer('div',attrs={'class':'page-box house-lst-page-box'}) #得到当前最大页数
PianQuList= ['北蔡', '碧云', '曹路', '川沙', '大团镇', '合庆', '高行', '高东', '花木', '航头', '惠南', '金桥', '金杨', '康桥', '陆家嘴', '老港镇', '临港新城', '联洋', '泥城镇', '南码头', '三林', '世博', '书院镇', '塘桥', '唐镇', '外高桥', '万祥镇', '潍坊', '宣桥', '新场', '御桥', '杨东', '源深', '洋泾', '张江', '祝桥', '周浦']
PianQuLink= ['/chengjiao/beicai/', '/chengjiao/biyun/', '/chengjiao/caolu/', '/chengjiao/chuansha/', '/chengjiao/datuanzhen/', '/chengjiao/geqing/', '/chengjiao/gaohang/', '/chengjiao/gaodong/', '/chengjiao/huamu/', '/chengjiao/hangtou/', '/chengjiao/huinan/', '/chengjiao/jinqiao/', '/chengjiao/jinyang/', '/chengjiao/kangqiao/', '/chengjiao/lujiazui/', '/chengjiao/laogangzhen/', '/chengjiao/lingangxincheng/', '/chengjiao/lianyang/', '/chengjiao/nichengzhen/', '/chengjiao/nanmatou/', '/chengjiao/sanlin/', '/chengjiao/shibo/', '/chengjiao/shuyuanzhen/', '/chengjiao/tangqiao/', '/chengjiao/tangzhen/', '/chengjiao/waigaoqiao/', '/chengjiao/wanxiangzhen/', '/chengjiao/weifang/', '/chengjiao/xuanqiao/', '/chengjiao/xinchang/', '/chengjiao/yuqiao1/', '/chengjiao/yangdong/', '/chengjiao/yuanshen/', '/chengjiao/yangjing/', '/chengjiao/zhangjiang/', '/chengjiao/zhuqiao/', '/chengjiao/zhoupu/']
#PianQuList=[]
#PianQuList.index('唐镇') #24
#PianQuLink[PianQuList.index('唐镇')] #'/chengjiao/tangzhen/'
MaxGetPage=100
TotalPage=MaxGetPage
HouseLocMajorString='浦东'
def SaveList():
df=pandas.DataFrame({'总价':TotalPrice,'单价':UnitPrice,'房型':HouseConfig,'成交日期':HouseDealDate,
'成交周期':HouseDealCycle,'面积':HouseArea,'小区':HouseCommunit,'楼层':HouseHeight,
'区':HouseLocMajor,'板块':HouseLocMinor,'初始报价':InitialPrice,'楼龄':HouseBuildYear,
'网址':LinkUrl})
datetimestr=time.str
|
ftime('%Y-%m-%d-%H-%M-%S',time.localtime(time.time()))
df.to_csv(datetimestr+'-'+HouseLocMajorString+'-LianJia.csv')
begin = time.time()
for PianQuGet in PianQuList:
|
i=1
RetryTimes=0
PianQuNum=PianQuList.index(PianQuGet)
while i<=TotalPage: # 100页最大值
#http://sh.lianjia.com/chengjiao/tangzhen/pg1/
domain = 'http://sh.lianjia.com'+PianQuLink[PianQuNum]+'pg'+str(i)
headers1 = {'User-Agent': UserAgent().random, 'Accept-Language': 'zh-CN,zh;q=0.8'}#使用随机header,模拟人类
sleeptime=random.randint(10, 20)/10
time.sleep(sleeptime)
res = requests.get(domain,headers=headers1)#爬取拼接域名
#<ul class="listContent">
PageNumHtml = BeautifulSoup(res.text,'html.parser',parse_only=StrainerTotalPage)
# 把 string 变成 Dictionary
if len (PageNumHtml) == 0: #遇到空页面
# PageNumDict = []
# if len(PageNumDict) == 0: #25
if RetryTimes>10:
sys.exit("Error to get Page: "+domain)
else:
RetryTimes+=1
sleeptime=random.randint(10, 20)/10+RetryTimes
time.sleep(sleeptime)
print('Retry after delay '+str(sleeptime)+' s :'+domain)
continue
RetryTimes=0
PageNumDict = eval(PageNumHtml.div['page-data']) # {'totalPage': 25, 'curPage': 1}
TotalPage = int(PageNumDict['totalPage'])
if TotalPage> MaxGetPage:
TotalPage=MaxGetPage
#更新抓取进度
print('已经抓取'+PianQuGet+' 第'+str(i)+'/'+str(TotalPage)+'页 ''耗时: %.1f 分' %((time.time()-begin)/60))
i+=1
ChengJiaoListHtml=BeautifulSoup(res.text,'html.parser',parse_only=StrainerChengJiaoList)
for ListItem in ChengJiaoListHtml.find_all('li'):
#<div class="title"><a href="https://sh.lianjia.com/chengjiao/107100614568.html" target="_blank">创新佳苑 1室1厅 61.67平米</a></div>
# try:
if ListItem.div.contents[1].find(class_='dealDate').string == '近30天内成交':
continue
else:
HouseString=[]
HouseString1=[]
HouseString2=[]
HouseString3=[]
LinkUrl.append(ListItem.div.contents[0].a['href']) # https://sh.lianjia.com/chengjiao/107100614568.html
HouseString = ListItem.div.contents[0].string.split() #['金唐公寓', '2室2厅', '89.06平米']
HouseArea.append(HouseString[2])
HouseConfig.append(HouseString[1])
HouseCommunit.append(HouseString[0])
HouseString1=ListItem.div.contents[1].div.text.split('|') #'['南 ', ' 精装\xa0', ' 无电梯']
HouseDealDate.append(ListItem.div.contents[1].find(class_='dealDate').string) #'2018.10.24' or '近30天内成交'
TotalPrice.append(float(ListItem.div.contents[1].find(class_='number').string)) #386
HouseString2=ListItem.div.contents[2].contents[0].text.split() #'中楼层(共6层) 2006年建板楼'
HouseHeight.append(HouseString2[0])
HouseBuildYear.append(HouseString2[1])
UnitPrice.append(int(ListItem.div.find(class_='unitPrice').span.string)) #unitPrice 43342
#HouseString3 = ListItem.div.find(class_='dealCycleTxt').contents
HouseLocMinor.append(PianQuList[PianQuNum])
HouseLocMajor.append(HouseLocMajorString)
HouseString=ListItem.div.find(text=re.compile('挂牌')) # '挂牌391万'
if (HouseString == None):
InitialPrice.append(' ')
else:
InitialPrice.append(int(re.findall(r'\d+',HouseString)[0]))
HouseString=ListItem.div.find(text=re.compile('成交周期')) # '成交周期119天' ->119
if (HouseString == None):
HouseDealCycle.append(' ')
else:
HouseDealCycle.append(int(re.findall(r'\d+',HouseString)[0]))
# except:
# info=sys.exc_info()
# print(info[0],":",info[1])
SaveList()
#df=pandas.DataFrame({'总价':TotalPrice,'单价':UnitPrice,'房型':HouseConfig,'成交日期':HouseDealDate,
# '成交周期':HouseDealCycle,'面积':HouseArea,'小区':HouseCommunit,'楼层':HouseHeight,
# '区':HouseLocMajor,'板块':HouseLocMinor,'初始报价':InitialPrice,'楼龄':HouseBuildYear,
# '网址':LinkUrl})
#
#datetimestr=time.strftime('%Y-%m-%d-%H-%M-%S',time.localtime(time.time()))
#df.to_csv(datetimestr+'-'+HouseLocMajorString+'-LianJia.csv')
|
ooici/coi-services
|
ion/agents/data/test/test_dsa_moas_dosta.py
|
Python
|
bsd-2-clause
| 2,513
| 0.004377
|
#!/usr/bin/env python
"""
@package ion.agents.data.test.test_moas_dosta
@file ion/agents/data/test_moas_dosta
@author Bill French
@brief End to end testing for moas dosta
"""
__author__ = 'Bill French'
import gevent
from pyon.public import log
from nose.plugins.attrib import attr
from ion.agents.data.test.dataset_test import DatasetAgentTestCase
from ion.services.dm.test.dm_test_case import breakpoint
import unittest
###############################################################################
# Global constants.
###############################################################################
@attr('INT', group='sa')
class HypmDOSTATest(DatasetAgentTestCase):
"""
Verify dataset agent can harvest data fails, parse the date, publish,
ingest and retrieve stored data.
"""
def setUp(self):
self.test_config.i
|
nitialize(
instrument_device_name = 'DOSTA-01',
preload_scenario= 'GENG,DOSTA',
stream_name= 'ggldr_dosta_delayed',
# Uncomment this line to load driver from a locak repository
#mi_repo = '/Users/wfrench/Workspace/code/wfrench/marine-integrations'
)
super(HypmDOSTATest, self).setUp()
def test_parse(self):
"
|
""
Verify file import and connection ids
"""
self.assert_initialize()
self.create_sample_data("moas_dosta/file_1.mrg", "unit_363_2013_245_6_6.mrg")
self.create_sample_data("moas_dosta/file_2.mrg", "unit_363_2013_245_10_6.mrg")
granules = self.get_samples(self.test_config.stream_name, 4)
self.assert_data_values(granules, 'moas_dosta/merged.result.yml')
def test_large_file(self):
"""
Verify a large file import with no buffering
"""
self.assert_initialize()
self.create_sample_data("moas_dosta/unit_363_2013_199_0_0.mrg", "unit_363_2013_199_0_0.mrg")
gevent.sleep(10)
self.assert_sample_queue_size(self.test_config.stream_name, 1)
self.create_sample_data("moas_dosta/unit_363_2013_199_1_0.mrg", "unit_363_2013_199_1_0.mrg")
gevent.sleep(10)
self.assert_sample_queue_size(self.test_config.stream_name, 2)
self.create_sample_data("moas_dosta/unit_363_2013_245_6_6.mrg", "unit_363_2013_245_6_6.mrg")
self.get_samples(self.test_config.stream_name, 171, 180)
self.assert_sample_queue_size(self.test_config.stream_name, 0)
def test_capabilities(self):
self.assert_agent_capabilities()
|
Boussadia/weboob
|
modules/sachsen/pages.py
|
Python
|
agpl-3.0
| 4,869
| 0.004518
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2014 Florent Fourcot
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.browser2.page import HTMLPage, method, ListElement, ItemElement
from weboob.tools.browser2.filters import Env, CleanText, Regexp, Field, DateTime, Map, Attr
from weboob.capabilities.gauge import Gauge, GaugeMeasure, GaugeSensor
from weboob.capabilities.base import NotAvailable, NotLoaded
import re
__all__ = ['ListPage', 'HistoryPage']
class ListPage(HTMLPage):
@method
class get_rivers_list(ListElement):
item_xpath = ".//a[@onmouseout='pegelaus()']"
class item(ItemElement):
klass = Gauge
forecasts = {'pf_gerade.png': u'stable',
'pf_unten.png': u'Go down',
'pf_oben.png': u'Go up',
}
alarmlevel = {"as1.gif": u"Alarmstufe 1", "as2.gif": u"Alarmstufe 2",
"as3.gif": u"Alarmstufe 3", "as4.gig": u"Alarmstufe 4",
"qua_grau.gif": u"No alarm function", "p_gruen.gif": u"",
"qua_weiss.gif": u"no data", "as0.gif": u"",
"MNW.gif": u""}
obj_id = CleanText(Env('id'))
obj_name = CleanText(Env('name'), "'")
obj_city = Regexp(Field('name'), '^([^\s]+).*')
obj_object = Env('object')
def parse(self, el):
div = el.getparent
|
()
img = Regexp(Attr('.//img', 'src'), "(.*?)/(.*)", "\\2")(div)
data = unicode(el.attrib['onmouseover']) \
.strip('pegelein(').strip(')').replace(",'", ",").split("',")
self.env['id'] = data[7].strip()
self.env['name'] = data[0]
|
self.env['object'] = data[1]
self.env['datetime'] = data[2]
self.env['levelvalue'] = data[3]
self.env['flowvalue'] = data[4]
self.env['forecast'] = data[5]
self.env['alarm'] = img
def add_sensor(self, sensors, name, unit, value, forecast, alarm, date):
sensor = GaugeSensor("%s-%s" % (self.obj.id, name.lower()))
sensor.name = name
sensor.unit = unit
sensor.forecast = forecast
lastvalue = GaugeMeasure()
lastvalue.alarm = alarm
try:
lastvalue.level = float(value)
except ValueError:
lastvalue.level = NotAvailable
lastvalue.date = date
sensor.lastvalue = lastvalue
sensor.history = NotLoaded
sensor.gaugeid = self.obj.id
sensors.append(sensor)
def obj_sensors(self):
sensors = []
lastdate = DateTime(Regexp(Env('datetime'), r'(\d+)\.(\d+)\.(\d+) (\d+):(\d+)', r'\3-\2-\1 \4:\5', default=NotAvailable))(self)
forecast = Map(Env('forecast'), self.forecasts, default=NotAvailable)(self)
alarm = Map(Env('alarm'), self.alarmlevel, default=u'')(self)
self.add_sensor(sensors, u"Level", u"cm", self.env['levelvalue'], forecast, alarm, lastdate)
self.add_sensor(sensors, u"Flow", u"m3/s", self.env['flowvalue'], forecast, alarm, lastdate)
return sensors
class HistoryPage(HTMLPage):
@method
class iter_history(ListElement):
item_xpath = '//table[@width="215"]/tr'
class item(ItemElement):
klass = GaugeMeasure
verif = re.compile("\d\d.\d\d.\d+ \d\d:\d\d")
def condition(self):
return self.verif.match(self.el[0].text_content())
obj_date = DateTime(Regexp(CleanText('.'), r'(\d+)\.(\d+)\.(\d+) (\d+):(\d+)', r'\3-\2-\1 \4:\5'))
sensor_types = [u'Level', u'Flow']
def obj_level(self):
index = self.sensor_types.index(self.env['sensor'].name) + 1
try:
return float(self.el[index].text_content())
except ValueError:
return NotAvailable
# TODO: history.alarm
|
DJones81/GTNewsDev
|
gtnewsdev/geonewsapi/migrations/0017_auto_20151201_1555.py
|
Python
|
gpl-2.0
| 420
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('geonewsapi', '0016_auto_20
|
151201_1517'),
]
operations = [
migrations.AlterField(
model_name='article',
|
name='category',
field=models.CharField(max_length=20, blank=True),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.