repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
Laimiux/mydeatree | refs/heads/master | django/contrib/gis/gdal/tests/test_geom.py | 154 | from django.contrib.gis.gdal import OGRGeometry, OGRGeomType, \
OGRException, OGRIndexError, SpatialReference, CoordTransform, \
GDAL_VERSION
from django.utils import unittest
from django.contrib.gis.geometry.test_data import TestDataMixin
class OGRGeomTest(unittest.TestCase, TestDataMixin):
"This tests the OGR Geometry."
def test00a_geomtype(self):
"Testing OGRGeomType object."
# OGRGeomType should initialize on all these inputs.
try:
g = OGRGeomType(1)
g = OGRGeomType(7)
g = OGRGeomType('point')
g = OGRGeomType('GeometrycollectioN')
g = OGRGeomType('LINearrING')
g = OGRGeomType('Unknown')
except:
self.fail('Could not create an OGRGeomType object!')
# Should throw TypeError on this input
self.assertRaises(OGRException, OGRGeomType, 23)
self.assertRaises(OGRException, OGRGeomType, 'fooD')
self.assertRaises(OGRException, OGRGeomType, 9)
# Equivalence can take strings, ints, and other OGRGeomTypes
self.assertEqual(True, OGRGeomType(1) == OGRGeomType(1))
self.assertEqual(True, OGRGeomType(7) == 'GeometryCollection')
self.assertEqual(True, OGRGeomType('point') == 'POINT')
self.assertEqual(False, OGRGeomType('point') == 2)
self.assertEqual(True, OGRGeomType('unknown') == 0)
self.assertEqual(True, OGRGeomType(6) == 'MULtiPolyGON')
self.assertEqual(False, OGRGeomType(1) != OGRGeomType('point'))
self.assertEqual(True, OGRGeomType('POINT') != OGRGeomType(6))
# Testing the Django field name equivalent property.
self.assertEqual('PointField', OGRGeomType('Point').django)
self.assertEqual('GeometryField', OGRGeomType('Unknown').django)
self.assertEqual(None, OGRGeomType('none').django)
# 'Geometry' initialization implies an unknown geometry type.
gt = OGRGeomType('Geometry')
self.assertEqual(0, gt.num)
self.assertEqual('Unknown', gt.name)
def test00b_geomtype_25d(self):
"Testing OGRGeomType object with 25D types."
wkb25bit = OGRGeomType.wkb25bit
self.assertTrue(OGRGeomType(wkb25bit + 1) == 'Point25D')
self.assertTrue(OGRGeomType('MultiLineString25D') == (5 + wkb25bit))
self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django)
def test01a_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
self.assertEqual(g.wkt, geom.wkt)
def test01a_ewkt(self):
"Testing EWKT input/output."
for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'):
# First with ewkt output when no SRID in EWKT
self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt)
# No test consumption with an SRID specified.
ewkt_val = 'SRID=4326;%s' % ewkt_val
geom = OGRGeometry(ewkt_val)
self.assertEqual(ewkt_val, geom.ewkt)
self.assertEqual(4326, geom.srs.srid)
def test01b_gml(self):
"Testing GML output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
exp_gml = g.gml
if GDAL_VERSION >= (1, 8):
# In GDAL 1.8, the non-conformant GML tag <gml:GeometryCollection> was
# replaced with <gml:MultiGeometry>.
exp_gml = exp_gml.replace('GeometryCollection', 'MultiGeometry')
self.assertEqual(exp_gml, geom.gml)
def test01c_hex(self):
"Testing HEX input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
self.assertEqual(g.hex, geom1.hex)
# Constructing w/HEX
geom2 = OGRGeometry(g.hex)
self.assertEqual(geom1, geom2)
def test01d_wkb(self):
"Testing WKB input/output."
from binascii import b2a_hex
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
wkb = geom1.wkb
self.assertEqual(b2a_hex(wkb).upper(), g.hex)
# Constructing w/WKB.
geom2 = OGRGeometry(wkb)
self.assertEqual(geom1, geom2)
def test01e_json(self):
"Testing GeoJSON input/output."
from django.contrib.gis.gdal.prototypes.geom import GEOJSON
if not GEOJSON: return
for g in self.geometries.json_geoms:
geom = OGRGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
self.assertEqual(g.json, geom.json)
self.assertEqual(g.json, geom.geojson)
self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json))
def test02_points(self):
"Testing Point objects."
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.points:
if not hasattr(p, 'z'): # No 3D
pnt = OGRGeometry(p.wkt)
self.assertEqual(1, pnt.geom_type)
self.assertEqual('POINT', pnt.geom_name)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual((p.x, p.y), pnt.tuple)
def test03_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mgeom1 = OGRGeometry(mp.wkt) # First one from WKT
self.assertEqual(4, mgeom1.geom_type)
self.assertEqual('MULTIPOINT', mgeom1.geom_name)
mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint
mgeom3 = OGRGeometry('MULTIPOINT')
for g in mgeom1:
mgeom2.add(g) # adding each point from the multipoints
mgeom3.add(g.wkt) # should take WKT as well
self.assertEqual(mgeom1, mgeom2) # they should equal
self.assertEqual(mgeom1, mgeom3)
self.assertEqual(mp.coords, mgeom2.coords)
self.assertEqual(mp.n_p, mgeom2.point_count)
def test04_linestring(self):
"Testing LineString objects."
prev = OGRGeometry('POINT(0 0)')
for ls in self.geometries.linestrings:
linestr = OGRGeometry(ls.wkt)
self.assertEqual(2, linestr.geom_type)
self.assertEqual('LINESTRING', linestr.geom_name)
self.assertEqual(ls.n_p, linestr.point_count)
self.assertEqual(ls.coords, linestr.tuple)
self.assertEqual(True, linestr == OGRGeometry(ls.wkt))
self.assertEqual(True, linestr != prev)
self.assertRaises(OGRIndexError, linestr.__getitem__, len(linestr))
prev = linestr
# Testing the x, y properties.
x = [tmpx for tmpx, tmpy in ls.coords]
y = [tmpy for tmpx, tmpy in ls.coords]
self.assertEqual(x, linestr.x)
self.assertEqual(y, linestr.y)
def test05_multilinestring(self):
"Testing MultiLineString objects."
prev = OGRGeometry('POINT(0 0)')
for mls in self.geometries.multilinestrings:
mlinestr = OGRGeometry(mls.wkt)
self.assertEqual(5, mlinestr.geom_type)
self.assertEqual('MULTILINESTRING', mlinestr.geom_name)
self.assertEqual(mls.n_p, mlinestr.point_count)
self.assertEqual(mls.coords, mlinestr.tuple)
self.assertEqual(True, mlinestr == OGRGeometry(mls.wkt))
self.assertEqual(True, mlinestr != prev)
prev = mlinestr
for ls in mlinestr:
self.assertEqual(2, ls.geom_type)
self.assertEqual('LINESTRING', ls.geom_name)
self.assertRaises(OGRIndexError, mlinestr.__getitem__, len(mlinestr))
def test06_linearring(self):
"Testing LinearRing objects."
prev = OGRGeometry('POINT(0 0)')
for rr in self.geometries.linearrings:
lr = OGRGeometry(rr.wkt)
#self.assertEqual(101, lr.geom_type.num)
self.assertEqual('LINEARRING', lr.geom_name)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(True, lr == OGRGeometry(rr.wkt))
self.assertEqual(True, lr != prev)
prev = lr
def test07a_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180,-90,180,90)
p = OGRGeometry.from_bbox( bbox )
self.assertEqual(bbox, p.extent)
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.polygons:
poly = OGRGeometry(p.wkt)
self.assertEqual(3, poly.geom_type)
self.assertEqual('POLYGON', poly.geom_name)
self.assertEqual(p.n_p, poly.point_count)
self.assertEqual(p.n_i + 1, len(poly))
# Testing area & centroid.
self.assertAlmostEqual(p.area, poly.area, 9)
x, y = poly.centroid.tuple
self.assertAlmostEqual(p.centroid[0], x, 9)
self.assertAlmostEqual(p.centroid[1], y, 9)
# Testing equivalence
self.assertEqual(True, poly == OGRGeometry(p.wkt))
self.assertEqual(True, poly != prev)
if p.ext_ring_cs:
ring = poly[0]
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple)
self.assertEqual(len(p.ext_ring_cs), ring.point_count)
for r in poly:
self.assertEqual('LINEARRING', r.geom_name)
def test07b_closepolygons(self):
"Testing closing Polygon objects."
# Both rings in this geometry are not closed.
poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))')
self.assertEqual(8, poly.point_count)
print "\nBEGIN - expecting IllegalArgumentException; safe to ignore.\n"
try:
c = poly.centroid
except OGRException:
# Should raise an OGR exception, rings are not closed
pass
else:
self.fail('Should have raised an OGRException!')
print "\nEND - expecting IllegalArgumentException; safe to ignore.\n"
# Closing the rings -- doesn't work on GDAL versions 1.4.1 and below:
# http://trac.osgeo.org/gdal/ticket/1673
if GDAL_VERSION <= (1, 4, 1): return
poly.close_rings()
self.assertEqual(10, poly.point_count) # Two closing points should've been added
self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid)
def test08_multipolygons(self):
"Testing MultiPolygon objects."
prev = OGRGeometry('POINT(0 0)')
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
self.assertEqual(6, mpoly.geom_type)
self.assertEqual('MULTIPOLYGON', mpoly.geom_name)
if mp.valid:
self.assertEqual(mp.n_p, mpoly.point_count)
self.assertEqual(mp.num_geom, len(mpoly))
self.assertRaises(OGRIndexError, mpoly.__getitem__, len(mpoly))
for p in mpoly:
self.assertEqual('POLYGON', p.geom_name)
self.assertEqual(3, p.geom_type)
self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt)
def test09a_srs(self):
"Testing OGR Geometries with Spatial Reference objects."
for mp in self.geometries.multipolygons:
# Creating a geometry w/spatial reference
sr = SpatialReference('WGS84')
mpoly = OGRGeometry(mp.wkt, sr)
self.assertEqual(sr.wkt, mpoly.srs.wkt)
# Ensuring that SRS is propagated to clones.
klone = mpoly.clone()
self.assertEqual(sr.wkt, klone.srs.wkt)
# Ensuring all children geometries (polygons and their rings) all
# return the assigned spatial reference as well.
for poly in mpoly:
self.assertEqual(sr.wkt, poly.srs.wkt)
for ring in poly:
self.assertEqual(sr.wkt, ring.srs.wkt)
# Ensuring SRS propagate in topological ops.
a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr)
b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr)
diff = a.difference(b)
union = a.union(b)
self.assertEqual(sr.wkt, diff.srs.wkt)
self.assertEqual(sr.srid, union.srs.srid)
# Instantiating w/an integer SRID
mpoly = OGRGeometry(mp.wkt, 4326)
self.assertEqual(4326, mpoly.srid)
mpoly.srs = SpatialReference(4269)
self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolyogn after the spatial reference
# has been re-assigned.
for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)
poly.srs = 32140
for ring in poly:
# Changing each ring in the polygon
self.assertEqual(32140, ring.srs.srid)
self.assertEqual('NAD83 / Texas South Central', ring.srs.name)
ring.srs = str(SpatialReference(4326)) # back to WGS84
self.assertEqual(4326, ring.srs.srid)
# Using the `srid` property.
ring.srid = 4322
self.assertEqual('WGS 72', ring.srs.name)
self.assertEqual(4322, ring.srid)
def test09b_srs_transform(self):
"Testing transform()."
orig = OGRGeometry('POINT (-104.609 38.255)', 4326)
trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using an srid, a SpatialReference object, and a CoordTransform object
# or transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(SpatialReference('EPSG:2774'))
ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test09c_transform_dim(self):
"Testing coordinate dimension is the same on transformed geometries."
ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326)
ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774)
prec = 3
ls_orig.transform(ls_trans.srs)
# Making sure the coordinate dimension is still 2D.
self.assertEqual(2, ls_orig.coord_dim)
self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec)
self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec)
def test10_difference(self):
"Testing difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test11_intersection(self):
"Testing intersects() and intersection()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt)
self.assertEqual(True, a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test12_symdifference(self):
"Testing sym_difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test13_union(self):
"Testing union()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
u1 = OGRGeometry(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test14_add(self):
"Testing GeometryCollection.add()."
# Can't insert a Point into a MultiPolygon.
mp = OGRGeometry('MultiPolygon')
pnt = OGRGeometry('POINT(5 23)')
self.assertRaises(OGRException, mp.add, pnt)
# GeometryCollection.add may take an OGRGeometry (if another collection
# of the same type all child geoms will be added individually) or WKT.
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
mp1 = OGRGeometry('MultiPolygon')
mp2 = OGRGeometry('MultiPolygon')
mp3 = OGRGeometry('MultiPolygon')
for poly in mpoly:
mp1.add(poly) # Adding a geometry at a time
mp2.add(poly.wkt) # Adding WKT
mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once.
for tmp in (mp1, mp2, mp3): self.assertEqual(mpoly, tmp)
def test15_extent(self):
"Testing `extent` property."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)')
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
# Testing on the 'real world' Polygon.
poly = OGRGeometry(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test16_25D(self):
"Testing 2.5D geometries."
pnt_25d = OGRGeometry('POINT(1 2 3)')
self.assertEqual('Point25D', pnt_25d.geom_type.name)
self.assertEqual(3.0, pnt_25d.z)
self.assertEqual(3, pnt_25d.coord_dim)
ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)')
self.assertEqual('LineString25D', ls_25d.geom_type.name)
self.assertEqual([1.0, 2.0, 3.0], ls_25d.z)
self.assertEqual(3, ls_25d.coord_dim)
def test17_pickle(self):
"Testing pickle support."
import cPickle
g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
g2 = cPickle.loads(cPickle.dumps(g1))
self.assertEqual(g1, g2)
self.assertEqual(4326, g2.srs.srid)
self.assertEqual(g1.srs.wkt, g2.srs.wkt)
def test18_ogrgeometry_transform_workaround(self):
"Testing coordinate dimensions on geometries after transformation."
# A bug in GDAL versions prior to 1.7 changes the coordinate
# dimension of a geometry after it has been transformed.
# This test ensures that the bug workarounds employed within
# `OGRGeometry.transform` indeed work.
wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))"
wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))"
srid = 4326
# For both the 2D and 3D MultiLineString, ensure _both_ the dimension
# of the collection and the component LineString have the expected
# coordinate dimension after transform.
geom = OGRGeometry(wkt_2d, srid)
geom.transform(srid)
self.assertEqual(2, geom.coord_dim)
self.assertEqual(2, geom[0].coord_dim)
self.assertEqual(wkt_2d, geom.wkt)
geom = OGRGeometry(wkt_3d, srid)
geom.transform(srid)
self.assertEqual(3, geom.coord_dim)
self.assertEqual(3, geom[0].coord_dim)
self.assertEqual(wkt_3d, geom.wkt)
def test19_equivalence_regression(self):
"Testing equivalence methods with non-OGRGeometry instances."
self.assertNotEqual(None, OGRGeometry('POINT(0 0)'))
self.assertEqual(False, OGRGeometry('LINESTRING(0 0, 1 1)') == 3)
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(OGRGeomTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
diversys/wubi | refs/heads/master | src/urlgrabber/progress.py | 26 | # This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
# This file is part of urlgrabber, a high-level cross-protocol url-grabber
# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
# $Id: progress.py,v 1.7 2005/08/19 21:59:07 mstenner Exp $
import sys
import time
import math
import thread
class BaseMeter:
def __init__(self):
self.update_period = 0.3 # seconds
self.filename = None
self.url = None
self.basename = None
self.text = None
self.size = None
self.start_time = None
self.last_amount_read = 0
self.last_update_time = None
self.re = RateEstimator()
def start(self, filename=None, url=None, basename=None,
size=None, now=None, text=None):
self.filename = filename
self.url = url
self.basename = basename
self.text = text
#size = None ######### TESTING
self.size = size
if not size is None: self.fsize = format_number(size) + 'B'
if now is None: now = time.time()
self.start_time = now
self.re.start(size, now)
self.last_amount_read = 0
self.last_update_time = now
self._do_start(now)
def _do_start(self, now=None):
pass
def update(self, amount_read, now=None):
# for a real gui, you probably want to override and put a call
# to your mainloop iteration function here
if now is None: now = time.time()
if (now >= self.last_update_time + self.update_period) or \
not self.last_update_time:
self.re.update(amount_read, now)
self.last_amount_read = amount_read
self.last_update_time = now
self._do_update(amount_read, now)
def _do_update(self, amount_read, now=None):
pass
def end(self, amount_read, now=None):
if now is None: now = time.time()
self.re.update(amount_read, now)
self.last_amount_read = amount_read
self.last_update_time = now
self._do_end(amount_read, now)
def _do_end(self, amount_read, now=None):
pass
class TextMeter(BaseMeter):
def __init__(self, fo=sys.stderr):
BaseMeter.__init__(self)
self.fo = fo
def _do_update(self, amount_read, now=None):
etime = self.re.elapsed_time()
fetime = format_time(etime)
fread = format_number(amount_read)
#self.size = None
if self.text is not None:
text = self.text
else:
text = self.basename
if self.size is None:
out = '\r%-60.60s %5sB %s ' % \
(text, fread, fetime)
else:
rtime = self.re.remaining_time()
frtime = format_time(rtime)
frac = self.re.fraction_read()
bar = '='*int(25 * frac)
out = '\r%-25.25s %3i%% |%-25.25s| %5sB %8s ETA ' % \
(text, frac*100, bar, fread, frtime)
self.fo.write(out)
self.fo.flush()
def _do_end(self, amount_read, now=None):
total_time = format_time(self.re.elapsed_time())
total_size = format_number(amount_read)
if self.text is not None:
text = self.text
else:
text = self.basename
if self.size is None:
out = '\r%-60.60s %5sB %s ' % \
(text, total_size, total_time)
else:
bar = '='*25
out = '\r%-25.25s %3i%% |%-25.25s| %5sB %8s ' % \
(text, 100, bar, total_size, total_time)
self.fo.write(out + '\n')
self.fo.flush()
text_progress_meter = TextMeter
class MultiFileHelper(BaseMeter):
def __init__(self, master):
BaseMeter.__init__(self)
self.master = master
def _do_start(self, now):
self.master.start_meter(self, now)
def _do_update(self, amount_read, now):
# elapsed time since last update
self.master.update_meter(self, now)
def _do_end(self, amount_read, now):
self.ftotal_time = format_time(now - self.start_time)
self.ftotal_size = format_number(self.last_amount_read)
self.master.end_meter(self, now)
def failure(self, message, now=None):
self.master.failure_meter(self, message, now)
def message(self, message):
self.master.message_meter(self, message)
class MultiFileMeter:
helperclass = MultiFileHelper
def __init__(self):
self.meters = []
self.in_progress_meters = []
self._lock = thread.allocate_lock()
self.update_period = 0.3 # seconds
self.numfiles = None
self.finished_files = 0
self.failed_files = 0
self.open_files = 0
self.total_size = None
self.failed_size = 0
self.start_time = None
self.finished_file_size = 0
self.last_update_time = None
self.re = RateEstimator()
def start(self, numfiles=None, total_size=None, now=None):
if now is None: now = time.time()
self.numfiles = numfiles
self.finished_files = 0
self.failed_files = 0
self.open_files = 0
self.total_size = total_size
self.failed_size = 0
self.start_time = now
self.finished_file_size = 0
self.last_update_time = now
self.re.start(total_size, now)
self._do_start(now)
def _do_start(self, now):
pass
def end(self, now=None):
if now is None: now = time.time()
self._do_end(now)
def _do_end(self, now):
pass
def lock(self): self._lock.acquire()
def unlock(self): self._lock.release()
###########################################################
# child meter creation and destruction
def newMeter(self):
newmeter = self.helperclass(self)
self.meters.append(newmeter)
return newmeter
def removeMeter(self, meter):
self.meters.remove(meter)
###########################################################
# child functions - these should only be called by helpers
def start_meter(self, meter, now):
if not meter in self.meters:
raise ValueError('attempt to use orphaned meter')
self._lock.acquire()
try:
if not meter in self.in_progress_meters:
self.in_progress_meters.append(meter)
self.open_files += 1
finally:
self._lock.release()
self._do_start_meter(meter, now)
def _do_start_meter(self, meter, now):
pass
def update_meter(self, meter, now):
if not meter in self.meters:
raise ValueError('attempt to use orphaned meter')
if (now >= self.last_update_time + self.update_period) or \
not self.last_update_time:
self.re.update(self._amount_read(), now)
self.last_update_time = now
self._do_update_meter(meter, now)
def _do_update_meter(self, meter, now):
pass
def end_meter(self, meter, now):
if not meter in self.meters:
raise ValueError('attempt to use orphaned meter')
self._lock.acquire()
try:
try: self.in_progress_meters.remove(meter)
except ValueError: pass
self.open_files -= 1
self.finished_files += 1
self.finished_file_size += meter.last_amount_read
finally:
self._lock.release()
self._do_end_meter(meter, now)
def _do_end_meter(self, meter, now):
pass
def failure_meter(self, meter, message, now):
if not meter in self.meters:
raise ValueError('attempt to use orphaned meter')
self._lock.acquire()
try:
try: self.in_progress_meters.remove(meter)
except ValueError: pass
self.open_files -= 1
self.failed_files += 1
if meter.size and self.failed_size is not None:
self.failed_size += meter.size
else:
self.failed_size = None
finally:
self._lock.release()
self._do_failure_meter(meter, message, now)
def _do_failure_meter(self, meter, message, now):
pass
def message_meter(self, meter, message):
pass
########################################################
# internal functions
def _amount_read(self):
tot = self.finished_file_size
for m in self.in_progress_meters:
tot += m.last_amount_read
return tot
class TextMultiFileMeter(MultiFileMeter):
def __init__(self, fo=sys.stderr):
self.fo = fo
MultiFileMeter.__init__(self)
# files: ###/### ###% data: ######/###### ###% time: ##:##:##/##:##:##
def _do_update_meter(self, meter, now):
self._lock.acquire()
try:
format = "files: %3i/%-3i %3i%% data: %6.6s/%-6.6s %3i%% " \
"time: %8.8s/%8.8s"
df = self.finished_files
tf = self.numfiles or 1
pf = 100 * float(df)/tf + 0.49
dd = self.re.last_amount_read
td = self.total_size
pd = 100 * (self.re.fraction_read() or 0) + 0.49
dt = self.re.elapsed_time()
rt = self.re.remaining_time()
if rt is None: tt = None
else: tt = dt + rt
fdd = format_number(dd) + 'B'
ftd = format_number(td) + 'B'
fdt = format_time(dt, 1)
ftt = format_time(tt, 1)
out = '%-79.79s' % (format % (df, tf, pf, fdd, ftd, pd, fdt, ftt))
self.fo.write('\r' + out)
self.fo.flush()
finally:
self._lock.release()
def _do_end_meter(self, meter, now):
self._lock.acquire()
try:
format = "%-30.30s %6.6s %8.8s %9.9s"
fn = meter.basename
size = meter.last_amount_read
fsize = format_number(size) + 'B'
et = meter.re.elapsed_time()
fet = format_time(et, 1)
frate = format_number(size / et) + 'B/s'
out = '%-79.79s' % (format % (fn, fsize, fet, frate))
self.fo.write('\r' + out + '\n')
finally:
self._lock.release()
self._do_update_meter(meter, now)
def _do_failure_meter(self, meter, message, now):
self._lock.acquire()
try:
format = "%-30.30s %6.6s %s"
fn = meter.basename
if type(message) in (type(''), type(u'')):
message = message.splitlines()
if not message: message = ['']
out = '%-79s' % (format % (fn, 'FAILED', message[0] or ''))
self.fo.write('\r' + out + '\n')
for m in message[1:]: self.fo.write(' ' + m + '\n')
self._lock.release()
finally:
self._do_update_meter(meter, now)
def message_meter(self, meter, message):
self._lock.acquire()
try:
pass
finally:
self._lock.release()
def _do_end(self, now):
self._do_update_meter(None, now)
self._lock.acquire()
try:
self.fo.write('\n')
self.fo.flush()
finally:
self._lock.release()
######################################################################
# support classes and functions
class RateEstimator:
def __init__(self, timescale=5.0):
self.timescale = timescale
def start(self, total=None, now=None):
if now is None: now = time.time()
self.total = total
self.start_time = now
self.last_update_time = now
self.last_amount_read = 0
self.ave_rate = None
def update(self, amount_read, now=None):
if now is None: now = time.time()
if amount_read == 0:
# if we just started this file, all bets are off
self.last_update_time = now
self.last_amount_read = 0
self.ave_rate = None
return
#print 'times', now, self.last_update_time
time_diff = now - self.last_update_time
read_diff = amount_read - self.last_amount_read
self.last_update_time = now
self.last_amount_read = amount_read
self.ave_rate = self._temporal_rolling_ave(\
time_diff, read_diff, self.ave_rate, self.timescale)
#print 'results', time_diff, read_diff, self.ave_rate
#####################################################################
# result methods
def average_rate(self):
"get the average transfer rate (in bytes/second)"
return self.ave_rate
def elapsed_time(self):
"the time between the start of the transfer and the most recent update"
return self.last_update_time - self.start_time
def remaining_time(self):
"estimated time remaining"
if not self.ave_rate or not self.total: return None
return (self.total - self.last_amount_read) / self.ave_rate
def fraction_read(self):
"""the fraction of the data that has been read
(can be None for unknown transfer size)"""
if self.total is None: return None
elif self.total == 0: return 1.0
else: return float(self.last_amount_read)/self.total
#########################################################################
# support methods
def _temporal_rolling_ave(self, time_diff, read_diff, last_ave, timescale):
"""a temporal rolling average performs smooth averaging even when
updates come at irregular intervals. This is performed by scaling
the "epsilon" according to the time since the last update.
Specifically, epsilon = time_diff / timescale
As a general rule, the average will take on a completely new value
after 'timescale' seconds."""
epsilon = time_diff / timescale
if epsilon > 1: epsilon = 1.0
return self._rolling_ave(time_diff, read_diff, last_ave, epsilon)
def _rolling_ave(self, time_diff, read_diff, last_ave, epsilon):
"""perform a "rolling average" iteration
a rolling average "folds" new data into an existing average with
some weight, epsilon. epsilon must be between 0.0 and 1.0 (inclusive)
a value of 0.0 means only the old value (initial value) counts,
and a value of 1.0 means only the newest value is considered."""
try:
recent_rate = read_diff / time_diff
except ZeroDivisionError:
recent_rate = None
if last_ave is None: return recent_rate
elif recent_rate is None: return last_ave
# at this point, both last_ave and recent_rate are numbers
return epsilon * recent_rate + (1 - epsilon) * last_ave
def _round_remaining_time(self, rt, start_time=15.0):
"""round the remaining time, depending on its size
If rt is between n*start_time and (n+1)*start_time round downward
to the nearest multiple of n (for any counting number n).
If rt < start_time, round down to the nearest 1.
For example (for start_time = 15.0):
2.7 -> 2.0
25.2 -> 25.0
26.4 -> 26.0
35.3 -> 34.0
63.6 -> 60.0
"""
if rt < 0: return 0.0
shift = int(math.log(rt/start_time)/math.log(2))
rt = int(rt)
if shift <= 0: return rt
return float(int(rt) >> shift << shift)
def format_time(seconds, use_hours=0):
if seconds is None or seconds < 0:
if use_hours: return '--:--:--'
else: return '--:--'
else:
seconds = int(seconds)
minutes = seconds / 60
seconds = seconds % 60
if use_hours:
hours = minutes / 60
minutes = minutes % 60
return '%02i:%02i:%02i' % (hours, minutes, seconds)
else:
return '%02i:%02i' % (minutes, seconds)
def format_number(number, SI=0, space=' '):
"""Turn numbers into human-readable metric-like numbers"""
symbols = ['', # (none)
'k', # kilo
'M', # mega
'G', # giga
'T', # tera
'P', # peta
'E', # exa
'Z', # zetta
'Y'] # yotta
if SI: step = 1000.0
else: step = 1024.0
thresh = 999
depth = 0
max_depth = len(symbols) - 1
# we want numbers between 0 and thresh, but don't exceed the length
# of our list. In that event, the formatting will be screwed up,
# but it'll still show the right number.
while number > thresh and depth < max_depth:
depth = depth + 1
number = number / step
if type(number) == type(1) or type(number) == type(1L):
# it's an int or a long, which means it didn't get divided,
# which means it's already short enough
format = '%i%s%s'
elif number < 9.95:
# must use 9.95 for proper sizing. For example, 9.99 will be
# rounded to 10.0 with the .1f format string (which is too long)
format = '%.1f%s%s'
else:
format = '%.0f%s%s'
return(format % (float(number or 0), space, symbols[depth]))
|
steveb/heat | refs/heads/master | heat/tests/convergence/scenarios/update_replace_rollback.py | 11 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def check_c_count(expected_count):
test.assertEqual(expected_count,
len(reality.resources_by_logical_name('C')))
example_template = Template({
'A': RsrcDef({'a': 'initial'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
example_template2 = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.update_stack('foo', example_template2)
engine.noop(4)
engine.rollback_stack('foo')
engine.call(check_c_count, 2)
engine.noop(11)
engine.call(verify, example_template)
engine.delete_stack('foo')
engine.noop(12)
engine.call(verify, Template({}))
|
DMLoy/ECommerceBasic | refs/heads/master | bin/bin/activate/lib/python2.7/site-packages/setuptools/command/install_lib.py | 454 | from distutils.command.install_lib import install_lib as _install_lib
import os
class install_lib(_install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def _bytecode_filenames (self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
if not py_file.endswith('.py'):
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
exclude = {}
nsp = self.distribution.namespace_packages
if (nsp and self.get_finalized_command('install')
.single_version_externally_managed
):
for pkg in nsp:
parts = pkg.split('.')
while parts:
pkgdir = os.path.join(self.install_dir, *parts)
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
exclude[os.path.join(pkgdir,f)] = 1
parts.pop()
return exclude
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return _install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = _install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
|
shastikk/youtube-dl | refs/heads/master | devscripts/gh-pages/add-version.py | 174 | #!/usr/bin/env python3
from __future__ import unicode_literals
import json
import sys
import hashlib
import os.path
if len(sys.argv) <= 1:
print('Specify the version number as parameter')
sys.exit()
version = sys.argv[1]
with open('update/LATEST_VERSION', 'w') as f:
f.write(version)
versions_info = json.load(open('update/versions.json'))
if 'signature' in versions_info:
del versions_info['signature']
new_version = {}
filenames = {
'bin': 'youtube-dl',
'exe': 'youtube-dl.exe',
'tar': 'youtube-dl-%s.tar.gz' % version}
build_dir = os.path.join('..', '..', 'build', version)
for key, filename in filenames.items():
url = 'https://yt-dl.org/downloads/%s/%s' % (version, filename)
fn = os.path.join(build_dir, filename)
with open(fn, 'rb') as f:
data = f.read()
if not data:
raise ValueError('File %s is empty!' % fn)
sha256sum = hashlib.sha256(data).hexdigest()
new_version[key] = (url, sha256sum)
versions_info['versions'][version] = new_version
versions_info['latest'] = version
with open('update/versions.json', 'w') as jsonf:
json.dump(versions_info, jsonf, indent=4, sort_keys=True)
|
SatoshiNXSimudrone/sl4a-damon-clone | refs/heads/master | python/src/Demo/pdist/cvslock.py | 47 | """CVS locking algorithm.
CVS locking strategy
====================
As reverse engineered from the CVS 1.3 sources (file lock.c):
- Locking is done on a per repository basis (but a process can hold
write locks for multiple directories); all lock files are placed in
the repository and have names beginning with "#cvs.".
- Before even attempting to lock, a file "#cvs.tfl.<pid>" is created
(and removed again), to test that we can write the repository. [The
algorithm can still be fooled (1) if the repository's mode is changed
while attempting to lock; (2) if this file exists and is writable but
the directory is not.]
- While creating the actual read/write lock files (which may exist for
a long time), a "meta-lock" is held. The meta-lock is a directory
named "#cvs.lock" in the repository. The meta-lock is also held while
a write lock is held.
- To set a read lock:
- acquire the meta-lock
- create the file "#cvs.rfl.<pid>"
- release the meta-lock
- To set a write lock:
- acquire the meta-lock
- check that there are no files called "#cvs.rfl.*"
- if there are, release the meta-lock, sleep, try again
- create the file "#cvs.wfl.<pid>"
- To release a write lock:
- remove the file "#cvs.wfl.<pid>"
- rmdir the meta-lock
- To release a read lock:
- remove the file "#cvs.rfl.<pid>"
Additional notes
----------------
- A process should read-lock at most one repository at a time.
- A process may write-lock as many repositories as it wishes (to avoid
deadlocks, I presume it should always lock them top-down in the
directory hierarchy).
- A process should make sure it removes all its lock files and
directories when it crashes.
- Limitation: one user id should not be committing files into the same
repository at the same time.
Turn this into Python code
--------------------------
rl = ReadLock(repository, waittime)
wl = WriteLock(repository, waittime)
list = MultipleWriteLock([repository1, repository2, ...], waittime)
"""
import os
import time
import stat
import pwd
# Default wait time
DELAY = 10
# XXX This should be the same on all Unix versions
EEXIST = 17
# Files used for locking (must match cvs.h in the CVS sources)
CVSLCK = "#cvs.lck"
CVSRFL = "#cvs.rfl."
CVSWFL = "#cvs.wfl."
class Error:
def __init__(self, msg):
self.msg = msg
def __repr__(self):
return repr(self.msg)
def __str__(self):
return str(self.msg)
class Locked(Error):
pass
class Lock:
def __init__(self, repository = ".", delay = DELAY):
self.repository = repository
self.delay = delay
self.lockdir = None
self.lockfile = None
pid = repr(os.getpid())
self.cvslck = self.join(CVSLCK)
self.cvsrfl = self.join(CVSRFL + pid)
self.cvswfl = self.join(CVSWFL + pid)
def __del__(self):
print "__del__"
self.unlock()
def setlockdir(self):
while 1:
try:
self.lockdir = self.cvslck
os.mkdir(self.cvslck, 0777)
return
except os.error, msg:
self.lockdir = None
if msg[0] == EEXIST:
try:
st = os.stat(self.cvslck)
except os.error:
continue
self.sleep(st)
continue
raise Error("failed to lock %s: %s" % (
self.repository, msg))
def unlock(self):
self.unlockfile()
self.unlockdir()
def unlockfile(self):
if self.lockfile:
print "unlink", self.lockfile
try:
os.unlink(self.lockfile)
except os.error:
pass
self.lockfile = None
def unlockdir(self):
if self.lockdir:
print "rmdir", self.lockdir
try:
os.rmdir(self.lockdir)
except os.error:
pass
self.lockdir = None
def sleep(self, st):
sleep(st, self.repository, self.delay)
def join(self, name):
return os.path.join(self.repository, name)
def sleep(st, repository, delay):
if delay <= 0:
raise Locked(st)
uid = st[stat.ST_UID]
try:
pwent = pwd.getpwuid(uid)
user = pwent[0]
except KeyError:
user = "uid %d" % uid
print "[%s]" % time.ctime(time.time())[11:19],
print "Waiting for %s's lock in" % user, repository
time.sleep(delay)
class ReadLock(Lock):
def __init__(self, repository, delay = DELAY):
Lock.__init__(self, repository, delay)
ok = 0
try:
self.setlockdir()
self.lockfile = self.cvsrfl
fp = open(self.lockfile, 'w')
fp.close()
ok = 1
finally:
if not ok:
self.unlockfile()
self.unlockdir()
class WriteLock(Lock):
def __init__(self, repository, delay = DELAY):
Lock.__init__(self, repository, delay)
self.setlockdir()
while 1:
uid = self.readers_exist()
if not uid:
break
self.unlockdir()
self.sleep(uid)
self.lockfile = self.cvswfl
fp = open(self.lockfile, 'w')
fp.close()
def readers_exist(self):
n = len(CVSRFL)
for name in os.listdir(self.repository):
if name[:n] == CVSRFL:
try:
st = os.stat(self.join(name))
except os.error:
continue
return st
return None
def MultipleWriteLock(repositories, delay = DELAY):
while 1:
locks = []
for r in repositories:
try:
locks.append(WriteLock(r, 0))
except Locked, instance:
del locks
break
else:
break
sleep(instance.msg, r, delay)
return list
def test():
import sys
if sys.argv[1:]:
repository = sys.argv[1]
else:
repository = "."
rl = None
wl = None
try:
print "attempting write lock ..."
wl = WriteLock(repository)
print "got it."
wl.unlock()
print "attempting read lock ..."
rl = ReadLock(repository)
print "got it."
rl.unlock()
finally:
print [1]
sys.exc_traceback = None
print [2]
if rl:
rl.unlock()
print [3]
if wl:
wl.unlock()
print [4]
rl = None
print [5]
wl = None
print [6]
if __name__ == '__main__':
test()
|
j2sol/ansible-modules-core | refs/heads/devel | database/mysql/mysql_variables.py | 48 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage mysql variables
(c) 2013, Balazs Pocze <banyek@gawker.com>
Certain parts are taken from Mark Theunissen's mysqldb module
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: mysql_variables
short_description: Manage MySQL global variables
description:
- Query / Set MySQL variables
version_added: 1.3
options:
variable:
description:
- Variable name to operate
required: True
value:
description:
- If set, then sets variable value to this
required: False
login_user:
description:
- username to connect mysql host, if defined login_password also needed.
required: False
login_password:
description:
- password to connect mysql host, if defined login_user also needed.
required: False
login_host:
description:
- mysql host to connect
required: False
login_unix_socket:
description:
- unix socket to connect mysql server
'''
EXAMPLES = '''
# Check for sync_binlog setting
- mysql_variables: variable=sync_binlog
# Set read_only variable to 1
- mysql_variables: variable=read_only value=1
'''
import ConfigParser
import os
import warnings
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
def typedvalue(value):
"""
Convert value to number whenever possible, return same value
otherwise.
>>> typedvalue('3')
3
>>> typedvalue('3.0')
3.0
>>> typedvalue('foobar')
'foobar'
"""
try:
return int(value)
except ValueError:
pass
try:
return float(value)
except ValueError:
pass
return value
def getvariable(cursor, mysqlvar):
cursor.execute("SHOW VARIABLES LIKE %s", (mysqlvar,))
mysqlvar_val = cursor.fetchall()
return mysqlvar_val
def setvariable(cursor, mysqlvar, value):
""" Set a global mysql variable to a given value
The DB driver will handle quoting of the given value based on its
type, thus numeric strings like '3.0' or '8' are illegal, they
should be passed as numeric literals.
"""
query = ["SET GLOBAL %s" % mysql_quote_identifier(mysqlvar, 'vars') ]
query.append(" = %s")
query = ' '.join(query)
try:
cursor.execute(query, (value,))
cursor.fetchall()
result = True
except Exception, e:
result = str(e)
return result
def strip_quotes(s):
""" Remove surrounding single or double quotes
>>> print strip_quotes('hello')
hello
>>> print strip_quotes('"hello"')
hello
>>> print strip_quotes("'hello'")
hello
>>> print strip_quotes("'hello")
'hello
"""
single_quote = "'"
double_quote = '"'
if s.startswith(single_quote) and s.endswith(single_quote):
s = s.strip(single_quote)
elif s.startswith(double_quote) and s.endswith(double_quote):
s = s.strip(double_quote)
return s
def config_get(config, section, option):
""" Calls ConfigParser.get and strips quotes
See: http://dev.mysql.com/doc/refman/5.0/en/option-files.html
"""
return strip_quotes(config.get(section, option))
def load_mycnf():
config = ConfigParser.RawConfigParser()
mycnf = os.path.expanduser('~/.my.cnf')
if not os.path.exists(mycnf):
return False
try:
config.readfp(open(mycnf))
except (IOError):
return False
# We support two forms of passwords in .my.cnf, both pass= and password=,
# as these are both supported by MySQL.
try:
passwd = config_get(config, 'client', 'password')
except (ConfigParser.NoOptionError):
try:
passwd = config_get(config, 'client', 'pass')
except (ConfigParser.NoOptionError):
return False
# If .my.cnf doesn't specify a user, default to user login name
try:
user = config_get(config, 'client', 'user')
except (ConfigParser.NoOptionError):
user = getpass.getuser()
creds = dict(user=user, passwd=passwd)
return creds
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None),
login_host=dict(default="localhost"),
login_unix_socket=dict(default=None),
variable=dict(default=None),
value=dict(default=None)
)
)
user = module.params["login_user"]
password = module.params["login_password"]
host = module.params["login_host"]
mysqlvar = module.params["variable"]
value = module.params["value"]
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
else:
warnings.filterwarnings('error', category=MySQLdb.Warning)
# Either the caller passes both a username and password with which to connect to
# mysql, or they pass neither and allow this module to read the credentials from
# ~/.my.cnf.
login_password = module.params["login_password"]
login_user = module.params["login_user"]
if login_user is None and login_password is None:
mycnf_creds = load_mycnf()
if mycnf_creds is False:
login_user = "root"
login_password = ""
else:
login_user = mycnf_creds["user"]
login_password = mycnf_creds["passwd"]
elif login_password is None or login_user is None:
module.fail_json(msg="when supplying login arguments, both login_user and login_password must be provided")
try:
if module.params["login_unix_socket"]:
db_connection = MySQLdb.connect(host=module.params["login_host"], unix_socket=module.params["login_unix_socket"], user=login_user, passwd=login_password, db="mysql")
else:
db_connection = MySQLdb.connect(host=module.params["login_host"], user=login_user, passwd=login_password, db="mysql")
cursor = db_connection.cursor()
except Exception, e:
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or ~/.my.cnf has the credentials")
if mysqlvar is None:
module.fail_json(msg="Cannot run without variable to operate with")
mysqlvar_val = getvariable(cursor, mysqlvar)
if value is None:
module.exit_json(msg=mysqlvar_val)
else:
if len(mysqlvar_val) < 1:
module.fail_json(msg="Variable not available", changed=False)
# Type values before using them
value_wanted = typedvalue(value)
value_actual = typedvalue(mysqlvar_val[0][1])
if value_wanted == value_actual:
module.exit_json(msg="Variable already set to requested value", changed=False)
try:
result = setvariable(cursor, mysqlvar, value_wanted)
except SQLParseError, e:
result = str(e)
if result is True:
module.exit_json(msg="Variable change succeeded prev_value=%s" % value_actual, changed=True)
else:
module.fail_json(msg=result, changed=False)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
main()
|
nirmeshk/oh-mainline | refs/heads/master | vendor/packages/django-extensions/django_extensions/tests/utils.py | 47 | # -*- coding: utf-8 -*-
UTILS_TRUNCATE_LETTERS_TESTS = """
>>> from django_extensions.utils.text import truncate_letters
>>> truncate_letters("hello tests", 100)
u'hello tests'
>>> truncate_letters("hello tests", 5)
u'hello...'
>>> for i in range(10,-1,-1): truncate_letters("hello tests", i),i
(u'hello test...', 10)
(u'hello tes...', 9)
(u'hello te...', 8)
(u'hello t...', 7)
(u'hello ...', 6)
(u'hello...', 5)
(u'hell...', 4)
(u'hel...', 3)
(u'he...', 2)
(u'h...', 1)
(u'...', 0)
>>> truncate_letters("峠 (とうげ tōge - mountain pass)", 10)
u'\u5ce0 (\u3068\u3046\u3052 t\u014dg...'
"""
UTILS_UUID_TESTS = """
>>> from django_extensions.utils import uuid
# make a UUID using an MD5 hash of a namespace UUID and a name
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
# make a UUID using a SHA-1 hash of a namespace UUID and a name
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
# make a UUID from a string of hex digits (braces and hyphens ignored)
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
# convert a UUID to a string of hex digits in standard form
>>> str(x)
'00010203-0405-0607-0809-0a0b0c0d0e0f'
# get the raw 16 bytes of the UUID
>>> x.bytes
'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f'
# make a UUID from a 16-byte string
>>> uuid.UUID(bytes=x.bytes)
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
"""
|
shipci/boto | refs/heads/develop | tests/unit/ec2/test_address.py | 3 | from tests.compat import mock, unittest
from boto.ec2.address import Address
class AddressTest(unittest.TestCase):
def setUp(self):
self.address = Address()
self.address.connection = mock.Mock()
self.address.public_ip = "192.168.1.1"
def check_that_attribute_has_been_set(self, name, value, attribute):
self.address.endElement(name, value, None)
self.assertEqual(getattr(self.address, attribute), value)
def test_endElement_sets_correct_attributes_with_values(self):
for arguments in [("publicIp", "192.168.1.1", "public_ip"),
("instanceId", 1, "instance_id"),
("domain", "some domain", "domain"),
("allocationId", 1, "allocation_id"),
("associationId", 1, "association_id"),
("somethingRandom", "somethingRandom", "somethingRandom")]:
self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2])
def test_release_calls_connection_release_address_with_correct_args(self):
self.address.release()
self.address.connection.release_address.assert_called_with(
public_ip="192.168.1.1",
dry_run=False
)
def test_associate_calls_connection_associate_address_with_correct_args(self):
self.address.associate(1)
self.address.connection.associate_address.assert_called_with(
instance_id=1,
public_ip="192.168.1.1",
allow_reassociation=False,
network_interface_id=None,
private_ip_address=None,
dry_run=False
)
def test_disassociate_calls_connection_disassociate_address_with_correct_args(self):
self.address.disassociate()
self.address.connection.disassociate_address.assert_called_with(
public_ip="192.168.1.1",
dry_run=False
)
class AddressWithAllocationTest(unittest.TestCase):
def setUp(self):
self.address = Address()
self.address.connection = mock.Mock()
self.address.public_ip = "192.168.1.1"
self.address.allocation_id = "aid1"
def check_that_attribute_has_been_set(self, name, value, attribute):
self.address.endElement(name, value, None)
self.assertEqual(getattr(self.address, attribute), value)
def test_endElement_sets_correct_attributes_with_values(self):
for arguments in [("publicIp", "192.168.1.1", "public_ip"),
("instanceId", 1, "instance_id"),
("domain", "some domain", "domain"),
("allocationId", 1, "allocation_id"),
("associationId", 1, "association_id"),
("somethingRandom", "somethingRandom", "somethingRandom")]:
self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2])
def test_release_calls_connection_release_address_with_correct_args(self):
self.address.release()
self.address.connection.release_address.assert_called_with(
association_id="aid1",
dry_run=False
)
def test_associate_calls_connection_associate_address_with_correct_args(self):
self.address.associate(1)
self.address.connection.associate_address.assert_called_with(
instance_id=1,
public_ip="192.168.1.1",
allocation_id="aid1",
network_interface_id=None,
private_ip_address=None,
allow_reassociation=False,
dry_run=False
)
def test_disassociate_calls_connection_disassociate_address_with_correct_args(self):
self.address.disassociate()
self.address.connection.disassociate_address.assert_called_with(
public_ip="192.168.1.1",
dry_run=False
)
class AddressWithNetworkInterfaceTest(unittest.TestCase):
def setUp(self):
self.address = Address()
self.address.connection = mock.Mock()
self.address.public_ip = "192.168.1.1"
self.address.allocation_id = "aid1"
def check_that_attribute_has_been_set(self, name, value, attribute):
self.address.endElement(name, value, None)
self.assertEqual(getattr(self.address, attribute), value)
def test_endElement_sets_correct_attributes_with_values(self):
for arguments in [("publicIp", "192.168.1.1", "public_ip"),
("instanceId", 1, "instance_id"),
("domain", "some domain", "domain"),
("allocationId", 1, "allocation_id"),
("associationId", 1, "association_id"),
("somethingRandom", "somethingRandom", "somethingRandom")]:
self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2])
def test_release_calls_connection_release_address_with_correct_args(self):
self.address.release()
self.address.connection.release_address.assert_called_with(
association_id="aid1",
dry_run=False
)
def test_associate_calls_connection_associate_address_with_correct_args(self):
self.address.associate(network_interface_id=1)
self.address.connection.associate_address.assert_called_with(
instance_id=None,
public_ip="192.168.1.1",
network_interface_id=1,
private_ip_address=None,
allocation_id="aid1",
allow_reassociation=False,
dry_run=False
)
def test_disassociate_calls_connection_disassociate_address_with_correct_args(self):
self.address.disassociate()
self.address.connection.disassociate_address.assert_called_with(
public_ip="192.168.1.1",
dry_run=False
)
if __name__ == "__main__":
unittest.main()
|
msmolens/VTK | refs/heads/slicer-v6.3.0-2015-07-21-426987d | ThirdParty/Twisted/twisted/web/distrib.py | 63 | # -*- test-case-name: twisted.web.test.test_distrib -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Distributed web servers.
This is going to have to be refactored so that argument parsing is done
by each subprocess and not by the main web server (i.e. GET, POST etc.).
"""
# System Imports
import types, os, copy, cStringIO
try:
import pwd
except ImportError:
pwd = None
from xml.dom.minidom import Element, Text
# Twisted Imports
from twisted.spread import pb
from twisted.spread.banana import SIZE_LIMIT
from twisted.web import http, resource, server, html, static
from twisted.web.http_headers import Headers
from twisted.python import log
from twisted.persisted import styles
from twisted.internet import address, reactor
class _ReferenceableProducerWrapper(pb.Referenceable):
def __init__(self, producer):
self.producer = producer
def remote_resumeProducing(self):
self.producer.resumeProducing()
def remote_pauseProducing(self):
self.producer.pauseProducing()
def remote_stopProducing(self):
self.producer.stopProducing()
class Request(pb.RemoteCopy, server.Request):
"""
A request which was received by a L{ResourceSubscription} and sent via
PB to a distributed node.
"""
def setCopyableState(self, state):
"""
Initialize this L{twisted.web.distrib.Request} based on the copied
state so that it closely resembles a L{twisted.web.server.Request}.
"""
for k in 'host', 'client':
tup = state[k]
addrdesc = {'INET': 'TCP', 'UNIX': 'UNIX'}[tup[0]]
addr = {'TCP': lambda: address.IPv4Address(addrdesc,
tup[1], tup[2]),
'UNIX': lambda: address.UNIXAddress(tup[1])}[addrdesc]()
state[k] = addr
state['requestHeaders'] = Headers(dict(state['requestHeaders']))
pb.RemoteCopy.setCopyableState(self, state)
# Emulate the local request interface --
self.content = cStringIO.StringIO(self.content_data)
self.finish = self.remote.remoteMethod('finish')
self.setHeader = self.remote.remoteMethod('setHeader')
self.addCookie = self.remote.remoteMethod('addCookie')
self.setETag = self.remote.remoteMethod('setETag')
self.setResponseCode = self.remote.remoteMethod('setResponseCode')
self.setLastModified = self.remote.remoteMethod('setLastModified')
# To avoid failing if a resource tries to write a very long string
# all at once, this one will be handled slightly differently.
self._write = self.remote.remoteMethod('write')
def write(self, bytes):
"""
Write the given bytes to the response body.
@param bytes: The bytes to write. If this is longer than 640k, it
will be split up into smaller pieces.
"""
start = 0
end = SIZE_LIMIT
while True:
self._write(bytes[start:end])
start += SIZE_LIMIT
end += SIZE_LIMIT
if start >= len(bytes):
break
def registerProducer(self, producer, streaming):
self.remote.callRemote("registerProducer",
_ReferenceableProducerWrapper(producer),
streaming).addErrback(self.fail)
def unregisterProducer(self):
self.remote.callRemote("unregisterProducer").addErrback(self.fail)
def fail(self, failure):
log.err(failure)
pb.setUnjellyableForClass(server.Request, Request)
class Issue:
def __init__(self, request):
self.request = request
def finished(self, result):
if result != server.NOT_DONE_YET:
assert isinstance(result, types.StringType),\
"return value not a string"
self.request.write(result)
self.request.finish()
def failed(self, failure):
#XXX: Argh. FIXME.
failure = str(failure)
self.request.write(
resource.ErrorPage(http.INTERNAL_SERVER_ERROR,
"Server Connection Lost",
"Connection to distributed server lost:" +
html.PRE(failure)).
render(self.request))
self.request.finish()
log.msg(failure)
class ResourceSubscription(resource.Resource):
isLeaf = 1
waiting = 0
def __init__(self, host, port):
resource.Resource.__init__(self)
self.host = host
self.port = port
self.pending = []
self.publisher = None
def __getstate__(self):
"""Get persistent state for this ResourceSubscription.
"""
# When I unserialize,
state = copy.copy(self.__dict__)
# Publisher won't be connected...
state['publisher'] = None
# I won't be making a connection
state['waiting'] = 0
# There will be no pending requests.
state['pending'] = []
return state
def connected(self, publisher):
"""I've connected to a publisher; I'll now send all my requests.
"""
log.msg('connected to publisher')
publisher.broker.notifyOnDisconnect(self.booted)
self.publisher = publisher
self.waiting = 0
for request in self.pending:
self.render(request)
self.pending = []
def notConnected(self, msg):
"""I can't connect to a publisher; I'll now reply to all pending
requests.
"""
log.msg("could not connect to distributed web service: %s" % msg)
self.waiting = 0
self.publisher = None
for request in self.pending:
request.write("Unable to connect to distributed server.")
request.finish()
self.pending = []
def booted(self):
self.notConnected("connection dropped")
def render(self, request):
"""Render this request, from my server.
This will always be asynchronous, and therefore return NOT_DONE_YET.
It spins off a request to the pb client, and either adds it to the list
of pending issues or requests it immediately, depending on if the
client is already connected.
"""
if not self.publisher:
self.pending.append(request)
if not self.waiting:
self.waiting = 1
bf = pb.PBClientFactory()
timeout = 10
if self.host == "unix":
reactor.connectUNIX(self.port, bf, timeout)
else:
reactor.connectTCP(self.host, self.port, bf, timeout)
d = bf.getRootObject()
d.addCallbacks(self.connected, self.notConnected)
else:
i = Issue(request)
self.publisher.callRemote('request', request).addCallbacks(i.finished, i.failed)
return server.NOT_DONE_YET
class ResourcePublisher(pb.Root, styles.Versioned):
"""
L{ResourcePublisher} exposes a remote API which can be used to respond
to request.
@ivar site: The site which will be used for resource lookup.
@type site: L{twisted.web.server.Site}
"""
def __init__(self, site):
self.site = site
persistenceVersion = 2
def upgradeToVersion2(self):
self.application.authorizer.removeIdentity("web")
del self.application.services[self.serviceName]
del self.serviceName
del self.application
del self.perspectiveName
def getPerspectiveNamed(self, name):
return self
def remote_request(self, request):
"""
Look up the resource for the given request and render it.
"""
res = self.site.getResourceFor(request)
log.msg( request )
result = res.render(request)
if result is not server.NOT_DONE_YET:
request.write(result)
request.finish()
return server.NOT_DONE_YET
class UserDirectory(resource.Resource):
"""
A resource which lists available user resources and serves them as
children.
@ivar _pwd: An object like L{pwd} which is used to enumerate users and
their home directories.
"""
userDirName = 'public_html'
userSocketName = '.twistd-web-pb'
template = """
<html>
<head>
<title>twisted.web.distrib.UserDirectory</title>
<style>
a
{
font-family: Lucida, Verdana, Helvetica, Arial, sans-serif;
color: #369;
text-decoration: none;
}
th
{
font-family: Lucida, Verdana, Helvetica, Arial, sans-serif;
font-weight: bold;
text-decoration: none;
text-align: left;
}
pre, code
{
font-family: "Courier New", Courier, monospace;
}
p, body, td, ol, ul, menu, blockquote, div
{
font-family: Lucida, Verdana, Helvetica, Arial, sans-serif;
color: #000;
}
</style>
</head>
<body>
<h1>twisted.web.distrib.UserDirectory</h1>
%(users)s
</body>
</html>
"""
def __init__(self, userDatabase=None):
resource.Resource.__init__(self)
if userDatabase is None:
userDatabase = pwd
self._pwd = userDatabase
def _users(self):
"""
Return a list of two-tuples giving links to user resources and text to
associate with those links.
"""
users = []
for user in self._pwd.getpwall():
name, passwd, uid, gid, gecos, dir, shell = user
realname = gecos.split(',')[0]
if not realname:
realname = name
if os.path.exists(os.path.join(dir, self.userDirName)):
users.append((name, realname + ' (file)'))
twistdsock = os.path.join(dir, self.userSocketName)
if os.path.exists(twistdsock):
linkName = name + '.twistd'
users.append((linkName, realname + ' (twistd)'))
return users
def render_GET(self, request):
"""
Render as HTML a listing of all known users with links to their
personal resources.
"""
listing = Element('ul')
for link, text in self._users():
linkElement = Element('a')
linkElement.setAttribute('href', link + '/')
textNode = Text()
textNode.data = text
linkElement.appendChild(textNode)
item = Element('li')
item.appendChild(linkElement)
listing.appendChild(item)
return self.template % {'users': listing.toxml()}
def getChild(self, name, request):
if name == '':
return self
td = '.twistd'
if name[-len(td):] == td:
username = name[:-len(td)]
sub = 1
else:
username = name
sub = 0
try:
pw_name, pw_passwd, pw_uid, pw_gid, pw_gecos, pw_dir, pw_shell \
= self._pwd.getpwnam(username)
except KeyError:
return resource.NoResource()
if sub:
twistdsock = os.path.join(pw_dir, self.userSocketName)
rs = ResourceSubscription('unix',twistdsock)
self.putChild(name, rs)
return rs
else:
path = os.path.join(pw_dir, self.userDirName)
if not os.path.exists(path):
return resource.NoResource()
return static.File(path)
|
EE/bestja | refs/heads/master | addons/bestja_volunteer_fpbz/models.py | 2 | # -*- coding: utf-8 -*-
from openerp import models, fields
class DriversLicense(models.Model):
_name = 'volunteer.drivers_license'
name = fields.Char(required=True, string=u"nazwa")
class Volunteer(models.Model):
_inherit = 'res.users'
drivers_license = fields.Many2many(
'volunteer.drivers_license',
string=u"prawo jazdy",
ondelete='restrict',
)
sanepid = fields.Date(string=u"badania sanepidu")
forklift = fields.Date(string=u"uprawnienia na wózek widłowy")
def __init__(self, pool, cr):
super(Volunteer, self).__init__(pool, cr)
self._add_permitted_fields(
level='privileged',
fields={'drivers_license', 'sanepid', 'forklift'},
)
self._add_permitted_fields(
level='owner',
fields={'drivers_license', 'sanepid', 'forklift'},
)
class Offer(models.Model):
_inherit = 'offer'
drivers_license = fields.Many2one('volunteer.drivers_license', string=u"Prawa jazdy")
sanepid = fields.Boolean(string=u"Badania sanepidu")
forklift = fields.Boolean(string=u"Uprawnienia na wózek widłowy")
class Application(models.Model):
_inherit = 'offers.application'
drivers_license = fields.Many2many(related='user.drivers_license', related_sudo=True)
sanepid = fields.Date(related='user.sanepid', related_sudo=True)
forklift = fields.Date(related='user.forklift', related_sudo=True)
|
Fat-Zer/FreeCAD_sf_master | refs/heads/master | src/Mod/Path/PathScripts/post/jtech_post.py | 13 | # ***************************************************************************
# * Copyright (c) 2018 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * FreeCAD is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Lesser General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with FreeCAD; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
from __future__ import print_function
import FreeCAD
from FreeCAD import Units
import Path
import argparse
import datetime
import shlex
from PathScripts import PostUtils
TOOLTIP = '''
This is a postprocessor file for the Path workbench. It is used to
take a pseudo-gcode fragment outputted by a Path object, and output
real GCode suitable for a jtech photonics laser. This postprocessor, once placed
in the appropriate PathScripts folder, can be used directly from inside
FreeCAD, via the GUI importer or via python scripts with:
import jtech_post
jtech_post.export(object,"/path/to/file.ngc","")
'''
now = datetime.datetime.now()
parser = argparse.ArgumentParser(prog='jtech', add_help=False)
parser.add_argument('--no-header', action='store_true', help='suppress header output')
parser.add_argument('--no-comments', action='store_true', help='suppress comment output')
parser.add_argument('--line-numbers', action='store_true', help='prefix with line numbers')
parser.add_argument('--no-show-editor', action='store_true', help='don\'t pop up editor before writing output')
parser.add_argument('--precision', default='3', help='number of digits of precision, default=3')
parser.add_argument('--preamble', help='set commands to be issued before the first command, default="M05 S0\nG90"')
parser.add_argument('--postamble', help='set commands to be issued after the last command, default="M05 S0\nM2"')
parser.add_argument('--inches', action='store_true', help='Convert output for US imperial mode (G20)')
parser.add_argument('--modal', action='store_true', help='Output the Same G-command Name USE NonModal Mode')
parser.add_argument('--axis-modal', action='store_true', help='Output the Same Axis Value Mode')
parser.add_argument('--power-on-delay', default='255', help='milliseconds - Add a delay after laser on before moving to pre-heat material. Default=0')
TOOLTIP_ARGS = parser.format_help()
# These globals set common customization preferences
OUTPUT_COMMENTS = True
OUTPUT_HEADER = True
OUTPUT_LINE_NUMBERS = False
SHOW_EDITOR = True
MODAL = False # if true commands are suppressed if the same as previous line.
OUTPUT_DOUBLES = True # if false duplicate axis values are suppressed if the same as previous line.
COMMAND_SPACE = " "
LINENR = 100 # line number starting value
# These globals will be reflected in the Machine configuration of the project
UNITS = "G21" # G21 for metric, G20 for us standard
UNIT_SPEED_FORMAT = 'mm/min'
UNIT_FORMAT = 'mm'
MACHINE_NAME = "JTECH Photonic Laser"
PRECISION = 3
# Preamble text will appear at the beginning of the GCODE output file.
PREAMBLE = '''M05 S0
G90
'''
# Postamble text will appear following the last operation.
POSTAMBLE = '''M05 S0
M2
'''
PRE_FEED = '''M03
G4 P{}
'''
POST_FEED = '''M05
'''
# Pre operation text will be inserted before every operation
PRE_OPERATION = ''''''
# Post operation text will be inserted after every operation
POST_OPERATION = ''''''
# Tool Change commands will be inserted before a tool change
TOOL_CHANGE = ''''''
POWER_ON_DELAY = 0
# to distinguish python built-in open function from the one declared below
if open.__module__ == '__builtin__':
pythonopen = open
def processArguments(argstring):
# pylint: disable=global-statement
global OUTPUT_HEADER
global OUTPUT_COMMENTS
global OUTPUT_LINE_NUMBERS
global SHOW_EDITOR
global PRECISION
global PREAMBLE
global POSTAMBLE
global UNITS
global UNIT_SPEED_FORMAT
global UNIT_FORMAT
global MODAL
global OUTPUT_DOUBLES
global POWER_ON_DELAY
try:
args = parser.parse_args(shlex.split(argstring))
if args.no_header:
OUTPUT_HEADER = False
if args.no_comments:
OUTPUT_COMMENTS = False
if args.line_numbers:
OUTPUT_LINE_NUMBERS = True
if args.no_show_editor:
SHOW_EDITOR = False
print("Show editor = %d" % SHOW_EDITOR)
PRECISION = args.precision
if args.preamble is not None:
PREAMBLE = args.preamble
if args.postamble is not None:
POSTAMBLE = args.postamble
if args.inches:
UNITS = 'G20'
UNIT_SPEED_FORMAT = 'in/min'
UNIT_FORMAT = 'in'
PRECISION = 4
if args.modal:
MODAL = True
if args.axis_modal:
OUTPUT_DOUBLES = False
POWER_ON_DELAY = float(args.power_on_delay) / 1000 # milliseconds
except Exception: # pylint: disable=broad-except
return False
return True
def export(objectslist, filename, argstring):
if not processArguments(argstring):
return None
for obj in objectslist:
if not hasattr(obj, "Path"):
print("the object " + obj.Name + " is not a path. Please select only path and Compounds.")
return None
print("postprocessing...")
gcode = ""
# write header
if OUTPUT_HEADER:
gcode += linenumber() + "(Exported by FreeCAD)\n"
gcode += linenumber() + "(Post Processor: " + __name__ + ")\n"
gcode += linenumber() + "(Output Time:" + str(now) + ")\n"
# Write the preamble
if OUTPUT_COMMENTS:
gcode += linenumber() + "(begin preamble)\n"
for line in PREAMBLE.splitlines(False):
gcode += linenumber() + line + "\n"
gcode += linenumber() + UNITS + "\n"
for obj in objectslist:
# do the pre_op
if OUTPUT_COMMENTS:
gcode += linenumber() + "(begin operation: %s)\n" % obj.Label
for line in PRE_OPERATION.splitlines(True):
gcode += linenumber() + line
gcode += parse(obj)
# do the post_op
if OUTPUT_COMMENTS:
gcode += linenumber() + "(finish operation: %s)\n" % obj.Label
for line in POST_OPERATION.splitlines(True):
gcode += linenumber() + line
# do the post_amble
if OUTPUT_COMMENTS:
gcode += "(begin postamble)\n"
for line in POSTAMBLE.splitlines(True):
gcode += linenumber() + line
if FreeCAD.GuiUp and SHOW_EDITOR:
dia = PostUtils.GCodeEditorDialog()
dia.editor.setText(gcode)
result = dia.exec_()
if result:
final = dia.editor.toPlainText()
else:
final = gcode
else:
final = gcode
print("done postprocessing.")
if not filename == '-':
gfile = pythonopen(filename, "wb")
gfile.write(final)
gfile.close()
return final
def linenumber():
global LINENR # pylint: disable=global-statement
if OUTPUT_LINE_NUMBERS is True:
LINENR += 10
return "N" + str(LINENR) + " "
return ""
def parse(pathobj):
out = ""
lastcommand = None
precision_string = '.' + str(PRECISION) + 'f'
currLocation = {} # keep track for no doubles
RAPID_MOVES = ["G0", "G00"]
FEED_MOVES = ["G1", "G01", "G2", "G02", "G3", "G03"]
# the order of parameters
params = ['X', 'Y', 'Z', 'A', 'B', 'C', 'I', 'J', 'F', 'S', 'T', 'Q', 'R', 'L', 'H', 'D', 'P']
firstmove = Path.Command("G0", {"X": -1, "Y": -1, "Z": -1, "F": 0.0})
currLocation.update(firstmove.Parameters) # set First location Parameters
if hasattr(pathobj, "Group"): # We have a compound or project.
for p in pathobj.Group:
out += parse(p)
return out
else: # parsing simple path
# groups might contain non-path things like stock.
if not hasattr(pathobj, "Path"):
return out
for c in pathobj.Path.Commands:
outstring = []
command = c.Name
controlstring = ""
if command in FEED_MOVES and lastcommand in RAPID_MOVES:
controlstring = PRE_FEED.format(POWER_ON_DELAY)
elif command in RAPID_MOVES and lastcommand in FEED_MOVES:
controlstring = POST_FEED
if len(controlstring) > 0:
out += controlstring
outstring.append(command)
# if modal: suppress the command if it is the same as the last one
if MODAL is True:
if command == lastcommand:
outstring.pop(0)
if c.Name[0] == '(' and not OUTPUT_COMMENTS: # command is a comment
continue
# Now add the remaining parameters in order
for param in params:
if param in c.Parameters:
if param == 'F' and (currLocation[param] != c.Parameters[param] or OUTPUT_DOUBLES):
if c.Name not in RAPID_MOVES: # linuxcnc doesn't use rapid speeds
speed = Units.Quantity(c.Parameters['F'], FreeCAD.Units.Velocity)
if speed.getValueAs(UNIT_SPEED_FORMAT) > 0.0:
outstring.append(param + format(float(speed.getValueAs(UNIT_SPEED_FORMAT)), precision_string))
else:
continue
elif param == 'T':
outstring.append(param + str(int(c.Parameters['T'])))
elif param == 'H':
outstring.append(param + str(int(c.Parameters['H'])))
elif param == 'D':
outstring.append(param + str(int(c.Parameters['D'])))
elif param == 'S':
outstring.append(param + str(int(c.Parameters['S'])))
else:
if (not OUTPUT_DOUBLES) and (param in currLocation) and (currLocation[param] == c.Parameters[param]):
continue
else:
pos = Units.Quantity(c.Parameters[param], FreeCAD.Units.Length)
outstring.append(
param + format(float(pos.getValueAs(UNIT_FORMAT)), precision_string))
# store the latest command
lastcommand = command
currLocation.update(c.Parameters)
# Check for Tool Change:
if command == 'M6':
continue
if command == "message":
if OUTPUT_COMMENTS is False:
out = []
else:
outstring.pop(0) # remove the command
# prepend a line number and append a newline
if len(outstring) >= 1:
if OUTPUT_LINE_NUMBERS:
outstring.insert(0, (linenumber()))
# append the line to the final output
for w in outstring:
out += w + COMMAND_SPACE
out = out.strip() + "\n"
return out
# print(__name__ + " gcode postprocessor loaded.")
|
gpetukhov/pybb | refs/heads/master | pybb/signals.py | 1 | from django.db.models.signals import post_save
from django.contrib.auth.models import User
from pybb.subscription import notify_topic_subscribers
from pybb.models import Post, Topic, Profile, ReadTracking
def post_saved(instance, **kwargs):
notify_topic_subscribers(instance)
profile = instance.user.pybb_profile
profile.post_count = instance.user.pybb_posts.count()
profile.save()
def topic_saved(instance, **kwargs):
forum = instance.forum
forum.topic_count = forum.topics.count()
forum.save()
def user_saved(instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
ReadTracking.objects.create(user=instance)
post_save.connect(post_saved, sender=Post)
post_save.connect(topic_saved, sender=Topic)
post_save.connect(user_saved, sender=User)
|
enzochiau/tablib | refs/heads/develop | tablib/packages/xlwt/examples/row_styles_empty.py | 20 | #!/usr/bin/env python
# -*- coding: windows-1251 -*-
# Copyright (C) 2005 Kiseliov Roman
__rev_id__ = """$Id: row_styles_empty.py 3309 2008-03-14 11:04:30Z chris $"""
from pyExcelerator import *
w = Workbook()
ws = w.add_sheet('Hey, Dude')
for i in range(6, 80):
fnt = Font()
fnt.height = i*20
style = XFStyle()
style.font = fnt
ws.row(i).set_style(style)
w.save('row_styles_empty.xls')
|
run2/citytour | refs/heads/master | 4symantec/Lib/site-packages/pkg_resources/_vendor/packaging/__about__.py | 101 | # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "15.1"
__author__ = "Donald Stufft"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2014 %s" % __author__
|
raiden-network/raiden | refs/heads/develop | raiden/tests/unit/test_raiden_event_handler.py | 1 | from typing import cast
from unittest.mock import Mock, call, patch
from uuid import UUID, uuid4
from raiden.constants import LOCKSROOT_OF_NO_LOCKS, RoutingMode
from raiden.network.proxies.token_network import ParticipantDetails, ParticipantsDetails
from raiden.raiden_event_handler import PFSFeedbackEventHandler, RaidenEventHandler
from raiden.raiden_service import RaidenService
from raiden.tests.utils.factories import (
make_address,
make_block_hash,
make_canonical_identifier,
make_channel_identifier,
make_locksroot,
make_payment_id,
make_secret,
make_secret_hash,
make_token_network_address,
make_token_network_registry_address,
)
from raiden.tests.utils.mocks import make_raiden_service_mock
from raiden.transfer.events import ContractSendChannelBatchUnlock, EventPaymentSentSuccess
from raiden.transfer.mediated_transfer.events import EventRouteFailed
from raiden.transfer.state import ChainState
from raiden.transfer.utils import hash_balance_data
from raiden.transfer.views import get_channelstate_by_token_network_and_partner, state_from_raiden
from raiden.utils.typing import (
Address,
ChannelID,
List,
LockedAmount,
Nonce,
Optional,
PaymentAmount,
TargetAddress,
TokenAmount,
TokenNetworkAddress,
TokenNetworkRegistryAddress,
Tuple,
WithdrawAmount,
)
def test_handle_contract_send_channelunlock_already_unlocked():
"""This is a test for the scenario where the onchain unlock has
already happened when we get to handle our own send unlock
transaction.
Regression test for https://github.com/raiden-network/raiden/issues/3152
"""
channel_identifier = ChannelID(1)
token_network_registry_address = make_token_network_registry_address()
token_network_address = make_token_network_address()
participant = make_address()
raiden = make_raiden_service_mock(
token_network_registry_address=token_network_registry_address,
token_network_address=token_network_address,
channel_identifier=channel_identifier,
partner=participant,
)
channel_state = get_channelstate_by_token_network_and_partner(
chain_state=state_from_raiden(raiden),
token_network_address=token_network_address,
partner_address=participant,
)
assert channel_state
channel_state.our_state.onchain_locksroot = LOCKSROOT_OF_NO_LOCKS
channel_state.partner_state.onchain_locksroot = LOCKSROOT_OF_NO_LOCKS
def detail_participants(_participant1, _participant2, _block_identifier, _channel_identifier):
transferred_amount = TokenAmount(1)
locked_amount = LockedAmount(1)
locksroot = make_locksroot()
balance_hash = hash_balance_data(transferred_amount, locked_amount, locksroot)
our_details = ParticipantDetails(
address=raiden.address,
deposit=TokenAmount(5),
withdrawn=WithdrawAmount(0),
is_closer=False,
balance_hash=balance_hash,
nonce=Nonce(1),
locksroot=locksroot,
locked_amount=locked_amount,
)
transferred_amount = TokenAmount(1)
locked_amount = LockedAmount(1)
# Let's mock here that partner locksroot is 0x0
balance_hash = hash_balance_data(transferred_amount, locked_amount, locksroot)
partner_details = ParticipantDetails(
address=participant,
deposit=TokenAmount(5),
withdrawn=WithdrawAmount(0),
is_closer=True,
balance_hash=balance_hash,
nonce=Nonce(1),
locksroot=LOCKSROOT_OF_NO_LOCKS,
locked_amount=locked_amount,
)
return ParticipantsDetails(our_details, partner_details)
# make sure detail_participants returns partner data with a locksroot of 0x0
raiden.proxy_manager.token_network.detail_participants = detail_participants
event = ContractSendChannelBatchUnlock(
canonical_identifier=make_canonical_identifier(
token_network_address=token_network_address, channel_identifier=channel_identifier
),
sender=participant,
triggered_by_block_hash=make_block_hash(),
)
# This should not throw an unrecoverable error
RaidenEventHandler().on_raiden_events(
raiden=raiden, chain_state=raiden.wal.get_current_state(), events=[event]
)
def setup_pfs_handler_test(
set_feedback_token: bool,
) -> Tuple[
RaidenService,
PFSFeedbackEventHandler,
TokenNetworkRegistryAddress,
TokenNetworkAddress,
List[Address],
Optional[UUID],
]:
channel_identifier = make_channel_identifier()
token_network_registry_address = make_token_network_registry_address()
token_network_address = make_token_network_address()
participant = make_address()
raiden = make_raiden_service_mock(
token_network_registry_address=token_network_registry_address,
token_network_address=token_network_address,
channel_identifier=channel_identifier,
partner=participant,
)
default_handler = RaidenEventHandler()
pfs_handler = PFSFeedbackEventHandler(default_handler)
route = [make_address(), make_address(), make_address()]
# Set PFS config and feedback token
pfs_config = True # just a truthy value
raiden.config.pfs_config = pfs_config
feedback_uuid = None
if set_feedback_token:
feedback_uuid = uuid4()
raiden.route_to_feedback_token[tuple(route)] = feedback_uuid
return (
raiden,
pfs_handler,
token_network_registry_address,
token_network_address,
route,
feedback_uuid,
)
def test_pfs_handler_handle_routefailed_with_feedback_token():
raiden, pfs_handler, _, token_network_address, route, feedback_uuid = setup_pfs_handler_test(
set_feedback_token=True
)
route_failed_event = EventRouteFailed(
secrethash=make_secret_hash(), route=route, token_network_address=token_network_address
)
with patch("raiden.raiden_event_handler.post_pfs_feedback") as pfs_feedback_handler:
pfs_handler.on_raiden_events(
raiden=raiden,
chain_state=cast(ChainState, raiden.wal.get_current_state()), # type: ignore
events=[route_failed_event],
)
assert pfs_feedback_handler.called
assert pfs_feedback_handler.call_args == call(
pfs_config=raiden.config.pfs_config,
route=route,
routing_mode=RoutingMode.PRIVATE,
successful=False,
token=feedback_uuid,
token_network_address=token_network_address,
)
def test_pfs_handler_handle_routefailed_without_feedback_token():
raiden, pfs_handler, _, token_network_address, route, _ = setup_pfs_handler_test(
set_feedback_token=False
)
route_failed_event = EventRouteFailed(
secrethash=make_secret_hash(), route=route, token_network_address=token_network_address
)
with patch("raiden.raiden_event_handler.post_pfs_feedback") as pfs_feedback_handler:
pfs_handler.on_raiden_events(
raiden=raiden,
chain_state=cast(ChainState, raiden.wal.get_current_state()), # type: ignore
events=[route_failed_event],
)
assert not pfs_feedback_handler.called
def test_pfs_handler_handle_paymentsentsuccess_with_feedback_token():
(
raiden,
pfs_handler,
token_network_registry_address,
token_network_address,
route,
feedback_uuid,
) = setup_pfs_handler_test(set_feedback_token=True)
payment_id = make_payment_id()
amount = PaymentAmount(123)
target = TargetAddress(route[-1])
raiden.targets_to_identifiers_to_statuses[target][payment_id] = Mock()
route_failed_event = EventPaymentSentSuccess(
token_network_registry_address=token_network_registry_address,
token_network_address=token_network_address,
identifier=payment_id,
amount=amount,
target=TargetAddress(target),
secret=make_secret(),
route=route,
)
with patch("raiden.raiden_event_handler.post_pfs_feedback") as pfs_feedback_handler:
pfs_handler.on_raiden_events(
raiden=raiden,
chain_state=cast(ChainState, raiden.wal.get_current_state()), # type: ignore
events=[route_failed_event],
)
assert pfs_feedback_handler.called
assert pfs_feedback_handler.call_args == call(
pfs_config=raiden.config.pfs_config,
route=route,
routing_mode=RoutingMode.PRIVATE,
successful=True,
token=feedback_uuid,
token_network_address=token_network_address,
)
def test_pfs_handler_handle_paymentsentsuccess_without_feedback_token():
(
raiden,
pfs_handler,
token_network_registry_address,
token_network_address,
route,
_,
) = setup_pfs_handler_test(set_feedback_token=False)
payment_id = make_payment_id()
amount = PaymentAmount(123)
target = TargetAddress(route[-1])
raiden.targets_to_identifiers_to_statuses[target][payment_id] = Mock()
route_failed_event = EventPaymentSentSuccess(
token_network_registry_address=token_network_registry_address,
token_network_address=token_network_address,
identifier=payment_id,
amount=amount,
target=TargetAddress(target),
secret=make_secret(),
route=route,
)
with patch("raiden.raiden_event_handler.post_pfs_feedback") as pfs_feedback_handler:
pfs_handler.on_raiden_events(
raiden=raiden,
chain_state=cast(ChainState, raiden.wal.get_current_state()), # type: ignore
events=[route_failed_event],
)
assert not pfs_feedback_handler.called
|
gunchleoc/django | refs/heads/master | tests/template_tests/filter_tests/test_dictsort.py | 342 | from django.template.defaultfilters import dictsort
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_sort(self):
sorted_dicts = dictsort(
[{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}],
'age',
)
self.assertEqual(
[sorted(dict.items()) for dict in sorted_dicts],
[[('age', 18), ('name', 'Jonny B Goode')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 63), ('name', 'Ra Ra Rasputin')]],
)
def test_dictsort_complex_sorting_key(self):
"""
Since dictsort uses template.Variable under the hood, it can sort
on keys like 'foo.bar'.
"""
data = [
{'foo': {'bar': 1, 'baz': 'c'}},
{'foo': {'bar': 2, 'baz': 'b'}},
{'foo': {'bar': 3, 'baz': 'a'}},
]
sorted_data = dictsort(data, 'foo.baz')
self.assertEqual([d['foo']['bar'] for d in sorted_data], [3, 2, 1])
def test_invalid_values(self):
"""
If dictsort is passed something other than a list of dictionaries,
fail silently.
"""
self.assertEqual(dictsort([1, 2, 3], 'age'), '')
self.assertEqual(dictsort('Hello!', 'age'), '')
self.assertEqual(dictsort({'a': 1}, 'age'), '')
self.assertEqual(dictsort(1, 'age'), '')
|
leo524/7mos-fourm | refs/heads/master | node_modules/nodebb-plugin-markdown/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/modeline.py | 292 | # -*- coding: utf-8 -*-
"""
pygments.modeline
~~~~~~~~~~~~~~~~~
A simple modeline parser (based on pymodeline).
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
__all__ = ['get_filetype_from_buffer']
modeline_re = re.compile(r'''
(?: vi | vim | ex ) (?: [<=>]? \d* )? :
.* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
''', re.VERBOSE)
def get_filetype_from_line(l):
m = modeline_re.search(l)
if m:
return m.group(1)
def get_filetype_from_buffer(buf, max_lines=5):
"""
Scan the buffer for modelines and return filetype if one is found.
"""
lines = buf.splitlines()
for l in lines[-1:-max_lines-1:-1]:
ret = get_filetype_from_line(l)
if ret:
return ret
for l in lines[max_lines:0:-1]:
ret = get_filetype_from_line(l)
if ret:
return ret
return None
|
d1hotpep/openai_gym | refs/heads/master | gym/envs/doom/doom_take_cover.py | 2 | import logging
from gym.envs.doom import doom_env
logger = logging.getLogger(__name__)
class DoomTakeCoverEnv(doom_env.DoomEnv):
"""
------------ Training Mission 8 - Take Cover ------------
This map is to train you on the damage of incoming missiles.
It is a rectangular map with monsters firing missiles and fireballs
at you. You need to survive as long as possible.
Allowed actions:
[10] - MOVE_RIGHT - Move to the right - Values 0 or 1
[11] - MOVE_LEFT - Move to the left - Values 0 or 1
Note: see controls.md for details
Rewards:
+ 1 - 35 times per second - Survive as long as possible
Goal: 750 points
Survive for ~ 20 seconds
Mode:
- env.mode can be 'fast', 'normal' or 'human' (e.g. env.mode = 'fast')
- 'fast' (default) will run as fast as possible (~75 fps) (best for simulation)
- 'normal' will run at roughly 35 fps (easier for human to watch)
- 'human' will let you play the game (keyboard only: Arrow Keys, '<', '>' and Ctrl)
Ends when:
- Player is dead (one or two fireballs should be enough to kill you)
- Timeout (60 seconds - 2,100 frames)
Actions:
actions = [0] * 43
actions[10] = 0 # MOVE_RIGHT
actions[11] = 1 # MOVE_LEFT
-----------------------------------------------------
"""
def __init__(self):
super(DoomTakeCoverEnv, self).__init__(7)
|
alash3al/rethinkdb | refs/heads/next | external/v8_3.30.33.16/build/gyp/test/hello/gyptest-disable-regyp.py | 501 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that Makefiles don't get rebuilt when a source gyp file changes and
the disable_regeneration generator flag is set.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('hello.gyp', '-Gauto_regeneration=0')
test.build('hello.gyp', test.ALL)
test.run_built_executable('hello', stdout="Hello, world!\n")
# Sleep so that the changed gyp file will have a newer timestamp than the
# previously generated build files.
test.sleep()
test.write('hello.gyp', test.read('hello2.gyp'))
test.build('hello.gyp', test.ALL)
# Should still be the old executable, as regeneration was disabled.
test.run_built_executable('hello', stdout="Hello, world!\n")
test.pass_test()
|
VoIP-co-uk/sftf | refs/heads/master | UserAgentBasicTestSuite/case302bye.py | 2 | #
# Copyright (C) 2004 SIPfoundry Inc.
# Licensed by SIPfoundry under the GPL license.
#
# Copyright (C) 2004 SIP Forum
# Licensed to SIPfoundry under a Contributor Agreement.
#
#
# This file is part of SIP Forum User Agent Basic Test Suite which
# belongs to the SIP Forum Test Framework.
#
# SIP Forum User Agent Basic Test Suite is free software; you can
# redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# SIP Forum User Agent Basic Test Suite is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SIP Forum User Agent Basic Test Suite; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
# $Id: case302bye.py,v 1.2 2004/05/02 18:57:35 lando Exp $
#
from TestCase import TestCase
import NetworkEventHandler as NEH
import Log
class case302bye (TestCase):
def config(self):
self.name = "Case 302bye"
self.description = "Digest Authentication of BYE without qop"
self.isClient = True
self.transport = "UDP"
self.interactRequired = True
def run(self):
self.neh = NEH.NetworkEventHandler(self.transport)
#if not self.userInteraction("case302bye: proceed when ready to send a INVITE"):
# neh.closeSock()
# return
inv = self.createRequest("INVITE")
self.challenged = 0
self.writeMessageToNetwork(self.neh, inv)
self.end = 0
while self.end == 0:
req = self.readMessageFromNetwork(self.neh, 10)
if req is None:
self.end = 1
if req is None:
if self.challenged == 1:
self.addResult(TestCase.TC_FAILED, "missing BYE after sending challenge (401)")
else:
self.addResult(TestCase.TC_ERROR, "missing reply on INVITE")
else:
if self.challenged == 1:
if req.hasParsedHeaderField("Authorization"):
auth_p = req.getParsedHeaderValue("Authorization")
ret = auth_p.verify(req.getHeaderValue("Authorization"))
if ret:
Log.logDebug("case302bye: warnings or errors about the Authorization header, see test log", 1)
Log.logTest("case302bye: warnings or errors about the Authorization header, see WARNINGS above")
self.results.extend(ret)
if self.first_bye.hasParsedHeaderField("CSeq") and self.bye.hasParsedHeaderField("CSeq"):
if (self.bye.getParsedHeaderValue("CSeq").number <= self.first_bye.getParsedHeaderValue("CSeq").number) and (self.bye.getParsedHeaderValue("CallID") == self.first_bye.getParsedHeaderValue("CallID")):
self.addResult(TestCase.TC_WARN, "CSeq number was not increased for authorization")
else:
if req.hasHeaderField("Authorization"):
Log.logDebug("case302bye: failed to parse the given Authorization header", 1)
Log.logTest("case302bye: unable to parse the Authorization header")
self.addResult(TestCase.TC_ERROR, "failed to parse Authorization header")
else:
Log.logDebug("case302bye: missing Authorization header in request", 1)
Log.logTest("case302bye: missing Authorization header in request")
self.addResult(TestCase.TC_FAILED, "missing Authorization header in request")
if self.checkAuthResponse(req):
Log.logDebug("case302bye: authenticaton reply is valid", 2)
Log.logTest("case302bye: authenticaton reply is valid")
self.addResult(TestCase.TC_PASSED, "authentication reply is valid")
else:
Log.logDebug("case302bye: authentication reply is NOT valid", 1)
Log.logTest("case302bye: authentication reply is NOT valid")
self.addResult(TestCase.TC_FAILED, "wrong authentication reply")
self.neh.closeSock()
def on180(self, message):
print " !!!! PLEASE ANSWER/PICKUP THE CALL !!!!"
def on183(self, message):
self.on180(message)
def on200(self, message):
ack = self.createRequest("ACK", trans=message.transaction)
self.writeMessageToNetwork(self.neh, ack)
print " !!!! PLEASE TERMINATE/HANGUP THE CALL !!!!!!"
def onBYE(self, message):
if self.challenged == 0:
self.first_bye = message
repl = self.createChallenge(mes=message)
self.writeMessageToNetwork(self.neh, repl)
self.challenged = 1
else:
self.bye = message
repl = self.createReply(200, "OK")
self.end = 1
self.writeMessageToNetwork(self.neh, repl)
|
wiki-ai/wikiclass | refs/heads/master | articlequality/utilities/weighted_sum.py | 3 | """
``$ articlequality weighted_sum -h``
::
Extracts probabilities assigned to each class from the output of
revscoring score utility and outputs the weighted sum of the article
quality predicted where each class is represented as a weight sorted
in a yaml config file.
Usage:
weighted_sum <weights> [--scores=<path>] [--output=<path>]
Options:
-h --help Show this documentation.
<weights> Path to a yaml file containing class weights
--scores=<path> Path to a file containting scores generated by
`revscoring score`. [default: <stdin>]
--output=<path> Path to a file to write new observations
(with "weighted_sum") out to.
[default: <stdout>]
"""
import sys
import json
from docopt import docopt
import yamlconf
def main(argv=None):
args = docopt(__doc__, argv=argv)
weights = yamlconf.load(open(args['<weights>']))
if args['--scores'] == '<stdin>':
revision_scores = read_revision_scores(sys.stdin)
else:
revision_scores = read_revision_scores(open(args['--scores']))
if args['--output'] == '<stdout>':
output = sys.stdout
else:
output = open(args['--output'])
run(revision_scores, weights, output)
def read_revision_scores(f):
for line in f:
rev_id, json_doc = line.split('\t', 1)
yield int(rev_id), json.loads(json_doc)
def run(revision_scores, weights, output):
for rev_id, score in revision_scores:
weighted_sum = 0
for article_class in weights:
weighted_sum += (weights[article_class] *
score['probability'][article_class])
score['weighted_sum'] = weighted_sum
output.write("{0}\t{1}\n".format(rev_id, json.dumps(score)))
|
pawaranand/phr_frappe | refs/heads/develop | frappe/tests/test_geo_ip.py | 37 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
import unittest
class TestGeoIP(unittest.TestCase):
def test_geo_ip(self):
return
from frappe.sessions import get_geo_ip_country
self.assertEquals(get_geo_ip_country("223.29.223.255"), "India")
self.assertEquals(get_geo_ip_country("4.18.32.80"), "United States")
self.assertEquals(get_geo_ip_country("217.194.147.25"), "United States") |
farooqsheikhpk/Aspose_Cells_Cloud | refs/heads/master | SDKs/Aspose.Cells-Cloud-SDK-for-Python/asposecellscloud/models/Font.py | 4 | #!/usr/bin/env python
class Font(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'Color': 'Color',
'DoubleSize': 'float',
'IsBold': 'bool',
'IsItalic': 'bool',
'IsStrikeout': 'bool',
'IsSubscript': 'bool',
'IsSuperscript': 'bool',
'Name': 'str',
'Size': 'int',
'Underline': 'str'
}
self.attributeMap = {
'Color': 'Color','DoubleSize': 'DoubleSize','IsBold': 'IsBold','IsItalic': 'IsItalic','IsStrikeout': 'IsStrikeout','IsSubscript': 'IsSubscript','IsSuperscript': 'IsSuperscript','Name': 'Name','Size': 'Size','Underline': 'Underline'}
self.Color = None # Color
self.DoubleSize = None # float
self.IsBold = None # bool
self.IsItalic = None # bool
self.IsStrikeout = None # bool
self.IsSubscript = None # bool
self.IsSuperscript = None # bool
self.Name = None # str
self.Size = None # int
self.Underline = None # str
|
Mlieou/lXXtcode | refs/heads/master | leetcode/python/ex_642.py | 3 | class TrieNode(object):
def __init__(self):
self.child = dict()
self.is_word = False
class Trie(object):
def __init__(self):
self.root = TrieNode()
def insert(self, word):
p = self.root
for c in word:
if c not in p.child:
p.child[c] = TrieNode()
p = p.child[c]
p.is_word = True
def search(self, word):
p = self.root
for c in word:
if c not in p.child:
return False
p = p.child[c]
return p.is_word
def startsWith(self, prefix):
p = self.root
for c in prefix:
if c not in p.child:
return []
p = p.child[c]
res = []
self.findWord(res, prefix, p)
return res
def findWord(self, res, prefix, node):
if node.is_word:
res.append(prefix)
for c in node.child:
self.findWord(res, prefix + c, node.child[c])
class AutocompleteSystem(object):
prefix = ''
def __init__(self, sentences, times):
"""
:type sentences: List[str]
:type times: List[int]
"""
self.trie = Trie()
self.count = {}
for i in range(len(times)):
self.trie.insert(sentences[i])
self.count[sentences[i]] = times[i]
def input(self, c):
"""
:type c: str
:rtype: List[str]
"""
if c == '#':
if not self.trie.search(self.prefix):
self.trie.insert(self.prefix)
if self.prefix not in self.count:
self.count[self.prefix] = 0
self.count[self.prefix] += 1
self.prefix = ''
res = []
else:
self.prefix += c
res = self.trie.startsWith(self.prefix)
res.sort(key=lambda x: (-self.count[x], x))
return res[:3]
# Your AutocompleteSystem object will be instantiated and called as such:
# obj = AutocompleteSystem(sentences, times)
# param_1 = obj.input(c) |
NetApp/manila | refs/heads/master | manila/hacking/checks.py | 1 | # Copyright (c) 2012, Cloudscaling
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import re
import six
import pep8
"""
Guidelines for writing new hacking checks
- Use only for Manila specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range M3xx. Find the current test with
the highest allocated number and then pick the next value.
- Keep the test method code in the source file ordered based
on the M3xx value.
- List the new rule in the top level HACKING.rst file
- Add test cases for each new rule to manila/tests/test_hacking.py
"""
UNDERSCORE_IMPORT_FILES = []
log_translation = re.compile(
r"(.)*LOG\.(audit|error|info|critical|exception)\(\s*('|\")")
log_translation_LC = re.compile(
r"(.)*LOG\.(critical)\(\s*(_\(|'|\")")
log_translation_LE = re.compile(
r"(.)*LOG\.(error|exception)\(\s*(_\(|'|\")")
log_translation_LI = re.compile(
r"(.)*LOG\.(info)\(\s*(_\(|'|\")")
log_translation_LW = re.compile(
r"(.)*LOG\.(warning|warn)\(\s*(_\(|'|\")")
translated_log = re.compile(
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)"
"\(\s*_\(\s*('|\")")
string_translation = re.compile(r"[^_]*_\(\s*('|\")")
underscore_import_check = re.compile(r"(.)*import _$")
underscore_import_check_multi = re.compile(r"(.)*import (.)*_, (.)*")
# We need this for cases where they have created their own _ function.
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
oslo_namespace_imports = re.compile(r"from[\s]*oslo[.](.*)")
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
assert_no_xrange_re = re.compile(r"\s*xrange\s*\(")
assert_True = re.compile(r".*assertEqual\(True, .*\)")
assert_None = re.compile(r".*assertEqual\(None, .*\)")
class BaseASTChecker(ast.NodeVisitor):
"""Provides a simple framework for writing AST-based checks.
Subclasses should implement visit_* methods like any other AST visitor
implementation. When they detect an error for a particular node the
method should call ``self.add_error(offending_node)``. Details about
where in the code the error occurred will be pulled from the node
object.
Subclasses should also provide a class variable named CHECK_DESC to
be used for the human readable error message.
"""
CHECK_DESC = 'No check message specified'
def __init__(self, tree, filename):
"""This object is created automatically by pep8.
:param tree: an AST tree
:param filename: name of the file being analyzed
(ignored by our checks)
"""
self._tree = tree
self._errors = []
def run(self):
"""Called automatically by pep8."""
self.visit(self._tree)
return self._errors
def add_error(self, node, message=None):
"""Add an error caused by a node to the list of errors for pep8."""
message = message or self.CHECK_DESC
error = (node.lineno, node.col_offset, message, self.__class__)
self._errors.append(error)
def _check_call_names(self, call_node, names):
if isinstance(call_node, ast.Call):
if isinstance(call_node.func, ast.Name):
if call_node.func.id in names:
return True
return False
def no_translate_debug_logs(logical_line, filename):
"""Check for 'LOG.debug(_('
As per our translation policy,
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
we shouldn't translate debug level logs.
* This check assumes that 'LOG' is a logger.
* Use filename so we can start enforcing this in specific folders instead
of needing to do so all at once.
M319
"""
if logical_line.startswith("LOG.debug(_("):
yield(0, "M319 Don't translate debug level logs")
class CheckLoggingFormatArgs(BaseASTChecker):
"""Check for improper use of logging format arguments.
LOG.debug("Volume %s caught fire and is at %d degrees C and climbing.",
('volume1', 500))
The format arguments should not be a tuple as it is easy to miss.
"""
CHECK_DESC = 'M310 Log method arguments should not be a tuple.'
LOG_METHODS = [
'debug', 'info',
'warn', 'warning',
'error', 'exception',
'critical', 'fatal',
'trace', 'log'
]
def _find_name(self, node):
"""Return the fully qualified name or a Name or Attribute."""
if isinstance(node, ast.Name):
return node.id
elif (isinstance(node, ast.Attribute)
and isinstance(node.value, (ast.Name, ast.Attribute))):
method_name = node.attr
obj_name = self._find_name(node.value)
if obj_name is None:
return None
return obj_name + '.' + method_name
elif isinstance(node, six.string_types):
return node
else: # could be Subscript, Call or many more
return None
def visit_Call(self, node):
"""Look for the 'LOG.*' calls."""
# extract the obj_name and method_name
if isinstance(node.func, ast.Attribute):
obj_name = self._find_name(node.func.value)
if isinstance(node.func.value, ast.Name):
method_name = node.func.attr
elif isinstance(node.func.value, ast.Attribute):
obj_name = self._find_name(node.func.value)
method_name = node.func.attr
else: # could be Subscript, Call or many more
return super(CheckLoggingFormatArgs, self).generic_visit(node)
# obj must be a logger instance and method must be a log helper
if (obj_name != 'LOG'
or method_name not in self.LOG_METHODS):
return super(CheckLoggingFormatArgs, self).generic_visit(node)
# the call must have arguments
if not len(node.args):
return super(CheckLoggingFormatArgs, self).generic_visit(node)
# any argument should not be a tuple
for arg in node.args:
if isinstance(arg, ast.Tuple):
self.add_error(arg)
return super(CheckLoggingFormatArgs, self).generic_visit(node)
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test and tempest
# directories.
if ("manila/tests" in filename or "manila_tempest_tests" in filename or
"contrib/tempest" in filename):
return
if pep8.noqa(physical_line):
return
msg = "M327: LOG.critical messages require translations `_LC()`!"
if log_translation_LC.match(logical_line):
yield (0, msg)
msg = ("M328: LOG.error and LOG.exception messages require translations "
"`_LE()`!")
if log_translation_LE.match(logical_line):
yield (0, msg)
msg = "M329: LOG.info messages require translations `_LI()`!"
if log_translation_LI.match(logical_line):
yield (0, msg)
msg = "M330: LOG.warning messages require translations `_LW()`!"
if log_translation_LW.match(logical_line):
yield (0, msg)
msg = "M331: Log messages require translations!"
if log_translation.match(logical_line):
yield (0, msg)
def check_explicit_underscore_import(logical_line, filename):
"""Check for explicit import of the _ function
We need to ensure that any files that are using the _() function
to translate logs are explicitly importing the _ function. We
can't trust unit test to catch whether the import has been
added so we need to check for it here.
"""
# Build a list of the files that have _ imported. No further
# checking needed once it is found.
if filename in UNDERSCORE_IMPORT_FILES:
pass
elif (underscore_import_check.match(logical_line) or
underscore_import_check_multi.match(logical_line) or
custom_underscore_check.match(logical_line)):
UNDERSCORE_IMPORT_FILES.append(filename)
elif (translated_log.match(logical_line) or
string_translation.match(logical_line)):
yield(0, "M323: Found use of _() without explicit import of _ !")
class CheckForStrUnicodeExc(BaseASTChecker):
"""Checks for the use of str() or unicode() on an exception.
This currently only handles the case where str() or unicode()
is used in the scope of an exception handler. If the exception
is passed into a function, returned from an assertRaises, or
used on an exception created in the same scope, this does not
catch it.
"""
CHECK_DESC = ('M325 str() and unicode() cannot be used on an '
'exception. Remove or use six.text_type()')
def __init__(self, tree, filename):
super(CheckForStrUnicodeExc, self).__init__(tree, filename)
self.name = []
self.already_checked = []
# Python 2
def visit_TryExcept(self, node):
for handler in node.handlers:
if handler.name:
self.name.append(handler.name.id)
super(CheckForStrUnicodeExc, self).generic_visit(node)
self.name = self.name[:-1]
else:
super(CheckForStrUnicodeExc, self).generic_visit(node)
# Python 3
def visit_ExceptHandler(self, node):
if node.name:
self.name.append(node.name)
super(CheckForStrUnicodeExc, self).generic_visit(node)
self.name = self.name[:-1]
else:
super(CheckForStrUnicodeExc, self).generic_visit(node)
def visit_Call(self, node):
if self._check_call_names(node, ['str', 'unicode']):
if node not in self.already_checked:
self.already_checked.append(node)
if isinstance(node.args[0], ast.Name):
if node.args[0].id in self.name:
self.add_error(node.args[0])
super(CheckForStrUnicodeExc, self).generic_visit(node)
class CheckForTransAdd(BaseASTChecker):
"""Checks for the use of concatenation on a translated string.
Translations should not be concatenated with other strings, but
should instead include the string being added to the translated
string to give the translators the most information.
"""
CHECK_DESC = ('M326 Translated messages cannot be concatenated. '
'String should be included in translated message.')
TRANS_FUNC = ['_', '_LI', '_LW', '_LE', '_LC']
def visit_BinOp(self, node):
if isinstance(node.op, ast.Add):
if self._check_call_names(node.left, self.TRANS_FUNC):
self.add_error(node.left)
elif self._check_call_names(node.right, self.TRANS_FUNC):
self.add_error(node.right)
super(CheckForTransAdd, self).generic_visit(node)
def check_oslo_namespace_imports(logical_line, physical_line, filename):
if pep8.noqa(physical_line):
return
if re.match(oslo_namespace_imports, logical_line):
msg = ("M333: '%s' must be used instead of '%s'.") % (
logical_line.replace('oslo.', 'oslo_'),
logical_line)
yield(0, msg)
def dict_constructor_with_list_copy(logical_line):
msg = ("M336: Must use a dict comprehension instead of a dict constructor"
" with a sequence of key-value pairs."
)
if dict_constructor_with_list_copy_re.match(logical_line):
yield (0, msg)
def no_xrange(logical_line):
if assert_no_xrange_re.match(logical_line):
yield(0, "M337: Do not use xrange().")
def validate_assertTrue(logical_line):
if re.match(assert_True, logical_line):
msg = ("M313: Unit tests should use assertTrue(value) instead"
" of using assertEqual(True, value).")
yield(0, msg)
def validate_assertIsNone(logical_line):
if re.match(assert_None, logical_line):
msg = ("M312: Unit tests should use assertIsNone(value) instead"
" of using assertEqual(None, value).")
yield(0, msg)
def factory(register):
register(validate_log_translations)
register(check_explicit_underscore_import)
register(no_translate_debug_logs)
register(CheckForStrUnicodeExc)
register(CheckLoggingFormatArgs)
register(CheckForTransAdd)
register(check_oslo_namespace_imports)
register(dict_constructor_with_list_copy)
register(no_xrange)
register(validate_assertTrue)
register(validate_assertIsNone)
|
mbernasocchi/QGIS | refs/heads/master | python/plugins/processing/algs/grass7/ext/r_category.py | 38 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_category.py
-------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
from processing.tools.system import getTempFilename
from processing.algs.grass7.Grass7Utils import Grass7Utils
def checkParameterValuesBeforeExecuting(alg, parameters, context):
""" Verify if we have the right parameters """
rules = alg.parameterAsString(parameters, 'rules', context)
txtrules = alg.parameterAsString(parameters, 'txtrules', context)
raster = alg.parameterAsString(parameters, 'raster', context)
if rules and txtrules:
return False, alg.tr("You need to set either a rules file or write directly the rules!")
elif (rules and raster) or (txtrules and raster):
return False, alg.tr("You need to set either rules or a raster from which to copy categories!")
return True, None
def processInputs(alg, parameters, context, feedback):
# If there is another raster to copy categories from
# we need to import it with r.in.gdal rather than r.external
raster = alg.parameterAsString(parameters, 'raster', context)
if raster:
alg.loadRasterLayerFromParameter('raster',
parameters, context,
False, None)
alg.loadRasterLayerFromParameter('map', parameters, context)
alg.postInputs(context)
def processCommand(alg, parameters, context, feedback):
# Handle inline rules
txtRules = alg.parameterAsString(parameters, 'txtrules', context)
if txtRules:
# Creates a temporary txt file
tempRulesName = getTempFilename()
# Inject rules into temporary txt file
with open(tempRulesName, "w") as tempRules:
tempRules.write(txtRules)
alg.removeParameter('txtrules')
parameters['rules'] = tempRulesName
alg.processCommand(parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
# Output results ('map' layer)
createOpt = alg.parameterAsString(parameters, alg.GRASS_RASTER_FORMAT_OPT, context)
metaOpt = alg.parameterAsString(parameters, alg.GRASS_RASTER_FORMAT_META, context)
# We need to export the raster with all its bands and its color table
fileName = alg.parameterAsOutputLayer(parameters, 'output', context)
outFormat = Grass7Utils.getRasterFormatFromFilename(fileName)
grassName = alg.exportedLayers['map']
alg.exportRasterLayer(grassName, fileName, True,
outFormat, createOpt, metaOpt)
|
ryfeus/lambda-packs | refs/heads/master | Pandas_numpy/source/pandas/core/reshape/api.py | 14 | # flake8: noqa
from pandas.core.reshape.concat import concat
from pandas.core.reshape.reshape import melt
from pandas.core.reshape.merge import (
merge, ordered_merge, merge_ordered, merge_asof)
from pandas.core.reshape.pivot import pivot_table, crosstab
from pandas.core.reshape.tile import cut, qcut
|
CodeDJ/qt5-hidpi | refs/heads/master | qt/qtwebkit/Source/ThirdParty/gtest/test/run_tests_util.py | 228 | # Copyright 2008 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides facilities for running SCons-built Google Test/Mock tests."""
import optparse
import os
import re
import sets
import sys
try:
# subrocess module is a preferable way to invoke subprocesses but it may
# not be available on MacOS X 10.4.
# Suppresses the 'Import not at the top of the file' lint complaint.
# pylint: disable-msg=C6204
import subprocess
except ImportError:
subprocess = None
HELP_MSG = """Runs the specified tests for %(proj)s.
SYNOPSIS
run_tests.py [OPTION]... [BUILD_DIR]... [TEST]...
DESCRIPTION
Runs the specified tests (either binary or Python), and prints a
summary of the results. BUILD_DIRS will be used to search for the
binaries. If no TESTs are specified, all binary tests found in
BUILD_DIRs and all Python tests found in the directory test/ (in the
%(proj)s root) are run.
TEST is a name of either a binary or a Python test. A binary test is
an executable file named *_test or *_unittest (with the .exe
extension on Windows) A Python test is a script named *_test.py or
*_unittest.py.
OPTIONS
-h, --help
Print this help message.
-c CONFIGURATIONS
Specify build directories via build configurations.
CONFIGURATIONS is either a comma-separated list of build
configurations or 'all'. Each configuration is equivalent to
adding 'scons/build/<configuration>/%(proj)s/scons' to BUILD_DIRs.
Specifying -c=all is equivalent to providing all directories
listed in KNOWN BUILD DIRECTORIES section below.
-a
Equivalent to -c=all
-b
Equivalent to -c=all with the exception that the script will not
fail if some of the KNOWN BUILD DIRECTORIES do not exists; the
script will simply not run the tests there. 'b' stands for
'built directories'.
RETURN VALUE
Returns 0 if all tests are successful; otherwise returns 1.
EXAMPLES
run_tests.py
Runs all tests for the default build configuration.
run_tests.py -a
Runs all tests with binaries in KNOWN BUILD DIRECTORIES.
run_tests.py -b
Runs all tests in KNOWN BUILD DIRECTORIES that have been
built.
run_tests.py foo/
Runs all tests in the foo/ directory and all Python tests in
the directory test. The Python tests are instructed to look
for binaries in foo/.
run_tests.py bar_test.exe test/baz_test.exe foo/ bar/
Runs foo/bar_test.exe, bar/bar_test.exe, foo/baz_test.exe, and
bar/baz_test.exe.
run_tests.py foo bar test/foo_test.py
Runs test/foo_test.py twice instructing it to look for its
test binaries in the directories foo and bar,
correspondingly.
KNOWN BUILD DIRECTORIES
run_tests.py knows about directories where the SCons build script
deposits its products. These are the directories where run_tests.py
will be looking for its binaries. Currently, %(proj)s's SConstruct file
defines them as follows (the default build directory is the first one
listed in each group):
On Windows:
<%(proj)s root>/scons/build/win-dbg8/%(proj)s/scons/
<%(proj)s root>/scons/build/win-opt8/%(proj)s/scons/
On Mac:
<%(proj)s root>/scons/build/mac-dbg/%(proj)s/scons/
<%(proj)s root>/scons/build/mac-opt/%(proj)s/scons/
On other platforms:
<%(proj)s root>/scons/build/dbg/%(proj)s/scons/
<%(proj)s root>/scons/build/opt/%(proj)s/scons/"""
IS_WINDOWS = os.name == 'nt'
IS_MAC = os.name == 'posix' and os.uname()[0] == 'Darwin'
IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]
# Definition of CONFIGS must match that of the build directory names in the
# SConstruct script. The first list item is the default build configuration.
if IS_WINDOWS:
CONFIGS = ('win-dbg8', 'win-opt8')
elif IS_MAC:
CONFIGS = ('mac-dbg', 'mac-opt')
else:
CONFIGS = ('dbg', 'opt')
if IS_WINDOWS or IS_CYGWIN:
PYTHON_TEST_REGEX = re.compile(r'_(unit)?test\.py$', re.IGNORECASE)
BINARY_TEST_REGEX = re.compile(r'_(unit)?test(\.exe)?$', re.IGNORECASE)
BINARY_TEST_SEARCH_REGEX = re.compile(r'_(unit)?test\.exe$', re.IGNORECASE)
else:
PYTHON_TEST_REGEX = re.compile(r'_(unit)?test\.py$')
BINARY_TEST_REGEX = re.compile(r'_(unit)?test$')
BINARY_TEST_SEARCH_REGEX = BINARY_TEST_REGEX
def _GetGtestBuildDir(injected_os, script_dir, config):
"""Calculates path to the Google Test SCons build directory."""
return injected_os.path.normpath(injected_os.path.join(script_dir,
'scons/build',
config,
'gtest/scons'))
def _GetConfigFromBuildDir(build_dir):
"""Extracts the configuration name from the build directory."""
# We don't want to depend on build_dir containing the correct path
# separators.
m = re.match(r'.*[\\/]([^\\/]+)[\\/][^\\/]+[\\/]scons[\\/]?$', build_dir)
if m:
return m.group(1)
else:
print >>sys.stderr, ('%s is an invalid build directory that does not '
'correspond to any configuration.' % (build_dir,))
return ''
# All paths in this script are either absolute or relative to the current
# working directory, unless otherwise specified.
class TestRunner(object):
"""Provides facilities for running Python and binary tests for Google Test."""
def __init__(self,
script_dir,
build_dir_var_name='GTEST_BUILD_DIR',
injected_os=os,
injected_subprocess=subprocess,
injected_build_dir_finder=_GetGtestBuildDir):
"""Initializes a TestRunner instance.
Args:
script_dir: File path to the calling script.
build_dir_var_name: Name of the env variable used to pass the
the build directory path to the invoked
tests.
injected_os: standard os module or a mock/stub for
testing.
injected_subprocess: standard subprocess module or a mock/stub
for testing
injected_build_dir_finder: function that determines the path to
the build directory.
"""
self.os = injected_os
self.subprocess = injected_subprocess
self.build_dir_finder = injected_build_dir_finder
self.build_dir_var_name = build_dir_var_name
self.script_dir = script_dir
def _GetBuildDirForConfig(self, config):
"""Returns the build directory for a given configuration."""
return self.build_dir_finder(self.os, self.script_dir, config)
def _Run(self, args):
"""Runs the executable with given args (args[0] is the executable name).
Args:
args: Command line arguments for the process.
Returns:
Process's exit code if it exits normally, or -signal if the process is
killed by a signal.
"""
if self.subprocess:
return self.subprocess.Popen(args).wait()
else:
return self.os.spawnv(self.os.P_WAIT, args[0], args)
def _RunBinaryTest(self, test):
"""Runs the binary test given its path.
Args:
test: Path to the test binary.
Returns:
Process's exit code if it exits normally, or -signal if the process is
killed by a signal.
"""
return self._Run([test])
def _RunPythonTest(self, test, build_dir):
"""Runs the Python test script with the specified build directory.
Args:
test: Path to the test's Python script.
build_dir: Path to the directory where the test binary is to be found.
Returns:
Process's exit code if it exits normally, or -signal if the process is
killed by a signal.
"""
old_build_dir = self.os.environ.get(self.build_dir_var_name)
try:
self.os.environ[self.build_dir_var_name] = build_dir
# If this script is run on a Windows machine that has no association
# between the .py extension and a python interpreter, simply passing
# the script name into subprocess.Popen/os.spawn will not work.
print 'Running %s . . .' % (test,)
return self._Run([sys.executable, test])
finally:
if old_build_dir is None:
del self.os.environ[self.build_dir_var_name]
else:
self.os.environ[self.build_dir_var_name] = old_build_dir
def _FindFilesByRegex(self, directory, regex):
"""Returns files in a directory whose names match a regular expression.
Args:
directory: Path to the directory to search for files.
regex: Regular expression to filter file names.
Returns:
The list of the paths to the files in the directory.
"""
return [self.os.path.join(directory, file_name)
for file_name in self.os.listdir(directory)
if re.search(regex, file_name)]
# TODO(vladl@google.com): Implement parsing of scons/SConscript to run all
# tests defined there when no tests are specified.
# TODO(vladl@google.com): Update the docstring after the code is changed to
# try to test all builds defined in scons/SConscript.
def GetTestsToRun(self,
args,
named_configurations,
built_configurations,
available_configurations=CONFIGS,
python_tests_to_skip=None):
"""Determines what tests should be run.
Args:
args: The list of non-option arguments from the command line.
named_configurations: The list of configurations specified via -c or -a.
built_configurations: True if -b has been specified.
available_configurations: a list of configurations available on the
current platform, injectable for testing.
python_tests_to_skip: a collection of (configuration, python test name)s
that need to be skipped.
Returns:
A tuple with 2 elements: the list of Python tests to run and the list of
binary tests to run.
"""
if named_configurations == 'all':
named_configurations = ','.join(available_configurations)
normalized_args = [self.os.path.normpath(arg) for arg in args]
# A final list of build directories which will be searched for the test
# binaries. First, add directories specified directly on the command
# line.
build_dirs = filter(self.os.path.isdir, normalized_args)
# Adds build directories specified via their build configurations using
# the -c or -a options.
if named_configurations:
build_dirs += [self._GetBuildDirForConfig(config)
for config in named_configurations.split(',')]
# Adds KNOWN BUILD DIRECTORIES if -b is specified.
if built_configurations:
build_dirs += [self._GetBuildDirForConfig(config)
for config in available_configurations
if self.os.path.isdir(self._GetBuildDirForConfig(config))]
# If no directories were specified either via -a, -b, -c, or directly, use
# the default configuration.
elif not build_dirs:
build_dirs = [self._GetBuildDirForConfig(available_configurations[0])]
# Makes sure there are no duplications.
build_dirs = sets.Set(build_dirs)
errors_found = False
listed_python_tests = [] # All Python tests listed on the command line.
listed_binary_tests = [] # All binary tests listed on the command line.
test_dir = self.os.path.normpath(self.os.path.join(self.script_dir, 'test'))
# Sifts through non-directory arguments fishing for any Python or binary
# tests and detecting errors.
for argument in sets.Set(normalized_args) - build_dirs:
if re.search(PYTHON_TEST_REGEX, argument):
python_path = self.os.path.join(test_dir,
self.os.path.basename(argument))
if self.os.path.isfile(python_path):
listed_python_tests.append(python_path)
else:
sys.stderr.write('Unable to find Python test %s' % argument)
errors_found = True
elif re.search(BINARY_TEST_REGEX, argument):
# This script also accepts binary test names prefixed with test/ for
# the convenience of typing them (can use path completions in the
# shell). Strips test/ prefix from the binary test names.
listed_binary_tests.append(self.os.path.basename(argument))
else:
sys.stderr.write('%s is neither test nor build directory' % argument)
errors_found = True
if errors_found:
return None
user_has_listed_tests = listed_python_tests or listed_binary_tests
if user_has_listed_tests:
selected_python_tests = listed_python_tests
else:
selected_python_tests = self._FindFilesByRegex(test_dir,
PYTHON_TEST_REGEX)
# TODO(vladl@google.com): skip unbuilt Python tests when -b is specified.
python_test_pairs = []
for directory in build_dirs:
for test in selected_python_tests:
config = _GetConfigFromBuildDir(directory)
file_name = os.path.basename(test)
if python_tests_to_skip and (config, file_name) in python_tests_to_skip:
print ('NOTE: %s is skipped for configuration %s, as it does not '
'work there.' % (file_name, config))
else:
python_test_pairs.append((directory, test))
binary_test_pairs = []
for directory in build_dirs:
if user_has_listed_tests:
binary_test_pairs.extend(
[(directory, self.os.path.join(directory, test))
for test in listed_binary_tests])
else:
tests = self._FindFilesByRegex(directory, BINARY_TEST_SEARCH_REGEX)
binary_test_pairs.extend([(directory, test) for test in tests])
return (python_test_pairs, binary_test_pairs)
def RunTests(self, python_tests, binary_tests):
"""Runs Python and binary tests and reports results to the standard output.
Args:
python_tests: List of Python tests to run in the form of tuples
(build directory, Python test script).
binary_tests: List of binary tests to run in the form of tuples
(build directory, binary file).
Returns:
The exit code the program should pass into sys.exit().
"""
if python_tests or binary_tests:
results = []
for directory, test in python_tests:
results.append((directory,
test,
self._RunPythonTest(test, directory) == 0))
for directory, test in binary_tests:
results.append((directory,
self.os.path.basename(test),
self._RunBinaryTest(test) == 0))
failed = [(directory, test)
for (directory, test, success) in results
if not success]
print
print '%d tests run.' % len(results)
if failed:
print 'The following %d tests failed:' % len(failed)
for (directory, test) in failed:
print '%s in %s' % (test, directory)
return 1
else:
print 'All tests passed!'
else: # No tests defined
print 'Nothing to test - no tests specified!'
return 0
def ParseArgs(project_name, argv=None, help_callback=None):
"""Parses the options run_tests.py uses."""
# Suppresses lint warning on unused arguments. These arguments are
# required by optparse, even though they are unused.
# pylint: disable-msg=W0613
def PrintHelp(option, opt, value, parser):
print HELP_MSG % {'proj': project_name}
sys.exit(1)
parser = optparse.OptionParser()
parser.add_option('-c',
action='store',
dest='configurations',
default=None)
parser.add_option('-a',
action='store_const',
dest='configurations',
default=None,
const='all')
parser.add_option('-b',
action='store_const',
dest='built_configurations',
default=False,
const=True)
# Replaces the built-in help with ours.
parser.remove_option('-h')
parser.add_option('-h', '--help',
action='callback',
callback=help_callback or PrintHelp)
return parser.parse_args(argv)
|
gdimitris/ChessPuzzlerBackend | refs/heads/master | Virtual_Environment/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.py | 469 | import functools
from pip._vendor.requests.adapters import HTTPAdapter
from .controller import CacheController
from .cache import DictCache
from .filewrapper import CallbackFileWrapper
class CacheControlAdapter(HTTPAdapter):
invalidating_methods = set(['PUT', 'DELETE'])
def __init__(self, cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
*args, **kw):
super(CacheControlAdapter, self).__init__(*args, **kw)
self.cache = cache or DictCache()
self.heuristic = heuristic
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache,
cache_etags=cache_etags,
serializer=serializer,
)
def send(self, request, **kw):
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
"""
if request.method == 'GET':
cached_response = self.controller.cached_request(request)
if cached_response:
return self.build_response(request, cached_response,
from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(
self.controller.conditional_headers(request)
)
resp = super(CacheControlAdapter, self).send(request, **kw)
return resp
def build_response(self, request, response, from_cache=False):
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
if not from_cache and request.method == 'GET':
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
# We always cache the 301 responses
elif response.status == 301:
self.controller.cache_response(request, response)
else:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
response._fp,
functools.partial(
self.controller.cache_response,
request,
response,
)
)
resp = super(CacheControlAdapter, self).build_response(
request, response
)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
return resp
def close(self):
self.cache.close()
super(CacheControlAdapter, self).close()
|
vnc-biz/openerp-server | refs/heads/master | openerp/report/pyPdf/xmp.py | 14 | import re
import datetime
import decimal
from generic import PdfObject
from xml.dom import getDOMImplementation
from xml.dom.minidom import parseString
RDF_NAMESPACE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
DC_NAMESPACE = "http://purl.org/dc/elements/1.1/"
XMP_NAMESPACE = "http://ns.adobe.com/xap/1.0/"
PDF_NAMESPACE = "http://ns.adobe.com/pdf/1.3/"
XMPMM_NAMESPACE = "http://ns.adobe.com/xap/1.0/mm/"
# What is the PDFX namespace, you might ask? I might ask that too. It's
# a completely undocumented namespace used to place "custom metadata"
# properties, which are arbitrary metadata properties with no semantic or
# documented meaning. Elements in the namespace are key/value-style storage,
# where the element name is the key and the content is the value. The keys
# are transformed into valid XML identifiers by substituting an invalid
# identifier character with \u2182 followed by the unicode hex ID of the
# original character. A key like "my car" is therefore "my\u21820020car".
#
# \u2182, in case you're wondering, is the unicode character
# \u{ROMAN NUMERAL TEN THOUSAND}, a straightforward and obvious choice for
# escaping characters.
#
# Intentional users of the pdfx namespace should be shot on sight. A
# custom data schema and sensical XML elements could be used instead, as is
# suggested by Adobe's own documentation on XMP (under "Extensibility of
# Schemas").
#
# Information presented here on the /pdfx/ schema is a result of limited
# reverse engineering, and does not constitute a full specification.
PDFX_NAMESPACE = "http://ns.adobe.com/pdfx/1.3/"
iso8601 = re.compile("""
(?P<year>[0-9]{4})
(-
(?P<month>[0-9]{2})
(-
(?P<day>[0-9]+)
(T
(?P<hour>[0-9]{2}):
(?P<minute>[0-9]{2})
(:(?P<second>[0-9]{2}(.[0-9]+)?))?
(?P<tzd>Z|[-+][0-9]{2}:[0-9]{2})
)?
)?
)?
""", re.VERBOSE)
##
# An object that represents Adobe XMP metadata.
class XmpInformation(PdfObject):
def __init__(self, stream):
self.stream = stream
docRoot = parseString(self.stream.getData())
self.rdfRoot = docRoot.getElementsByTagNameNS(RDF_NAMESPACE, "RDF")[0]
self.cache = {}
def writeToStream(self, stream, encryption_key):
self.stream.writeToStream(stream, encryption_key)
def getElement(self, aboutUri, namespace, name):
for desc in self.rdfRoot.getElementsByTagNameNS(RDF_NAMESPACE, "Description"):
if desc.getAttributeNS(RDF_NAMESPACE, "about") == aboutUri:
attr = desc.getAttributeNodeNS(namespace, name)
if attr != None:
yield attr
for element in desc.getElementsByTagNameNS(namespace, name):
yield element
def getNodesInNamespace(self, aboutUri, namespace):
for desc in self.rdfRoot.getElementsByTagNameNS(RDF_NAMESPACE, "Description"):
if desc.getAttributeNS(RDF_NAMESPACE, "about") == aboutUri:
for i in range(desc.attributes.length):
attr = desc.attributes.item(i)
if attr.namespaceURI == namespace:
yield attr
for child in desc.childNodes:
if child.namespaceURI == namespace:
yield child
def _getText(self, element):
text = ""
for child in element.childNodes:
if child.nodeType == child.TEXT_NODE:
text += child.data
return text
def _converter_string(value):
return value
def _converter_date(value):
m = iso8601.match(value)
year = int(m.group("year"))
month = int(m.group("month") or "1")
day = int(m.group("day") or "1")
hour = int(m.group("hour") or "0")
minute = int(m.group("minute") or "0")
second = decimal.Decimal(m.group("second") or "0")
seconds = second.to_integral(decimal.ROUND_FLOOR)
milliseconds = (second - seconds) * 1000000
tzd = m.group("tzd") or "Z"
dt = datetime.datetime(year, month, day, hour, minute, seconds, milliseconds)
if tzd != "Z":
tzd_hours, tzd_minutes = [int(x) for x in tzd.split(":")]
tzd_hours *= -1
if tzd_hours < 0:
tzd_minutes *= -1
dt = dt + datetime.timedelta(hours=tzd_hours, minutes=tzd_minutes)
return dt
_test_converter_date = staticmethod(_converter_date)
def _getter_bag(namespace, name, converter):
def get(self):
cached = self.cache.get(namespace, {}).get(name)
if cached:
return cached
retval = []
for element in self.getElement("", namespace, name):
bags = element.getElementsByTagNameNS(RDF_NAMESPACE, "Bag")
if len(bags):
for bag in bags:
for item in bag.getElementsByTagNameNS(RDF_NAMESPACE, "li"):
value = self._getText(item)
value = converter(value)
retval.append(value)
ns_cache = self.cache.setdefault(namespace, {})
ns_cache[name] = retval
return retval
return get
def _getter_seq(namespace, name, converter):
def get(self):
cached = self.cache.get(namespace, {}).get(name)
if cached:
return cached
retval = []
for element in self.getElement("", namespace, name):
seqs = element.getElementsByTagNameNS(RDF_NAMESPACE, "Seq")
if len(seqs):
for seq in seqs:
for item in seq.getElementsByTagNameNS(RDF_NAMESPACE, "li"):
value = self._getText(item)
value = converter(value)
retval.append(value)
else:
value = converter(self._getText(element))
retval.append(value)
ns_cache = self.cache.setdefault(namespace, {})
ns_cache[name] = retval
return retval
return get
def _getter_langalt(namespace, name, converter):
def get(self):
cached = self.cache.get(namespace, {}).get(name)
if cached:
return cached
retval = {}
for element in self.getElement("", namespace, name):
alts = element.getElementsByTagNameNS(RDF_NAMESPACE, "Alt")
if len(alts):
for alt in alts:
for item in alt.getElementsByTagNameNS(RDF_NAMESPACE, "li"):
value = self._getText(item)
value = converter(value)
retval[item.getAttribute("xml:lang")] = value
else:
retval["x-default"] = converter(self._getText(element))
ns_cache = self.cache.setdefault(namespace, {})
ns_cache[name] = retval
return retval
return get
def _getter_single(namespace, name, converter):
def get(self):
cached = self.cache.get(namespace, {}).get(name)
if cached:
return cached
value = None
for element in self.getElement("", namespace, name):
if element.nodeType == element.ATTRIBUTE_NODE:
value = element.nodeValue
else:
value = self._getText(element)
break
if value != None:
value = converter(value)
ns_cache = self.cache.setdefault(namespace, {})
ns_cache[name] = value
return value
return get
##
# Contributors to the resource (other than the authors). An unsorted
# array of names.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_contributor = property(_getter_bag(DC_NAMESPACE, "contributor", _converter_string))
##
# Text describing the extent or scope of the resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_coverage = property(_getter_single(DC_NAMESPACE, "coverage", _converter_string))
##
# A sorted array of names of the authors of the resource, listed in order
# of precedence.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_creator = property(_getter_seq(DC_NAMESPACE, "creator", _converter_string))
##
# A sorted array of dates (datetime.datetime instances) of signifigance to
# the resource. The dates and times are in UTC.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_date = property(_getter_seq(DC_NAMESPACE, "date", _converter_date))
##
# A language-keyed dictionary of textual descriptions of the content of the
# resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_description = property(_getter_langalt(DC_NAMESPACE, "description", _converter_string))
##
# The mime-type of the resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_format = property(_getter_single(DC_NAMESPACE, "format", _converter_string))
##
# Unique identifier of the resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_identifier = property(_getter_single(DC_NAMESPACE, "identifier", _converter_string))
##
# An unordered array specifying the languages used in the resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_language = property(_getter_bag(DC_NAMESPACE, "language", _converter_string))
##
# An unordered array of publisher names.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_publisher = property(_getter_bag(DC_NAMESPACE, "publisher", _converter_string))
##
# An unordered array of text descriptions of relationships to other
# documents.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_relation = property(_getter_bag(DC_NAMESPACE, "relation", _converter_string))
##
# A language-keyed dictionary of textual descriptions of the rights the
# user has to this resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_rights = property(_getter_langalt(DC_NAMESPACE, "rights", _converter_string))
##
# Unique identifier of the work from which this resource was derived.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_source = property(_getter_single(DC_NAMESPACE, "source", _converter_string))
##
# An unordered array of descriptive phrases or keywrods that specify the
# topic of the content of the resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_subject = property(_getter_bag(DC_NAMESPACE, "subject", _converter_string))
##
# A language-keyed dictionary of the title of the resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_title = property(_getter_langalt(DC_NAMESPACE, "title", _converter_string))
##
# An unordered array of textual descriptions of the document type.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
dc_type = property(_getter_bag(DC_NAMESPACE, "type", _converter_string))
##
# An unformatted text string representing document keywords.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
pdf_keywords = property(_getter_single(PDF_NAMESPACE, "Keywords", _converter_string))
##
# The PDF file version, for example 1.0, 1.3.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
pdf_pdfversion = property(_getter_single(PDF_NAMESPACE, "PDFVersion", _converter_string))
##
# The name of the tool that created the PDF document.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
pdf_producer = property(_getter_single(PDF_NAMESPACE, "Producer", _converter_string))
##
# The date and time the resource was originally created. The date and
# time are returned as a UTC datetime.datetime object.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
xmp_createDate = property(_getter_single(XMP_NAMESPACE, "CreateDate", _converter_date))
##
# The date and time the resource was last modified. The date and time
# are returned as a UTC datetime.datetime object.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
xmp_modifyDate = property(_getter_single(XMP_NAMESPACE, "ModifyDate", _converter_date))
##
# The date and time that any metadata for this resource was last
# changed. The date and time are returned as a UTC datetime.datetime
# object.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
xmp_metadataDate = property(_getter_single(XMP_NAMESPACE, "MetadataDate", _converter_date))
##
# The name of the first known tool used to create the resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
xmp_creatorTool = property(_getter_single(XMP_NAMESPACE, "CreatorTool", _converter_string))
##
# The common identifier for all versions and renditions of this resource.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
xmpmm_documentId = property(_getter_single(XMPMM_NAMESPACE, "DocumentID", _converter_string))
##
# An identifier for a specific incarnation of a document, updated each
# time a file is saved.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
xmpmm_instanceId = property(_getter_single(XMPMM_NAMESPACE, "InstanceID", _converter_string))
def custom_properties(self):
if not hasattr(self, "_custom_properties"):
self._custom_properties = {}
for node in self.getNodesInNamespace("", PDFX_NAMESPACE):
key = node.localName
while True:
# see documentation about PDFX_NAMESPACE earlier in file
idx = key.find(u"\u2182")
if idx == -1:
break
key = key[:idx] + chr(int(key[idx+1:idx+5], base=16)) + key[idx+5:]
if node.nodeType == node.ATTRIBUTE_NODE:
value = node.nodeValue
else:
value = self._getText(node)
self._custom_properties[key] = value
return self._custom_properties
##
# Retrieves custom metadata properties defined in the undocumented pdfx
# metadata schema.
# <p>Stability: Added in v1.12, will exist for all future v1.x releases.
# @return Returns a dictionary of key/value items for custom metadata
# properties.
custom_properties = property(custom_properties)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
hesam-setareh/nest-simulator | refs/heads/master | topology/pynest/tests/test_plotting.py | 9 | # -*- coding: utf-8 -*-
#
# test_plotting.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for basic topology hl_api functions.
NOTE: These tests only test whether the code runs, it does not check
whether the results produced are correct.
"""
import unittest
import nest
import nest.topology as topo
try:
import matplotlib.pyplot as plt
plt.figure() # make sure we can open a window; DISPLAY may not be set
PLOTTING_POSSIBLE = True
except:
PLOTTING_POSSIBLE = False
@unittest.skipIf(not PLOTTING_POSSIBLE,
'Plotting impossible because matplotlib or display missing')
class PlottingTestCase(unittest.TestCase):
def test_PlotLayer(self):
"""Test plotting layer."""
ldict = {'elements': 'iaf_psc_alpha', 'rows': 3, 'columns': 3,
'extent': [2., 2.], 'edge_wrap': True}
nest.ResetKernel()
l = topo.CreateLayer(ldict)
topo.PlotLayer(l)
self.assertTrue(True)
def test_PlotTargets(self):
"""Test plotting targets."""
ldict = {'elements': ['iaf_psc_alpha', 'iaf_psc_alpha'], 'rows': 3,
'columns': 3,
'extent': [2., 2.], 'edge_wrap': True}
cdict = {'connection_type': 'divergent',
'mask': {'grid': {'rows': 2, 'columns': 2}}}
nest.ResetKernel()
l = topo.CreateLayer(ldict)
ian = [gid for gid in nest.GetLeaves(l)[0]
if nest.GetStatus([gid], 'model')[0] == 'iaf_psc_alpha']
ipa = [gid for gid in nest.GetLeaves(l)[0]
if nest.GetStatus([gid], 'model')[0] == 'iaf_psc_alpha']
# connect ian -> all using static_synapse
cdict.update({'sources': {'model': 'iaf_psc_alpha'},
'synapse_model': 'static_synapse'})
topo.ConnectLayers(l, l, cdict)
for k in ['sources', 'synapse_model']:
cdict.pop(k)
# connect ipa -> ipa using stdp_synapse
cdict.update({'sources': {'model': 'iaf_psc_alpha'},
'targets': {'model': 'iaf_psc_alpha'},
'synapse_model': 'stdp_synapse'})
topo.ConnectLayers(l, l, cdict)
for k in ['sources', 'targets', 'synapse_model']:
cdict.pop(k)
ctr = topo.FindCenterElement(l)
fig = topo.PlotTargets(ctr, l)
fig.gca().set_title('Plain call')
self.assertTrue(True)
def test_PlotKernel(self):
"""Test plotting kernels."""
ldict = {'elements': 'iaf_psc_alpha', 'rows': 3, 'columns': 3,
'extent': [2., 2.], 'edge_wrap': True}
nest.ResetKernel()
l = topo.CreateLayer(ldict)
f = plt.figure()
a1 = f.add_subplot(221)
ctr = topo.FindCenterElement(l)
topo.PlotKernel(a1, ctr, {'circular': {'radius': 1.}},
{'gaussian': {'sigma': 0.2}})
a2 = f.add_subplot(222)
topo.PlotKernel(a2, ctr, {
'doughnut': {'inner_radius': 0.5, 'outer_radius': 0.75}})
a3 = f.add_subplot(223)
topo.PlotKernel(a3, ctr, {'rectangular': {'lower_left': [-.5, -.5],
'upper_right': [0.5, 0.5]}})
self.assertTrue(True)
def suite():
suite = unittest.makeSuite(PlottingTestCase, 'test')
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
import matplotlib.pyplot as plt
plt.show()
|
jowinter/qemu-trustzone | refs/heads/for-mainline | tests/qemu-iotests/iotests.py | 4 | # Common utilities and Python wrappers for qemu-iotests
#
# Copyright (C) 2012 IBM Corp.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import re
import subprocess
import string
import unittest
import sys; sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'QMP'))
import qmp
import struct
__all__ = ['imgfmt', 'imgproto', 'test_dir' 'qemu_img', 'qemu_io',
'VM', 'QMPTestCase', 'notrun', 'main']
# This will not work if arguments or path contain spaces but is necessary if we
# want to support the override options that ./check supports.
qemu_img_args = os.environ.get('QEMU_IMG', 'qemu-img').strip().split(' ')
qemu_io_args = os.environ.get('QEMU_IO', 'qemu-io').strip().split(' ')
qemu_args = os.environ.get('QEMU', 'qemu').strip().split(' ')
imgfmt = os.environ.get('IMGFMT', 'raw')
imgproto = os.environ.get('IMGPROTO', 'file')
test_dir = os.environ.get('TEST_DIR', '/var/tmp')
def qemu_img(*args):
'''Run qemu-img and return the exit code'''
devnull = open('/dev/null', 'r+')
return subprocess.call(qemu_img_args + list(args), stdin=devnull, stdout=devnull)
def qemu_img_verbose(*args):
'''Run qemu-img without suppressing its output and return the exit code'''
return subprocess.call(qemu_img_args + list(args))
def qemu_io(*args):
'''Run qemu-io and return the stdout data'''
args = qemu_io_args + list(args)
return subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0]
def compare_images(img1, img2):
'''Return True if two image files are identical'''
return qemu_img('compare', '-f', imgfmt,
'-F', imgfmt, img1, img2) == 0
def create_image(name, size):
'''Create a fully-allocated raw image with sector markers'''
file = open(name, 'w')
i = 0
while i < size:
sector = struct.pack('>l504xl', i / 512, i / 512)
file.write(sector)
i = i + 512
file.close()
class VM(object):
'''A QEMU VM'''
def __init__(self):
self._monitor_path = os.path.join(test_dir, 'qemu-mon.%d' % os.getpid())
self._qemu_log_path = os.path.join(test_dir, 'qemu-log.%d' % os.getpid())
self._args = qemu_args + ['-chardev',
'socket,id=mon,path=' + self._monitor_path,
'-mon', 'chardev=mon,mode=control',
'-qtest', 'stdio', '-machine', 'accel=qtest',
'-display', 'none', '-vga', 'none']
self._num_drives = 0
def add_drive(self, path, opts=''):
'''Add a virtio-blk drive to the VM'''
options = ['if=virtio',
'format=%s' % imgfmt,
'cache=none',
'file=%s' % path,
'id=drive%d' % self._num_drives]
if opts:
options.append(opts)
self._args.append('-drive')
self._args.append(','.join(options))
self._num_drives += 1
return self
def add_fd(self, fd, fdset, opaque, opts=''):
'''Pass a file descriptor to the VM'''
options = ['fd=%d' % fd,
'set=%d' % fdset,
'opaque=%s' % opaque]
if opts:
options.append(opts)
self._args.append('-add-fd')
self._args.append(','.join(options))
return self
def launch(self):
'''Launch the VM and establish a QMP connection'''
devnull = open('/dev/null', 'rb')
qemulog = open(self._qemu_log_path, 'wb')
try:
self._qmp = qmp.QEMUMonitorProtocol(self._monitor_path, server=True)
self._popen = subprocess.Popen(self._args, stdin=devnull, stdout=qemulog,
stderr=subprocess.STDOUT)
self._qmp.accept()
except:
os.remove(self._monitor_path)
raise
def shutdown(self):
'''Terminate the VM and clean up'''
if not self._popen is None:
self._qmp.cmd('quit')
self._popen.wait()
os.remove(self._monitor_path)
os.remove(self._qemu_log_path)
self._popen = None
underscore_to_dash = string.maketrans('_', '-')
def qmp(self, cmd, **args):
'''Invoke a QMP command and return the result dict'''
qmp_args = dict()
for k in args.keys():
qmp_args[k.translate(self.underscore_to_dash)] = args[k]
return self._qmp.cmd(cmd, args=qmp_args)
def get_qmp_event(self, wait=False):
'''Poll for one queued QMP events and return it'''
return self._qmp.pull_event(wait=wait)
def get_qmp_events(self, wait=False):
'''Poll for queued QMP events and return a list of dicts'''
events = self._qmp.get_events(wait=wait)
self._qmp.clear_events()
return events
index_re = re.compile(r'([^\[]+)\[([^\]]+)\]')
class QMPTestCase(unittest.TestCase):
'''Abstract base class for QMP test cases'''
def dictpath(self, d, path):
'''Traverse a path in a nested dict'''
for component in path.split('/'):
m = index_re.match(component)
if m:
component, idx = m.groups()
idx = int(idx)
if not isinstance(d, dict) or component not in d:
self.fail('failed path traversal for "%s" in "%s"' % (path, str(d)))
d = d[component]
if m:
if not isinstance(d, list):
self.fail('path component "%s" in "%s" is not a list in "%s"' % (component, path, str(d)))
try:
d = d[idx]
except IndexError:
self.fail('invalid index "%s" in path "%s" in "%s"' % (idx, path, str(d)))
return d
def assert_qmp_absent(self, d, path):
try:
result = self.dictpath(d, path)
except AssertionError:
return
self.fail('path "%s" has value "%s"' % (path, str(result)))
def assert_qmp(self, d, path, value):
'''Assert that the value for a specific path in a QMP dict matches'''
result = self.dictpath(d, path)
self.assertEqual(result, value, 'values not equal "%s" and "%s"' % (str(result), str(value)))
def assert_no_active_block_jobs(self):
result = self.vm.qmp('query-block-jobs')
self.assert_qmp(result, 'return', [])
def cancel_and_wait(self, drive='drive0', force=False):
'''Cancel a block job and wait for it to finish, returning the event'''
result = self.vm.qmp('block-job-cancel', device=drive, force=force)
self.assert_qmp(result, 'return', {})
cancelled = False
result = None
while not cancelled:
for event in self.vm.get_qmp_events(wait=True):
if event['event'] == 'BLOCK_JOB_COMPLETED' or \
event['event'] == 'BLOCK_JOB_CANCELLED':
self.assert_qmp(event, 'data/device', drive)
result = event
cancelled = True
self.assert_no_active_block_jobs()
return result
def notrun(reason):
'''Skip this test suite'''
# Each test in qemu-iotests has a number ("seq")
seq = os.path.basename(sys.argv[0])
open('%s.notrun' % seq, 'wb').write(reason + '\n')
print '%s not run: %s' % (seq, reason)
sys.exit(0)
def main(supported_fmts=[]):
'''Run tests'''
if supported_fmts and (imgfmt not in supported_fmts):
notrun('not suitable for this image format: %s' % imgfmt)
# We need to filter out the time taken from the output so that qemu-iotest
# can reliably diff the results against master output.
import StringIO
output = StringIO.StringIO()
class MyTestRunner(unittest.TextTestRunner):
def __init__(self, stream=output, descriptions=True, verbosity=1):
unittest.TextTestRunner.__init__(self, stream, descriptions, verbosity)
# unittest.main() will use sys.exit() so expect a SystemExit exception
try:
unittest.main(testRunner=MyTestRunner)
finally:
sys.stderr.write(re.sub(r'Ran (\d+) tests? in [\d.]+s', r'Ran \1 tests', output.getvalue()))
|
dionysio/django-haystack | refs/heads/master | example_project/regular_app/search_indexes.py | 12 | from haystack import indexes
from regular_app.models import Dog
# More typical usage involves creating a subclassed `SearchIndex`. This will
# provide more control over how data is indexed, generally resulting in better
# search.
class DogIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
# We can pull data straight out of the model via `model_attr`.
breed = indexes.CharField(model_attr='breed')
# Note that callables are also OK to use.
name = indexes.CharField(model_attr='full_name')
bio = indexes.CharField(model_attr='name')
birth_date = indexes.DateField(model_attr='birth_date')
# Note that we can't assign an attribute here. We'll manually prepare it instead.
toys = indexes.MultiValueField()
def get_model(self):
return Dog
def index_queryset(self, using=None):
return self.get_model().objects.filter(public=True)
def prepare_toys(self, obj):
# Store a list of id's for filtering
return [toy.id for toy in obj.toys.all()]
# Alternatively, you could store the names if searching for toy names
# is more useful.
# return [toy.name for toy in obj.toys.all()]
|
crobby/sahara | refs/heads/master | sahara/tests/unit/utils/test_keymgr.py | 10 | # Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.tests.unit import base
from sahara.utils import keymgr
class TestKeymgrUtils(base.SaharaTestCase):
def setUp(self):
super(TestKeymgrUtils, self).setUp()
@mock.patch('sahara.utils.openstack.barbican.client_for_admin')
def test_keymgr_delete_with_external(self, client_for_admin):
self.override_config('use_external_key_manager', True)
keyref = 'test_key_reference'
secrets_manager = mock.Mock()
secrets_manager.delete = mock.Mock()
client = mock.Mock(secrets=secrets_manager)
client_for_admin.return_value = client
keymgr.delete(keyref)
secrets_manager.delete.assert_called_with(keyref)
def test_keymgr_get_no_external(self):
actual_key = 'test_key_super_secret'
# with no external key manager, get should return the argument
keyref = keymgr.get(actual_key)
self.assertEqual(actual_key, keyref)
@mock.patch('sahara.utils.openstack.barbican.client_for_admin')
def test_keymgr_get_with_external(self, client_for_admin):
self.override_config('use_external_key_manager', True)
actual_key = 'test_key_super_secret'
keyref = 'test_key_reference'
secret = mock.Mock(payload=actual_key)
secrets_manager = mock.Mock()
secrets_manager.get = mock.Mock(return_value=secret)
client = mock.Mock(secrets=secrets_manager)
client_for_admin.return_value = client
# with external key manager, get should return a key from a reference
key = keymgr.get(keyref)
secrets_manager.get.assert_called_with(keyref)
self.assertEqual(actual_key, key)
def test_keymgr_store_no_external(self):
actual_key = 'test_key_super_secret'
# with no external key manager, store should return the argument
keyref = keymgr.store(actual_key)
self.assertEqual(actual_key, keyref)
@mock.patch('sahara.utils.openstack.barbican.client_for_admin')
def test_keymgr_store_with_external(self, client_for_admin):
self.override_config('use_external_key_manager', True)
key = 'test_key_super_secret'
actual_keyref = 'test_key_reference'
secret = mock.Mock()
secret.store = mock.Mock(return_value=actual_keyref)
secrets_manager = mock.Mock()
secrets_manager.create = mock.Mock(return_value=secret)
client = mock.Mock(secrets=secrets_manager)
client_for_admin.return_value = client
# with external key manager, store should return a key reference
keyref = keymgr.store(key)
secrets_manager.create.assert_called_with(
payload=key, payload_content_type='text/plain')
secret.store.assert_called_once_with()
self.assertEqual(actual_keyref, keyref)
|
bakhtout/odoo-educ | refs/heads/8.0 | addons/purchase_requisition/wizard/bid_line_qty.py | 374 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
class bid_line_qty(osv.osv_memory):
_name = "bid.line.qty"
_description = "Change Bid line quantity"
_columns = {
'qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
}
def change_qty(self, cr, uid, ids, context=None):
active_ids = context and context.get('active_ids', [])
data = self.browse(cr, uid, ids, context=context)[0]
self.pool.get('purchase.order.line').write(cr, uid, active_ids, {'quantity_bid': data.qty})
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
lmazuel/azure-sdk-for-python | refs/heads/master | azure-mgmt-billing/azure_bdist_wheel.py | 241 | """
"wheel" copyright (c) 2012-2017 Daniel Holth <dholth@fastmail.fm> and
contributors.
The MIT License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Create a Azure wheel (.whl) distribution (a wheel is a built archive format).
This file is a copy of the official bdist_wheel file from wheel 0.30.0a0, enhanced
of the bottom with some Microsoft extension for Azure SDK for Python
"""
import csv
import hashlib
import os
import subprocess
import warnings
import shutil
import json
import sys
try:
import sysconfig
except ImportError: # pragma nocover
# Python < 2.7
import distutils.sysconfig as sysconfig
import pkg_resources
safe_name = pkg_resources.safe_name
safe_version = pkg_resources.safe_version
from shutil import rmtree
from email.generator import Generator
from distutils.core import Command
from distutils.sysconfig import get_python_version
from distutils import log as logger
from wheel.pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform
from wheel.util import native, open_for_csv
from wheel.archive import archive_wheelfile
from wheel.pkginfo import read_pkg_info, write_pkg_info
from wheel.metadata import pkginfo_to_dict
from wheel import pep425tags, metadata
from wheel import __version__ as wheel_version
def safer_name(name):
return safe_name(name).replace('-', '_')
def safer_version(version):
return safe_version(version).replace('-', '_')
class bdist_wheel(Command):
description = 'create a wheel distribution'
user_options = [('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths"
"(default: false)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
('universal', None,
"make a universal wheel"
" (default: false)"),
('python-tag=', None,
"Python implementation compatibility tag"
" (default: py%s)" % get_impl_ver()[0]),
]
boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal']
def initialize_options(self):
self.bdist_dir = None
self.data_dir = None
self.plat_name = None
self.plat_tag = None
self.format = 'zip'
self.keep_temp = False
self.dist_dir = None
self.distinfo_dir = None
self.egginfo_dir = None
self.root_is_pure = None
self.skip_build = None
self.relative = False
self.owner = None
self.group = None
self.universal = False
self.python_tag = 'py' + get_impl_ver()[0]
self.plat_name_supplied = False
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wheel')
self.data_dir = self.wheel_dist_name + '.data'
self.plat_name_supplied = self.plat_name is not None
need_options = ('dist_dir', 'plat_name', 'skip_build')
self.set_undefined_options('bdist',
*zip(need_options, need_options))
self.root_is_pure = not (self.distribution.has_ext_modules()
or self.distribution.has_c_libraries())
# Support legacy [wheel] section for setting universal
wheel = self.distribution.get_option_dict('wheel')
if 'universal' in wheel:
# please don't define this in your global configs
val = wheel['universal'][1].strip()
if val.lower() in ('1', 'true', 'yes'):
self.universal = True
@property
def wheel_dist_name(self):
"""Return distribution full name with - replaced with _"""
return '-'.join((safer_name(self.distribution.get_name()),
safer_version(self.distribution.get_version())))
def get_tag(self):
# bdist sets self.plat_name if unset, we should only use it for purepy
# wheels if the user supplied it.
if self.plat_name_supplied:
plat_name = self.plat_name
elif self.root_is_pure:
plat_name = 'any'
else:
plat_name = self.plat_name or get_platform()
if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647:
plat_name = 'linux_i686'
plat_name = plat_name.replace('-', '_').replace('.', '_')
if self.root_is_pure:
if self.universal:
impl = 'py2.py3'
else:
impl = self.python_tag
tag = (impl, 'none', plat_name)
else:
impl_name = get_abbr_impl()
impl_ver = get_impl_ver()
# PEP 3149
abi_tag = str(get_abi_tag()).lower()
tag = (impl_name + impl_ver, abi_tag, plat_name)
supported_tags = pep425tags.get_supported(
supplied_platform=plat_name if self.plat_name_supplied else None)
# XXX switch to this alternate implementation for non-pure:
assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0])
return tag
def get_archive_basename(self):
"""Return archive name without extension"""
impl_tag, abi_tag, plat_tag = self.get_tag()
archive_basename = "%s-%s-%s-%s" % (
self.wheel_dist_name,
impl_tag,
abi_tag,
plat_tag)
return archive_basename
def run(self):
build_scripts = self.reinitialize_command('build_scripts')
build_scripts.executable = 'python'
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install',
reinit_subcommands=True)
install.root = self.bdist_dir
install.compile = False
install.skip_build = self.skip_build
install.warn_dir = False
# A wheel without setuptools scripts is more cross-platform.
# Use the (undocumented) `no_ep` option to setuptools'
# install_scripts command to avoid creating entry point scripts.
install_scripts = self.reinitialize_command('install_scripts')
install_scripts.no_ep = True
# Use a custom scheme for the archive, because we have to decide
# at installation time which scheme to use.
for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'):
setattr(install,
'install_' + key,
os.path.join(self.data_dir, key))
basedir_observed = ''
if os.name == 'nt':
# win32 barfs if any of these are ''; could be '.'?
# (distutils.command.install:change_roots bug)
basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..'))
self.install_libbase = self.install_lib = basedir_observed
setattr(install,
'install_purelib' if self.root_is_pure else 'install_platlib',
basedir_observed)
logger.info("installing to %s", self.bdist_dir)
self.run_command('install')
archive_basename = self.get_archive_basename()
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
archive_root = os.path.join(
self.bdist_dir,
self._ensure_relative(install.install_base))
self.set_undefined_options(
'install_egg_info', ('target', 'egginfo_dir'))
self.distinfo_dir = os.path.join(self.bdist_dir,
'%s.dist-info' % self.wheel_dist_name)
self.egg2dist(self.egginfo_dir,
self.distinfo_dir)
self.write_wheelfile(self.distinfo_dir)
self.write_record(self.bdist_dir, self.distinfo_dir)
# Make the archive
if not os.path.exists(self.dist_dir):
os.makedirs(self.dist_dir)
wheel_name = archive_wheelfile(pseudoinstall_root, archive_root)
# Sign the archive
if 'WHEEL_TOOL' in os.environ:
subprocess.call([os.environ['WHEEL_TOOL'], 'sign', wheel_name])
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution, 'dist_files', []).append(
('bdist_wheel', get_python_version(), wheel_name))
if not self.keep_temp:
if self.dry_run:
logger.info('removing %s', self.bdist_dir)
else:
rmtree(self.bdist_dir)
def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'):
from email.message import Message
msg = Message()
msg['Wheel-Version'] = '1.0' # of the spec
msg['Generator'] = generator
msg['Root-Is-Purelib'] = str(self.root_is_pure).lower()
# Doesn't work for bdist_wininst
impl_tag, abi_tag, plat_tag = self.get_tag()
for impl in impl_tag.split('.'):
for abi in abi_tag.split('.'):
for plat in plat_tag.split('.'):
msg['Tag'] = '-'.join((impl, abi, plat))
wheelfile_path = os.path.join(wheelfile_base, 'WHEEL')
logger.info('creating %s', wheelfile_path)
with open(wheelfile_path, 'w') as f:
Generator(f, maxheaderlen=0).flatten(msg)
def _ensure_relative(self, path):
# copied from dir_util, deleted
drive, path = os.path.splitdrive(path)
if path[0:1] == os.sep:
path = drive + path[1:]
return path
def _pkginfo_to_metadata(self, egg_info_path, pkginfo_path):
return metadata.pkginfo_to_metadata(egg_info_path, pkginfo_path)
def license_file(self):
"""Return license filename from a license-file key in setup.cfg, or None."""
metadata = self.distribution.get_option_dict('metadata')
if not 'license_file' in metadata:
return None
return metadata['license_file'][1]
def setupcfg_requirements(self):
"""Generate requirements from setup.cfg as
('Requires-Dist', 'requirement; qualifier') tuples. From a metadata
section in setup.cfg:
[metadata]
provides-extra = extra1
extra2
requires-dist = requirement; qualifier
another; qualifier2
unqualified
Yields
('Provides-Extra', 'extra1'),
('Provides-Extra', 'extra2'),
('Requires-Dist', 'requirement; qualifier'),
('Requires-Dist', 'another; qualifier2'),
('Requires-Dist', 'unqualified')
"""
metadata = self.distribution.get_option_dict('metadata')
# our .ini parser folds - to _ in key names:
for key, title in (('provides_extra', 'Provides-Extra'),
('requires_dist', 'Requires-Dist')):
if not key in metadata:
continue
field = metadata[key]
for line in field[1].splitlines():
line = line.strip()
if not line:
continue
yield (title, line)
def add_requirements(self, metadata_path):
"""Add additional requirements from setup.cfg to file metadata_path"""
additional = list(self.setupcfg_requirements())
if not additional: return
pkg_info = read_pkg_info(metadata_path)
if 'Provides-Extra' in pkg_info or 'Requires-Dist' in pkg_info:
warnings.warn('setup.cfg requirements overwrite values from setup.py')
del pkg_info['Provides-Extra']
del pkg_info['Requires-Dist']
for k, v in additional:
pkg_info[k] = v
write_pkg_info(metadata_path, pkg_info)
def egg2dist(self, egginfo_path, distinfo_path):
"""Convert an .egg-info directory into a .dist-info directory"""
def adios(p):
"""Appropriately delete directory, file or link."""
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
shutil.rmtree(p)
elif os.path.exists(p):
os.unlink(p)
adios(distinfo_path)
if not os.path.exists(egginfo_path):
# There is no egg-info. This is probably because the egg-info
# file/directory is not named matching the distribution name used
# to name the archive file. Check for this case and report
# accordingly.
import glob
pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
possible = glob.glob(pat)
err = "Egg metadata expected at %s but not found" % (egginfo_path,)
if possible:
alt = os.path.basename(possible[0])
err += " (%s found - possible misnamed archive file?)" % (alt,)
raise ValueError(err)
if os.path.isfile(egginfo_path):
# .egg-info is a single file
pkginfo_path = egginfo_path
pkg_info = self._pkginfo_to_metadata(egginfo_path, egginfo_path)
os.mkdir(distinfo_path)
else:
# .egg-info is a directory
pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
pkg_info = self._pkginfo_to_metadata(egginfo_path, pkginfo_path)
# ignore common egg metadata that is useless to wheel
shutil.copytree(egginfo_path, distinfo_path,
ignore=lambda x, y: set(('PKG-INFO',
'requires.txt',
'SOURCES.txt',
'not-zip-safe',)))
# delete dependency_links if it is only whitespace
dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
with open(dependency_links_path, 'r') as dependency_links_file:
dependency_links = dependency_links_file.read().strip()
if not dependency_links:
adios(dependency_links_path)
write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
# XXX deprecated. Still useful for current distribute/setuptools.
metadata_path = os.path.join(distinfo_path, 'METADATA')
self.add_requirements(metadata_path)
# XXX intentionally a different path than the PEP.
metadata_json_path = os.path.join(distinfo_path, 'metadata.json')
pymeta = pkginfo_to_dict(metadata_path,
distribution=self.distribution)
if 'description' in pymeta:
description_filename = 'DESCRIPTION.rst'
description_text = pymeta.pop('description')
description_path = os.path.join(distinfo_path,
description_filename)
with open(description_path, "wb") as description_file:
description_file.write(description_text.encode('utf-8'))
pymeta['extensions']['python.details']['document_names']['description'] = description_filename
# XXX heuristically copy any LICENSE/LICENSE.txt?
license = self.license_file()
if license:
license_filename = 'LICENSE.txt'
shutil.copy(license, os.path.join(self.distinfo_dir, license_filename))
pymeta['extensions']['python.details']['document_names']['license'] = license_filename
with open(metadata_json_path, "w") as metadata_json:
json.dump(pymeta, metadata_json, sort_keys=True)
adios(egginfo_path)
def write_record(self, bdist_dir, distinfo_dir):
from wheel.util import urlsafe_b64encode
record_path = os.path.join(distinfo_dir, 'RECORD')
record_relpath = os.path.relpath(record_path, bdist_dir)
def walk():
for dir, dirs, files in os.walk(bdist_dir):
dirs.sort()
for f in sorted(files):
yield os.path.join(dir, f)
def skip(path):
"""Wheel hashes every possible file."""
return (path == record_relpath)
with open_for_csv(record_path, 'w+') as record_file:
writer = csv.writer(record_file)
for path in walk():
relpath = os.path.relpath(path, bdist_dir)
if skip(relpath):
hash = ''
size = ''
else:
with open(path, 'rb') as f:
data = f.read()
digest = hashlib.sha256(data).digest()
hash = 'sha256=' + native(urlsafe_b64encode(digest))
size = len(data)
record_path = os.path.relpath(
path, bdist_dir).replace(os.path.sep, '/')
writer.writerow((record_path, hash, size))
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
from distutils import log as logger
import os.path
#from wheel.bdist_wheel import bdist_wheel
class azure_bdist_wheel(bdist_wheel):
description = "Create an Azure wheel distribution"
user_options = bdist_wheel.user_options + \
[('azure-namespace-package=', None,
"Name of the deepest nspkg used")]
def initialize_options(self):
bdist_wheel.initialize_options(self)
self.azure_namespace_package = None
def finalize_options(self):
bdist_wheel.finalize_options(self)
if self.azure_namespace_package and not self.azure_namespace_package.endswith("-nspkg"):
raise ValueError("azure_namespace_package must finish by -nspkg")
def run(self):
if not self.distribution.install_requires:
self.distribution.install_requires = []
self.distribution.install_requires.append(
"{}>=2.0.0".format(self.azure_namespace_package))
bdist_wheel.run(self)
def write_record(self, bdist_dir, distinfo_dir):
if self.azure_namespace_package:
# Split and remove last part, assuming it's "nspkg"
subparts = self.azure_namespace_package.split('-')[0:-1]
folder_with_init = [os.path.join(*subparts[0:i+1]) for i in range(len(subparts))]
for azure_sub_package in folder_with_init:
init_file = os.path.join(bdist_dir, azure_sub_package, '__init__.py')
if os.path.isfile(init_file):
logger.info("manually remove {} while building the wheel".format(init_file))
os.remove(init_file)
else:
raise ValueError("Unable to find {}. Are you sure of your namespace package?".format(init_file))
bdist_wheel.write_record(self, bdist_dir, distinfo_dir)
cmdclass = {
'bdist_wheel': azure_bdist_wheel,
}
|
Freso/beets | refs/heads/master | beetsplug/convert.py | 1 | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Jakob Schnitzer.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Converts tracks or albums to external directory
"""
from __future__ import division, absolute_import, print_function
import os
import threading
import subprocess
import tempfile
import shlex
from string import Template
from beets import ui, util, plugins, config
from beets.plugins import BeetsPlugin
from beets.util.confit import ConfigTypeError
from beets import art
from beets.util.artresizer import ArtResizer
_fs_lock = threading.Lock()
_temp_files = [] # Keep track of temporary transcoded files for deletion.
# Some convenient alternate names for formats.
ALIASES = {
u'wma': u'windows media',
u'vorbis': u'ogg',
}
LOSSLESS_FORMATS = ['ape', 'flac', 'alac', 'wav', 'aiff']
def replace_ext(path, ext):
"""Return the path with its extension replaced by `ext`.
The new extension must not contain a leading dot.
"""
return os.path.splitext(path)[0] + b'.' + ext
def get_format(fmt=None):
"""Return the command template and the extension from the config.
"""
if not fmt:
fmt = config['convert']['format'].get(unicode).lower()
fmt = ALIASES.get(fmt, fmt)
try:
format_info = config['convert']['formats'][fmt].get(dict)
command = format_info['command']
extension = format_info.get('extension', fmt)
except KeyError:
raise ui.UserError(
u'convert: format {0} needs the "command" field'
.format(fmt)
)
except ConfigTypeError:
command = config['convert']['formats'][fmt].get(bytes)
extension = fmt
# Convenience and backwards-compatibility shortcuts.
keys = config['convert'].keys()
if 'command' in keys:
command = config['convert']['command'].get(unicode)
elif 'opts' in keys:
# Undocumented option for backwards compatibility with < 1.3.1.
command = u'ffmpeg -i $source -y {0} $dest'.format(
config['convert']['opts'].get(unicode)
)
if 'extension' in keys:
extension = config['convert']['extension'].get(unicode)
return (command.encode('utf8'), extension.encode('utf8'))
def should_transcode(item, fmt):
"""Determine whether the item should be transcoded as part of
conversion (i.e., its bitrate is high or it has the wrong format).
"""
if config['convert']['never_convert_lossy_files'] and \
not (item.format.lower() in LOSSLESS_FORMATS):
return False
maxbr = config['convert']['max_bitrate'].get(int)
return fmt.lower() != item.format.lower() or \
item.bitrate >= 1000 * maxbr
class ConvertPlugin(BeetsPlugin):
def __init__(self):
super(ConvertPlugin, self).__init__()
self.config.add({
u'dest': None,
u'pretend': False,
u'threads': util.cpu_count(),
u'format': u'mp3',
u'formats': {
u'aac': {
u'command': u'ffmpeg -i $source -y -vn -acodec libfaac '
u'-aq 100 $dest',
u'extension': u'm4a',
},
u'alac': {
u'command': u'ffmpeg -i $source -y -vn -acodec alac $dest',
u'extension': u'm4a',
},
u'flac': u'ffmpeg -i $source -y -vn -acodec flac $dest',
u'mp3': u'ffmpeg -i $source -y -vn -aq 2 $dest',
u'opus':
u'ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest',
u'ogg':
u'ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest',
u'wma':
u'ffmpeg -i $source -y -vn -acodec wmav2 -vn $dest',
},
u'max_bitrate': 500,
u'auto': False,
u'tmpdir': None,
u'quiet': False,
u'embed': True,
u'paths': {},
u'never_convert_lossy_files': False,
u'copy_album_art': False,
u'album_art_maxwidth': 0,
})
self.import_stages = [self.auto_convert]
self.register_listener('import_task_files', self._cleanup)
def commands(self):
cmd = ui.Subcommand('convert', help=u'convert to external location')
cmd.parser.add_option('-p', '--pretend', action='store_true',
help=u'show actions but do nothing')
cmd.parser.add_option('-t', '--threads', action='store', type='int',
help=u'change the number of threads, \
defaults to maximum available processors')
cmd.parser.add_option('-k', '--keep-new', action='store_true',
dest='keep_new', help=u'keep only the converted \
and move the old files')
cmd.parser.add_option('-d', '--dest', action='store',
help=u'set the destination directory')
cmd.parser.add_option('-f', '--format', action='store', dest='format',
help=u'set the target format of the tracks')
cmd.parser.add_option('-y', '--yes', action='store_true', dest='yes',
help=u'do not ask for confirmation')
cmd.parser.add_album_option()
cmd.func = self.convert_func
return [cmd]
def auto_convert(self, config, task):
if self.config['auto']:
for item in task.imported_items():
self.convert_on_import(config.lib, item)
# Utilities converted from functions to methods on logging overhaul
def encode(self, command, source, dest, pretend=False):
"""Encode `source` to `dest` using command template `command`.
Raises `subprocess.CalledProcessError` if the command exited with a
non-zero status code.
"""
# The paths and arguments must be bytes.
assert isinstance(command, bytes)
assert isinstance(source, bytes)
assert isinstance(dest, bytes)
quiet = self.config['quiet'].get(bool)
if not quiet and not pretend:
self._log.info(u'Encoding {0}', util.displayable_path(source))
# Substitute $source and $dest in the argument list.
args = shlex.split(command)
for i, arg in enumerate(args):
args[i] = Template(arg).safe_substitute({
b'source': source,
b'dest': dest,
})
if pretend:
self._log.info(u' '.join(ui.decargs(args)))
return
try:
util.command_output(args)
except subprocess.CalledProcessError as exc:
# Something went wrong (probably Ctrl+C), remove temporary files
self._log.info(u'Encoding {0} failed. Cleaning up...',
util.displayable_path(source))
self._log.debug(u'Command {0} exited with status {1}',
exc.cmd.decode('utf8', 'ignore'),
exc.returncode)
util.remove(dest)
util.prune_dirs(os.path.dirname(dest))
raise
except OSError as exc:
raise ui.UserError(
u"convert: couldn't invoke '{0}': {1}".format(
u' '.join(ui.decargs(args)), exc
)
)
if not quiet and not pretend:
self._log.info(u'Finished encoding {0}',
util.displayable_path(source))
def convert_item(self, dest_dir, keep_new, path_formats, fmt,
pretend=False):
command, ext = get_format(fmt)
item, original, converted = None, None, None
while True:
item = yield (item, original, converted)
dest = item.destination(basedir=dest_dir,
path_formats=path_formats)
# When keeping the new file in the library, we first move the
# current (pristine) file to the destination. We'll then copy it
# back to its old path or transcode it to a new path.
if keep_new:
original = dest
converted = item.path
if should_transcode(item, fmt):
converted = replace_ext(converted, ext)
else:
original = item.path
if should_transcode(item, fmt):
dest = replace_ext(dest, ext)
converted = dest
# Ensure that only one thread tries to create directories at a
# time. (The existence check is not atomic with the directory
# creation inside this function.)
if not pretend:
with _fs_lock:
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
self._log.info(u'Skipping {0} (target file exists)',
util.displayable_path(item.path))
continue
if keep_new:
if pretend:
self._log.info(u'mv {0} {1}',
util.displayable_path(item.path),
util.displayable_path(original))
else:
self._log.info(u'Moving to {0}',
util.displayable_path(original))
util.move(item.path, original)
if should_transcode(item, fmt):
try:
self.encode(command, original, converted, pretend)
except subprocess.CalledProcessError:
continue
else:
if pretend:
self._log.info(u'cp {0} {1}',
util.displayable_path(original),
util.displayable_path(converted))
else:
# No transcoding necessary.
self._log.info(u'Copying {0}',
util.displayable_path(item.path))
util.copy(original, converted)
if pretend:
continue
# Write tags from the database to the converted file.
item.try_write(path=converted)
if keep_new:
# If we're keeping the transcoded file, read it again (after
# writing) to get new bitrate, duration, etc.
item.path = converted
item.read()
item.store() # Store new path and audio data.
if self.config['embed']:
album = item.get_album()
if album and album.artpath:
self._log.debug(u'embedding album art from {}',
util.displayable_path(album.artpath))
art.embed_item(self._log, item, album.artpath,
itempath=converted)
if keep_new:
plugins.send('after_convert', item=item,
dest=dest, keepnew=True)
else:
plugins.send('after_convert', item=item,
dest=converted, keepnew=False)
def copy_album_art(self, album, dest_dir, path_formats, pretend=False):
"""Copies or converts the associated cover art of the album. Album must
have at least one track.
"""
if not album or not album.artpath:
return
album_item = album.items().get()
# Album shouldn't be empty.
if not album_item:
return
# Get the destination of the first item (track) of the album, we use
# this function to format the path accordingly to path_formats.
dest = album_item.destination(basedir=dest_dir,
path_formats=path_formats)
# Remove item from the path.
dest = os.path.join(*util.components(dest)[:-1])
dest = album.art_destination(album.artpath, item_dir=dest)
if album.artpath == dest:
return
if not pretend:
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
self._log.info(u'Skipping {0} (target file exists)',
util.displayable_path(album.artpath))
return
# Decide whether we need to resize the cover-art image.
resize = False
maxwidth = None
if self.config['album_art_maxwidth']:
maxwidth = self.config['album_art_maxwidth'].get(int)
size = ArtResizer.shared.get_size(album.artpath)
self._log.debug('image size: {}', size)
if size:
resize = size[0] > maxwidth
else:
self._log.warning(u'Could not get size of image (please see '
u'documentation for dependencies).')
# Either copy or resize (while copying) the image.
if resize:
self._log.info(u'Resizing cover art from {0} to {1}',
util.displayable_path(album.artpath),
util.displayable_path(dest))
if not pretend:
ArtResizer.shared.resize(maxwidth, album.artpath, dest)
else:
if pretend:
self._log.info(u'cp {0} {1}',
util.displayable_path(album.artpath),
util.displayable_path(dest))
else:
self._log.info(u'Copying cover art to {0}',
util.displayable_path(album.artpath),
util.displayable_path(dest))
util.copy(album.artpath, dest)
def convert_func(self, lib, opts, args):
if not opts.dest:
opts.dest = self.config['dest'].get()
if not opts.dest:
raise ui.UserError(u'no convert destination set')
opts.dest = util.bytestring_path(opts.dest)
if not opts.threads:
opts.threads = self.config['threads'].get(int)
if self.config['paths']:
path_formats = ui.get_path_formats(self.config['paths'])
else:
path_formats = ui.get_path_formats()
if not opts.format:
opts.format = self.config['format'].get(unicode).lower()
pretend = opts.pretend if opts.pretend is not None else \
self.config['pretend'].get(bool)
if not pretend:
ui.commands.list_items(lib, ui.decargs(args), opts.album)
if not (opts.yes or ui.input_yn(u"Convert? (Y/n)")):
return
if opts.album:
albums = lib.albums(ui.decargs(args))
items = (i for a in albums for i in a.items())
if self.config['copy_album_art']:
for album in albums:
self.copy_album_art(album, opts.dest, path_formats,
pretend)
else:
items = iter(lib.items(ui.decargs(args)))
convert = [self.convert_item(opts.dest,
opts.keep_new,
path_formats,
opts.format,
pretend)
for _ in range(opts.threads)]
pipe = util.pipeline.Pipeline([items, convert])
pipe.run_parallel()
def convert_on_import(self, lib, item):
"""Transcode a file automatically after it is imported into the
library.
"""
fmt = self.config['format'].get(unicode).lower()
if should_transcode(item, fmt):
command, ext = get_format()
# Create a temporary file for the conversion.
tmpdir = self.config['tmpdir'].get()
fd, dest = tempfile.mkstemp('.' + ext, dir=tmpdir)
os.close(fd)
dest = util.bytestring_path(dest)
_temp_files.append(dest) # Delete the transcode later.
# Convert.
try:
self.encode(command, item.path, dest)
except subprocess.CalledProcessError:
return
# Change the newly-imported database entry to point to the
# converted file.
item.path = dest
item.write()
item.read() # Load new audio information data.
item.store()
def _cleanup(self, task, session):
for path in task.old_paths:
if path in _temp_files:
if os.path.isfile(path):
util.remove(path)
_temp_files.remove(path)
|
divya-csekar/flask-microblog-server | refs/heads/master | flask/Lib/site-packages/flask/testsuite/helpers.py | 405 | # -*- coding: utf-8 -*-
"""
flask.testsuite.helpers
~~~~~~~~~~~~~~~~~~~~~~~
Various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import flask
import unittest
from logging import StreamHandler
from flask.testsuite import FlaskTestCase, catch_warnings, catch_stderr
from werkzeug.http import parse_cache_control_header, parse_options_header
from flask._compat import StringIO, text_type
def has_encoding(name):
try:
import codecs
codecs.lookup(name)
return True
except LookupError:
return False
class JSONTestCase(FlaskTestCase):
def test_json_bad_requests(self):
app = flask.Flask(__name__)
@app.route('/json', methods=['POST'])
def return_json():
return flask.jsonify(foo=text_type(flask.request.get_json()))
c = app.test_client()
rv = c.post('/json', data='malformed', content_type='application/json')
self.assert_equal(rv.status_code, 400)
def test_json_body_encoding(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
return flask.request.get_json()
c = app.test_client()
resp = c.get('/', data=u'"Hällo Wörld"'.encode('iso-8859-15'),
content_type='application/json; charset=iso-8859-15')
self.assert_equal(resp.data, u'Hällo Wörld'.encode('utf-8'))
def test_jsonify(self):
d = dict(a=23, b=42, c=[1, 2, 3])
app = flask.Flask(__name__)
@app.route('/kw')
def return_kwargs():
return flask.jsonify(**d)
@app.route('/dict')
def return_dict():
return flask.jsonify(d)
c = app.test_client()
for url in '/kw', '/dict':
rv = c.get(url)
self.assert_equal(rv.mimetype, 'application/json')
self.assert_equal(flask.json.loads(rv.data), d)
def test_json_as_unicode(self):
app = flask.Flask(__name__)
app.config['JSON_AS_ASCII'] = True
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, '"\\u2603"')
app.config['JSON_AS_ASCII'] = False
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, u'"\u2603"')
def test_json_attr(self):
app = flask.Flask(__name__)
@app.route('/add', methods=['POST'])
def add():
json = flask.request.get_json()
return text_type(json['a'] + json['b'])
c = app.test_client()
rv = c.post('/add', data=flask.json.dumps({'a': 1, 'b': 2}),
content_type='application/json')
self.assert_equal(rv.data, b'3')
def test_template_escaping(self):
app = flask.Flask(__name__)
render = flask.render_template_string
with app.test_request_context():
rv = flask.json.htmlsafe_dumps('</script>')
self.assert_equal(rv, u'"\\u003c/script\\u003e"')
self.assert_equal(type(rv), text_type)
rv = render('{{ "</script>"|tojson }}')
self.assert_equal(rv, '"\\u003c/script\\u003e"')
rv = render('{{ "<\0/script>"|tojson }}')
self.assert_equal(rv, '"\\u003c\\u0000/script\\u003e"')
rv = render('{{ "<!--<script>"|tojson }}')
self.assert_equal(rv, '"\\u003c!--\\u003cscript\\u003e"')
rv = render('{{ "&"|tojson }}')
self.assert_equal(rv, '"\\u0026"')
rv = render('{{ "\'"|tojson }}')
self.assert_equal(rv, '"\\u0027"')
rv = render("<a ng-data='{{ data|tojson }}'></a>",
data={'x': ["foo", "bar", "baz'"]})
self.assert_equal(rv,
'<a ng-data=\'{"x": ["foo", "bar", "baz\\u0027"]}\'></a>')
def test_json_customization(self):
class X(object):
def __init__(self, val):
self.val = val
class MyEncoder(flask.json.JSONEncoder):
def default(self, o):
if isinstance(o, X):
return '<%d>' % o.val
return flask.json.JSONEncoder.default(self, o)
class MyDecoder(flask.json.JSONDecoder):
def __init__(self, *args, **kwargs):
kwargs.setdefault('object_hook', self.object_hook)
flask.json.JSONDecoder.__init__(self, *args, **kwargs)
def object_hook(self, obj):
if len(obj) == 1 and '_foo' in obj:
return X(obj['_foo'])
return obj
app = flask.Flask(__name__)
app.testing = True
app.json_encoder = MyEncoder
app.json_decoder = MyDecoder
@app.route('/', methods=['POST'])
def index():
return flask.json.dumps(flask.request.get_json()['x'])
c = app.test_client()
rv = c.post('/', data=flask.json.dumps({
'x': {'_foo': 42}
}), content_type='application/json')
self.assertEqual(rv.data, b'"<42>"')
def test_modified_url_encoding(self):
class ModifiedRequest(flask.Request):
url_charset = 'euc-kr'
app = flask.Flask(__name__)
app.testing = True
app.request_class = ModifiedRequest
app.url_map.charset = 'euc-kr'
@app.route('/')
def index():
return flask.request.args['foo']
rv = app.test_client().get(u'/?foo=정상처리'.encode('euc-kr'))
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, u'정상처리'.encode('utf-8'))
if not has_encoding('euc-kr'):
test_modified_url_encoding = None
def test_json_key_sorting(self):
app = flask.Flask(__name__)
app.testing = True
self.assert_equal(app.config['JSON_SORT_KEYS'], True)
d = dict.fromkeys(range(20), 'foo')
@app.route('/')
def index():
return flask.jsonify(values=d)
c = app.test_client()
rv = c.get('/')
lines = [x.strip() for x in rv.data.strip().decode('utf-8').splitlines()]
self.assert_equal(lines, [
'{',
'"values": {',
'"0": "foo",',
'"1": "foo",',
'"2": "foo",',
'"3": "foo",',
'"4": "foo",',
'"5": "foo",',
'"6": "foo",',
'"7": "foo",',
'"8": "foo",',
'"9": "foo",',
'"10": "foo",',
'"11": "foo",',
'"12": "foo",',
'"13": "foo",',
'"14": "foo",',
'"15": "foo",',
'"16": "foo",',
'"17": "foo",',
'"18": "foo",',
'"19": "foo"',
'}',
'}'
])
class SendfileTestCase(FlaskTestCase):
def test_send_file_regular(self):
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_equal(rv.mimetype, 'text/html')
with app.open_resource('static/index.html') as f:
rv.direct_passthrough = False
self.assert_equal(rv.data, f.read())
rv.close()
def test_send_file_xsendfile(self):
app = flask.Flask(__name__)
app.use_x_sendfile = True
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
def test_send_file_object(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
rv.direct_passthrough = False
with app.open_resource('static/index.html') as f:
self.assert_equal(rv.data, f.read())
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
self.assert_equal(rv.mimetype, 'text/html')
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = False
with app.test_request_context():
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f)
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'application/octet-stream')
rv.close()
# etags
self.assert_equal(len(captured), 1)
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f, mimetype='text/plain')
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'text/plain')
rv.close()
# etags
self.assert_equal(len(captured), 1)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = StringIO('Test')
rv = flask.send_file(f)
self.assert_not_in('x-sendfile', rv.headers)
rv.close()
# etags
self.assert_equal(len(captured), 1)
def test_attachment(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f, as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
with app.test_request_context():
self.assert_equal(options['filename'], 'index.html')
rv = flask.send_file('static/index.html', as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.html')
rv.close()
with app.test_request_context():
rv = flask.send_file(StringIO('Test'), as_attachment=True,
attachment_filename='index.txt',
add_etags=False)
self.assert_equal(rv.mimetype, 'text/plain')
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.txt')
rv.close()
def test_static_file(self):
app = flask.Flask(__name__)
# default cache timeout is 12 hours
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 3600
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
class StaticFileApp(flask.Flask):
def get_send_file_max_age(self, filename):
return 10
app = StaticFileApp(__name__)
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
class LoggingTestCase(FlaskTestCase):
def test_logger_cache(self):
app = flask.Flask(__name__)
logger1 = app.logger
self.assert_true(app.logger is logger1)
self.assert_equal(logger1.name, __name__)
app.logger_name = __name__ + '/test_logger_cache'
self.assert_true(app.logger is not logger1)
def test_debug_log(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
app.logger.warning('the standard library is dead')
app.logger.debug('this is a debug statement')
return ''
@app.route('/exc')
def exc():
1 // 0
with app.test_client() as c:
with catch_stderr() as err:
c.get('/')
out = err.getvalue()
self.assert_in('WARNING in helpers [', out)
self.assert_in(os.path.basename(__file__.rsplit('.', 1)[0] + '.py'), out)
self.assert_in('the standard library is dead', out)
self.assert_in('this is a debug statement', out)
with catch_stderr() as err:
try:
c.get('/exc')
except ZeroDivisionError:
pass
else:
self.assert_true(False, 'debug log ate the exception')
def test_debug_log_override(self):
app = flask.Flask(__name__)
app.debug = True
app.logger_name = 'flask_tests/test_debug_log_override'
app.logger.level = 10
self.assert_equal(app.logger.level, 10)
def test_exception_logging(self):
out = StringIO()
app = flask.Flask(__name__)
app.logger_name = 'flask_tests/test_exception_logging'
app.logger.addHandler(StreamHandler(out))
@app.route('/')
def index():
1 // 0
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_in(b'Internal Server Error', rv.data)
err = out.getvalue()
self.assert_in('Exception on / [GET]', err)
self.assert_in('Traceback (most recent call last):', err)
self.assert_in('1 // 0', err)
self.assert_in('ZeroDivisionError:', err)
def test_processor_exceptions(self):
app = flask.Flask(__name__)
@app.before_request
def before_request():
if trigger == 'before':
1 // 0
@app.after_request
def after_request(response):
if trigger == 'after':
1 // 0
return response
@app.route('/')
def index():
return 'Foo'
@app.errorhandler(500)
def internal_server_error(e):
return 'Hello Server Error', 500
for trigger in 'before', 'after':
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_equal(rv.data, b'Hello Server Error')
def test_url_for_with_anchor(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index', _anchor='x y'),
'/#x%20y')
def test_url_for_with_scheme(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index',
_external=True,
_scheme='https'),
'https://localhost/')
def test_url_for_with_scheme_not_external(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_raises(ValueError,
flask.url_for,
'index',
_scheme='https')
def test_url_with_method(self):
from flask.views import MethodView
app = flask.Flask(__name__)
class MyView(MethodView):
def get(self, id=None):
if id is None:
return 'List'
return 'Get %d' % id
def post(self):
return 'Create'
myview = MyView.as_view('myview')
app.add_url_rule('/myview/', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/<int:id>', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/create', methods=['POST'],
view_func=myview)
with app.test_request_context():
self.assert_equal(flask.url_for('myview', _method='GET'),
'/myview/')
self.assert_equal(flask.url_for('myview', id=42, _method='GET'),
'/myview/42')
self.assert_equal(flask.url_for('myview', _method='POST'),
'/myview/create')
class NoImportsTestCase(FlaskTestCase):
"""Test Flasks are created without import.
Avoiding ``__import__`` helps create Flask instances where there are errors
at import time. Those runtime errors will be apparent to the user soon
enough, but tools which build Flask instances meta-programmatically benefit
from a Flask which does not ``__import__``. Instead of importing to
retrieve file paths or metadata on a module or package, use the pkgutil and
imp modules in the Python standard library.
"""
def test_name_with_import_error(self):
try:
flask.Flask('importerror')
except NotImplementedError:
self.fail('Flask(import_name) is importing import_name.')
class StreamingTestCase(FlaskTestCase):
def test_streaming_with_context(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(generate()))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_as_decorator(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.stream_with_context
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(generate())
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_and_custom_close(self):
app = flask.Flask(__name__)
app.testing = True
called = []
class Wrapper(object):
def __init__(self, gen):
self._gen = gen
def __iter__(self):
return self
def close(self):
called.append(42)
def __next__(self):
return next(self._gen)
next = __next__
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(
Wrapper(generate())))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
self.assertEqual(called, [42])
def suite():
suite = unittest.TestSuite()
if flask.json_available:
suite.addTest(unittest.makeSuite(JSONTestCase))
suite.addTest(unittest.makeSuite(SendfileTestCase))
suite.addTest(unittest.makeSuite(LoggingTestCase))
suite.addTest(unittest.makeSuite(NoImportsTestCase))
suite.addTest(unittest.makeSuite(StreamingTestCase))
return suite
|
rfuentess/RIOT | refs/heads/master | tests/build_system_cflags_spaces/tests/01-run.py | 22 | #!/usr/bin/env python3
# Copyright (C) 2019 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
"""
Test for passing `CFLAGS` with spaces to the application.
It also tests that even if a `CFLAGS` is set after including Makefile.include,
changing its value will trigger a rebuild.
There is also a way to test passing additional values with spaces to docker
documented in the `README.md`.
"""
import os
import sys
from testrunner import run
# Verify the macro matches the configuration value
CONFIGURATION_VALUE = os.environ['CONFIGURATION_VALUE']
def testfunc(child):
child.expect_exact('The output of the configuration variables:')
child.expect_exact('SUPER_STRING: I love sentences with spaces')
child.expect_exact('DEFINED_AFTER_MAKEFILE_INCLUDE: %s' %
CONFIGURATION_VALUE)
# This one is not tested here, see the output in 'riotbuild.h'
child.expect(r'CFLAGS_STRING_FROM_DOCKER: .*')
if __name__ == "__main__":
sys.exit(run(testfunc))
|
mathspace/django | refs/heads/master | tests/string_lookup/models.py | 281 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Foo(models.Model):
name = models.CharField(max_length=50)
friend = models.CharField(max_length=50, blank=True)
def __str__(self):
return "Foo %s" % self.name
@python_2_unicode_compatible
class Bar(models.Model):
name = models.CharField(max_length=50)
normal = models.ForeignKey(Foo, models.CASCADE, related_name='normal_foo')
fwd = models.ForeignKey("Whiz", models.CASCADE)
back = models.ForeignKey("Foo", models.CASCADE)
def __str__(self):
return "Bar %s" % self.place.name
@python_2_unicode_compatible
class Whiz(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return "Whiz %s" % self.name
@python_2_unicode_compatible
class Child(models.Model):
parent = models.OneToOneField('Base', models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return "Child %s" % self.name
@python_2_unicode_compatible
class Base(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return "Base %s" % self.name
@python_2_unicode_compatible
class Article(models.Model):
name = models.CharField(max_length=50)
text = models.TextField()
submitted_from = models.GenericIPAddressField(blank=True, null=True)
def __str__(self):
return "Article %s" % self.name
|
ademmers/ansible | refs/heads/devel | test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/connection/persistent.py | 47 | # 2017 Red Hat Inc.
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """author: Ansible Core Team
connection: persistent
short_description: Use a persistent unix socket for connection
description:
- This is a helper plugin to allow making other connections persistent.
options:
persistent_command_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait for a command to return from
the remote device. If this timer is exceeded before the command returns, the
connection plugin will raise an exception and close
default: 10
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
vars:
- name: ansible_command_timeout
"""
from ansible.executor.task_executor import start_connection
from ansible.plugins.connection import ConnectionBase
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection as SocketConnection
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
""" Local based connections """
transport = "ansible.netcommon.persistent"
has_pipelining = False
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(
play_context, new_stdin, *args, **kwargs
)
self._task_uuid = to_text(kwargs.get("task_uuid", ""))
def _connect(self):
self._connected = True
return self
def exec_command(self, cmd, in_data=None, sudoable=True):
display.vvvv(
"exec_command(), socket_path=%s" % self.socket_path,
host=self._play_context.remote_addr,
)
connection = SocketConnection(self.socket_path)
out = connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
return 0, out, ""
def put_file(self, in_path, out_path):
pass
def fetch_file(self, in_path, out_path):
pass
def close(self):
self._connected = False
def run(self):
"""Returns the path of the persistent connection socket.
Attempts to ensure (within playcontext.timeout seconds) that the
socket path exists. If the path exists (or the timeout has expired),
returns the socket path.
"""
display.vvvv(
"starting connection from persistent connection plugin",
host=self._play_context.remote_addr,
)
variables = {
"ansible_command_timeout": self.get_option(
"persistent_command_timeout"
)
}
socket_path = start_connection(
self._play_context, variables, self._task_uuid
)
display.vvvv(
"local domain socket path is %s" % socket_path,
host=self._play_context.remote_addr,
)
setattr(self, "_socket_path", socket_path)
return socket_path
|
liangwang/m5 | refs/heads/master | src/mem/slicc/ast/TypeDeclAST.py | 1 | # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.DeclAST import DeclAST
from slicc.symbols.Type import Type
class TypeDeclAST(DeclAST):
def __init__(self, slicc, type_ast, pairs, field_asts):
super(TypeDeclAST, self).__init__(slicc, pairs)
self.type_ast = type_ast
self.field_asts = field_asts
def __repr__(self):
return "[TypeDecl: %r]" % (self.type_ast)
def files(self, parent=None):
if "external" in self:
return set()
if parent:
ident = "%s_%s" % (parent, self.type_ast.ident)
else:
ident = self.type_ast.ident
return set(("%s.hh" % ident, "%s.cc" % ident))
def generate(self):
ident = str(self.type_ast)
# Make the new type
new_type = Type(self.symtab, ident, self.location, self.pairs,
self.state_machine)
self.symtab.newSymbol(new_type)
# Add all of the fields of the type to it
for field in self.field_asts:
field.generate(new_type)
|
s40523144/2016fallcp_hw | refs/heads/gh-pages | course/task2.py | 19 | # 我們能夠利用 Python 程式進行簡單的 2D 繪圖嗎?
# 機械設計工程師在那些情況下, 會用到 2D 繪圖?
# 我們需要 2D 靜態繪圖? 還是 2D 動態繪圖?
# 2D 繪圖之於機械設計工程師, 是設? 還是計?
# 我們能用 Python 程式繪出日本國旗嗎? 英國國旗? 美國國旗? 中華民國國旗? 中國國旗? 畫這些國旗要幹什?
# 我們能用 Python 程式畫出平面四連桿機構的動態模擬嗎? 曲柄滑塊呢? 其他的平面多連桿的動態模擬? 模擬機構運動要幹什?
# 可以算出這些平面連桿機構上某一特定點, 在機構運動過程中所通過的座標或掃掠過的輪廓嗎?
|
godfather1103/WeiboRobot | refs/heads/master | python27/1.0/lib/ctypes/test/test_delattr.py | 337 | import unittest
from ctypes import *
class X(Structure):
_fields_ = [("foo", c_int)]
class TestCase(unittest.TestCase):
def test_simple(self):
self.assertRaises(TypeError,
delattr, c_int(42), "value")
def test_chararray(self):
self.assertRaises(TypeError,
delattr, (c_char * 5)(), "value")
def test_struct(self):
self.assertRaises(TypeError,
delattr, X(), "foo")
if __name__ == "__main__":
unittest.main()
|
bjornlevi/5thpower | refs/heads/master | nefndaralit/env/lib/python3.6/site-packages/certifi/core.py | 105 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
certifi.py
~~~~~~~~~~
This module returns the installation location of cacert.pem.
"""
import os
import warnings
class DeprecatedBundleWarning(DeprecationWarning):
"""
The weak security bundle is being deprecated. Please bother your service
provider to get them to stop using cross-signed roots.
"""
def where():
f = os.path.dirname(__file__)
return os.path.join(f, 'cacert.pem')
def old_where():
warnings.warn(
"The weak security bundle has been removed. certifi.old_where() is now an alias "
"of certifi.where(). Please update your code to use certifi.where() instead. "
"certifi.old_where() will be removed in 2018.",
DeprecatedBundleWarning
)
return where()
if __name__ == '__main__':
print(where())
|
jellysheep/pyload | refs/heads/stable | module/plugins/hoster/MegaFilesSe.py | 15 | # -*- coding: utf-8 -*-
from module.plugins.internal.DeadHoster import DeadHoster, create_getInfo
class MegaFilesSe(DeadHoster):
__name__ = "MegaFilesSe"
__type__ = "hoster"
__version__ = "0.03"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?megafiles\.se/\w{12}'
__config__ = [] #@TODO: Remove in 0.4.10
__description__ = """MegaFiles.se hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("t4skforce", "t4skforce1337[AT]gmail[DOT]com")]
getInfo = create_getInfo(MegaFilesSe)
|
jayceyxc/hue | refs/heads/master | apps/jobsub/src/jobsub/forms.py | 38 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django import forms
from desktop.lib.django_forms import MultiForm
from jobsub import models
from django.utils.translation import ugettext as _
LOG = logging.getLogger(__name__)
# This aligns with what Oozie accepts as a workflow name
_OOZIE_WORKFLOW_NAME_REGEX = '^([a-zA-Z_]([\-_a-zA-Z0-9])*){1,39}$'
class WorkflowDesignForm(forms.ModelForm):
"""Used for specifying a design"""
class Meta:
model = models.OozieDesign
exclude = ('root_action', 'owner')
name = forms.RegexField(
label=_('Name'),
max_length=39,
regex=_OOZIE_WORKFLOW_NAME_REGEX,
help_text="Name of the design.",
error_messages={'invalid': _("Allows letters, digits, '_', and '-'. "
"The first character must be a letter or '_'.")})
class JavaActionForm(forms.ModelForm):
"""Used for specifying a java action"""
class Meta:
model = models.OozieJavaAction
exclude = ('action_type',)
widgets = {
'job_properties': forms.widgets.HiddenInput(),
'files': forms.HiddenInput(),
'archives': forms.HiddenInput()
}
name = forms.CharField(label=_('Name'))
description = forms.CharField(label=_('Description'))
main_class = forms.CharField(label=_('Main class'))
jar_path = forms.CharField(label=_('Jar path'), widget=forms.TextInput(attrs={'class': 'pathChooser'}))
args = forms.CharField(label=_('Args'))
java_opts = forms.CharField(label=_('Java opts'))
class MapreduceActionForm(forms.ModelForm):
"""Used for specifying a mapreduce action"""
class Meta:
model = models.OozieMapreduceAction
exclude = ('action_type',)
widgets = {
'job_properties': forms.widgets.HiddenInput(),
'files': forms.HiddenInput(),
'archives': forms.HiddenInput()
}
name = forms.CharField(label=_('Name'))
description = forms.CharField(label=_('Description'))
jar_path = forms.CharField(label=_('Jar path'), widget=forms.TextInput(attrs={'class': 'pathChooser'}))
mapper = forms.CharField(label=_('Mapper'))
reducer = forms.CharField(label=_('Reducer'))
class StreamingActionForm(forms.ModelForm):
"""Used for specifying a streaming action"""
class Meta:
model = models.OozieStreamingAction
exclude = ('action_type',)
widgets = {
'job_properties': forms.widgets.HiddenInput(),
'files': forms.widgets.HiddenInput(),
'archives': forms.widgets.HiddenInput(),
}
name = forms.CharField(label=_('Name'))
description = forms.CharField(label=_('Description'))
_ACTION_TYPE_TO_FORM_CLS = {
models.OozieMapreduceAction.ACTION_TYPE: MapreduceActionForm,
models.OozieStreamingAction.ACTION_TYPE: StreamingActionForm,
models.OozieJavaAction.ACTION_TYPE: JavaActionForm,
}
def design_form_by_type(action_type):
cls = _ACTION_TYPE_TO_FORM_CLS[action_type]
return MultiForm(wf=WorkflowDesignForm, action=cls)
def design_form_by_instance(design_obj, data=None):
action_obj = design_obj.get_root_action()
cls = _ACTION_TYPE_TO_FORM_CLS[action_obj.action_type]
instances = dict(wf=design_obj, action=action_obj)
res = MultiForm(wf=WorkflowDesignForm, action=cls)
res.bind(data=data, instances=instances)
return res
|
clobrano/personfinder | refs/heads/master | app/pytz/zoneinfo/Canada/Atlantic.py | 9 | '''tzinfo timezone information for Canada/Atlantic.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Atlantic(DstTzInfo):
'''Canada/Atlantic timezone definition. See datetime.tzinfo for details'''
zone = 'Canada/Atlantic'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1902,6,15,4,14,24),
d(1916,4,1,4,0,0),
d(1916,10,1,3,0,0),
d(1918,4,14,6,0,0),
d(1918,10,31,5,0,0),
d(1920,5,9,4,0,0),
d(1920,8,29,3,0,0),
d(1921,5,6,4,0,0),
d(1921,9,5,3,0,0),
d(1922,4,30,4,0,0),
d(1922,9,5,3,0,0),
d(1923,5,6,4,0,0),
d(1923,9,4,3,0,0),
d(1924,5,4,4,0,0),
d(1924,9,15,3,0,0),
d(1925,5,3,4,0,0),
d(1925,9,28,3,0,0),
d(1926,5,16,4,0,0),
d(1926,9,13,3,0,0),
d(1927,5,1,4,0,0),
d(1927,9,26,3,0,0),
d(1928,5,13,4,0,0),
d(1928,9,9,3,0,0),
d(1929,5,12,4,0,0),
d(1929,9,3,3,0,0),
d(1930,5,11,4,0,0),
d(1930,9,15,3,0,0),
d(1931,5,10,4,0,0),
d(1931,9,28,3,0,0),
d(1932,5,1,4,0,0),
d(1932,9,26,3,0,0),
d(1933,4,30,4,0,0),
d(1933,10,2,3,0,0),
d(1934,5,20,4,0,0),
d(1934,9,16,3,0,0),
d(1935,6,2,4,0,0),
d(1935,9,30,3,0,0),
d(1936,6,1,4,0,0),
d(1936,9,14,3,0,0),
d(1937,5,2,4,0,0),
d(1937,9,27,3,0,0),
d(1938,5,1,4,0,0),
d(1938,9,26,3,0,0),
d(1939,5,28,4,0,0),
d(1939,9,25,3,0,0),
d(1940,5,5,4,0,0),
d(1940,9,30,3,0,0),
d(1941,5,4,4,0,0),
d(1941,9,29,3,0,0),
d(1942,2,9,6,0,0),
d(1945,8,14,23,0,0),
d(1945,9,30,5,0,0),
d(1946,4,28,6,0,0),
d(1946,9,29,5,0,0),
d(1947,4,27,6,0,0),
d(1947,9,28,5,0,0),
d(1948,4,25,6,0,0),
d(1948,9,26,5,0,0),
d(1949,4,24,6,0,0),
d(1949,9,25,5,0,0),
d(1951,4,29,6,0,0),
d(1951,9,30,5,0,0),
d(1952,4,27,6,0,0),
d(1952,9,28,5,0,0),
d(1953,4,26,6,0,0),
d(1953,9,27,5,0,0),
d(1954,4,25,6,0,0),
d(1954,9,26,5,0,0),
d(1956,4,29,6,0,0),
d(1956,9,30,5,0,0),
d(1957,4,28,6,0,0),
d(1957,9,29,5,0,0),
d(1958,4,27,6,0,0),
d(1958,9,28,5,0,0),
d(1959,4,26,6,0,0),
d(1959,9,27,5,0,0),
d(1962,4,29,6,0,0),
d(1962,10,28,5,0,0),
d(1963,4,28,6,0,0),
d(1963,10,27,5,0,0),
d(1964,4,26,6,0,0),
d(1964,10,25,5,0,0),
d(1965,4,25,6,0,0),
d(1965,10,31,5,0,0),
d(1966,4,24,6,0,0),
d(1966,10,30,5,0,0),
d(1967,4,30,6,0,0),
d(1967,10,29,5,0,0),
d(1968,4,28,6,0,0),
d(1968,10,27,5,0,0),
d(1969,4,27,6,0,0),
d(1969,10,26,5,0,0),
d(1970,4,26,6,0,0),
d(1970,10,25,5,0,0),
d(1971,4,25,6,0,0),
d(1971,10,31,5,0,0),
d(1972,4,30,6,0,0),
d(1972,10,29,5,0,0),
d(1973,4,29,6,0,0),
d(1973,10,28,5,0,0),
d(1974,4,28,6,0,0),
d(1974,10,27,5,0,0),
d(1975,4,27,6,0,0),
d(1975,10,26,5,0,0),
d(1976,4,25,6,0,0),
d(1976,10,31,5,0,0),
d(1977,4,24,6,0,0),
d(1977,10,30,5,0,0),
d(1978,4,30,6,0,0),
d(1978,10,29,5,0,0),
d(1979,4,29,6,0,0),
d(1979,10,28,5,0,0),
d(1980,4,27,6,0,0),
d(1980,10,26,5,0,0),
d(1981,4,26,6,0,0),
d(1981,10,25,5,0,0),
d(1982,4,25,6,0,0),
d(1982,10,31,5,0,0),
d(1983,4,24,6,0,0),
d(1983,10,30,5,0,0),
d(1984,4,29,6,0,0),
d(1984,10,28,5,0,0),
d(1985,4,28,6,0,0),
d(1985,10,27,5,0,0),
d(1986,4,27,6,0,0),
d(1986,10,26,5,0,0),
d(1987,4,5,6,0,0),
d(1987,10,25,5,0,0),
d(1988,4,3,6,0,0),
d(1988,10,30,5,0,0),
d(1989,4,2,6,0,0),
d(1989,10,29,5,0,0),
d(1990,4,1,6,0,0),
d(1990,10,28,5,0,0),
d(1991,4,7,6,0,0),
d(1991,10,27,5,0,0),
d(1992,4,5,6,0,0),
d(1992,10,25,5,0,0),
d(1993,4,4,6,0,0),
d(1993,10,31,5,0,0),
d(1994,4,3,6,0,0),
d(1994,10,30,5,0,0),
d(1995,4,2,6,0,0),
d(1995,10,29,5,0,0),
d(1996,4,7,6,0,0),
d(1996,10,27,5,0,0),
d(1997,4,6,6,0,0),
d(1997,10,26,5,0,0),
d(1998,4,5,6,0,0),
d(1998,10,25,5,0,0),
d(1999,4,4,6,0,0),
d(1999,10,31,5,0,0),
d(2000,4,2,6,0,0),
d(2000,10,29,5,0,0),
d(2001,4,1,6,0,0),
d(2001,10,28,5,0,0),
d(2002,4,7,6,0,0),
d(2002,10,27,5,0,0),
d(2003,4,6,6,0,0),
d(2003,10,26,5,0,0),
d(2004,4,4,6,0,0),
d(2004,10,31,5,0,0),
d(2005,4,3,6,0,0),
d(2005,10,30,5,0,0),
d(2006,4,2,6,0,0),
d(2006,10,29,5,0,0),
d(2007,3,11,6,0,0),
d(2007,11,4,5,0,0),
d(2008,3,9,6,0,0),
d(2008,11,2,5,0,0),
d(2009,3,8,6,0,0),
d(2009,11,1,5,0,0),
d(2010,3,14,6,0,0),
d(2010,11,7,5,0,0),
d(2011,3,13,6,0,0),
d(2011,11,6,5,0,0),
d(2012,3,11,6,0,0),
d(2012,11,4,5,0,0),
d(2013,3,10,6,0,0),
d(2013,11,3,5,0,0),
d(2014,3,9,6,0,0),
d(2014,11,2,5,0,0),
d(2015,3,8,6,0,0),
d(2015,11,1,5,0,0),
d(2016,3,13,6,0,0),
d(2016,11,6,5,0,0),
d(2017,3,12,6,0,0),
d(2017,11,5,5,0,0),
d(2018,3,11,6,0,0),
d(2018,11,4,5,0,0),
d(2019,3,10,6,0,0),
d(2019,11,3,5,0,0),
d(2020,3,8,6,0,0),
d(2020,11,1,5,0,0),
d(2021,3,14,6,0,0),
d(2021,11,7,5,0,0),
d(2022,3,13,6,0,0),
d(2022,11,6,5,0,0),
d(2023,3,12,6,0,0),
d(2023,11,5,5,0,0),
d(2024,3,10,6,0,0),
d(2024,11,3,5,0,0),
d(2025,3,9,6,0,0),
d(2025,11,2,5,0,0),
d(2026,3,8,6,0,0),
d(2026,11,1,5,0,0),
d(2027,3,14,6,0,0),
d(2027,11,7,5,0,0),
d(2028,3,12,6,0,0),
d(2028,11,5,5,0,0),
d(2029,3,11,6,0,0),
d(2029,11,4,5,0,0),
d(2030,3,10,6,0,0),
d(2030,11,3,5,0,0),
d(2031,3,9,6,0,0),
d(2031,11,2,5,0,0),
d(2032,3,14,6,0,0),
d(2032,11,7,5,0,0),
d(2033,3,13,6,0,0),
d(2033,11,6,5,0,0),
d(2034,3,12,6,0,0),
d(2034,11,5,5,0,0),
d(2035,3,11,6,0,0),
d(2035,11,4,5,0,0),
d(2036,3,9,6,0,0),
d(2036,11,2,5,0,0),
d(2037,3,8,6,0,0),
d(2037,11,1,5,0,0),
]
_transition_info = [
i(-15240,0,'LMT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'AWT'),
i(-10800,3600,'APT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
i(-10800,3600,'ADT'),
i(-14400,0,'AST'),
]
Atlantic = Atlantic()
|
OliverEvans96/rte_matrix | refs/heads/master | factorizations/SVD_vs_SVDsparse.py | 1 | # File Name: SVD_vs_SVDsparse.py
# Description:
# Author: Christopher Parker
# Created: Sun Apr 30, 2017 | 07:10P EDT
# Last Modified: Sun Apr 30, 2017 | 08:46P EDT
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-
# GNU GPL LICENSE #
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-
# #
# Copyright Christopher Parker 2017 <cjp65@case.edu> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-
import matplotlib.pyplot as plt
from SVD import SVD
from SVDsparse import SVDsparse
SVD_answers = [SVD('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_10x10x16_012.mat'), SVD('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_20x20x12_012.mat'), SVD('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_20x20x24_012.mat'), SVD('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_20x20x32_012.mat')]#, SVD('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_50x50x16_012.mat'), SVD('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_50x50x24_012.mat'), SVD('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_50x50x32_012.mat')]
SVD_times = [SVD_answers[0][1], SVD_answers[1][1], SVD_answers[2][1], SVD_answers[3][1]]#, SVD_answers[4][1], SVD_answers[5][1], SVD_answers[6][1]]
SVDsparse_answers = [SVDsparse('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_10x10x16_012.mat'), SVDsparse('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_20x20x12_012.mat'), SVDsparse('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_20x20x24_012.mat'), SVDsparse('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_20x20x32_012.mat')]#, SVDsparse('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_50x50x16_012.mat'), SVDsparse('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_50x50x24_012.mat'), SVDsparse('/Users/christopher/Documents/anaII/rte_matrix/mat/kelp1_50x50x32_012.mat')]
SVDsparse_times = [SVDsparse_answers[0][1], SVDsparse_answers[1][1], SVDsparse_answers[2][1], SVDsparse_answers[3][1]]#, SVDsparse_answers[4][1]], SVDsparse_answers[5][1], SVDsparse_answers[6][1]]
matrix_sizes = [len(SVDsparse_answers[0][0]), len(SVDsparse_answers[1][0]), len(SVDsparse_answers[2][0]), len(SVDsparse_answers[3][0])]#, len(SVDsparse_answers[4][0]), len(SVDsparse_answers[5][0]), len(SVDsparse_answers[6][0])]
plt.semilogy(matrix_sizes,SVD_times,'k')
plt.semilogy(matrix_sizes,SVDsparse_times,'b')
plt.xlabel('Matrix Size')
plt.ylabel('Computation Time (s)')
plt.savefig('SVD_vs_SVDsparse.png')
plt.show()
|
XiaosongWei/chromium-crosswalk | refs/heads/master | build/write_buildflag_header.py | 38 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This writes headers for build flags. See buildflag_header.gni for usage of
# this system as a whole.
#
# The parameters are passed in a response file so we don't have to worry
# about command line lengths. The name of the response file is passed on the
# command line.
#
# The format of the response file is:
# [--flags <list of one or more flag values>]
import optparse
import os
import shlex
import sys
class Options:
def __init__(self, output, rulename, header_guard, flags):
self.output = output
self.rulename = rulename
self.header_guard = header_guard
self.flags = flags
def GetOptions():
parser = optparse.OptionParser()
parser.add_option('--output', help="Output header name inside --gen-dir.")
parser.add_option('--rulename',
help="Helpful name of build rule for including in the " +
"comment at the top of the file.")
parser.add_option('--gen-dir',
help="Path to root of generated file directory tree.")
parser.add_option('--definitions',
help="Name of the response file containing the flags.")
cmdline_options, cmdline_flags = parser.parse_args()
# Compute header guard by replacing some chars with _ and upper-casing.
header_guard = cmdline_options.output.upper()
header_guard = \
header_guard.replace('/', '_').replace('\\', '_').replace('.', '_')
header_guard += '_'
# The actual output file is inside the gen dir.
output = os.path.join(cmdline_options.gen_dir, cmdline_options.output)
# Definition file in GYP is newline separated, in GN they are shell formatted.
# shlex can parse both of these.
with open(cmdline_options.definitions, 'r') as def_file:
defs = shlex.split(def_file.read())
flags_index = defs.index('--flags')
# Everything after --flags are flags. true/false are remapped to 1/0,
# everything else is passed through.
flags = []
for flag in defs[flags_index + 1 :]:
equals_index = flag.index('=')
key = flag[:equals_index]
value = flag[equals_index + 1:]
# Canonicalize and validate the value.
if value == 'true':
value = '1'
elif value == 'false':
value = '0'
flags.append((key, str(value)))
return Options(output=output,
rulename=cmdline_options.rulename,
header_guard=header_guard,
flags=flags)
def WriteHeader(options):
with open(options.output, 'w') as output_file:
output_file.write("// Generated by build/write_buildflag_header.py\n")
if options.rulename:
output_file.write('// From "' + options.rulename + '"\n')
output_file.write('\n#ifndef %s\n' % options.header_guard)
output_file.write('#define %s\n\n' % options.header_guard)
output_file.write('#include "build/buildflag.h"\n\n')
for pair in options.flags:
output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair)
output_file.write('\n#endif // %s\n' % options.header_guard)
options = GetOptions()
WriteHeader(options)
|
woodbunny/JIT-ASLR-kernel | refs/heads/master | tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
percyfal/bokeh | refs/heads/master | bokeh/util/tests/test_deprecation.py | 14 | import pytest
from mock import patch
import bokeh.util.deprecation as dep
def foo(): pass
def test_bad_arg_type():
for x in [10, True, foo, [], (), {}]:
with pytest.raises(ValueError):
dep.deprecated(x)
@patch('warnings.warn')
def test_message(mock_warn):
dep.deprecated('test')
assert mock_warn.called
assert mock_warn.call_args[0] == ("test", dep.BokehDeprecationWarning)
assert mock_warn.call_args[1] == {'stacklevel': 2}
def test_message_no_extra_args():
with pytest.raises(ValueError):
dep.deprecated('test', 'foo')
with pytest.raises(ValueError):
dep.deprecated('test', old='foo')
with pytest.raises(ValueError):
dep.deprecated('test', new='foo')
with pytest.raises(ValueError):
dep.deprecated('test', extra='foo')
def test_since_missing_extra_args():
with pytest.raises(ValueError):
dep.deprecated((1,2,3))
with pytest.raises(ValueError):
dep.deprecated((1,2,3), old="foo")
with pytest.raises(ValueError):
dep.deprecated((1,2,3), new="foo")
def test_since_bad_tuple():
with pytest.raises(ValueError):
dep.deprecated((1,), old="foo", new="bar")
with pytest.raises(ValueError):
dep.deprecated((1,2), old="foo", new="bar")
with pytest.raises(ValueError):
dep.deprecated((1,2,3,4), old="foo", new="bar")
with pytest.raises(ValueError):
dep.deprecated((1,2,-4), old="foo", new="bar")
with pytest.raises(ValueError):
dep.deprecated((1,2,"3"), old="foo", new="bar")
@patch('warnings.warn')
def test_since(mock_warn):
dep.deprecated((1,2,3), old="foo", new="bar")
assert mock_warn.called
assert mock_warn.call_args[0] == ("foo was deprecated in Bokeh 1.2.3 and will be removed, use bar instead.", dep.BokehDeprecationWarning)
assert mock_warn.call_args[1] == {'stacklevel': 2}
@patch('warnings.warn')
def test_since_with_extra(mock_warn):
dep.deprecated((1,2,3), old="foo", new="bar", extra="baz")
assert mock_warn.called
assert mock_warn.call_args[0] == ("foo was deprecated in Bokeh 1.2.3 and will be removed, use bar instead. baz", dep.BokehDeprecationWarning)
assert mock_warn.call_args[1] == {'stacklevel': 2}
|
LiveTex/Livetex-Tools | refs/heads/master | tools/externs-extractor/extractors/recordsExtractor.py | 2 | import re
from utils import *
from entities.record import Record
def __extractTag(text):
"""
Extracts tag from record.
@tag
@param {string} text.
@return {string} tag.
"""
return re.match('@\w+', text).group(0)
def __extractType(text, tag):
"""
Extracts type expression from record.
{type}
@param {string} text.
@param {string} tag.
@return {string} Type expression.
"""
typeExpression = extractTextBetweenTokens(text, '{')
return typeExpression
def __extractName(text, tag):
"""
Extracts name of variable from record.
@param {string} text.
@param {string} tag.
@return {string} Name.
"""
name = None
if tag not in {'@return', '@inheritDoc'}:
name = text.split(' ')[0]
return name
def __extractDescription(text, tag):
"""
Extracts description of variable from record without newlines.
@param {string} text.
@param {string} tag.
@return {string} Description.
"""
return text.replace('\n', ' ')
def extractRecord(text):
"""
Extracts from code a record object, which contain such information as
tag, type, name of variable abd its description.
@param {string} text.
@return {jsCodeParser.record.Record} Record
"""
tag = __extractTag(text)
position = text.find(tag) + len(tag)
text = text[position:]
recordMap = {
'type': {
'extractor': __extractType,
'value': ''
},
'name': {
'extractor': __extractName,
'value': ''
},
'description': {
'extractor': __extractDescription,
'value': ''
}
}
while text:
for key in ['type', 'name', 'description']:
extractor = recordMap[key]['extractor']
value = extractor(text, tag)
if value:
recordMap[key]['value'] = value
position = text.find(value) + len(value)
text = text[position:]
text = text.strip('. ')
typeExpression = recordMap['type']['value']
name = recordMap['name']['value']
description = recordMap['description']['value']
return Record(tag, typeExpression, name, description)
|
Varentsov/servo | refs/heads/master | tests/wpt/web-platform-tests/xhr/resources/inspect-headers.py | 20 | def get_response(raw_headers, filter_value, filter_name):
result = ""
for line in raw_headers.headers:
if line[-2:] != '\r\n':
return "Syntax error: missing CRLF: " + line
line = line[:-2]
if ':' not in line:
return "Syntax error: no colon found: " + line
name, value = line.split(':', 1)
if len(value) > 1 and value[0] == ' ':
value = value[1:]
if filter_value:
if value == filter_value:
result += name + ","
elif name.lower() == filter_name:
result += name + ": " + value + "\n";
return result
def main(request, response):
headers = []
if "cors" in request.GET:
headers.append(("Access-Control-Allow-Origin", "*"))
headers.append(("Access-Control-Allow-Credentials", "true"))
headers.append(("Access-Control-Allow-Methods", "GET, POST, PUT, FOO"))
headers.append(("Access-Control-Allow-Headers", "x-test, x-foo"))
headers.append(("Access-Control-Expose-Headers", "x-request-method, x-request-content-type, x-request-query, x-request-content-length"))
headers.append(("content-type", "text/plain"))
filter_value = request.GET.first("filter_value", "")
filter_name = request.GET.first("filter_name", "").lower()
result = get_response(request.raw_headers, filter_value, filter_name)
return headers, result
|
rolandmansilla/microblog | refs/heads/master | flask/lib/python2.7/site-packages/blinker/__init__.py | 57 | from blinker.base import (
ANY,
NamedSignal,
Namespace,
Signal,
WeakNamespace,
receiver_connected,
signal,
)
__all__ = [
'ANY',
'NamedSignal',
'Namespace',
'Signal',
'WeakNamespace',
'receiver_connected',
'signal',
]
__version__ = '1.4'
|
fabioz/PyDev.Debugger | refs/heads/master | third_party/pep8/lib2to3/lib2to3/fixer_base.py | 305 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Base class for fixers (optional, but recommended)."""
# Python imports
import logging
import itertools
# Local imports
from .patcomp import PatternCompiler
from . import pygram
from .fixer_util import does_tree_import
class BaseFix(object):
"""Optional base class for fixers.
The subclass name must be FixFooBar where FooBar is the result of
removing underscores and capitalizing the words of the fix name.
For example, the class name for a fixer named 'has_key' should be
FixHasKey.
"""
PATTERN = None # Most subclasses should override with a string literal
pattern = None # Compiled pattern, set by compile_pattern()
pattern_tree = None # Tree representation of the pattern
options = None # Options object passed to initializer
filename = None # The filename (set by set_filename)
logger = None # A logger (set by set_filename)
numbers = itertools.count(1) # For new_name()
used_names = set() # A set of all used NAMEs
order = "post" # Does the fixer prefer pre- or post-order traversal
explicit = False # Is this ignored by refactor.py -f all?
run_order = 5 # Fixers will be sorted by run order before execution
# Lower numbers will be run first.
_accept_type = None # [Advanced and not public] This tells RefactoringTool
# which node type to accept when there's not a pattern.
keep_line_order = False # For the bottom matcher: match with the
# original line order
BM_compatible = False # Compatibility with the bottom matching
# module; every fixer should set this
# manually
# Shortcut for access to Python grammar symbols
syms = pygram.python_symbols
def __init__(self, options, log):
"""Initializer. Subclass may override.
Args:
options: an dict containing the options passed to RefactoringTool
that could be used to customize the fixer through the command line.
log: a list to append warnings and other messages to.
"""
self.options = options
self.log = log
self.compile_pattern()
def compile_pattern(self):
"""Compiles self.PATTERN into self.pattern.
Subclass may override if it doesn't want to use
self.{pattern,PATTERN} in .match().
"""
if self.PATTERN is not None:
PC = PatternCompiler()
self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN,
with_tree=True)
def set_filename(self, filename):
"""Set the filename, and a logger derived from it.
The main refactoring tool should call this.
"""
self.filename = filename
self.logger = logging.getLogger(filename)
def match(self, node):
"""Returns match for a given parse tree node.
Should return a true or false object (not necessarily a bool).
It may return a non-empty dict of matching sub-nodes as
returned by a matching pattern.
Subclass may override.
"""
results = {"node": node}
return self.pattern.match(node, results) and results
def transform(self, node, results):
"""Returns the transformation for a given parse tree node.
Args:
node: the root of the parse tree that matched the fixer.
results: a dict mapping symbolic names to part of the match.
Returns:
None, or a node that is a modified copy of the
argument node. The node argument may also be modified in-place to
effect the same change.
Subclass *must* override.
"""
raise NotImplementedError()
def new_name(self, template=u"xxx_todo_changeme"):
"""Return a string suitable for use as an identifier
The new name is guaranteed not to conflict with other identifiers.
"""
name = template
while name in self.used_names:
name = template + unicode(self.numbers.next())
self.used_names.add(name)
return name
def log_message(self, message):
if self.first_log:
self.first_log = False
self.log.append("### In file %s ###" % self.filename)
self.log.append(message)
def cannot_convert(self, node, reason=None):
"""Warn the user that a given chunk of code is not valid Python 3,
but that it cannot be converted automatically.
First argument is the top-level node for the code in question.
Optional second argument is why it can't be converted.
"""
lineno = node.get_lineno()
for_output = node.clone()
for_output.prefix = u""
msg = "Line %d: could not convert: %s"
self.log_message(msg % (lineno, for_output))
if reason:
self.log_message(reason)
def warning(self, node, reason):
"""Used for warning the user about possible uncertainty in the
translation.
First argument is the top-level node for the code in question.
Optional second argument is why it can't be converted.
"""
lineno = node.get_lineno()
self.log_message("Line %d: %s" % (lineno, reason))
def start_tree(self, tree, filename):
"""Some fixers need to maintain tree-wide state.
This method is called once, at the start of tree fix-up.
tree - the root node of the tree to be processed.
filename - the name of the file the tree came from.
"""
self.used_names = tree.used_names
self.set_filename(filename)
self.numbers = itertools.count(1)
self.first_log = True
def finish_tree(self, tree, filename):
"""Some fixers need to maintain tree-wide state.
This method is called once, at the conclusion of tree fix-up.
tree - the root node of the tree to be processed.
filename - the name of the file the tree came from.
"""
pass
class ConditionalFix(BaseFix):
""" Base class for fixers which not execute if an import is found. """
# This is the name of the import which, if found, will cause the test to be skipped
skip_on = None
def start_tree(self, *args):
super(ConditionalFix, self).start_tree(*args)
self._should_skip = None
def should_skip(self, node):
if self._should_skip is not None:
return self._should_skip
pkg = self.skip_on.split(".")
name = pkg[-1]
pkg = ".".join(pkg[:-1])
self._should_skip = does_tree_import(pkg, name, node)
return self._should_skip
|
asedunov/intellij-community | refs/heads/master | python/testData/formatter/unaryMinus_after.py | 3989 | -1
|
steven-albanese/FAHMunge | refs/heads/master | setup.py | 2 | """Some tools for munging FAH trajectories
"""
from __future__ import print_function
DOCLINES = __doc__.split("\n")
import os
import sys
import shutil
import tempfile
import subprocess
from distutils.ccompiler import new_compiler
try:
from setuptools import setup, Extension
except ImportError:
from distutils.core import setup, Extension
##########################
VERSION = "0.1.0"
ISRELEASED = False
__version__ = VERSION
##########################
CLASSIFIERS = """\
Development Status :: 3 - Alpha
Intended Audience :: Science/Research
Intended Audience :: Developers
License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)
Programming Language :: C
Programming Language :: Python
Programming Language :: Python :: 3
Topic :: Scientific/Engineering :: Bio-Informatics
Topic :: Scientific/Engineering :: Chemistry
Operating System :: Microsoft :: Windows
Operating System :: POSIX
Operating System :: Unix
Operating System :: MacOS
"""
def find_packages():
"""Find all of fahmunge's python packages.
Adapted from IPython's setupbase.py. Copyright IPython
contributors, licensed under the BSD license.
"""
packages = []
for dir,subdirs,files in os.walk('fahmunge'):
package = dir.replace(os.path.sep, '.')
if '__init__.py' not in files:
# not a package
continue
packages.append(package.replace('fahmunge', 'fahmunge'))
return packages
################################################################################
# Writing version control information to the module
################################################################################
def git_version():
# Return the git revision as a string
# copied from numpy setup.py
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
out = subprocess.Popen(
cmd, stdout=subprocess.PIPE, env=env).communicate()[0]
return out
try:
out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
GIT_REVISION = out.strip().decode('ascii')
except OSError:
GIT_REVISION = 'Unknown'
return GIT_REVISION
def write_version_py(filename='FAHMunge/version.py'):
cnt = """
# THIS FILE IS GENERATED FROM fahmunge SETUP.PY
short_version = '%(version)s'
version = '%(version)s'
full_version = '%(full_version)s'
git_revision = '%(git_revision)s'
release = %(isrelease)s
if not release:
version = full_version
"""
# Adding the git rev number needs to be done inside write_version_py(),
# otherwise the import of numpy.version messes up the build under Python 3.
FULLVERSION = VERSION
if os.path.exists('.git'):
GIT_REVISION = git_version()
else:
GIT_REVISION = 'Unknown'
if not ISRELEASED:
FULLVERSION += '.dev-' + GIT_REVISION[:7]
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION,
'full_version': FULLVERSION,
'git_revision': GIT_REVISION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
setup_kwargs = {}
write_version_py()
setup(name='fahmunge',
author='Kyle A. Beauchamp',
author_email='kyleabeauchamp@gmail.com',
zip_safe=False,
description=DOCLINES[0],
long_description="\n".join(DOCLINES[2:]),
version=__version__,
license='LGPLv2.1+',
download_url = "https://github.com/FoldingAtHome/FAHMunge/releases/latest",
platforms=['Linux'],
classifiers=CLASSIFIERS.splitlines(),
packages=["fahmunge"],
package_dir={'fahmunge': 'FAHMunge'},
**setup_kwargs)
|
ktan2020/legacy-automation | refs/heads/master | win/Lib/distutils/emxccompiler.py | 59 | """distutils.emxccompiler
Provides the EMXCCompiler class, a subclass of UnixCCompiler that
handles the EMX port of the GNU C compiler to OS/2.
"""
# issues:
#
# * OS/2 insists that DLLs can have names no longer than 8 characters
# We put export_symbols in a def-file, as though the DLL can have
# an arbitrary length name, but truncate the output filename.
#
# * only use OMF objects and use LINK386 as the linker (-Zomf)
#
# * always build for multithreading (-Zmt) as the accompanying OS/2 port
# of Python is only distributed with threads enabled.
#
# tested configurations:
#
# * EMX gcc 2.81/EMX 0.9d fix03
__revision__ = "$Id$"
import os,sys,copy
from distutils.ccompiler import gen_preprocess_options, gen_lib_options
from distutils.unixccompiler import UnixCCompiler
from distutils.file_util import write_file
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
from distutils import log
class EMXCCompiler (UnixCCompiler):
compiler_type = 'emx'
obj_extension = ".obj"
static_lib_extension = ".lib"
shared_lib_extension = ".dll"
static_lib_format = "%s%s"
shared_lib_format = "%s%s"
res_extension = ".res" # compiled resource file
exe_extension = ".exe"
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
UnixCCompiler.__init__ (self, verbose, dry_run, force)
(status, details) = check_config_h()
self.debug_print("Python's GCC status: %s (details: %s)" %
(status, details))
if status is not CONFIG_H_OK:
self.warn(
"Python's pyconfig.h doesn't seem to support your compiler. " +
("Reason: %s." % details) +
"Compiling may fail because of undefined preprocessor macros.")
(self.gcc_version, self.ld_version) = \
get_versions()
self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
(self.gcc_version,
self.ld_version) )
# Hard-code GCC because that's what this is all about.
# XXX optimization, warnings etc. should be customizable.
self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
linker_exe='gcc -Zomf -Zmt -Zcrtdll',
linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
# want the gcc library statically linked (so that we don't have
# to distribute a version dependent on the compiler we have)
self.dll_libraries=["gcc"]
# __init__ ()
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
if ext == '.rc':
# gcc requires '.rc' compiled to binary ('.res') files !!!
try:
self.spawn(["rc", "-r", src])
except DistutilsExecError, msg:
raise CompileError, msg
else: # for other files use the C-compiler
try:
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# use separate copies, so we can modify the lists
extra_preargs = copy.copy(extra_preargs or [])
libraries = copy.copy(libraries or [])
objects = copy.copy(objects or [])
# Additional libraries
libraries.extend(self.dll_libraries)
# handle export symbols by creating a def-file
# with executables this only works with gcc/ld as linker
if ((export_symbols is not None) and
(target_desc != self.EXECUTABLE)):
# (The linker doesn't do anything if output is up-to-date.
# So it would probably better to check if we really need this,
# but for this we had to insert some unchanged parts of
# UnixCCompiler, and this is not what we want.)
# we want to put some files in the same directory as the
# object files are, build_temp doesn't help much
# where are the object files
temp_dir = os.path.dirname(objects[0])
# name of dll to give the helper files the same base name
(dll_name, dll_extension) = os.path.splitext(
os.path.basename(output_filename))
# generate the filenames for these files
def_file = os.path.join(temp_dir, dll_name + ".def")
# Generate .def file
contents = [
"LIBRARY %s INITINSTANCE TERMINSTANCE" % \
os.path.splitext(os.path.basename(output_filename))[0],
"DATA MULTIPLE NONSHARED",
"EXPORTS"]
for sym in export_symbols:
contents.append(' "%s"' % sym)
self.execute(write_file, (def_file, contents),
"writing %s" % def_file)
# next add options for def-file and to creating import libraries
# for gcc/ld the def-file is specified as any other object files
objects.append(def_file)
#end: if ((export_symbols is not None) and
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
# who wants symbols and a many times larger output file
# should explicitly switch the debug mode on
# otherwise we let dllwrap/ld strip the output file
# (On my machine: 10KB < stripped_file < ??100KB
# unstripped_file = stripped_file + XXX KB
# ( XXX=254 for a typical python extension))
if not debug:
extra_preargs.append("-s")
UnixCCompiler.link(self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
None, # export_symbols, we do this in our def-file
debug,
extra_preargs,
extra_postargs,
build_temp,
target_lang)
# link ()
# -- Miscellaneous methods -----------------------------------------
# override the object_filenames method from CCompiler to
# support rc and res-files
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc']):
raise UnknownFileError, \
"unknown file type '%s' (from '%s')" % \
(ext, src_name)
if strip_dir:
base = os.path.basename (base)
if ext == '.rc':
# these need to be compiled to object files
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
# override the find_library_file method from UnixCCompiler
# to deal with file naming/searching differences
def find_library_file(self, dirs, lib, debug=0):
shortlib = '%s.lib' % lib
longlib = 'lib%s.lib' % lib # this form very rare
# get EMX's default library directory search path
try:
emx_dirs = os.environ['LIBRARY_PATH'].split(';')
except KeyError:
emx_dirs = []
for dir in dirs + emx_dirs:
shortlibp = os.path.join(dir, shortlib)
longlibp = os.path.join(dir, longlib)
if os.path.exists(shortlibp):
return shortlibp
elif os.path.exists(longlibp):
return longlibp
# Oops, didn't find it in *any* of 'dirs'
return None
# class EMXCCompiler
# Because these compilers aren't configured in Python's pyconfig.h file by
# default, we should at least warn the user if he is using a unmodified
# version.
CONFIG_H_OK = "ok"
CONFIG_H_NOTOK = "not ok"
CONFIG_H_UNCERTAIN = "uncertain"
def check_config_h():
"""Check if the current Python installation (specifically, pyconfig.h)
appears amenable to building extensions with GCC. Returns a tuple
(status, details), where 'status' is one of the following constants:
CONFIG_H_OK
all is well, go ahead and compile
CONFIG_H_NOTOK
doesn't look good
CONFIG_H_UNCERTAIN
not sure -- unable to read pyconfig.h
'details' is a human-readable string explaining the situation.
Note there are two ways to conclude "OK": either 'sys.version' contains
the string "GCC" (implying that this Python was built with GCC), or the
installed "pyconfig.h" contains the string "__GNUC__".
"""
# XXX since this function also checks sys.version, it's not strictly a
# "pyconfig.h" check -- should probably be renamed...
from distutils import sysconfig
import string
# if sys.version contains GCC then python was compiled with
# GCC, and the pyconfig.h file should be OK
if string.find(sys.version,"GCC") >= 0:
return (CONFIG_H_OK, "sys.version mentions 'GCC'")
fn = sysconfig.get_config_h_filename()
try:
# It would probably better to read single lines to search.
# But we do this only once, and it is fast enough
f = open(fn)
try:
s = f.read()
finally:
f.close()
except IOError, exc:
# if we can't read this file, we cannot say it is wrong
# the compiler will complain later about this file as missing
return (CONFIG_H_UNCERTAIN,
"couldn't read '%s': %s" % (fn, exc.strerror))
else:
# "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
if string.find(s,"__GNUC__") >= 0:
return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
else:
return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
def get_versions():
""" Try to find out the versions of gcc and ld.
If not possible it returns None for it.
"""
from distutils.version import StrictVersion
from distutils.spawn import find_executable
import re
gcc_exe = find_executable('gcc')
if gcc_exe:
out = os.popen(gcc_exe + ' -dumpversion','r')
try:
out_string = out.read()
finally:
out.close()
result = re.search('(\d+\.\d+\.\d+)',out_string)
if result:
gcc_version = StrictVersion(result.group(1))
else:
gcc_version = None
else:
gcc_version = None
# EMX ld has no way of reporting version number, and we use GCC
# anyway - so we can link OMF DLLs
ld_version = None
return (gcc_version, ld_version)
|
skipmodea1/plugin.video.xbmctorrent | refs/heads/master | resources/site-packages/html5lib/treewalkers/etree.py | 658 | from __future__ import absolute_import, division, unicode_literals
try:
from collections import OrderedDict
except ImportError:
try:
from ordereddict import OrderedDict
except ImportError:
OrderedDict = dict
import gettext
_ = gettext.gettext
import re
from six import text_type
from . import _base
from ..utils import moduleFactoryFactory
tag_regexp = re.compile("{([^}]*)}(.*)")
def getETreeBuilder(ElementTreeImplementation):
ElementTree = ElementTreeImplementation
ElementTreeCommentType = ElementTree.Comment("asd").tag
class TreeWalker(_base.NonRecursiveTreeWalker):
"""Given the particular ElementTree representation, this implementation,
to avoid using recursion, returns "nodes" as tuples with the following
content:
1. The current element
2. The index of the element relative to its parent
3. A stack of ancestor elements
4. A flag "text", "tail" or None to indicate if the current node is a
text node; either the text or tail of the current element (1)
"""
def getNodeDetails(self, node):
if isinstance(node, tuple): # It might be the root Element
elt, key, parents, flag = node
if flag in ("text", "tail"):
return _base.TEXT, getattr(elt, flag)
else:
node = elt
if not(hasattr(node, "tag")):
node = node.getroot()
if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"):
return (_base.DOCUMENT,)
elif node.tag == "<!DOCTYPE>":
return (_base.DOCTYPE, node.text,
node.get("publicId"), node.get("systemId"))
elif node.tag == ElementTreeCommentType:
return _base.COMMENT, node.text
else:
assert type(node.tag) == text_type, type(node.tag)
# This is assumed to be an ordinary element
match = tag_regexp.match(node.tag)
if match:
namespace, tag = match.groups()
else:
namespace = None
tag = node.tag
attrs = OrderedDict()
for name, value in list(node.attrib.items()):
match = tag_regexp.match(name)
if match:
attrs[(match.group(1), match.group(2))] = value
else:
attrs[(None, name)] = value
return (_base.ELEMENT, namespace, tag,
attrs, len(node) or node.text)
def getFirstChild(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
element, key, parents, flag = node, None, [], None
if flag in ("text", "tail"):
return None
else:
if element.text:
return element, key, parents, "text"
elif len(element):
parents.append(element)
return element[0], 0, parents, None
else:
return None
def getNextSibling(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
return None
if flag == "text":
if len(element):
parents.append(element)
return element[0], 0, parents, None
else:
return None
else:
if element.tail and flag != "tail":
return element, key, parents, "tail"
elif key < len(parents[-1]) - 1:
return parents[-1][key + 1], key + 1, parents, None
else:
return None
def getParentNode(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
return None
if flag == "text":
if not parents:
return element
else:
return element, key, parents, None
else:
parent = parents.pop()
if not parents:
return parent
else:
return parent, list(parents[-1]).index(parent), parents, None
return locals()
getETreeModule = moduleFactoryFactory(getETreeBuilder)
|
kirbyfan64/hy | refs/heads/master | hy/models/cons.py | 7 | # Copyright (c) 2013 Nicolas Dandrimont <nicolas.dandrimont@crans.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from hy.macros import _wrap_value
from hy.models import HyObject
from hy.models.expression import HyExpression
from hy.models.symbol import HySymbol
class HyCons(HyObject):
"""
HyCons: a cons object.
Building a HyCons of something and a HyList really builds a HyList
"""
__slots__ = ["car", "cdr"]
def __new__(cls, car, cdr):
if isinstance(cdr, list):
# Keep unquotes in the cdr of conses
if type(cdr) == HyExpression:
if len(cdr) > 0 and type(cdr[0]) == HySymbol:
if cdr[0] in ("unquote", "unquote_splice"):
return super(HyCons, cls).__new__(cls)
return cdr.__class__([_wrap_value(car)] + cdr)
elif cdr is None:
return HyExpression([_wrap_value(car)])
else:
return super(HyCons, cls).__new__(cls)
def __init__(self, car, cdr):
self.car = _wrap_value(car)
self.cdr = _wrap_value(cdr)
def __getitem__(self, n):
if n == 0:
return self.car
if n == slice(1, None):
return self.cdr
raise IndexError(
"Can only get the car ([0]) or the cdr ([1:]) of a HyCons")
def __setitem__(self, n, new):
if n == 0:
self.car = new
return
if n == slice(1, None):
self.cdr = new
return
raise IndexError(
"Can only set the car ([0]) or the cdr ([1:]) of a HyCons")
def __iter__(self):
yield self.car
try:
iterator = (i for i in self.cdr)
except TypeError:
if self.cdr is not None:
yield self.cdr
raise TypeError("Iteration on malformed cons")
else:
for i in iterator:
yield i
def replace(self, other):
if self.car is not None:
self.car.replace(other)
if self.cdr is not None:
self.cdr.replace(other)
HyObject.replace(self, other)
def __repr__(self):
if isinstance(self.cdr, self.__class__):
return "(%s %s)" % (repr(self.car), repr(self.cdr)[1:-1])
else:
return "(%s . %s)" % (repr(self.car), repr(self.cdr))
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self.car == other.car and
self.cdr == other.cdr
)
|
crossroadchurch/paul | refs/heads/master | tests/functional/openlp_plugins/images/test_imagetab.py | 1 | # -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2015 OpenLP Developers #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
"""
This module contains tests for the lib submodule of the Images plugin.
"""
from unittest import TestCase
from PyQt4 import QtGui
from openlp.core.common import Settings
from openlp.core.common import Registry
from openlp.plugins.images.lib.db import ImageFilenames, ImageGroups
from openlp.plugins.images.lib.mediaitem import ImageMediaItem
from openlp.plugins.images.lib import ImageTab
from tests.functional import MagicMock, patch
from tests.helpers.testmixin import TestMixin
__default_settings__ = {
'images/db type': 'sqlite',
'images/background color': '#000000',
}
class TestImageMediaItem(TestCase, TestMixin):
"""
This is a test case to test various methods in the ImageTab.
"""
def setUp(self):
"""
Create the UI
"""
Registry.create()
Registry().register('settings_form', MagicMock())
self.setup_application()
self.build_settings()
Settings().extend_default_settings(__default_settings__)
self.parent = QtGui.QMainWindow()
self.form = ImageTab(self.parent, 'Images', None, None)
self.form.settings_form.register_post_process = MagicMock()
def tearDown(self):
"""
Delete all the C++ objects at the end so that we don't have a segfault
"""
del self.parent
del self.form
self.destroy_settings()
def save_tab_nochange_test_test(self):
"""
Test no changes does not trigger post processing
"""
# GIVEN: No changes on the form.
self.initial_color = '#999999'
# WHEN: the save is invoked
self.form.save()
# THEN: the post process should not be requested
self.assertEqual(0, self.form.settings_form.register_post_process.call_count,
'Image Post processing should not have been requested')
def save_tab_change_test_test(self):
"""
Test a change triggers post processing.
"""
# GIVEN: Apply a change to the form.
self.form.background_color = '#999999'
# WHEN: the save is invoked
self.form.save()
# THEN: the post process should be requested
self.assertEqual(1, self.form.settings_form.register_post_process.call_count,
'Image Post processing should have been requested')
|
manasi24/jiocloud-tempest-qatempest | refs/heads/master | tempest/api/volume/admin/test_volumes_backup.py | 11 | # Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest_lib.common.utils import data_utils
from tempest.api.volume import base
from tempest import config
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class VolumesBackupsV2Test(base.BaseVolumeAdminTest):
@classmethod
def skip_checks(cls):
super(VolumesBackupsV2Test, cls).skip_checks()
if not CONF.volume_feature_enabled.backup:
raise cls.skipException("Cinder backup feature disabled")
@classmethod
def resource_setup(cls):
super(VolumesBackupsV2Test, cls).resource_setup()
cls.volume = cls.create_volume()
@test.idempotent_id('a66eb488-8ee1-47d4-8e9f-575a095728c6')
def test_volume_backup_create_get_detailed_list_restore_delete(self):
# Create backup
backup_name = data_utils.rand_name('Backup')
create_backup = self.backups_adm_client.create_backup
backup = create_backup(self.volume['id'],
name=backup_name)
self.addCleanup(self.backups_adm_client.delete_backup,
backup['id'])
self.assertEqual(backup_name, backup['name'])
self.admin_volume_client.wait_for_volume_status(
self.volume['id'], 'available')
self.backups_adm_client.wait_for_backup_status(backup['id'],
'available')
# Get a given backup
backup = self.backups_adm_client.show_backup(backup['id'])
self.assertEqual(backup_name, backup['name'])
# Get all backups with detail
backups = self.backups_adm_client.list_backups(detail=True)
self.assertIn((backup['name'], backup['id']),
[(m['name'], m['id']) for m in backups])
# Restore backup
restore = self.backups_adm_client.restore_backup(backup['id'])
# Delete backup
self.addCleanup(self.admin_volume_client.delete_volume,
restore['volume_id'])
self.assertEqual(backup['id'], restore['backup_id'])
self.backups_adm_client.wait_for_backup_status(backup['id'],
'available')
self.admin_volume_client.wait_for_volume_status(
restore['volume_id'], 'available')
class VolumesBackupsV1Test(VolumesBackupsV2Test):
_api_version = 1
|
caotianwei/django | refs/heads/master | tests/admin_inlines/urls.py | 810 | from django.conf.urls import url
from . import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
vitmod/enigma2-1 | refs/heads/master | lib/python/Plugins/SystemPlugins/FastScan/plugin.py | 59 | # -*- coding: utf-8 -*-
from os import path as os_path, walk as os_walk, unlink as os_unlink
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.config import config, ConfigSelection, ConfigYesNo, getConfigListEntry, ConfigSubsection, ConfigText
from Components.ConfigList import ConfigListScreen
from Components.NimManager import nimmanager
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.ProgressBar import ProgressBar
from Components.ServiceList import refreshServiceList
from Components.ActionMap import ActionMap
from enigma import eFastScan, eDVBFrontendParametersSatellite, eTimer
config.misc.fastscan = ConfigSubsection()
config.misc.fastscan.last_configuration = ConfigText(default = "()")
config.misc.fastscan.auto = ConfigYesNo(default = False)
class FastScanStatus(Screen):
skin = """
<screen position="150,115" size="420,180" title="Fast Scan">
<widget name="frontend" pixmap="icons/scan-s.png" position="5,5" size="64,64" transparent="1" alphatest="on" />
<widget name="scan_state" position="10,120" zPosition="2" size="400,30" font="Regular;18" />
<widget name="scan_progress" position="10,155" size="400,15" pixmap="progress_big.png" borderWidth="2" borderColor="#cccccc" />
</screen>"""
def __init__(self, session, scanTuner=0, transponderParameters=None, scanPid=900, keepNumbers=False, keepSettings=False, providerName='Favorites'):
Screen.__init__(self, session)
self.setTitle(_("Fast Scan"))
self.scanPid = scanPid
self.scanTuner = scanTuner
self.transponderParameters = transponderParameters
self.keepNumbers = keepNumbers
self.keepSettings = keepSettings
self.providerName = providerName
self.isDone = False
self.onClose.append(self.__onClose)
self["frontend"] = Pixmap()
self["scan_progress"] = ProgressBar()
self["scan_state"] = Label(_("scan state"))
if self.session.pipshown:
from Screens.InfoBar import InfoBar
InfoBar.instance and hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP()
self.prevservice = self.session.nav.getCurrentlyPlayingServiceReference()
self.session.nav.stopService()
self["actions"] = ActionMap(["OkCancelActions"],
{
"ok": self.ok,
"cancel": self.cancel
})
self.onFirstExecBegin.append(self.doServiceScan)
def __onClose(self):
self.scan.scanCompleted.get().remove(self.scanCompleted)
self.scan.scanProgress.get().remove(self.scanProgress)
del self.scan
def doServiceScan(self):
self["scan_state"].setText(_('Scanning %s...') % (self.providerName))
self["scan_progress"].setValue(0)
self.scan = eFastScan(self.scanPid, self.providerName, self.transponderParameters, self.keepNumbers, self.keepSettings)
self.scan.scanCompleted.get().append(self.scanCompleted)
self.scan.scanProgress.get().append(self.scanProgress)
fstfile = None
fntfile = None
for root, dirs, files in os_walk('/tmp/'):
for f in files:
if f.endswith('.bin'):
if '_FST' in f:
fstfile = os_path.join(root, f)
elif '_FNT' in f:
fntfile = os_path.join(root, f)
if fstfile and fntfile:
self.scan.startFile(fntfile, fstfile)
os_unlink(fstfile)
os_unlink(fntfile)
else:
self.scan.start(self.scanTuner)
def scanProgress(self, progress):
self["scan_progress"].setValue(progress)
def scanCompleted(self, result):
self.isDone = True
if result < 0:
self["scan_state"].setText(_('Scanning failed!'))
else:
self["scan_state"].setText(ngettext('List version %d, found %d channel', 'List version %d, found %d channels', result) % (self.scan.getVersion(), result))
def restoreService(self):
if self.prevservice:
self.session.nav.playService(self.prevservice)
def ok(self):
if self.isDone:
self.cancel()
def cancel(self):
if self.isDone:
refreshServiceList()
self.restoreService()
self.close()
class FastScanScreen(ConfigListScreen, Screen):
skin = """
<screen position="100,115" size="520,290" title="Fast Scan">
<widget name="config" position="10,10" size="500,250" scrollbarMode="showOnDemand" />
<widget name="introduction" position="10,265" size="500,25" font="Regular;20" halign="center" />
</screen>"""
providers = [
('Canal Digitaal', (1, 900, True)),
('TV Vlaanderen', (1, 910, True)),
('TéléSAT', (0, 920, True)),
('HD Austria', (0, 950, False)),
('Skylink Czech Republic', (1, 30, False)),
('Skylink Slovak Republic', (1, 31, False)),
('AustriaSat Magyarország Eutelsat 9E', (2, 951, False)),
('AustriaSat Magyarország Astra 3', (1, 951, False)),
('TéléSAT Astra3', (1, 920, True)),
('HD Austria Astra3', (1, 950, False)),
('Canal Digitaal Astra 1', (0, 900, True)),
('TV Vlaanderen Astra 1', (0, 910, True))]
transponders = ((12515000, 22000000, eDVBFrontendParametersSatellite.FEC_5_6, 192,
eDVBFrontendParametersSatellite.Polarisation_Horizontal, eDVBFrontendParametersSatellite.Inversion_Unknown,
eDVBFrontendParametersSatellite.System_DVB_S, eDVBFrontendParametersSatellite.Modulation_QPSK,
eDVBFrontendParametersSatellite.RollOff_alpha_0_35, eDVBFrontendParametersSatellite.Pilot_Off),
(12070000, 27500000, eDVBFrontendParametersSatellite.FEC_3_4, 235,
eDVBFrontendParametersSatellite.Polarisation_Horizontal, eDVBFrontendParametersSatellite.Inversion_Unknown,
eDVBFrontendParametersSatellite.System_DVB_S, eDVBFrontendParametersSatellite.Modulation_QPSK,
eDVBFrontendParametersSatellite.RollOff_alpha_0_35, eDVBFrontendParametersSatellite.Pilot_Off),
(12074000, 27500000, eDVBFrontendParametersSatellite.FEC_3_4, 90,
eDVBFrontendParametersSatellite.Polarisation_Vertical, eDVBFrontendParametersSatellite.Inversion_Unknown,
eDVBFrontendParametersSatellite.System_DVB_S2, eDVBFrontendParametersSatellite.Modulation_8PSK,
eDVBFrontendParametersSatellite.RollOff_alpha_0_35, eDVBFrontendParametersSatellite.Pilot_On))
def __init__(self, session, nimList):
Screen.__init__(self, session)
self["actions"] = ActionMap(["SetupActions", "MenuActions"],
{
"ok": self.keyGo,
"save": self.keySave,
"cancel": self.keyCancel,
"menu": self.closeRecursive,
}, -2)
providerList = list(x[0] for x in self.providers)
lastConfiguration = eval(config.misc.fastscan.last_configuration.value)
if not lastConfiguration or not tuple(x for x in self.providers if x[0] == lastConfiguration[1]):
lastConfiguration = (nimList[0][0], providerList[0], True, True, False)
self.scan_nims = ConfigSelection(default = lastConfiguration[0], choices = nimList)
self.scan_provider = ConfigSelection(default = lastConfiguration[1], choices = providerList)
self.scan_hd = ConfigYesNo(default = lastConfiguration[2])
self.scan_keepnumbering = ConfigYesNo(default = lastConfiguration[3])
self.scan_keepsettings = ConfigYesNo(default = lastConfiguration[4])
self.list = []
self.tunerEntry = getConfigListEntry(_("Tuner"), self.scan_nims)
self.list.append(self.tunerEntry)
self.scanProvider = getConfigListEntry(_("Provider"), self.scan_provider)
self.list.append(self.scanProvider)
self.scanHD = getConfigListEntry(_("HD list"), self.scan_hd)
self.list.append(self.scanHD)
self.list.append(getConfigListEntry(_("Use fastscan channel numbering"), self.scan_keepnumbering))
self.list.append(getConfigListEntry(_("Use fastscan channel names"), self.scan_keepsettings))
self.list.append(getConfigListEntry(_("Enable auto fast scan"), config.misc.fastscan.auto))
ConfigListScreen.__init__(self, self.list)
self["config"].list = self.list
self["config"].l.setList(self.list)
self.finished_cb = None
self["introduction"] = Label(_("Select your provider, and press OK to start the scan"))
def saveConfiguration(self):
config.misc.fastscan.last_configuration.value = `(self.scan_nims.value, self.scan_provider.value, self.scan_hd.value, self.scan_keepnumbering.value, self.scan_keepsettings.value)`
config.misc.fastscan.save()
def keySave(self):
self.saveConfiguration()
self.close()
def keyGo(self):
self.saveConfiguration()
self.startScan()
def getTransponderParameters(self, number):
transponderParameters = eDVBFrontendParametersSatellite()
transponderParameters.frequency = self.transponders[number][0]
transponderParameters.symbol_rate = self.transponders[number][1]
transponderParameters.fec = self.transponders[number][2]
transponderParameters.orbital_position = self.transponders[number][3]
transponderParameters.polarisation = self.transponders[number][4]
transponderParameters.inversion = self.transponders[number][5]
transponderParameters.system = self.transponders[number][6]
transponderParameters.modulation = self.transponders[number][7]
transponderParameters.rolloff = self.transponders[number][8]
transponderParameters.pilot = self.transponders[number][9]
return transponderParameters
def startScan(self):
parameters = tuple(x[1] for x in self.providers if x[0] == self.scan_provider.value)[0]
pid = parameters[1]
if self.scan_hd.value and parameters[2]:
pid += 1
if self.scan_nims.value:
self.session.open(FastScanStatus, scanTuner = int(self.scan_nims.value),
transponderParameters = self.getTransponderParameters(parameters[0]),
scanPid = pid, keepNumbers = self.scan_keepnumbering.value, keepSettings = self.scan_keepsettings.value,
providerName = self.scan_provider.getText())
def keyCancel(self):
self.close()
class FastScanAutoScreen(FastScanScreen):
def __init__(self, session, lastConfiguration):
print "[AutoFastScan] start"
Screen.__init__(self, session)
self.skinName="Standby"
self["actions"] = ActionMap( [ "StandbyActions" ],
{
"power": self.Power,
"discrete_on": self.Power
}, -1)
self.onClose.append(self.__onClose)
parameters = tuple(x[1] for x in self.providers if x[0] == lastConfiguration[1])
if parameters:
parameters = parameters[0]
pid = parameters[1]
if lastConfiguration[2] and parameters[2]:
pid += 1
self.scan = eFastScan(pid, lastConfiguration[1], self.getTransponderParameters(parameters[0]), lastConfiguration[3], lastConfiguration[4])
self.scan.scanCompleted.get().append(self.scanCompleted)
self.scan.start(int(lastConfiguration[0]))
else:
self.scan = None
self.close(True)
def __onClose(self):
if self.scan:
self.scan.scanCompleted.get().remove(self.scanCompleted)
del self.scan
def scanCompleted(self, result):
print "[AutoFastScan] completed result = ", result
refreshServiceList()
self.close(result>0)
def Power(self):
from Screens.Standby import inStandby
inStandby.Power()
print "[AutoFastScan] aborted due to power button pressed"
self.close(True)
def createSummary(self):
from Screens.Standby import StandbySummary
return StandbySummary
def FastScanMain(session, **kwargs):
if session.nav.RecordTimer.isRecording():
session.open(MessageBox, _("A recording is currently running. Please stop the recording before trying to scan."), MessageBox.TYPE_ERROR)
else:
nimList = []
# collect all nims which are *not* set to "nothing"
for n in nimmanager.nim_slots:
if not n.isCompatible("DVB-S"):
continue
if n.config_mode == "nothing":
continue
if n.config_mode in ("loopthrough", "satposdepends"):
root_id = nimmanager.sec.getRoot(n.slot_id, int(n.config.connectedTo.value))
if n.type == nimmanager.nim_slots[root_id].type: # check if connected from a DVB-S to DVB-S2 Nim or vice versa
continue
nimList.append((str(n.slot), n.friendly_full_description))
if nimList:
session.open(FastScanScreen, nimList)
else:
session.open(MessageBox, _("No suitable sat tuner found!"), MessageBox.TYPE_ERROR)
Session = None
FastScanAutoStartTimer = eTimer()
def restartScanAutoStartTimer(reply=False):
if not reply:
print "[AutoFastScan] Scan was not succesfully retry in one hour"
FastScanAutoStartTimer.startLongTimer(3600)
else:
FastScanAutoStartTimer.startLongTimer(86400)
def FastScanAuto():
lastConfiguration = eval(config.misc.fastscan.last_configuration.value)
if not lastConfiguration or Session.nav.RecordTimer.isRecording():
restartScanAutoStartTimer()
else:
Session.openWithCallback(restartScanAutoStartTimer, FastScanAutoScreen, lastConfiguration)
FastScanAutoStartTimer.callback.append(FastScanAuto)
def leaveStandby():
FastScanAutoStartTimer.stop()
def standbyCountChanged(value):
if config.misc.fastscan.auto.value:
from Screens.Standby import inStandby
inStandby.onClose.append(leaveStandby)
FastScanAutoStartTimer.startLongTimer(90)
def startSession(session, **kwargs):
global Session
Session = session
config.misc.standbyCounter.addNotifier(standbyCountChanged, initial_call=False)
def FastScanStart(menuid, **kwargs):
if menuid == "scan":
return [(_("Fast Scan"), FastScanMain, "fastscan", None)]
else:
return []
def Plugins(**kwargs):
if nimmanager.hasNimType("DVB-S"):
return [PluginDescriptor(name=_("Fast Scan"), description="Scan Dutch/Belgian sat provider", where = PluginDescriptor.WHERE_MENU, fnc=FastScanStart),
PluginDescriptor(where=[PluginDescriptor.WHERE_SESSIONSTART], fnc=startSession)]
else:
return []
|
gwpy/gwpy.github.io | refs/heads/master | docs/0.11.0/plotter/colors-1.py | 7 | from __future__ import division
import numpy
from matplotlib import (pyplot, rcParams)
from matplotlib.colors import to_hex
from gwpy.plotter import colors
rcParams.update({
'text.usetex': False,
'font.size': 15
})
th = numpy.linspace(0, 2*numpy.pi, 512)
names = [
'gwpy:geo600',
'gwpy:kagra',
'gwpy:ligo-hanford',
'gwpy:ligo-india',
'gwpy:ligo-livingston',
'gwpy:virgo',
]
fig = pyplot.figure(figsize=(5, 2))
ax = fig.gca()
ax.axis('off')
for j, name in enumerate(sorted(names)):
c = str(to_hex(name))
v_offset = -(j / len(names))
ax.plot(th, .1*numpy.sin(th) + v_offset, color=c)
ax.annotate("{!r}".format(name), (0, v_offset), xytext=(-1.5, 0),
ha='right', va='center', color=c,
textcoords='offset points', family='monospace')
ax.annotate("{!r}".format(c), (2*numpy.pi, v_offset), xytext=(1.5, 0),
ha='left', va='center', color=c,
textcoords='offset points', family='monospace')
fig.subplots_adjust(**{'bottom': 0.0, 'left': 0.54,
'right': 0.78, 'top': 1})
pyplot.show() |
nfedera/rg3-youtube-dl | refs/heads/master | youtube_dl/extractor/wistia.py | 3 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
ExtractorError,
sanitized_Request,
int_or_none,
)
class WistiaIE(InfoExtractor):
_VALID_URL = r'https?://(?:fast\.)?wistia\.net/embed/iframe/(?P<id>[a-z0-9]+)'
_API_URL = 'http://fast.wistia.com/embed/medias/{0:}.json'
_TEST = {
'url': 'http://fast.wistia.net/embed/iframe/sh7fpupwlt',
'md5': 'cafeb56ec0c53c18c97405eecb3133df',
'info_dict': {
'id': 'sh7fpupwlt',
'ext': 'mov',
'title': 'Being Resourceful',
'description': 'a Clients From Hell Video Series video from worldwidewebhosting',
'upload_date': '20131204',
'timestamp': 1386185018,
'duration': 117,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
request = sanitized_Request(self._API_URL.format(video_id))
request.add_header('Referer', url) # Some videos require this.
data_json = self._download_json(request, video_id)
if data_json.get('error'):
raise ExtractorError('Error while getting the playlist',
expected=True)
data = data_json['media']
title = data['name']
formats = []
thumbnails = []
for a in data['assets']:
astatus = a.get('status')
atype = a.get('type')
if (astatus is not None and astatus != 2) or atype == 'preview':
continue
elif atype in ('still', 'still_image'):
thumbnails.append({
'url': a['url'],
'resolution': '%dx%d' % (a['width'], a['height']),
})
else:
formats.append({
'format_id': atype,
'url': a['url'],
'tbr': int_or_none(a.get('bitrate')),
'vbr': int_or_none(a.get('opt_vbitrate')),
'width': int_or_none(a.get('width')),
'height': int_or_none(a.get('height')),
'filesize': int_or_none(a.get('size')),
'vcodec': a.get('codec'),
'container': a.get('container'),
'ext': a.get('ext'),
'preference': 1 if atype == 'original' else None,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': data.get('seoDescription'),
'formats': formats,
'thumbnails': thumbnails,
'duration': int_or_none(data.get('duration')),
'timestamp': int_or_none(data.get('createdAt')),
}
|
varunarya10/nova_test_latest | refs/heads/master | nova/api/openstack/compute/plugins/v3/preserve_ephemeral_rebuild.py | 34 | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import strutils
from nova.api.openstack.compute.schemas.v3 import preserve_ephemeral_rebuild
from nova.api.openstack import extensions
ALIAS = "os-preserve-ephemeral-rebuild"
class PreserveEphemeralRebuild(extensions.V3APIExtensionBase):
"""Allow preservation of the ephemeral partition on rebuild."""
name = "PreserveEphemeralOnRebuild"
alias = ALIAS
version = 1
def get_controller_extensions(self):
return []
def get_resources(self):
return []
def server_rebuild(self, rebuild_dict, rebuild_kwargs,
body_deprecated_param=None):
if 'preserve_ephemeral' in rebuild_dict:
rebuild_kwargs['preserve_ephemeral'] = strutils.bool_from_string(
rebuild_dict['preserve_ephemeral'], strict=True)
def get_server_rebuild_schema(self):
return preserve_ephemeral_rebuild.server_rebuild
|
petteyg/intellij-community | refs/heads/master | python/lib/Lib/encodings/iso8859_14.py | 593 | """ Python Character Mapping Codec iso8859_14 generated from 'MAPPINGS/ISO8859/8859-14.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-14',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u1e02' # 0xA1 -> LATIN CAPITAL LETTER B WITH DOT ABOVE
u'\u1e03' # 0xA2 -> LATIN SMALL LETTER B WITH DOT ABOVE
u'\xa3' # 0xA3 -> POUND SIGN
u'\u010a' # 0xA4 -> LATIN CAPITAL LETTER C WITH DOT ABOVE
u'\u010b' # 0xA5 -> LATIN SMALL LETTER C WITH DOT ABOVE
u'\u1e0a' # 0xA6 -> LATIN CAPITAL LETTER D WITH DOT ABOVE
u'\xa7' # 0xA7 -> SECTION SIGN
u'\u1e80' # 0xA8 -> LATIN CAPITAL LETTER W WITH GRAVE
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u1e82' # 0xAA -> LATIN CAPITAL LETTER W WITH ACUTE
u'\u1e0b' # 0xAB -> LATIN SMALL LETTER D WITH DOT ABOVE
u'\u1ef2' # 0xAC -> LATIN CAPITAL LETTER Y WITH GRAVE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\u0178' # 0xAF -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u1e1e' # 0xB0 -> LATIN CAPITAL LETTER F WITH DOT ABOVE
u'\u1e1f' # 0xB1 -> LATIN SMALL LETTER F WITH DOT ABOVE
u'\u0120' # 0xB2 -> LATIN CAPITAL LETTER G WITH DOT ABOVE
u'\u0121' # 0xB3 -> LATIN SMALL LETTER G WITH DOT ABOVE
u'\u1e40' # 0xB4 -> LATIN CAPITAL LETTER M WITH DOT ABOVE
u'\u1e41' # 0xB5 -> LATIN SMALL LETTER M WITH DOT ABOVE
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\u1e56' # 0xB7 -> LATIN CAPITAL LETTER P WITH DOT ABOVE
u'\u1e81' # 0xB8 -> LATIN SMALL LETTER W WITH GRAVE
u'\u1e57' # 0xB9 -> LATIN SMALL LETTER P WITH DOT ABOVE
u'\u1e83' # 0xBA -> LATIN SMALL LETTER W WITH ACUTE
u'\u1e60' # 0xBB -> LATIN CAPITAL LETTER S WITH DOT ABOVE
u'\u1ef3' # 0xBC -> LATIN SMALL LETTER Y WITH GRAVE
u'\u1e84' # 0xBD -> LATIN CAPITAL LETTER W WITH DIAERESIS
u'\u1e85' # 0xBE -> LATIN SMALL LETTER W WITH DIAERESIS
u'\u1e61' # 0xBF -> LATIN SMALL LETTER S WITH DOT ABOVE
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u0174' # 0xD0 -> LATIN CAPITAL LETTER W WITH CIRCUMFLEX
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\u1e6a' # 0xD7 -> LATIN CAPITAL LETTER T WITH DOT ABOVE
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\u0176' # 0xDE -> LATIN CAPITAL LETTER Y WITH CIRCUMFLEX
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u0175' # 0xF0 -> LATIN SMALL LETTER W WITH CIRCUMFLEX
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\u1e6b' # 0xF7 -> LATIN SMALL LETTER T WITH DOT ABOVE
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
u'\u0177' # 0xFE -> LATIN SMALL LETTER Y WITH CIRCUMFLEX
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
partofthething/home-assistant | refs/heads/dev | homeassistant/components/risco/alarm_control_panel.py | 15 | """Support for Risco alarms."""
import logging
from homeassistant.components.alarm_control_panel import (
FORMAT_NUMBER,
AlarmControlPanelEntity,
)
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_CUSTOM_BYPASS,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.const import (
CONF_PIN,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from .const import (
CONF_CODE_ARM_REQUIRED,
CONF_CODE_DISARM_REQUIRED,
CONF_HA_STATES_TO_RISCO,
CONF_RISCO_STATES_TO_HA,
DATA_COORDINATOR,
DEFAULT_OPTIONS,
DOMAIN,
RISCO_ARM,
RISCO_GROUPS,
RISCO_PARTIAL_ARM,
)
from .entity import RiscoEntity
_LOGGER = logging.getLogger(__name__)
STATES_TO_SUPPORTED_FEATURES = {
STATE_ALARM_ARMED_AWAY: SUPPORT_ALARM_ARM_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS: SUPPORT_ALARM_ARM_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME: SUPPORT_ALARM_ARM_HOME,
STATE_ALARM_ARMED_NIGHT: SUPPORT_ALARM_ARM_NIGHT,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Risco alarm control panel."""
coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR]
options = {**DEFAULT_OPTIONS, **config_entry.options}
entities = [
RiscoAlarm(coordinator, partition_id, config_entry.data[CONF_PIN], options)
for partition_id in coordinator.data.partitions
]
async_add_entities(entities, False)
class RiscoAlarm(AlarmControlPanelEntity, RiscoEntity):
"""Representation of a Risco partition."""
def __init__(self, coordinator, partition_id, code, options):
"""Init the partition."""
super().__init__(coordinator)
self._partition_id = partition_id
self._partition = self.coordinator.data.partitions[self._partition_id]
self._code = code
self._code_arm_required = options[CONF_CODE_ARM_REQUIRED]
self._code_disarm_required = options[CONF_CODE_DISARM_REQUIRED]
self._risco_to_ha = options[CONF_RISCO_STATES_TO_HA]
self._ha_to_risco = options[CONF_HA_STATES_TO_RISCO]
self._supported_states = 0
for state in self._ha_to_risco:
self._supported_states |= STATES_TO_SUPPORTED_FEATURES[state]
def _get_data_from_coordinator(self):
self._partition = self.coordinator.data.partitions[self._partition_id]
@property
def device_info(self):
"""Return device info for this device."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "Risco",
}
@property
def name(self):
"""Return the name of the partition."""
return f"Risco {self._risco.site_name} Partition {self._partition_id}"
@property
def unique_id(self):
"""Return a unique id for that partition."""
return f"{self._risco.site_uuid}_{self._partition_id}"
@property
def state(self):
"""Return the state of the device."""
if self._partition.triggered:
return STATE_ALARM_TRIGGERED
if self._partition.arming:
return STATE_ALARM_ARMING
if self._partition.disarmed:
return STATE_ALARM_DISARMED
if self._partition.armed:
return self._risco_to_ha[RISCO_ARM]
if self._partition.partially_armed:
for group, armed in self._partition.groups.items():
if armed:
return self._risco_to_ha[group]
return self._risco_to_ha[RISCO_PARTIAL_ARM]
return None
@property
def supported_features(self):
"""Return the list of supported features."""
return self._supported_states
@property
def code_arm_required(self):
"""Whether the code is required for arm actions."""
return self._code_arm_required
@property
def code_format(self):
"""Return one or more digits/characters."""
return FORMAT_NUMBER
def _validate_code(self, code):
"""Validate given code."""
return code == self._code
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
if self._code_disarm_required and not self._validate_code(code):
_LOGGER.warning("Wrong code entered for disarming")
return
await self._call_alarm_method("disarm")
async def async_alarm_arm_home(self, code=None):
"""Send arm home command."""
await self._arm(STATE_ALARM_ARMED_HOME, code)
async def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
await self._arm(STATE_ALARM_ARMED_AWAY, code)
async def async_alarm_arm_night(self, code=None):
"""Send arm night command."""
await self._arm(STATE_ALARM_ARMED_NIGHT, code)
async def async_alarm_arm_custom_bypass(self, code=None):
"""Send arm custom bypass command."""
await self._arm(STATE_ALARM_ARMED_CUSTOM_BYPASS, code)
async def _arm(self, mode, code):
if self._code_arm_required and not self._validate_code(code):
_LOGGER.warning("Wrong code entered for %s", mode)
return
risco_state = self._ha_to_risco[mode]
if not risco_state:
_LOGGER.warning("No mapping for mode %s", mode)
return
if risco_state in RISCO_GROUPS:
await self._call_alarm_method("group_arm", risco_state)
else:
await self._call_alarm_method(risco_state)
async def _call_alarm_method(self, method, *args):
alarm = await getattr(self._risco, method)(self._partition_id, *args)
self._partition = alarm.partitions[self._partition_id]
self.async_write_ha_state()
|
bincyber/beesly | refs/heads/master | beesly/utils.py | 1 | from hashlib import sha256
from distutils.spawn import find_executable
import re
import subprocess
from flask import request
import requests
def get_ec2_metadata():
"""
Returns the following AWS EC2 metadata as a dictionary:
* region
* availability zone
* image id
* instance type
* instance id
"""
metadata_url = 'http://169.254.169.254/latest/dynamic/instance-identity/document/'
resp = requests.get(metadata_url, timeout=0.250)
json_body = resp.json()
metadata = {
'image_id': json_body.get('imageId'),
'instance_type': json_body.get('instanceType'),
'instance_id': json_body.get('instanceId'),
'availability_zone': json_body.get('availabilityZone'),
'region': json_body.get('region')
}
return metadata
def get_real_source_ip():
"""
Returns the real source IP address of the HTTP request.
"""
if 'X-Forwarded-For' in request.headers:
return request.headers.getlist("X-Forwarded-For")[0].rpartition(' ')[-1]
else:
return request.environ['REMOTE_ADDR']
def get_request_ip_username():
"""
Returns a unique key for rate limiting by extracting the username from the
JSON request body, combining it with the remote address of the HTTP request
and hashing it using SHA256.
"""
request_json = request.get_json(force=True)
rlimiting_key = get_real_source_ip()
if request_json is not None:
rlimiting_key += request_json.get('username', 'None')
return sha256(rlimiting_key.encode('utf-8')).hexdigest()
def validate_username(username):
"""
Checks if the username is valid and does not contain prohibited characters.
Returns True if the username is valid, otherwise False.
Arguments
----------
username : string
the username to check
"""
regex = '^[a-zA-Z][-_.@a-z0-9]{1,32}$'
if not re.match(regex, username):
return False
else:
return True
def get_group_membership(username):
"""
Returns a list of groups the user is a member of to support Role-Based Access Control.
The `id` command is used because it reports all (POSIX) groups that the user
is a member of including external groups from Identity Management systems (AD, IdM, FreeIPA).
Arguments
----------
username : string
the username to get group membership for
"""
exe = find_executable('id')
process = subprocess.run([exe, '-Gn', username], stdout=subprocess.PIPE)
groups = [group.decode('utf-8') for group in process.stdout.split()]
groups.remove(username)
return groups
|
vuteam/BlackHole-New | refs/heads/master | lib/python/Components/ServiceList.py | 2 | from HTMLComponent import HTMLComponent
from GUIComponent import GUIComponent
from skin import parseColor, parseFont
from enigma import eListboxServiceContent, eListbox, eServiceCenter, eServiceReference, gFont, eRect
from Tools.LoadPixmap import LoadPixmap
from Tools.TextBoundary import getTextBoundarySize
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN
from Components.Renderer.Picon import getPiconName
from Components.config import config
def refreshServiceList(configElement = None):
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance is not None:
servicelist = InfoBarInstance.servicelist
if servicelist:
servicelist.setMode()
class ServiceList(HTMLComponent, GUIComponent):
MODE_NORMAL = 0
MODE_FAVOURITES = 1
def __init__(self, serviceList):
self.serviceList = serviceList
GUIComponent.__init__(self)
self.l = eListboxServiceContent()
pic = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/folder.png"))
pic and self.l.setPixmap(self.l.picFolder, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/marker.png"))
pic and self.l.setPixmap(self.l.picMarker, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_dvb_s-fs8.png"))
pic and self.l.setPixmap(self.l.picDVB_S, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_dvb_c-fs8.png"))
pic and self.l.setPixmap(self.l.picDVB_C, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_dvb_t-fs8.png"))
pic and self.l.setPixmap(self.l.picDVB_T, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_stream-fs8.png"))
pic and self.l.setPixmap(self.l.picStream, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/ico_service_group-fs8.png"))
pic and self.l.setPixmap(self.l.picServiceGroup, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/icon_crypt.png"))
pic and self.l.setPixmap(self.l.picCrypto, pic)
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/record.png"))
pic and self.l.setPixmap(self.l.picRecord, pic)
self.root = None
self.mode = self.MODE_NORMAL
self.ItemHeight = 28
self.ServiceNameFont = parseFont("Regular;22", ((1,1),(1,1)))
self.ServiceInfoFont = parseFont("Regular;18", ((1,1),(1,1)))
self.ServiceNumberFont = parseFont("Regular;20", ((1,1),(1,1)))
self.progressBarWidth = 80
self.progressPercentWidth = 0
self.fieldMargins = 10
self.onSelectionChanged = [ ]
def applySkin(self, desktop, parent):
def foregroundColorMarked(value):
self.l.setColor(eListboxServiceContent.markedForeground, parseColor(value))
def foregroundColorMarkedSelected(value):
self.l.setColor(eListboxServiceContent.markedForegroundSelected, parseColor(value))
def backgroundColorMarked(value):
self.l.setColor(eListboxServiceContent.markedBackground, parseColor(value))
def backgroundColorMarkedSelected(value):
self.l.setColor(eListboxServiceContent.markedBackgroundSelected, parseColor(value))
def foregroundColorServiceNotAvail(value):
self.l.setColor(eListboxServiceContent.serviceNotAvail, parseColor(value))
def foregroundColorEvent(value):
self.l.setColor(eListboxServiceContent.eventForeground, parseColor(value))
def colorServiceDescription(value):
self.l.setColor(eListboxServiceContent.eventForeground, parseColor(value))
def foregroundColorEventSelected(value):
self.l.setColor(eListboxServiceContent.eventForegroundSelected, parseColor(value))
def colorServiceDescriptionSelected(value):
self.l.setColor(eListboxServiceContent.eventForegroundSelected, parseColor(value))
def foregroundColorEventborder(value):
self.l.setColor(eListboxServiceContent.eventborderForeground, parseColor(value))
def foregroundColorEventborderSelected(value):
self.l.setColor(eListboxServiceContent.eventborderForegroundSelected, parseColor(value))
def colorEventProgressbar(value):
self.l.setColor(eListboxServiceContent.serviceEventProgressbarColor, parseColor(value))
def colorEventProgressbarSelected(value):
self.l.setColor(eListboxServiceContent.serviceEventProgressbarColorSelected, parseColor(value))
def colorEventProgressbarBorder(value):
self.l.setColor(eListboxServiceContent.serviceEventProgressbarBorderColor, parseColor(value))
def colorEventProgressbarBorderSelected(value):
self.l.setColor(eListboxServiceContent.serviceEventProgressbarBorderColorSelected, parseColor(value))
def colorServiceRecorded(value):
self.l.setColor(eListboxServiceContent.serviceRecorded, parseColor(value))
def colorFallbackItem(value):
self.l.setColor(eListboxServiceContent.serviceItemFallback, parseColor(value))
def colorServiceSelectedFallback(value):
self.l.setColor(eListboxServiceContent.serviceSelectedFallback, parseColor(value))
def colorServiceDescriptionFallback(value):
self.l.setColor(eListboxServiceContent.eventForegroundFallback, parseColor(value))
def colorServiceDescriptionSelectedFallback(value):
self.l.setColor(eListboxServiceContent.eventForegroundSelectedFallback, parseColor(value))
def picServiceEventProgressbar(value):
pic = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, value))
pic and self.l.setPixmap(self.l.picServiceEventProgressbar, pic)
def serviceItemHeight(value):
self.ItemHeight = int(value)
def serviceNameFont(value):
self.ServiceNameFont = parseFont(value, ((1,1),(1,1)))
def serviceInfoFont(value):
self.ServiceInfoFont = parseFont(value, ((1,1),(1,1)))
def serviceNumberFont(value):
self.ServiceNumberFont = parseFont(value, ((1,1),(1,1)))
def progressbarHeight(value):
self.l.setProgressbarHeight(int(value))
def progressbarBorderWidth(value):
self.l.setProgressbarBorderWidth(int(value))
def progressBarWidth(value):
self.progressBarWidth = int(value)
def progressPercentWidth(value):
self.progressPercentWidth = int(value)
def fieldMargins(value):
self.fieldMargins = int(value)
def nonplayableMargins(value):
self.l.setNonplayableMargins(int(value))
def itemsDistances(value):
self.l.setItemsDistances(int(value))
for (attrib, value) in list(self.skinAttributes):
try:
locals().get(attrib)(value)
self.skinAttributes.remove((attrib, value))
except:
pass
return GUIComponent.applySkin(self, desktop, parent)
def connectSelChanged(self, fnc):
if not fnc in self.onSelectionChanged:
self.onSelectionChanged.append(fnc)
def disconnectSelChanged(self, fnc):
if fnc in self.onSelectionChanged:
self.onSelectionChanged.remove(fnc)
def selectionChanged(self):
for x in self.onSelectionChanged:
x()
def setCurrent(self, ref, adjust=True):
if self.l.setCurrent(ref):
return None
from Components.ServiceEventTracker import InfoBarCount
if adjust and config.usage.multibouquet.value and InfoBarCount == 1 and ref and ref.type != 8192:
print "[servicelist] search for service in userbouquets"
if self.serviceList:
revert_mode = config.servicelist.lastmode.value
revert_root = self.getRoot()
self.serviceList.setModeTv()
revert_tv_root = self.getRoot()
bouquets = self.serviceList.getBouquetList()
for bouquet in bouquets:
self.serviceList.enterUserbouquet(bouquet[1])
if self.l.setCurrent(ref):
config.servicelist.lastmode.save()
self.serviceList.saveChannel(ref)
return True
self.serviceList.enterUserbouquet(revert_tv_root)
self.serviceList.setModeRadio()
revert_radio_root = self.getRoot()
bouquets = self.serviceList.getBouquetList()
for bouquet in bouquets:
self.serviceList.enterUserbouquet(bouquet[1])
if self.l.setCurrent(ref):
config.servicelist.lastmode.save()
self.serviceList.saveChannel(ref)
return True
self.serviceList.enterUserbouquet(revert_radio_root)
print "[servicelist] service not found in any userbouquets"
if revert_mode == "tv":
self.serviceList.setModeTv()
elif revert_mode == "radio":
self.serviceList.setModeRadio()
self.serviceList.enterUserbouquet(revert_root)
return False
def getCurrent(self):
r = eServiceReference()
self.l.getCurrent(r)
return r
def getPrev(self):
r = eServiceReference()
self.l.getPrev(r)
return r
def getNext(self):
r = eServiceReference()
self.l.getNext(r)
return r
def atBegin(self):
return self.instance.atBegin()
def atEnd(self):
return self.instance.atEnd()
def moveUp(self):
self.instance.moveSelection(self.instance.moveUp)
def moveDown(self):
self.instance.moveSelection(self.instance.moveDown)
def moveToChar(self, char):
# TODO fill with life
print "Next char: "
index = self.l.getNextBeginningWithChar(char)
indexup = self.l.getNextBeginningWithChar(char.upper())
if indexup != 0:
if (index > indexup or index == 0):
index = indexup
self.instance.moveSelectionTo(index)
print "Moving to character " + str(char)
def moveToNextMarker(self):
idx = self.l.getNextMarkerPos()
self.instance.moveSelectionTo(idx)
def moveToPrevMarker(self):
idx = self.l.getPrevMarkerPos()
self.instance.moveSelectionTo(idx)
def moveToIndex(self, index):
self.instance.moveSelectionTo(index)
def getCurrentIndex(self):
return self.instance.getCurrentIndex()
GUI_WIDGET = eListbox
def postWidgetCreate(self, instance):
instance.setWrapAround(True)
instance.setContent(self.l)
instance.selectionChanged.get().append(self.selectionChanged)
self.setMode(self.mode)
def preWidgetRemove(self, instance):
instance.setContent(None)
instance.selectionChanged.get().remove(self.selectionChanged)
def getRoot(self):
return self.root
def getRootServices(self):
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(self.root)
dest = [ ]
if list is not None:
while 1:
s = list.getNext()
if s.valid():
dest.append(s.toString())
else:
break
return dest
def setPlayableIgnoreService(self, ref):
self.l.setIgnoreService(ref)
def setRoot(self, root, justSet=False):
self.root = root
self.l.setRoot(root, justSet)
if not justSet:
self.l.sort()
self.selectionChanged()
def resetRoot(self):
index = self.instance.getCurrentIndex()
self.l.setRoot(self.root, False)
self.l.sort()
self.instance.moveSelectionTo(index)
def removeCurrent(self):
self.l.removeCurrent()
def addService(self, service, beforeCurrent=False):
self.l.addService(service, beforeCurrent)
def finishFill(self):
self.l.FillFinished()
self.l.sort()
# stuff for multiple marks (edit mode / later multiepg)
def clearMarks(self):
self.l.initMarked()
def isMarked(self, ref):
return self.l.isMarked(ref)
def addMarked(self, ref):
self.l.addMarked(ref)
def removeMarked(self, ref):
self.l.removeMarked(ref)
def getMarked(self):
i = self.l
i.markedQueryStart()
ref = eServiceReference()
marked = [ ]
while i.markedQueryNext(ref) == 0:
marked.append(ref.toString())
ref = eServiceReference()
return marked
#just for movemode.. only one marked entry..
def setCurrentMarked(self, state):
self.l.setCurrentMarked(state)
def setMode(self, mode):
if config.usage.servicelist_number_of_services.value == "by skin":
ItemHeight = self.ItemHeight
ServiceNameFont = self.ServiceNameFont
ServiceNumberFont = self.ServiceNumberFont
ServiceInfoFont = self.ServiceInfoFont
else:
ItemHeight = int(self.instance.size().height() / int(config.usage.servicelist_number_of_services.value))
FontFactor = ItemHeight * 100 / self.ItemHeight
ServiceNameFont = gFont(self.ServiceNameFont.family, int(self.ServiceNameFont.pointSize * FontFactor/100))
ServiceNumberFont = gFont(self.ServiceNumberFont.family, int(self.ServiceNumberFont.pointSize * FontFactor/100))
ServiceInfoFont = gFont(self.ServiceInfoFont.family, int(self.ServiceInfoFont.pointSize * FontFactor/100))
self.mode = mode
self.l.setItemHeight(ItemHeight)
self.l.setVisualMode(eListboxServiceContent.visModeComplex)
if config.usage.service_icon_enable.value:
self.l.setGetPiconNameFunc(getPiconName)
else:
self.l.setGetPiconNameFunc(None)
rowWidth = self.instance.size().width() - 30 #scrollbar is fixed 20 + 10 Extra marge
if mode == self.MODE_NORMAL or not config.usage.show_channel_numbers_in_servicelist.value:
channelNumberWidth = 0
channelNumberSpace = 0
else:
channelNumberWidth = config.usage.alternative_number_mode.value and getTextBoundarySize(self.instance, ServiceNumberFont, self.instance.size(), "0000").width() or getTextBoundarySize(self.instance, self.ServiceNumberFont, self.instance.size(), "00000").width()
channelNumberSpace = self.fieldMargins
self.l.setElementPosition(self.l.celServiceNumber, eRect(0, 0, channelNumberWidth, ItemHeight))
progressWidth = self.progressBarWidth
if "perc" in config.usage.show_event_progress_in_servicelist.value:
progressWidth = self.progressPercentWidth or self.progressBarWidth
if "left" in config.usage.show_event_progress_in_servicelist.value:
self.l.setElementPosition(self.l.celServiceEventProgressbar, eRect(channelNumberWidth+channelNumberSpace, 0, progressWidth , ItemHeight))
self.l.setElementPosition(self.l.celServiceName, eRect(channelNumberWidth+channelNumberSpace + progressWidth + self.fieldMargins, 0, rowWidth - (channelNumberWidth+channelNumberSpace + progressWidth + self.fieldMargins), ItemHeight))
elif "right" in config.usage.show_event_progress_in_servicelist.value:
self.l.setElementPosition(self.l.celServiceEventProgressbar, eRect(rowWidth - progressWidth, 0, progressWidth, ItemHeight))
self.l.setElementPosition(self.l.celServiceName, eRect(channelNumberWidth+channelNumberSpace, 0, rowWidth - (channelNumberWidth+channelNumberSpace + progressWidth + self.fieldMargins), ItemHeight))
else:
self.l.setElementPosition(self.l.celServiceEventProgressbar, eRect(0, 0, 0, 0))
self.l.setElementPosition(self.l.celServiceName, eRect(channelNumberWidth+channelNumberSpace, 0, rowWidth - (channelNumberWidth+channelNumberSpace), ItemHeight))
self.l.setElementFont(self.l.celServiceName, ServiceNameFont)
self.l.setElementFont(self.l.celServiceNumber, ServiceNumberFont)
self.l.setElementFont(self.l.celServiceInfo, ServiceInfoFont)
if "perc" in config.usage.show_event_progress_in_servicelist.value:
self.l.setElementFont(self.l.celServiceEventProgressbar, ServiceInfoFont)
self.l.setHideNumberMarker(config.usage.hide_number_markers.value)
self.l.setServiceTypeIconMode(int(config.usage.servicetype_icon_mode.value))
self.l.setCryptoIconMode(int(config.usage.crypto_icon_mode.value))
self.l.setRecordIndicatorMode(int(config.usage.record_indicator_mode.value))
self.l.setColumnWidth(int(config.usage.servicelist_column.value))
|
tswast/google-cloud-python | refs/heads/master | texttospeech/tests/system/v1beta1/test_system_tts_v1beta1.py | 2 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.cloud import texttospeech_v1beta1
class TestSystemSpeech(object):
def test_synthesize_speech(self):
client = texttospeech_v1beta1.TextToSpeechClient()
synthesis_input = texttospeech_v1beta1.types.SynthesisInput(
text="Hello, World!"
)
voice = texttospeech_v1beta1.types.VoiceSelectionParams(
language_code="en-US",
ssml_gender=texttospeech_v1beta1.enums.SsmlVoiceGender.NEUTRAL,
)
audio_config = texttospeech_v1beta1.types.AudioConfig(
audio_encoding=texttospeech_v1beta1.enums.AudioEncoding.MP3
)
response = client.synthesize_speech(synthesis_input, voice, audio_config)
assert response.audio_content is not None
def test_list_voices(self):
client = texttospeech_v1beta1.TextToSpeechClient()
voices = client.list_voices()
assert len(voices.voices) > 0
|
tariqqrasheedd/AudioXBlock | refs/heads/master | audio/__init__.py | 2 | from .audio import AudioXBlock |
drewet/androguard | refs/heads/master | androguard/decompiler/dad/tests/rpo_test.py | 24 | """Tests for rpo."""
import sys
sys.path.append('.')
import unittest
from androguard.decompiler.dad import graph
from androguard.decompiler.dad import node
class NodeTest(node.Node):
def __init__(self, name):
super(NodeTest, self).__init__(name)
def __str__(self):
return '%s (%d)' % (self.name, self.num)
class RpoTest(unittest.TestCase):
def _getNode(self, node_map, n):
ret_node = node_map.get(n)
if not ret_node:
ret_node = node_map[n] = NodeTest(n)
self.graph.add_node(ret_node)
return ret_node
def _createGraphFrom(self, edges):
node_map = {}
for n, childs in edges.iteritems():
if n is None:
continue
parent_node = self._getNode(node_map, n)
for child in childs:
child_node = self._getNode(node_map, child)
self.graph.add_edge(parent_node, child_node)
self.graph.entry = node_map[edges[None]]
return node_map
def _verifyRpo(self, node_map, expected_rpo):
for n1, n2 in expected_rpo.iteritems():
self.assertEqual(node_map[n1].num, n2)
def setUp(self):
self.graph = graph.Graph()
def tearDown(self):
self.graph = None
def testTarjanGraph(self):
edges = {None: 'r',
'r': ['a', 'b', 'c'],
'a': ['d'],
'b': ['a', 'd', 'e'],
'c': ['f', 'g'],
'd': ['l'],
'e': ['h'],
'f': ['i'],
'g': ['i', 'j'],
'h': ['e', 'k'],
'i': ['k'],
'j': ['i'],
'k': ['i', 'r'],
'l': ['h']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testTarjan_graph', '/tmp')
expected_rpo = {'r': 1, 'a': 7, 'b': 6, 'c': 2,
'd': 8, 'e': 13, 'f': 5,
'g': 3, 'h': 10, 'i': 12,
'j': 4, 'k': 11, 'l': 9}
self._verifyRpo(n_map, expected_rpo)
def testFirstGraph(self):
edges = {None: 'r',
'r': ['w1', 'x1', 'z5'],
'w1': ['w2'], 'w2': ['w3'],
'w3': ['w4'], 'w4': ['w5'],
'x1': ['x2'], 'x2': ['x3'],
'x3': ['x4'], 'x4': ['x5'], 'x5': ['y1'],
'y1': ['w1', 'w2', 'w3', 'w4', 'w5', 'y2'],
'y2': ['w1', 'w2', 'w3', 'w4', 'w5', 'y3'],
'y3': ['w1', 'w2', 'w3', 'w4', 'w5', 'y4'],
'y4': ['w1', 'w2', 'w3', 'w4', 'w5', 'y5'],
'y5': ['w1', 'w2', 'w3', 'w4', 'w5', 'z1'],
'z1': ['z2'],
'z2': ['z1', 'z3'],
'z3': ['z2', 'z4'],
'z4': ['z3', 'z5'],
'z5': ['z4']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testFirst_graph', '/tmp')
expected_rpo = {'r': 1, 'x1': 2, 'x2': 3, 'x3': 4, 'x4': 5, 'x5': 6,
'w1': 17, 'w2': 18, 'w3': 19, 'w4': 20, 'w5': 21,
'y1': 7, 'y2': 8, 'y3': 9, 'y4': 10, 'y5': 11,
'z1': 12, 'z2': 13, 'z3': 14, 'z4': 15, 'z5': 16}
self._verifyRpo(n_map, expected_rpo)
def testSecondGraph(self):
edges = {None: 'r',
'r': ['y1', 'x12'],
'x11': ['x12', 'x22'],
'x12': ['x11'],
'x21': ['x22'],
'x22': ['x21'],
'y1': ['y2', 'x11'],
'y2': ['x21']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testSecond_graph', '/tmp')
expected_rpo = {'r': 1, 'x11': 3, 'x12': 4, 'x21': 6, 'x22': 7,
'y1': 2, 'y2': 5}
self._verifyRpo(n_map, expected_rpo)
def testThirdGraph(self):
edges = {None: 'r',
'r': ['w', 'y1'],
'w': ['x1', 'x2'],
'x2': ['x1'],
'y1': ['y2'],
'y2': ['x2']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
##self.graph.draw('_testThird_graph', '/tmp')
expected_rpo = {'r': 1, 'w': 4, 'x1': 6, 'x2': 5, 'y1': 2, 'y2': 3}
self._verifyRpo(n_map, expected_rpo)
def testFourthGraph(self):
edges = {None: 'r',
'r': ['x1', 'y1', 'y2'],
'x1': ['x2'],
'x2': ['y1', 'y2']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testFourth_graph', '/tmp')
expected_rpo = {'r': 1, 'x1': 2, 'x2': 3, 'y1': 5, 'y2': 4}
self._verifyRpo(n_map, expected_rpo)
def testFifthGraph(self):
edges = {None: 'r',
'r': ['a', 'i'],
'a': ['b', 'c'],
'b': ['c', 'e', 'g'],
'c': ['d'],
'd': ['i'],
'e': ['c', 'f'],
'f': ['i'],
'g': ['h'],
'h': ['d', 'f', 'i']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testFifth_graph', '/tmp')
expected_rpo = {'r': 1, 'a': 2, 'b': 3, 'c': 8,
'd': 9, 'e': 6, 'f': 7, 'g': 4,
'h': 5, 'i': 10}
self._verifyRpo(n_map, expected_rpo)
def testLinearVitGraph(self):
edges = {None: 'r',
'r': ['w', 'y'],
'w': ['x1'],
'y': ['x7'],
'x1': ['x2'],
'x2': ['x1', 'x3'],
'x3': ['x2', 'x4'],
'x4': ['x3', 'x5'],
'x5': ['x4', 'x6'],
'x6': ['x5', 'x7'],
'x7': ['x6']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testLinearVit_graph', '/tmp')
expected_rpo = {'r': 1, 'w': 3, 'x1': 4, 'x2': 5, 'x3': 6,
'x4': 7, 'x5': 8, 'x6': 9, 'x7': 10, 'y': 2}
self._verifyRpo(n_map, expected_rpo)
def testCrossGraph(self):
edges = {None: 'r',
'r': ['a', 'd'],
'a': ['b'],
'b': ['c'],
'c': ['a', 'd', 'g'],
'd': ['e'],
'e': ['f'],
'f': ['a', 'd', 'g']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testCross_graph', '/tmp')
expected_rpo = {'r': 1, 'a': 2, 'b': 3, 'c': 4,
'd': 5, 'e': 6, 'f': 7, 'g': 8}
self._verifyRpo(n_map, expected_rpo)
def testTVerifyGraph(self):
edges = {None: 'n1',
'n1': ['n2', 'n8'],
'n2': ['n3'],
'n3': ['n4', 'n8', 'n9'],
'n4': ['n3', 'n5', 'n6', 'n7'],
'n5': ['n4'],
'n6': ['n5'],
'n7': ['n6'],
'n8': ['n9', 'n12'],
'n9': ['n10', 'n11', 'n12'],
'n10': ['n11'],
'n11': ['n7'],
'n12': ['n10']}
n_map = self._createGraphFrom(edges)
self.graph.compute_rpo()
#self.graph.draw('_testTVerify_graph', '/tmp')
expected_rpo = {'n1': 1, 'n2': 2, 'n3': 3,
'n4': 9, 'n5': 12, 'n6': 11,
'n7': 10, 'n8': 4, 'n9': 5,
'n10': 7, 'n11': 8, 'n12': 6}
self._verifyRpo(n_map, expected_rpo)
if __name__ == '__main__':
unittest.main()
|
kevin-coder/tensorflow-fork | refs/heads/master | tensorflow/python/data/experimental/__init__.py | 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Experimental API for building input pipelines.
This module contains experimental `Dataset` sources and transformations that can
be used in conjunction with the `tf.data.Dataset` API. Note that the
`tf.data.experimental` API is not subject to the same backwards compatibility
guarantees as `tf.data`, but we will provide deprecation advice in advance of
removing existing functionality.
See [Importing Data](https://tensorflow.org/guide/datasets) for an overview.
@@Counter
@@CheckpointInputPipelineHook
@@CsvDataset
@@DatasetStructure
@@MapVectorizationOptions
@@NestedStructure
@@OptimizationOptions
@@Optional
@@OptionalStructure
@@RandomDataset
@@Reducer
@@SparseTensorStructure
@@SqlDataset
@@StatsAggregator
@@StatsOptions
@@Structure
@@TFRecordWriter
@@TensorStructure
@@ThreadingOptions
@@bucket_by_sequence_length
@@bytes_produced_stats
@@cardinality
@@choose_from_datasets
@@copy_to_device
@@dense_to_sparse_batch
@@enumerate_dataset
@@get_next_as_optional
@@get_single_element
@@group_by_reducer
@@group_by_window
@@ignore_errors
@@latency_stats
@@make_batched_features_dataset
@@make_csv_dataset
@@make_saveable_from_iterator
@@map_and_batch
@@map_and_batch_with_legacy_function
@@parallel_interleave
@@parse_example_dataset
@@prefetch_to_device
@@rejection_resample
@@sample_from_datasets
@@scan
@@shuffle_and_repeat
@@take_while
@@unbatch
@@unique
@@AUTOTUNE
@@INFINITE_CARDINALITY
@@UNKNOWN_CARDINALITY
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.data.experimental.ops.batching import dense_to_sparse_batch
from tensorflow.python.data.experimental.ops.batching import map_and_batch
from tensorflow.python.data.experimental.ops.batching import map_and_batch_with_legacy_function
from tensorflow.python.data.experimental.ops.batching import unbatch
from tensorflow.python.data.experimental.ops.cardinality import cardinality
from tensorflow.python.data.experimental.ops.cardinality import INFINITE as INFINITE_CARDINALITY
from tensorflow.python.data.experimental.ops.cardinality import UNKNOWN as UNKNOWN_CARDINALITY
from tensorflow.python.data.experimental.ops.counter import Counter
from tensorflow.python.data.experimental.ops.enumerate_ops import enumerate_dataset
from tensorflow.python.data.experimental.ops.error_ops import ignore_errors
from tensorflow.python.data.experimental.ops.get_single_element import get_single_element
from tensorflow.python.data.experimental.ops.grouping import bucket_by_sequence_length
from tensorflow.python.data.experimental.ops.grouping import group_by_reducer
from tensorflow.python.data.experimental.ops.grouping import group_by_window
from tensorflow.python.data.experimental.ops.grouping import Reducer
from tensorflow.python.data.experimental.ops.interleave_ops import choose_from_datasets
from tensorflow.python.data.experimental.ops.interleave_ops import parallel_interleave
from tensorflow.python.data.experimental.ops.interleave_ops import sample_from_datasets
from tensorflow.python.data.experimental.ops.iterator_ops import CheckpointInputPipelineHook
from tensorflow.python.data.experimental.ops.iterator_ops import make_saveable_from_iterator
from tensorflow.python.data.experimental.ops.optimization import AUTOTUNE
from tensorflow.python.data.experimental.ops.optimization_options import MapVectorizationOptions
from tensorflow.python.data.experimental.ops.optimization_options import OptimizationOptions
from tensorflow.python.data.experimental.ops.parsing_ops import parse_example_dataset
from tensorflow.python.data.experimental.ops.prefetching_ops import copy_to_device
from tensorflow.python.data.experimental.ops.prefetching_ops import prefetch_to_device
from tensorflow.python.data.experimental.ops.random_ops import RandomDataset
from tensorflow.python.data.experimental.ops.readers import CsvDataset
from tensorflow.python.data.experimental.ops.readers import make_batched_features_dataset
from tensorflow.python.data.experimental.ops.readers import make_csv_dataset
from tensorflow.python.data.experimental.ops.readers import SqlDataset
from tensorflow.python.data.experimental.ops.resampling import rejection_resample
from tensorflow.python.data.experimental.ops.scan_ops import scan
from tensorflow.python.data.experimental.ops.shuffle_ops import shuffle_and_repeat
from tensorflow.python.data.experimental.ops.stats_aggregator import StatsAggregator
from tensorflow.python.data.experimental.ops.stats_ops import bytes_produced_stats
from tensorflow.python.data.experimental.ops.stats_ops import latency_stats
from tensorflow.python.data.experimental.ops.stats_options import StatsOptions
from tensorflow.python.data.experimental.ops.take_while_ops import take_while
from tensorflow.python.data.experimental.ops.threading_options import ThreadingOptions
from tensorflow.python.data.experimental.ops.unique import unique
from tensorflow.python.data.experimental.ops.writers import TFRecordWriter
from tensorflow.python.data.ops.dataset_ops import DatasetStructure
from tensorflow.python.data.ops.iterator_ops import get_next_as_optional
from tensorflow.python.data.ops.optional_ops import Optional
from tensorflow.python.data.ops.optional_ops import OptionalStructure
from tensorflow.python.data.util.structure import NestedStructure
from tensorflow.python.data.util.structure import SparseTensorStructure
from tensorflow.python.data.util.structure import Structure
from tensorflow.python.data.util.structure import TensorStructure
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__)
|
sean-/ansible | refs/heads/devel | v1/ansible/runner/action_plugins/synchronize.py | 86 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012-2013, Timothy Appnel <tim@appnel.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os.path
from ansible import utils
from ansible import constants
from ansible.runner.return_data import ReturnData
import ansible.utils.template as template
class ActionModule(object):
def __init__(self, runner):
self.runner = runner
self.inject = None
def _get_absolute_path(self, path=None):
if 'vars' in self.inject:
if '_original_file' in self.inject['vars']:
# roles
original_path = path
path = utils.path_dwim_relative(self.inject['_original_file'], 'files', path, self.runner.basedir)
if original_path and original_path[-1] == '/' and path[-1] != '/':
# make sure the dwim'd path ends in a trailing "/"
# if the original path did
path += '/'
return path
def _process_origin(self, host, path, user):
if not host in ['127.0.0.1', 'localhost']:
if user:
return '%s@%s:%s' % (user, host, path)
else:
return '%s:%s' % (host, path)
else:
if not ':' in path:
if not path.startswith('/'):
path = self._get_absolute_path(path=path)
return path
def _process_remote(self, host, path, user):
transport = self.runner.transport
return_data = None
if not host in ['127.0.0.1', 'localhost'] or transport != "local":
if user:
return_data = '%s@%s:%s' % (user, host, path)
else:
return_data = '%s:%s' % (host, path)
else:
return_data = path
if not ':' in return_data:
if not return_data.startswith('/'):
return_data = self._get_absolute_path(path=return_data)
return return_data
def setup(self, module_name, inject):
''' Always default to localhost as delegate if None defined '''
self.inject = inject
# Store original transport and sudo values.
self.original_transport = inject.get('ansible_connection', self.runner.transport)
self.original_become = self.runner.become
self.transport_overridden = False
if inject.get('delegate_to') is None:
inject['delegate_to'] = '127.0.0.1'
# IF original transport is not local, override transport and disable sudo.
if self.original_transport != 'local':
inject['ansible_connection'] = 'local'
self.transport_overridden = True
self.runner.become = False
def run(self, conn, tmp, module_name, module_args,
inject, complex_args=None, **kwargs):
''' generates params and passes them on to the rsync module '''
self.inject = inject
# load up options
options = {}
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args))
src = options.get('src', None)
dest = options.get('dest', None)
use_ssh_args = options.pop('use_ssh_args', None)
src = template.template(self.runner.basedir, src, inject)
dest = template.template(self.runner.basedir, dest, inject)
use_ssh_args = template.template(self.runner.basedir, use_ssh_args, inject)
try:
options['local_rsync_path'] = inject['ansible_rsync_path']
except KeyError:
pass
# from the perspective of the rsync call the delegate is the localhost
src_host = '127.0.0.1'
dest_host = inject.get('ansible_ssh_host', inject['inventory_hostname'])
# allow ansible_ssh_host to be templated
dest_host = template.template(self.runner.basedir, dest_host, inject, fail_on_undefined=True)
dest_is_local = dest_host in ['127.0.0.1', 'localhost']
# CHECK FOR NON-DEFAULT SSH PORT
dest_port = options.get('dest_port')
inv_port = inject.get('ansible_ssh_port', inject['inventory_hostname'])
if inv_port != dest_port and inv_port != inject['inventory_hostname']:
options['dest_port'] = inv_port
# edge case: explicit delegate and dest_host are the same
if dest_host == inject['delegate_to']:
dest_host = '127.0.0.1'
# SWITCH SRC AND DEST PER MODE
if options.get('mode', 'push') == 'pull':
(dest_host, src_host) = (src_host, dest_host)
# CHECK DELEGATE HOST INFO
use_delegate = False
if conn.delegate != conn.host:
if 'hostvars' in inject:
if conn.delegate in inject['hostvars'] and self.original_transport != 'local':
# use a delegate host instead of localhost
use_delegate = True
# COMPARE DELEGATE, HOST AND TRANSPORT
process_args = False
if not dest_host is src_host and self.original_transport != 'local':
# interpret and inject remote host info into src or dest
process_args = True
# MUNGE SRC AND DEST PER REMOTE_HOST INFO
if process_args or use_delegate:
user = None
if utils.boolean(options.get('set_remote_user', 'yes')):
if use_delegate:
user = inject['hostvars'][conn.delegate].get('ansible_ssh_user')
if not use_delegate or not user:
user = inject.get('ansible_ssh_user',
self.runner.remote_user)
if use_delegate:
# FIXME
private_key = inject.get('ansible_ssh_private_key_file', self.runner.private_key_file)
else:
private_key = inject.get('ansible_ssh_private_key_file', self.runner.private_key_file)
private_key = template.template(self.runner.basedir, private_key, inject, fail_on_undefined=True)
if not private_key is None:
private_key = os.path.expanduser(private_key)
options['private_key'] = private_key
# use the mode to define src and dest's url
if options.get('mode', 'push') == 'pull':
# src is a remote path: <user>@<host>, dest is a local path
src = self._process_remote(src_host, src, user)
dest = self._process_origin(dest_host, dest, user)
else:
# src is a local path, dest is a remote path: <user>@<host>
src = self._process_origin(src_host, src, user)
dest = self._process_remote(dest_host, dest, user)
options['src'] = src
options['dest'] = dest
if 'mode' in options:
del options['mode']
if use_ssh_args:
options['ssh_args'] = constants.ANSIBLE_SSH_ARGS
# Allow custom rsync path argument.
rsync_path = options.get('rsync_path', None)
# If no rsync_path is set, sudo was originally set, and dest is remote then add 'sudo rsync' argument.
if not rsync_path and self.transport_overridden and self.original_become and not dest_is_local and self.runner.become_method == 'sudo':
rsync_path = 'sudo rsync'
# make sure rsync path is quoted.
if rsync_path:
options['rsync_path'] = '"' + rsync_path + '"'
module_args = ""
if self.runner.noop_on_check(inject):
module_args = "CHECKMODE=True"
# run the module and store the result
result = self.runner._execute_module(conn, tmp, 'synchronize', module_args, complex_args=options, inject=inject)
# reset the sudo property
self.runner.become = self.original_become
return result
|
dpfg/kicker-scorer-api | refs/heads/master | shell.py | 2 | #!/usr/bin/env python
import os
import readline
from pprint import pprint
from flask import *
from app import *
# from utils import *
# from db import *
# from models import *
os.environ['PYTHONINSPECT'] = 'True'
|
lunafeng/django | refs/heads/master | django/db/backends/sqlite3/base.py | 323 | """
SQLite3 backend for django.
Works with either the pysqlite2 module or the sqlite3 module in the
standard library.
"""
from __future__ import unicode_literals
import datetime
import decimal
import re
import warnings
from django.conf import settings
from django.db import utils
from django.db.backends import utils as backend_utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.base.validation import BaseDatabaseValidation
from django.utils import six, timezone
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.utils.safestring import SafeBytes
try:
import pytz
except ImportError:
pytz = None
try:
try:
from pysqlite2 import dbapi2 as Database
except ImportError:
from sqlite3 import dbapi2 as Database
except ImportError as exc:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading either pysqlite2 or sqlite3 modules (tried in that order): %s" % exc)
# Some of these import sqlite3, so import them after checking if it's installed.
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
def adapt_datetime_warn_on_aware_datetime(value):
# Remove this function and rely on the default adapter in Django 2.0.
if settings.USE_TZ and timezone.is_aware(value):
warnings.warn(
"The SQLite database adapter received an aware datetime (%s), "
"probably from cursor.execute(). Update your code to pass a "
"naive datetime in the database connection's time zone (UTC by "
"default).", RemovedInDjango20Warning)
# This doesn't account for the database connection's timezone,
# which isn't known. (That's why this adapter is deprecated.)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return value.isoformat(str(" "))
def decoder(conv_func):
""" The Python sqlite3 interface returns always byte strings.
This function converts the received value to a regular string before
passing it to the receiver function.
"""
return lambda s: conv_func(s.decode('utf-8'))
Database.register_converter(str("bool"), decoder(lambda s: s == '1'))
Database.register_converter(str("time"), decoder(parse_time))
Database.register_converter(str("date"), decoder(parse_date))
Database.register_converter(str("datetime"), decoder(parse_datetime))
Database.register_converter(str("timestamp"), decoder(parse_datetime))
Database.register_converter(str("TIMESTAMP"), decoder(parse_datetime))
Database.register_converter(str("decimal"), decoder(backend_utils.typecast_decimal))
Database.register_adapter(datetime.datetime, adapt_datetime_warn_on_aware_datetime)
Database.register_adapter(decimal.Decimal, backend_utils.rev_typecast_decimal)
if six.PY2:
Database.register_adapter(str, lambda s: s.decode('utf-8'))
Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8'))
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'sqlite'
# SQLite doesn't actually support most of these types, but it "does the right
# thing" given more verbose field definitions, so leave them as is so that
# schema inspection is more useful.
data_types = {
'AutoField': 'integer',
'BinaryField': 'BLOB',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'decimal',
'DurationField': 'bigint',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'real',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer unsigned',
'PositiveSmallIntegerField': 'smallint unsigned',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'text',
'TimeField': 'time',
'UUIDField': 'char(32)',
}
data_types_suffix = {
'AutoField': 'AUTOINCREMENT',
}
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
'contains': r"LIKE '%%' || {} || '%%' ESCAPE '\'",
'icontains': r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'",
'startswith': r"LIKE {} || '%%' ESCAPE '\'",
'istartswith': r"LIKE UPPER({}) || '%%' ESCAPE '\'",
'endswith': r"LIKE '%%' || {} ESCAPE '\'",
'iendswith': r"LIKE '%%' || UPPER({}) ESCAPE '\'",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def get_connection_params(self):
settings_dict = self.settings_dict
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
# Always allow the underlying SQLite connection to be shareable
# between multiple threads. The safe-guarding will be handled at a
# higher level by the `BaseDatabaseWrapper.allow_thread_sharing`
# property. This is necessary as the shareability is disabled by
# default in pysqlite and it cannot be changed once a connection is
# opened.
if 'check_same_thread' in kwargs and kwargs['check_same_thread']:
warnings.warn(
'The `check_same_thread` option was provided and set to '
'True. It will be overridden with False. Use the '
'`DatabaseWrapper.allow_thread_sharing` property instead '
'for controlling thread shareability.',
RuntimeWarning
)
kwargs.update({'check_same_thread': False})
if self.features.can_share_in_memory_db:
kwargs.update({'uri': True})
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.create_function("django_date_extract", 2, _sqlite_date_extract)
conn.create_function("django_date_trunc", 2, _sqlite_date_trunc)
conn.create_function("django_datetime_cast_date", 2, _sqlite_datetime_cast_date)
conn.create_function("django_datetime_extract", 3, _sqlite_datetime_extract)
conn.create_function("django_datetime_trunc", 3, _sqlite_datetime_trunc)
conn.create_function("django_time_extract", 2, _sqlite_time_extract)
conn.create_function("regexp", 2, _sqlite_regexp)
conn.create_function("django_format_dtdelta", 3, _sqlite_format_dtdelta)
conn.create_function("django_power", 2, _sqlite_power)
return conn
def init_connection_state(self):
pass
def create_cursor(self):
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
self.validate_thread_sharing()
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if not self.is_in_memory_db(self.settings_dict['NAME']):
BaseDatabaseWrapper.close(self)
def _savepoint_allowed(self):
# Two conditions are required here:
# - A sufficiently recent version of SQLite to support savepoints,
# - Being in a transaction, which can only happen inside 'atomic'.
# When 'isolation_level' is not None, sqlite3 commits before each
# savepoint; it's a bug. When it is None, savepoints don't make sense
# because autocommit is enabled. The only exception is inside 'atomic'
# blocks. To work around that bug, on SQLite, 'atomic' starts a
# transaction explicitly rather than simply disable autocommit.
return self.features.uses_savepoints and self.in_atomic_block
def _set_autocommit(self, autocommit):
if autocommit:
level = None
else:
# sqlite3's internal default is ''. It's different from None.
# See Modules/_sqlite/connection.c.
level = ''
# 'isolation_level' is a misleading API.
# SQLite always runs at the SERIALIZABLE isolation level.
with self.wrap_database_errors:
self.connection.isolation_level = level
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute("""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
% (primary_key_column_name, column_name, table_name, referenced_table_name,
column_name, referenced_column_name, column_name, referenced_column_name))
for bad_row in cursor.fetchall():
raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (table_name, bad_row[0], table_name, column_name, bad_row[1],
referenced_table_name, referenced_column_name))
def is_usable(self):
return True
def _start_transaction_under_autocommit(self):
"""
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
"""
self.cursor().execute("BEGIN")
def is_in_memory_db(self, name):
return name == ":memory:" or "mode=memory" in force_text(name)
FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s')
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=None):
if params is None:
return Database.Cursor.execute(self, query)
query = self.convert_query(query)
return Database.Cursor.execute(self, query, params)
def executemany(self, query, param_list):
query = self.convert_query(query)
return Database.Cursor.executemany(self, query, param_list)
def convert_query(self, query):
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
def _sqlite_date_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
def _sqlite_datetime_parse(dt, tzname):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if tzname is not None:
dt = timezone.localtime(dt, pytz.timezone(tzname))
return dt
def _sqlite_datetime_cast_date(dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
return dt.date().isoformat()
def _sqlite_datetime_extract(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_datetime_trunc(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
elif lookup_type == 'hour':
return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour)
elif lookup_type == 'minute':
return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute)
elif lookup_type == 'second':
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
def _sqlite_time_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_time(dt)
except (ValueError, TypeError):
return None
return getattr(dt, lookup_type)
def _sqlite_format_dtdelta(conn, lhs, rhs):
"""
LHS and RHS can be either:
- An integer number of microseconds
- A string representing a timedelta object
- A string representing a datetime
"""
try:
if isinstance(lhs, six.integer_types):
lhs = str(decimal.Decimal(lhs) / decimal.Decimal(1000000))
real_lhs = parse_duration(lhs)
if real_lhs is None:
real_lhs = backend_utils.typecast_timestamp(lhs)
if isinstance(rhs, six.integer_types):
rhs = str(decimal.Decimal(rhs) / decimal.Decimal(1000000))
real_rhs = parse_duration(rhs)
if real_rhs is None:
real_rhs = backend_utils.typecast_timestamp(rhs)
if conn.strip() == '+':
out = real_lhs + real_rhs
else:
out = real_lhs - real_rhs
except (ValueError, TypeError):
return None
# typecast_timestamp returns a date or a datetime without timezone.
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
return str(out)
def _sqlite_regexp(re_pattern, re_string):
return bool(re.search(re_pattern, force_text(re_string))) if re_string is not None else False
def _sqlite_power(x, y):
return x ** y
|
orgito/ansible | refs/heads/devel | lib/ansible/plugins/action/net_get.py | 19 | # (c) 2018, Ansible Inc,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import re
import uuid
import hashlib
from ansible.module_utils._text import to_text, to_bytes
from ansible.module_utils.connection import Connection
from ansible.plugins.action.network import ActionModule as NetworkActionModule
from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.utils.display import Display
display = Display()
class ActionModule(NetworkActionModule):
def run(self, tmp=None, task_vars=None):
socket_path = None
play_context = copy.deepcopy(self._play_context)
play_context.network_os = self._get_network_os(task_vars)
result = super(ActionModule, self).run(task_vars=task_vars)
if play_context.connection != 'network_cli':
# It is supported only with network_cli
result['failed'] = True
result['msg'] = ('please use network_cli connection type for net_get module')
return result
try:
src = self._task.args.get('src')
except KeyError as exc:
return {'failed': True, 'msg': 'missing required argument: %s' % exc}
# Get destination file if specified
dest = self._task.args.get('dest')
if dest is None:
dest = self._get_default_dest(src)
else:
dest = self._handle_dest_path(dest)
# Get proto
proto = self._task.args.get('protocol')
if proto is None:
proto = 'scp'
sock_timeout = play_context.timeout
if socket_path is None:
socket_path = self._connection.socket_path
conn = Connection(socket_path)
try:
changed = self._handle_existing_file(conn, src, dest, proto, sock_timeout)
if changed is False:
result['changed'] = False
result['destination'] = dest
return result
except Exception as exc:
result['msg'] = ('Warning: exception %s idempotency check failed. Check '
'dest' % exc)
try:
out = conn.get_file(
source=src, destination=dest,
proto=proto, timeout=sock_timeout
)
except Exception as exc:
result['failed'] = True
result['msg'] = ('Exception received : %s' % exc)
result['changed'] = True
result['destination'] = dest
return result
def _handle_dest_path(self, dest):
working_path = self._get_working_path()
if os.path.isabs(dest) or urlsplit('dest').scheme:
dst = dest
else:
dst = self._loader.path_dwim_relative(working_path, '', dest)
return dst
def _get_src_filename_from_path(self, src_path):
filename_list = re.split('/|:', src_path)
return filename_list[-1]
def _get_default_dest(self, src_path):
dest_path = self._get_working_path()
src_fname = self._get_src_filename_from_path(src_path)
filename = '%s/%s' % (dest_path, src_fname)
return filename
def _handle_existing_file(self, conn, source, dest, proto, timeout):
if not os.path.exists(dest):
return True
cwd = self._loader.get_basedir()
filename = str(uuid.uuid4())
tmp_dest_file = os.path.join(cwd, filename)
try:
out = conn.get_file(
source=source, destination=tmp_dest_file,
proto=proto, timeout=timeout
)
except Exception as exc:
os.remove(tmp_dest_file)
raise Exception(exc)
try:
with open(tmp_dest_file, 'r') as f:
new_content = f.read()
with open(dest, 'r') as f:
old_content = f.read()
except (IOError, OSError) as ioexc:
raise IOError(ioexc)
sha1 = hashlib.sha1()
old_content_b = to_bytes(old_content, errors='surrogate_or_strict')
sha1.update(old_content_b)
checksum_old = sha1.digest()
sha1 = hashlib.sha1()
new_content_b = to_bytes(new_content, errors='surrogate_or_strict')
sha1.update(new_content_b)
checksum_new = sha1.digest()
os.remove(tmp_dest_file)
if checksum_old == checksum_new:
return False
else:
return True
|
kinwahlai/phantomjs-ghostdriver | refs/heads/master | src/breakpad/src/tools/gyp/test/copies/gyptest-all.py | 137 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies using an explicit build target of 'all'.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('copies.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('copies.gyp', test.ALL, chdir='relocate/src')
test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
test.built_file_must_match('copies-out/file2',
'file2 contents\n',
chdir='relocate/src')
test.pass_test()
|
caseyching/Impala | refs/heads/cdh5-trunk | thirdparty/gtest-1.6.0/scripts/gen_gtest_pred_impl.py | 412 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""gen_gtest_pred_impl.py v0.1
Generates the implementation of Google Test predicate assertions and
accompanying tests.
Usage:
gen_gtest_pred_impl.py MAX_ARITY
where MAX_ARITY is a positive integer.
The command generates the implementation of up-to MAX_ARITY-ary
predicate assertions, and writes it to file gtest_pred_impl.h in the
directory where the script is. It also generates the accompanying
unit test in file gtest_pred_impl_unittest.cc.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import sys
import time
# Where this script is.
SCRIPT_DIR = os.path.dirname(sys.argv[0])
# Where to store the generated header.
HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
# Where to store the generated unit test.
UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
def HeaderPreamble(n):
"""Returns the preamble for the header file.
Args:
n: the maximum arity of the predicate macros to be generated.
"""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
'n' : n
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
//
// Implements a family of generic predicate assertion macros.
#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
// Makes sure this header is not included before gtest.h.
#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
#error Do not include gtest_pred_impl.h directly. Include gtest.h instead.
#endif // GTEST_INCLUDE_GTEST_GTEST_H_
// This header implements a family of generic predicate assertion
// macros:
//
// ASSERT_PRED_FORMAT1(pred_format, v1)
// ASSERT_PRED_FORMAT2(pred_format, v1, v2)
// ...
//
// where pred_format is a function or functor that takes n (in the
// case of ASSERT_PRED_FORMATn) values and their source expression
// text, and returns a testing::AssertionResult. See the definition
// of ASSERT_EQ in gtest.h for an example.
//
// If you don't care about formatting, you can use the more
// restrictive version:
//
// ASSERT_PRED1(pred, v1)
// ASSERT_PRED2(pred, v1, v2)
// ...
//
// where pred is an n-ary function or functor that returns bool,
// and the values v1, v2, ..., must support the << operator for
// streaming to std::ostream.
//
// We also define the EXPECT_* variations.
//
// For now we only support predicates whose arity is at most %(n)s.
// Please email googletestframework@googlegroups.com if you need
// support for higher arities.
// GTEST_ASSERT_ is the basic statement to which all of the assertions
// in this file reduce. Don't use this in your code.
#define GTEST_ASSERT_(expression, on_failure) \\
GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
if (const ::testing::AssertionResult gtest_ar = (expression)) \\
; \\
else \\
on_failure(gtest_ar.failure_message())
""" % DEFS)
def Arity(n):
"""Returns the English name of the given arity."""
if n < 0:
return None
elif n <= 3:
return ['nullary', 'unary', 'binary', 'ternary'][n]
else:
return '%s-ary' % n
def Title(word):
"""Returns the given word in title case. The difference between
this and string's title() method is that Title('4-ary') is '4-ary'
while '4-ary'.title() is '4-Ary'."""
return word[0].upper() + word[1:]
def OneTo(n):
"""Returns the list [1, 2, 3, ..., n]."""
return range(1, n + 1)
def Iter(n, format, sep=''):
"""Given a positive integer n, a format string that contains 0 or
more '%s' format specs, and optionally a separator string, returns
the join of n strings, each formatted with the format string on an
iterator ranged from 1 to n.
Example:
Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
"""
# How many '%s' specs are in format?
spec_count = len(format.split('%s')) - 1
return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
def ImplementationForArity(n):
"""Returns the implementation of n-ary predicate assertions."""
# A map the defines the values used in the implementation template.
DEFS = {
'n' : str(n),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'arity' : Arity(n),
'Arity' : Title(Arity(n))
}
impl = """
// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
template <typename Pred""" % DEFS
impl += Iter(n, """,
typename T%s""")
impl += """>
AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
impl += Iter(n, """,
const char* e%s""")
impl += """,
Pred pred"""
impl += Iter(n, """,
const T%s& v%s""")
impl += """) {
if (pred(%(vs)s)) return AssertionSuccess();
""" % DEFS
impl += ' return AssertionFailure() << pred_text << "("'
impl += Iter(n, """
<< e%s""", sep=' << ", "')
impl += ' << ") evaluates to false, where"'
impl += Iter(n, """
<< "\\n" << e%s << " evaluates to " << v%s""")
impl += """;
}
// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
// Don't use this in your code.
#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s),\\
on_failure)
// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
impl += Iter(n, """, \\
#v%s""")
impl += """, \\
pred"""
impl += Iter(n, """, \\
v%s""")
impl += """), on_failure)
// %(Arity)s predicate assertion macros.
#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
""" % DEFS
return impl
def HeaderPostamble():
"""Returns the postamble for the header file."""
return """
#endif // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
"""
def GenerateFile(path, content):
"""Given a file path and a content string, overwrites it with the
given content."""
print 'Updating file %s . . .' % path
f = file(path, 'w+')
print >>f, content,
f.close()
print 'File %s has been updated.' % path
def GenerateHeader(n):
"""Given the maximum arity n, updates the header file that implements
the predicate assertions."""
GenerateFile(HEADER,
HeaderPreamble(n)
+ ''.join([ImplementationForArity(i) for i in OneTo(n)])
+ HeaderPostamble())
def UnitTestPreamble():
"""Returns the preamble for the unit test file."""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
// Regression test for gtest_pred_impl.h
//
// This file is generated by a script and quite long. If you intend to
// learn how Google Test works by reading its unit tests, read
// gtest_unittest.cc instead.
//
// This is intended as a regression test for the Google Test predicate
// assertions. We compile it as part of the gtest_unittest target
// only to keep the implementation tidy and compact, as it is quite
// involved to set up the stage for testing Google Test using Google
// Test itself.
//
// Currently, gtest_unittest takes ~11 seconds to run in the testing
// daemon. In the future, if it grows too large and needs much more
// time to finish, we should consider separating this file into a
// stand-alone regression test.
#include <iostream>
#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
// A user-defined data type.
struct Bool {
explicit Bool(int val) : value(val != 0) {}
bool operator>(int n) const { return value > Bool(n).value; }
Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
bool operator==(const Bool& rhs) const { return value == rhs.value; }
bool value;
};
// Enables Bool to be used in assertions.
std::ostream& operator<<(std::ostream& os, const Bool& x) {
return os << (x.value ? "true" : "false");
}
""" % DEFS)
def TestsForArity(n):
"""Returns the tests for n-ary predicate assertions."""
# A map that defines the values used in the template for the tests.
DEFS = {
'n' : n,
'es' : Iter(n, 'e%s', sep=', '),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'tvs' : Iter(n, 'T%s v%s', sep=', '),
'int_vs' : Iter(n, 'int v%s', sep=', '),
'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
'types' : Iter(n, 'typename T%s', sep=', '),
'v_sum' : Iter(n, 'v%s', sep=' + '),
'arity' : Arity(n),
'Arity' : Title(Arity(n)),
}
tests = (
"""// Sample functions/functors for testing %(arity)s predicate assertions.
// A %(arity)s predicate function.
template <%(types)s>
bool PredFunction%(n)s(%(tvs)s) {
return %(v_sum)s > 0;
}
// The following two functions are needed to circumvent a bug in
// gcc 2.95.3, which sometimes has problem with the above template
// function.
bool PredFunction%(n)sInt(%(int_vs)s) {
return %(v_sum)s > 0;
}
bool PredFunction%(n)sBool(%(Bool_vs)s) {
return %(v_sum)s > 0;
}
""" % DEFS)
tests += """
// A %(arity)s predicate functor.
struct PredFunctor%(n)s {
template <%(types)s>
bool operator()(""" % DEFS
tests += Iter(n, 'const T%s& v%s', sep=""",
""")
tests += """) {
return %(v_sum)s > 0;
}
};
""" % DEFS
tests += """
// A %(arity)s predicate-formatter function.
template <%(types)s>
testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) {
if (PredFunction%(n)s(%(vs)s))
return testing::AssertionSuccess();
return testing::AssertionFailure()
<< """ % DEFS
tests += Iter(n, 'e%s', sep=' << " + " << ')
tests += """
<< " is expected to be positive, but evaluates to "
<< %(v_sum)s << ".";
}
""" % DEFS
tests += """
// A %(arity)s predicate-formatter functor.
struct PredFormatFunctor%(n)s {
template <%(types)s>
testing::AssertionResult operator()(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) const {
return PredFormatFunction%(n)s(%(es)s, %(vs)s);
}
};
""" % DEFS
tests += """
// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
class Predicate%(n)sTest : public testing::Test {
protected:
virtual void SetUp() {
expected_to_finish_ = true;
finished_ = false;""" % DEFS
tests += """
""" + Iter(n, 'n%s_ = ') + """0;
}
"""
tests += """
virtual void TearDown() {
// Verifies that each of the predicate's arguments was evaluated
// exactly once."""
tests += ''.join(["""
EXPECT_EQ(1, n%s_) <<
"The predicate assertion didn't evaluate argument %s "
"exactly once.";""" % (i, i + 1) for i in OneTo(n)])
tests += """
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
FAIL() << "The predicate assertion unexpactedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
}
}
// true iff the test function is expected to run to finish.
static bool expected_to_finish_;
// true iff the test function did run to finish.
static bool finished_;
""" % DEFS
tests += Iter(n, """
static int n%s_;""")
tests += """
};
bool Predicate%(n)sTest::expected_to_finish_;
bool Predicate%(n)sTest::finished_;
""" % DEFS
tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
""") % DEFS
tests += """
typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
""" % DEFS
def GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type):
"""Returns the test for a predicate assertion macro.
Args:
use_format: true iff the assertion is a *_PRED_FORMAT*.
use_assert: true iff the assertion is a ASSERT_*.
expect_failure: true iff the assertion is expected to fail.
use_functor: true iff the first argument of the assertion is
a functor (as opposed to a function)
use_user_type: true iff the predicate functor/function takes
argument(s) of a user-defined type.
Example:
GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
of a successful EXPECT_PRED_FORMATn() that takes a functor
whose arguments have built-in types."""
if use_assert:
assrt = 'ASSERT' # 'assert' is reserved, so we cannot use
# that identifier here.
else:
assrt = 'EXPECT'
assertion = assrt + '_PRED'
if use_format:
pred_format = 'PredFormat'
assertion += '_FORMAT'
else:
pred_format = 'Pred'
assertion += '%(n)s' % DEFS
if use_functor:
pred_format_type = 'functor'
pred_format += 'Functor%(n)s()'
else:
pred_format_type = 'function'
pred_format += 'Function%(n)s'
if not use_format:
if use_user_type:
pred_format += 'Bool'
else:
pred_format += 'Int'
test_name = pred_format_type.title()
if use_user_type:
arg_type = 'user-defined type (Bool)'
test_name += 'OnUserType'
if expect_failure:
arg = 'Bool(n%s_++)'
else:
arg = 'Bool(++n%s_)'
else:
arg_type = 'built-in type (int)'
test_name += 'OnBuiltInType'
if expect_failure:
arg = 'n%s_++'
else:
arg = '++n%s_'
if expect_failure:
successful_or_failed = 'failed'
expected_or_not = 'expected.'
test_name += 'Failure'
else:
successful_or_failed = 'successful'
expected_or_not = 'UNEXPECTED!'
test_name += 'Success'
# A map that defines the values used in the test template.
defs = DEFS.copy()
defs.update({
'assert' : assrt,
'assertion' : assertion,
'test_name' : test_name,
'pf_type' : pred_format_type,
'pf' : pred_format,
'arg_type' : arg_type,
'arg' : arg,
'successful' : successful_or_failed,
'expected' : expected_or_not,
})
test = """
// Tests a %(successful)s %(assertion)s where the
// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
indent = (len(assertion) + 3)*' '
extra_indent = ''
if expect_failure:
extra_indent = ' '
if use_assert:
test += """
expected_to_finish_ = false;
EXPECT_FATAL_FAILURE({ // NOLINT"""
else:
test += """
EXPECT_NONFATAL_FAILURE({ // NOLINT"""
test += '\n' + extra_indent + """ %(assertion)s(%(pf)s""" % defs
test = test % defs
test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
test += ');\n' + extra_indent + ' finished_ = true;\n'
if expect_failure:
test += ' }, "");\n'
test += '}\n'
return test
# Generates tests for all 2**6 = 64 combinations.
tests += ''.join([GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type)
for use_format in [0, 1]
for use_assert in [0, 1]
for expect_failure in [0, 1]
for use_functor in [0, 1]
for use_user_type in [0, 1]
])
return tests
def UnitTestPostamble():
"""Returns the postamble for the tests."""
return ''
def GenerateUnitTest(n):
"""Returns the tests for up-to n-ary predicate assertions."""
GenerateFile(UNIT_TEST,
UnitTestPreamble()
+ ''.join([TestsForArity(i) for i in OneTo(n)])
+ UnitTestPostamble())
def _Main():
"""The entry point of the script. Generates the header file and its
unit test."""
if len(sys.argv) != 2:
print __doc__
print 'Author: ' + __author__
sys.exit(1)
n = int(sys.argv[1])
GenerateHeader(n)
GenerateUnitTest(n)
if __name__ == '__main__':
_Main()
|
sumpfgottheit/pdu1800_data_provider | refs/heads/master | pygame32/pygame/tests/run_tests__tests/failures1/__init__.py | 6353 | # empty
|
ganeti/ganeti | refs/heads/master | lib/impexpd/__init__.py | 1 | #
#
# Copyright (C) 2010 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Classes and functions for import/export daemon.
"""
import os
import re
import socket
import logging
import signal
import errno
import time
from io import StringIO
from ganeti import constants
from ganeti import errors
from ganeti import utils
from ganeti import netutils
from ganeti import compat
#: Used to recognize point at which socat(1) starts to listen on its socket.
#: The local address is required for the remote peer to connect (in particular
#: the port number).
LISTENING_RE = re.compile(r"^listening on\s+"
r"AF=(?P<family>\d+)\s+"
r"(?P<address>.+):(?P<port>\d+)$", re.I)
#: Used to recognize point at which socat(1) is sending data over the wire
TRANSFER_LOOP_RE = re.compile(r"^starting data transfer loop with FDs\s+.*$",
re.I)
SOCAT_LOG_DEBUG = "D"
SOCAT_LOG_INFO = "I"
SOCAT_LOG_NOTICE = "N"
SOCAT_LOG_WARNING = "W"
SOCAT_LOG_ERROR = "E"
SOCAT_LOG_FATAL = "F"
SOCAT_LOG_IGNORE = compat.UniqueFrozenset([
SOCAT_LOG_DEBUG,
SOCAT_LOG_INFO,
SOCAT_LOG_NOTICE,
])
#: Used to parse GNU dd(1) statistics
DD_INFO_RE = re.compile(r"^(?P<bytes>\d+)\s*byte(?:|s)\s.*\scopied,\s*"
r"(?P<seconds>[\d.]+)\s*s(?:|econds),.*$", re.I)
#: Used to ignore "N+N records in/out" on dd(1)'s stderr
DD_STDERR_IGNORE = re.compile(r"^\d+\+\d+\s*records\s+(?:in|out)$", re.I)
#: Signal upon which dd(1) will print statistics (on some platforms, SIGINFO is
#: unavailable and SIGUSR1 is used instead)
DD_INFO_SIGNAL = getattr(signal, "SIGINFO", signal.SIGUSR1)
#: Buffer size: at most this many bytes are transferred at once
BUFSIZE = 1024 * 1024
# Common options for socat
SOCAT_TCP_OPTS = ["keepalive", "keepidle=60", "keepintvl=10", "keepcnt=5"]
SOCAT_OPENSSL_OPTS = ["verify=1", "cipher=%s" % constants.OPENSSL_CIPHERS]
if constants.SOCAT_USE_COMPRESS:
# Disables all compression in by OpenSSL. Only supported in patched versions
# of socat (as of November 2010). See INSTALL for more information.
SOCAT_OPENSSL_OPTS.append("compress=none")
SOCAT_OPTION_MAXLEN = 400
(PROG_OTHER,
PROG_SOCAT,
PROG_DD,
PROG_DD_PID,
PROG_EXP_SIZE) = range(1, 6)
PROG_ALL = compat.UniqueFrozenset([
PROG_OTHER,
PROG_SOCAT,
PROG_DD,
PROG_DD_PID,
PROG_EXP_SIZE,
])
class CommandBuilder(object):
_SOCAT_VERSION = (0,)
def __init__(self, mode, opts, socat_stderr_fd, dd_stderr_fd, dd_pid_fd):
"""Initializes this class.
@param mode: Daemon mode (import or export)
@param opts: Options object
@type socat_stderr_fd: int
@param socat_stderr_fd: File descriptor socat should write its stderr to
@type dd_stderr_fd: int
@param dd_stderr_fd: File descriptor dd should write its stderr to
@type dd_pid_fd: int
@param dd_pid_fd: File descriptor the child should write dd's PID to
"""
self._opts = opts
self._mode = mode
self._socat_stderr_fd = socat_stderr_fd
self._dd_stderr_fd = dd_stderr_fd
self._dd_pid_fd = dd_pid_fd
assert (self._opts.magic is None or
constants.IE_MAGIC_RE.match(self._opts.magic))
@staticmethod
def GetBashCommand(cmd):
"""Prepares a command to be run in Bash.
"""
return ["bash", "-o", "errexit", "-o", "pipefail", "-c", cmd]
@classmethod
def _GetSocatVersion(cls):
"""Returns the socat version, as a tuple of ints.
The version is memoized in a class variable for future use.
"""
if cls._SOCAT_VERSION > (0,):
return cls._SOCAT_VERSION
socat = utils.RunCmd([constants.SOCAT_PATH, "-V"])
# No need to check for errors here. If -V is not there, socat is really
# old. Any other failure will be handled when running the actual socat
# command.
for line in socat.output.splitlines():
match = re.match(r"socat version ((\d+\.)*(\d+))", line)
if match:
try:
cls._SOCAT_VERSION = tuple(int(x) for x in match.group(1).split('.'))
except TypeError:
pass
break
return cls._SOCAT_VERSION
def _GetSocatCommand(self):
"""Returns the socat command.
"""
common_addr_opts = SOCAT_TCP_OPTS + SOCAT_OPENSSL_OPTS + [
"key=%s" % self._opts.key,
"cert=%s" % self._opts.cert,
"cafile=%s" % self._opts.ca,
]
if self._opts.bind is not None:
common_addr_opts.append("bind=%s" % self._opts.bind)
assert not (self._opts.ipv4 and self._opts.ipv6)
if self._opts.ipv4:
common_addr_opts.append("pf=ipv4")
elif self._opts.ipv6:
common_addr_opts.append("pf=ipv6")
if self._mode == constants.IEM_IMPORT:
if self._opts.port is None:
port = 0
else:
port = self._opts.port
addr1 = [
"OPENSSL-LISTEN:%s" % port,
"reuseaddr",
# Retry to listen if connection wasn't established successfully, up to
# 100 times a second. Note that this still leaves room for DoS attacks.
"forever",
"intervall=0.01",
] + common_addr_opts
addr2 = ["stdout"]
elif self._mode == constants.IEM_EXPORT:
if self._opts.host and netutils.IP6Address.IsValid(self._opts.host):
host = "[%s]" % self._opts.host
else:
host = self._opts.host
addr1 = ["stdin"]
addr2 = [
"OPENSSL:%s:%s" % (host, self._opts.port),
# How long to wait per connection attempt
"connect-timeout=%s" % self._opts.connect_timeout,
# Retry a few times before giving up to connect (once per second)
"retry=%s" % self._opts.connect_retries,
"intervall=1",
] + common_addr_opts
# For socat versions >= 1.7.3, we need to also specify
# openssl-commonname, otherwise server certificate verification will
# fail.
if self._GetSocatVersion() >= (1, 7, 3):
addr2 += ["openssl-commonname=%s" % constants.X509_CERT_CN]
else:
raise errors.GenericError("Invalid mode '%s'" % self._mode)
for i in [addr1, addr2]:
for value in i:
if len(value) > SOCAT_OPTION_MAXLEN:
raise errors.GenericError("Socat option longer than %s"
" characters: %r" %
(SOCAT_OPTION_MAXLEN, value))
if "," in value:
raise errors.GenericError("Comma not allowed in socat option"
" value: %r" % value)
return [
constants.SOCAT_PATH,
# Log to stderr
"-ls",
# Log level
"-d", "-d",
# Buffer size
"-b%s" % BUFSIZE,
# Unidirectional mode, the first address is only used for reading, and the
# second address is only used for writing
"-u",
",".join(addr1), ",".join(addr2),
]
def _GetMagicCommand(self):
"""Returns the command to read/write the magic value.
"""
if not self._opts.magic:
return None
# Prefix to ensure magic isn't interpreted as option to "echo"
magic = "M=%s" % self._opts.magic
cmd = StringIO()
if self._mode == constants.IEM_IMPORT:
cmd.write("{ ")
cmd.write(utils.ShellQuoteArgs(["read", "-n", str(len(magic)), "magic"]))
cmd.write(" && ")
cmd.write("if test \"$magic\" != %s; then" % utils.ShellQuote(magic))
cmd.write(" echo %s >&2;" % utils.ShellQuote("Magic value mismatch"))
cmd.write(" exit 1;")
cmd.write("fi;")
cmd.write(" }")
elif self._mode == constants.IEM_EXPORT:
cmd.write(utils.ShellQuoteArgs(["echo", "-E", "-n", magic]))
else:
raise errors.GenericError("Invalid mode '%s'" % self._mode)
return cmd.getvalue()
def _GetDdCommand(self):
"""Returns the command for measuring throughput.
"""
dd_cmd = StringIO()
magic_cmd = self._GetMagicCommand()
if magic_cmd:
dd_cmd.write("{ ")
dd_cmd.write(magic_cmd)
dd_cmd.write(" && ")
dd_cmd.write("{ ")
# Setting LC_ALL since we want to parse the output and explicitly
# redirecting stdin, as the background process (dd) would have
# /dev/null as stdin otherwise
dd_cmd.write("LC_ALL=C dd bs=%s <&0 2>&%d & pid=${!};" %
(BUFSIZE, self._dd_stderr_fd))
# Send PID to daemon
dd_cmd.write(" echo $pid >&%d;" % self._dd_pid_fd)
# And wait for dd
dd_cmd.write(" wait $pid;")
dd_cmd.write(" }")
if magic_cmd:
dd_cmd.write(" }")
return dd_cmd.getvalue()
def _GetTransportCommand(self):
"""Returns the command for the transport part of the daemon.
"""
socat_cmd = ("%s 2>&%d" %
(utils.ShellQuoteArgs(self._GetSocatCommand()),
self._socat_stderr_fd))
dd_cmd = self._GetDdCommand()
compr = self._opts.compress
parts = []
if self._mode == constants.IEM_IMPORT:
parts.append(socat_cmd)
if compr in [constants.IEC_GZIP, constants.IEC_GZIP_FAST,
constants.IEC_GZIP_SLOW, constants.IEC_LZOP]:
utility_name = constants.IEC_COMPRESSION_UTILITIES.get(compr, compr)
parts.append("%s -d -c" % utility_name)
elif compr != constants.IEC_NONE:
parts.append("%s -d" % compr)
else:
# No compression
pass
parts.append(dd_cmd)
elif self._mode == constants.IEM_EXPORT:
parts.append(dd_cmd)
if compr in [constants.IEC_GZIP_SLOW, constants.IEC_LZOP]:
utility_name = constants.IEC_COMPRESSION_UTILITIES.get(compr, compr)
parts.append("%s -c" % utility_name)
elif compr in [constants.IEC_GZIP_FAST, constants.IEC_GZIP]:
parts.append("gzip -1 -c")
elif compr != constants.IEC_NONE:
parts.append(compr)
else:
# No compression
pass
parts.append(socat_cmd)
else:
raise errors.GenericError("Invalid mode '%s'" % self._mode)
# TODO: Run transport as separate user
# The transport uses its own shell to simplify running it as a separate user
# in the future.
return self.GetBashCommand(" | ".join(parts))
def GetCommand(self):
"""Returns the complete child process command.
"""
transport_cmd = self._GetTransportCommand()
buf = StringIO()
if self._opts.cmd_prefix:
buf.write(self._opts.cmd_prefix)
buf.write(" ")
buf.write(utils.ShellQuoteArgs(transport_cmd))
if self._opts.cmd_suffix:
buf.write(" ")
buf.write(self._opts.cmd_suffix)
return self.GetBashCommand(buf.getvalue())
def _VerifyListening(family, address, port):
"""Verify address given as listening address by socat.
"""
if family not in (socket.AF_INET, socket.AF_INET6):
raise errors.GenericError("Address family %r not supported" % family)
if (family == socket.AF_INET6 and address.startswith("[") and
address.endswith("]")):
address = address.lstrip("[").rstrip("]")
try:
packed_address = socket.inet_pton(family, address)
except socket.error:
raise errors.GenericError("Invalid address %r for family %s" %
(address, family))
return (socket.inet_ntop(family, packed_address), port)
class ChildIOProcessor(object):
def __init__(self, debug, status_file, logger, throughput_samples, exp_size):
"""Initializes this class.
"""
self._debug = debug
self._status_file = status_file
self._logger = logger
self._splitter = dict([(prog, utils.LineSplitter(self._ProcessOutput, prog))
for prog in PROG_ALL])
self._dd_pid = None
self._dd_ready = False
self._dd_tp_samples = throughput_samples
self._dd_progress = []
# Expected size of transferred data
self._exp_size = exp_size
def GetLineSplitter(self, prog):
"""Returns the line splitter for a program.
"""
return self._splitter[prog]
def FlushAll(self):
"""Flushes all line splitters.
"""
for ls in self._splitter.values():
ls.flush()
def CloseAll(self):
"""Closes all line splitters.
"""
for ls in self._splitter.values():
ls.close()
self._splitter.clear()
def NotifyDd(self):
"""Tells dd(1) to write statistics.
"""
if self._dd_pid is None:
# Can't notify
return False
if not self._dd_ready:
# There's a race condition between starting the program and sending
# signals. The signal handler is only registered after some time, so we
# have to check whether the program is ready. If it isn't, sending a
# signal will invoke the default handler (and usually abort the program).
if not utils.IsProcessHandlingSignal(self._dd_pid, DD_INFO_SIGNAL):
logging.debug("dd is not yet ready for signal %s", DD_INFO_SIGNAL)
return False
logging.debug("dd is now handling signal %s", DD_INFO_SIGNAL)
self._dd_ready = True
logging.debug("Sending signal %s to PID %s", DD_INFO_SIGNAL, self._dd_pid)
try:
os.kill(self._dd_pid, DD_INFO_SIGNAL)
except EnvironmentError as err:
if err.errno != errno.ESRCH:
raise
# Process no longer exists
logging.debug("dd exited")
self._dd_pid = None
return True
def _ProcessOutput(self, line, prog):
"""Takes care of child process output.
@type line: string
@param line: Child output line
@type prog: number
@param prog: Program from which the line originates
"""
force_update = False
forward_line = line
if prog == PROG_SOCAT:
level = None
parts = line.split(None, 4)
if len(parts) == 5:
(_, _, _, level, msg) = parts
force_update = self._ProcessSocatOutput(self._status_file, level, msg)
if self._debug or (level and level not in SOCAT_LOG_IGNORE):
forward_line = "socat: %s %s" % (level, msg)
else:
forward_line = None
else:
forward_line = "socat: %s" % line
elif prog == PROG_DD:
(should_forward, force_update) = self._ProcessDdOutput(line)
if should_forward or self._debug:
forward_line = "dd: %s" % line
else:
forward_line = None
elif prog == PROG_DD_PID:
if self._dd_pid:
raise RuntimeError("dd PID reported more than once")
logging.debug("Received dd PID %r", line)
self._dd_pid = int(line)
forward_line = None
elif prog == PROG_EXP_SIZE:
logging.debug("Received predicted size %r", line)
forward_line = None
if line:
try:
exp_size = utils.BytesToMebibyte(int(line))
except (ValueError, TypeError) as err:
logging.error("Failed to convert predicted size %r to number: %s",
line, err)
exp_size = None
else:
exp_size = None
self._exp_size = exp_size
if forward_line:
self._logger.info(forward_line)
self._status_file.AddRecentOutput(forward_line)
self._status_file.Update(force_update)
@staticmethod
def _ProcessSocatOutput(status_file, level, msg):
"""Interprets socat log output.
"""
if level == SOCAT_LOG_NOTICE:
if status_file.GetListenPort() is None:
# TODO: Maybe implement timeout to not listen forever
m = LISTENING_RE.match(msg)
if m:
(_, port) = _VerifyListening(int(m.group("family")),
m.group("address"),
int(m.group("port")))
status_file.SetListenPort(port)
return True
if not status_file.GetConnected():
m = TRANSFER_LOOP_RE.match(msg)
if m:
logging.debug("Connection established")
status_file.SetConnected()
return True
return False
def _ProcessDdOutput(self, line):
"""Interprets a line of dd(1)'s output.
"""
m = DD_INFO_RE.match(line)
if m:
seconds = float(m.group("seconds"))
mbytes = utils.BytesToMebibyte(int(m.group("bytes")))
self._UpdateDdProgress(seconds, mbytes)
return (False, True)
m = DD_STDERR_IGNORE.match(line)
if m:
# Ignore
return (False, False)
# Forward line
return (True, False)
def _UpdateDdProgress(self, seconds, mbytes):
"""Updates the internal status variables for dd(1) progress.
@type seconds: float
@param seconds: Timestamp of this update
@type mbytes: float
@param mbytes: Total number of MiB transferred so far
"""
# Add latest sample
self._dd_progress.append((seconds, mbytes))
# Remove old samples
del self._dd_progress[:-self._dd_tp_samples]
# Calculate throughput
throughput = _CalcThroughput(self._dd_progress)
# Calculate percent and ETA
percent = None
eta = None
if self._exp_size is not None:
if self._exp_size != 0:
percent = max(0, min(100, (100.0 * mbytes) / self._exp_size))
if throughput:
eta = max(0, float(self._exp_size - mbytes) / throughput)
self._status_file.SetProgress(mbytes, throughput, percent, eta)
def _CalcThroughput(samples):
"""Calculates the throughput in MiB/second.
@type samples: sequence
@param samples: List of samples, each consisting of a (timestamp, mbytes)
tuple
@rtype: float or None
@return: Throughput in MiB/second
"""
if len(samples) < 2:
# Can't calculate throughput
return None
(start_time, start_mbytes) = samples[0]
(end_time, end_mbytes) = samples[-1]
return (float(end_mbytes) - start_mbytes) / (float(end_time) - start_time)
|
biskett/mic | refs/heads/master | mic/3rdparty/pykickstart/errors.py | 14 | #
# errors.py: Kickstart error handling.
#
# Chris Lumens <clumens@redhat.com>
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
"""
Error handling classes and functions.
This module exports a single function:
formatErrorMsg - Properly formats an error message.
It also exports several exception classes:
KickstartError - A generic exception class.
KickstartParseError - An exception for errors relating to parsing.
KickstartValueError - An exception for errors relating to option
processing.
KickstartVersionError - An exception for errors relating to unsupported
syntax versions.
"""
import gettext
_ = lambda x: gettext.ldgettext("pykickstart", x)
def formatErrorMsg(lineno, msg=""):
"""Properly format the error message msg for inclusion in an exception."""
if msg != "":
mapping = {"lineno": lineno, "msg": msg}
return _("The following problem occurred on line %(lineno)s of the kickstart file:\n\n%(msg)s\n") % mapping
else:
return _("There was a problem reading from line %s of the kickstart file") % lineno
class KickstartError(Exception):
"""A generic exception class for unspecific error conditions."""
def __init__(self, val = ""):
"""Create a new KickstartError exception instance with the descriptive
message val. val should be the return value of formatErrorMsg.
"""
Exception.__init__(self)
self.value = val
def __str__ (self):
return self.value
class KickstartParseError(KickstartError):
"""An exception class for errors when processing the input file, such as
unknown options, commands, or sections.
"""
def __init__(self, msg):
"""Create a new KickstartParseError exception instance with the
descriptive message val. val should be the return value of
formatErrorMsg.
"""
KickstartError.__init__(self, msg)
def __str__(self):
return self.value
class KickstartValueError(KickstartError):
"""An exception class for errors when processing arguments to commands,
such as too many arguments, too few arguments, or missing required
arguments.
"""
def __init__(self, msg):
"""Create a new KickstartValueError exception instance with the
descriptive message val. val should be the return value of
formatErrorMsg.
"""
KickstartError.__init__(self, msg)
def __str__ (self):
return self.value
class KickstartVersionError(KickstartError):
"""An exception class for errors related to using an incorrect version of
kickstart syntax.
"""
def __init__(self, msg):
"""Create a new KickstartVersionError exception instance with the
descriptive message val. val should be the return value of
formatErrorMsg.
"""
KickstartError.__init__(self, msg)
def __str__ (self):
return self.value
|
vvv1559/intellij-community | refs/heads/master | python/testData/inspections/PyNumpyType/Empty.py | 69 | def empty(shape, dtype=None, order='C'): # real signature unknown; restored from __doc__
"""
empty(shape, dtype=float, order='C')
Return a new array of given shape and type, without initializing entries.
Parameters
----------
shape : int or tuple of int
Shape of the empty array
dtype : data-type, optional
Desired output data-type.
order : {'C', 'F'}, optional
Whether to store multi-dimensional data in C (row-major) or
Fortran (column-major) order in memory.
Returns
-------
out : ndarray
Array of uninitialized (arbitrary) data with the given
shape, dtype, and order.
See Also
--------
empty_like, zeros, ones
Notes
-----
`empty`, unlike `zeros`, does not set the array values to zero,
and may therefore be marginally faster. On the other hand, it requires
the user to manually set all the values in the array, and should be
used with caution.
Examples
--------
>>> np.empty([2, 2])
array([[ -9.74499359e+001, 6.69583040e-309],
[ 2.13182611e-314, 3.06959433e-309]]) #random
>>> np.empty([2, 2], dtype=int)
array([[-1073741821, -1067949133],
[ 496041986, 19249760]]) #random
"""
pass
empty([2, 2]) |
superhuahua/xunfengES | refs/heads/master | celerynode/vuldb/pyasn1/compat/octets.py | 96 | from sys import version_info
if version_info[0] <= 2:
int2oct = chr
ints2octs = lambda s: ''.join([ int2oct(x) for x in s ])
null = ''
oct2int = ord
octs2ints = lambda s: [ oct2int(x) for x in s ]
str2octs = lambda x: x
octs2str = lambda x: x
isOctetsType = lambda s: isinstance(s, str)
isStringType = lambda s: isinstance(s, (str, unicode))
else:
ints2octs = bytes
int2oct = lambda x: ints2octs((x,))
null = ints2octs()
oct2int = lambda x: x
octs2ints = lambda s: [ x for x in s ]
str2octs = lambda x: x.encode()
octs2str = lambda x: x.decode()
isOctetsType = lambda s: isinstance(s, bytes)
isStringType = lambda s: isinstance(s, str)
|
lmr/autotest | refs/heads/master | database_legacy/migrations/022_implement_sync_count.py | 6 | DOWN_SQL = """
ALTER TABLE jobs ADD COLUMN synchronizing tinyint(1) default NULL;
ALTER TABLE autotests ADD COLUMN synch_type smallint(6) NOT NULL;
UPDATE autotests SET synch_type = 1;
UPDATE autotests SET synch_type = 2 WHERE sync_count > 1;
ALTER TABLE jobs ADD COLUMN synch_type int(11) default NULL;
UPDATE jobs SET synch_type = 1;
UPDATE jobs SET synch_type = 2 WHERE synch_count > 1;
ALTER TABLE host_queue_entries DROP COLUMN `execution_subdir`;
"""
def migrate_up(manager):
# add execution_subdir field
manager.execute("""ALTER TABLE host_queue_entries ADD COLUMN
`execution_subdir` varchar(255) NOT NULL""")
# fill in execution_subdir field for running/complete entries
rows = manager.execute("""
SELECT jobs.id, jobs.synch_type, COUNT(1) FROM jobs
INNER JOIN host_queue_entries AS hqe ON jobs.id = hqe.job_id
GROUP BY jobs.id""")
job_hqe_count = dict((row[0], row[2]) for row in rows)
synch_jobs = set(row[0] for row in rows if row[1] == 2)
hqes = manager.execute("""
SELECT hqe.id, hqe.job_id, hqe.status, hqe.complete, hosts.hostname
FROM host_queue_entries AS hqe
INNER JOIN hosts ON hqe.host_id = hosts.id
WHERE hqe.status IN ('Starting', 'Running') OR complete""")
for id, job_id, status, complete, hostname in hqes:
if job_id in synch_jobs or job_hqe_count[job_id] == 1:
execution_subdir = ''
else:
execution_subdir = hostname
manager.execute(
'UPDATE host_queue_entries SET execution_subdir = %s WHERE id = %s',
execution_subdir, id)
# ensure synch_type information doesn't get lost if we need to migrate down
manager.execute('UPDATE jobs SET synch_count = 1 WHERE synch_type = 1')
manager.execute('UPDATE jobs SET synch_count = 2 '
'WHERE synch_type = 2 AND synch_count = 1')
# drop the old synch_type fields
manager.execute('ALTER TABLE jobs DROP COLUMN synch_type')
manager.execute('ALTER TABLE autotests DROP COLUMN synch_type')
# drop deprecated synchronizing field
manager.execute('ALTER TABLE jobs DROP COLUMN synchronizing')
def migrate_down(manager):
manager.execute_script(DOWN_SQL)
|
gangadhar-kadam/mic-wnframework | refs/heads/master | webnotes/widgets/query_report.py | 3 | # Copyright (c) 2012 Web Notes Technologies Pvt Ltd (http://erpnext.com)
#
# MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import unicode_literals
import webnotes
import os, json
from webnotes import _
from webnotes.modules import scrub, get_module_path
from webnotes.utils import flt, cint
@webnotes.whitelist()
def get_script(report_name):
report = webnotes.doc("Report", report_name)
script_path = os.path.join(get_module_path(webnotes.conn.get_value("DocType", report.ref_doctype, "module")),
"report", scrub(report.name), scrub(report.name) + ".js")
if os.path.exists(script_path):
with open(script_path, "r") as script:
return script.read()
else:
return "wn.query_reports['%s']={}" % report_name
@webnotes.whitelist()
def run(report_name, filters=None):
report = webnotes.doc("Report", report_name)
if not webnotes.has_permission(report.ref_doctype, "report"):
webnotes.msgprint(_("Must have report permission to access this report."),
raise_exception=True)
if report.report_type=="Query Report":
if not report.query:
webnotes.msgprint(_("Must specify a Query to run"), raise_exception=True)
if not report.query.lower().startswith("select"):
webnotes.msgprint(_("Query must be a SELECT"), raise_exception=True)
result = [list(t) for t in webnotes.conn.sql(report.query)]
columns = [c[0] for c in webnotes.conn.get_description()]
else:
if filters:
filters = json.loads(filters)
method_name = scrub(webnotes.conn.get_value("DocType", report.ref_doctype, "module")) \
+ ".report." + scrub(report.name) + "." + scrub(report.name) + ".execute"
columns, result = webnotes.get_method(method_name)(filters or {})
if cint(report.add_total_row) and result:
result = add_total_row(result, columns)
return {
"result": result,
"columns": columns
}
def add_total_row(result, columns):
total_row = [""]*len(columns)
for row in result:
for i, col in enumerate(columns):
col = col.split(":")
if len(col) > 1 and col[1] in ["Currency", "Int", "Float"] and flt(row[i]):
total_row[i] = flt(total_row[i]) + flt(row[i])
first_col = columns[0].split(":")
if len(first_col) > 1 and first_col[1] not in ["Currency", "Int", "Float"]:
total_row[0] = "Total"
result.append(total_row)
return result |
Therp/partner-contact | refs/heads/8.0 | base_location/models/better_zip.py | 26 | # -*- coding: utf-8 -*-
#
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Contributor: Pedro Manuel Baeza <pedro.baeza@serviciosbaeza.com>
# Ignacio Ibeas <ignacio@acysos.com>
# Alejandro Santana <alejandrosantana@anubia.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from openerp import models, fields, api
class BetterZip(models.Model):
'''City/locations completion object'''
_name = "res.better.zip"
_description = __doc__
_order = "name asc"
_rec_name = "display_name"
display_name = fields.Char('Name', compute='_get_display_name', store=True)
name = fields.Char('ZIP')
code = fields.Char('City Code', size=64,
help="The official code for the city")
city = fields.Char('City', required=True)
state_id = fields.Many2one('res.country.state', 'State')
country_id = fields.Many2one('res.country', 'Country')
@api.one
@api.depends(
'name',
'city',
'state_id',
'country_id',
)
def _get_display_name(self):
if self.name:
name = [self.name, self.city]
else:
name = [self.city]
if self.state_id:
name.append(self.state_id.name)
if self.country_id:
name.append(self.country_id.name)
self.display_name = ", ".join(name)
@api.onchange('state_id')
def onchange_state_id(self):
if self.state_id:
self.country_id = self.state_id.country_id
|
ropik/chromium | refs/heads/master | ppapi/PRESUBMIT.py | 8 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import sys
import subprocess
def RunCmdAndCheck(cmd, err_string, output_api, cwd=None):
results = []
p = subprocess.Popen(cmd, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(p_stdout, p_stderr) = p.communicate()
if p.returncode:
results.append(
output_api.PresubmitError(err_string,
long_text=p_stderr))
return results
def RunUnittests(input_api, output_api):
# Run some Generator unittests if the generator source was changed.
results = []
files = input_api.LocalPaths()
generator_files = []
for filename in files:
name_parts = filename.split(os.sep)
if name_parts[0:2] == ['ppapi', 'generators']:
generator_files.append(filename)
if generator_files != []:
cmd = [ sys.executable, 'idl_gen_pnacl.py', '--wnone', '--test']
ppapi_dir = input_api.PresubmitLocalPath()
results.extend(RunCmdAndCheck(cmd,
'PPAPI IDL Pnacl unittest failed.',
output_api,
os.path.join(ppapi_dir, 'generators')))
return results
# If any .srpc files were changed, run run_srpcgen.py --diff_mode.
def CheckSrpcChange(input_api, output_api):
if [True for filename in input_api.LocalPaths() if
os.path.splitext(filename)[1] == '.srpc']:
return RunCmdAndCheck([sys.executable,
os.path.join(input_api.PresubmitLocalPath(),
'native_client', 'src',
'shared', 'ppapi_proxy',
'run_srpcgen.py'),
'--diff_mode'],
'PPAPI SRPC Diff detected: Run run_srpcgen.py.',
output_api)
return []
# Verify that the files do not contain a 'TODO' in them.
RE_TODO = re.compile(r'\WTODO\W', flags=re.I)
def CheckTODO(input_api, output_api):
files = input_api.LocalPaths()
todo = []
for filename in files:
name, ext = os.path.splitext(filename)
name_parts = name.split(os.sep)
# Only check normal build sources.
if ext not in ['.h', '.cc', '.idl']:
continue
# Only examine the ppapi directory.
if name_parts[0] != 'ppapi':
continue
# Only examine public plugin facing directories.
if name_parts[1] not in ['api', 'c', 'cpp', 'utility']:
continue
# Only examine public stable interfaces.
if name_parts[2] in ['dev', 'private', 'trusted']:
continue
filepath = os.path.join('..', filename)
if RE_TODO.search(open(filepath, 'rb').read()):
todo.append(filename)
if todo:
return [output_api.PresubmitError(
'TODOs found in stable public PPAPI files:',
long_text='\n'.join(todo))]
return []
def CheckChange(input_api, output_api):
results = []
results.extend(CheckSrpcChange(input_api, output_api))
results.extend(RunUnittests(input_api, output_api))
results.extend(CheckTODO(input_api, output_api))
# Verify all modified *.idl have a matching *.h
files = input_api.LocalPaths()
h_files = []
idl_files = []
# Find all relevant .h and .idl files.
for filename in files:
name, ext = os.path.splitext(filename)
name_parts = name.split(os.sep)
if name_parts[0:2] == ['ppapi', 'c'] and ext == '.h':
h_files.append('/'.join(name_parts[2:]))
if name_parts[0:2] == ['ppapi', 'api'] and ext == '.idl':
idl_files.append('/'.join(name_parts[2:]))
# Generate a list of all appropriate *.h and *.idl changes in this CL.
both = h_files + idl_files
# If there aren't any, we are done checking.
if not both: return results
missing = []
for filename in idl_files:
if filename not in set(h_files):
missing.append(' ppapi/c/%s.idl' % filename)
if missing:
results.append(
output_api.PresubmitPromptWarning(
'Missing PPAPI header, no change or skipped generation?',
long_text='\n'.join(missing)))
missing_dev = []
missing_stable = []
missing_priv = []
for filename in h_files:
if filename not in set(idl_files):
name_parts = filename.split(os.sep)
if 'trusted' in name_parts:
missing_priv.append(' ppapi/c/%s.h' % filename)
continue
if 'private' in name_parts:
missing_priv.append(' ppapi/c/%s.h' % filename)
continue
if 'dev' in name_parts:
missing_dev.append(' ppapi/c/%s.h' % filename)
continue
missing_stable.append(' ppapi/c/%s.h' % filename)
if missing_priv:
results.append(
output_api.PresubmitPromptWarning(
'Missing PPAPI IDL for private interface, please generate IDL:',
long_text='\n'.join(missing_priv)))
if missing_dev:
results.append(
output_api.PresubmitPromptWarning(
'Missing PPAPI IDL for DEV, required before moving to stable:',
long_text='\n'.join(missing_dev)))
if missing_stable:
results.append(
output_api.PresubmitError(
'Missing PPAPI IDL for stable interface:',
long_text='\n'.join(missing_stable)))
# Verify all *.h files match *.idl definitions, use:
# --test to prevent output to disk
# --diff to generate a unified diff
# --out to pick which files to examine (only the ones in the CL)
ppapi_dir = input_api.PresubmitLocalPath()
cmd = [sys.executable, 'generator.py',
'--wnone', '--diff', '--test','--cgen', '--range=start,end']
# Only generate output for IDL files references (as *.h or *.idl) in this CL
cmd.append('--out=' + ','.join([name + '.idl' for name in both]))
cmd_results = RunCmdAndCheck(cmd,
'PPAPI IDL Diff detected: Run the generator.',
output_api,
os.path.join(ppapi_dir, 'generators'))
if cmd_results:
results.extend(cmd_results)
return results
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
|
bjolivot/ansible | refs/heads/devel | lib/ansible/modules/network/illumos/ipadm_addr.py | 20 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <adam.stevko@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipadm_addr
short_description: Manage IP addresses on an interface.
description:
- Create/delete static/dynamic IP addresses on network interfaces on Solaris/illumos systems.
- Up/down static/dynamic IP addresses on network interfaces on Solaris/illumos systems.
- Manage IPv6 link-local addresses on network interfaces on Solaris/illumos systems.
version_added: "2.3"
author: Adam Števko (@xen0l)
options:
address:
description:
- Specifiies an IP address to configure in CIDR notation.
required: false
aliases: [ "addr" ]
addrtype:
description:
- Specifiies a type of IP address to configure.
required: false
default: static
choices: [ 'static', 'dhcp', 'addrconf' ]
addrobj:
description:
- Specifies an unique IP address on the system.
required: true
temporary:
description:
- Specifies that the configured IP address is temporary. Temporary
IP addresses do not persist across reboots.
required: false
default: false
wait:
description:
- Specifies the time in seconds we wait for obtaining address via DHCP.
required: false
default: 60
state:
description:
- Create/delete/enable/disable an IP address on the network interface.
required: false
default: present
choices: [ 'absent', 'present', 'up', 'down', 'enabled', 'disabled', 'refreshed' ]
'''
EXAMPLES = '''
name: Configure IP address 10.0.0.1 on e1000g0
ipadm_addr: addr=10.0.0.1/32 addrobj=e1000g0/v4 state=present
name: Delete addrobj
ipadm_addr: addrobj=e1000g0/v4 state=absent
name: Configure link-local IPv6 address
ipadm_addr: addtype=addrconf addrobj=vnic0/v6
name: Configure address via DHCP and wait 180 seconds for address obtaining
ipadm_addr: addrobj=vnic0/dhcp addrtype=dhcp wait=180
'''
RETURN = '''
addrobj:
description: address object name
returned: always
type: string
sample: bge0/v4
state:
description: state of the target
returned: always
type: string
sample: present
temporary:
description: specifies if operation will persist across reboots
returned: always
type: boolean
sample: True
addrtype:
description: address type
returned: always
type: string
sample: static
address:
description: IP address
returned: only if addrtype is 'static'
type: string
sample: 1.3.3.7/32
wait:
description: time we wait for DHCP
returned: only if addrtype is 'dhcp'
type: string
sample: 10
'''
import socket
from ansible.module_utils.basic import AnsibleModule
SUPPORTED_TYPES = ['static', 'addrconf', 'dhcp']
class Addr(object):
def __init__(self, module):
self.module = module
self.address = module.params['address']
self.addrtype = module.params['addrtype']
self.addrobj = module.params['addrobj']
self.temporary = module.params['temporary']
self.state = module.params['state']
self.wait = module.params['wait']
def is_cidr_notation(self):
return self.address.count('/') == 1
def is_valid_address(self):
ip_address = self.address.split('/')[0]
try:
if len(ip_address.split('.')) == 4:
socket.inet_pton(socket.AF_INET, ip_address)
else:
socket.inet_pton(socket.AF_INET6, ip_address)
except socket.error:
return False
return True
def is_dhcp(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-addr')
cmd.append('-p')
cmd.append('-o')
cmd.append('type')
cmd.append(self.addrobj)
(rc, out, err) = self.module.run_command(cmd)
if rc == 0:
if out.rstrip() != 'dhcp':
return False
return True
else:
self.module.fail_json(msg='Wrong addrtype %s for addrobj "%s": %s' % (out, self.addrobj, err),
rc=rc,
stderr=err)
def addrobj_exists(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-addr')
cmd.append(self.addrobj)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
return False
def delete_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('delete-addr')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def create_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('create-addr')
cmd.append('-T')
cmd.append(self.addrtype)
if self.temporary:
cmd.append('-t')
if self.addrtype == 'static':
cmd.append('-a')
cmd.append(self.address)
if self.addrtype == 'dhcp' and self.wait:
cmd.append('-w')
cmd.append(self.wait)
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def up_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('up-addr')
if self.temporary:
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def down_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('down-addr')
if self.temporary:
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def enable_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('enable-addr')
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def disable_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('disable-addr')
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def refresh_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('refresh-addr')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
address=dict(aliases=['addr']),
addrtype=dict(default='static', choices=SUPPORTED_TYPES),
addrobj=dict(required=True),
temporary=dict(default=False, type='bool'),
state=dict(
default='present', choices=['absent', 'present', 'up', 'down', 'enabled', 'disabled', 'refreshed']),
wait=dict(default=60),
),
mutually_exclusive=[
('address', 'wait'),
],
supports_check_mode=True
)
addr = Addr(module)
rc = None
out = ''
err = ''
result = {}
result['addrobj'] = addr.addrobj
result['state'] = addr.state
result['temporary'] = addr.temporary
result['addrtype'] = addr.addrtype
if addr.addrtype == 'static' and addr.address:
if addr.is_cidr_notation() and addr.is_valid_address():
result['address'] = addr.address
else:
module.fail_json(msg='Invalid IP address: %s' % addr.address)
if addr.addrtype == 'dhcp' and addr.wait:
result['wait'] = addr.wait
if addr.state == 'absent':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.delete_addr()
if rc != 0:
module.fail_json(msg='Error while deleting addrobj: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'present':
if not addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.create_addr()
if rc != 0:
module.fail_json(msg='Error while configuring IP address: "%s"' % err,
addrobj=addr.addrobj,
addr=addr.address,
stderr=err,
rc=rc)
elif addr.state == 'up':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.up_addr()
if rc != 0:
module.fail_json(msg='Error while bringing IP address up: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'down':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.down_addr()
if rc != 0:
module.fail_json(msg='Error while bringing IP address down: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'refreshed':
if addr.addrobj_exists():
if addr.is_dhcp():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.refresh_addr()
if rc != 0:
module.fail_json(msg='Error while refreshing IP address: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
else:
module.fail_json(msg='state "refreshed" cannot be used with "%s" addrtype' % addr.addrtype,
addrobj=addr.addrobj,
stderr=err,
rc=1)
elif addr.state == 'enabled':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.enable_addr()
if rc != 0:
module.fail_json(msg='Error while enabling IP address: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'disabled':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.disable_addr()
if rc != 0:
module.fail_json(msg='Error while disabling IP address: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
|
BigDataforYou/movie_recommendation_workshop_1 | refs/heads/master | big_data_4_you_demo_1/venv/lib/python2.7/site-packages/click/globals.py | 234 | from threading import local
_local = local()
def get_current_context(silent=False):
"""Returns the current click context. This can be used as a way to
access the current context object from anywhere. This is a more implicit
alternative to the :func:`pass_context` decorator. This function is
primarily useful for helpers such as :func:`echo` which might be
interested in changing it's behavior based on the current context.
To push the current context, :meth:`Context.scope` can be used.
.. versionadded:: 5.0
:param silent: is set to `True` the return value is `None` if no context
is available. The default behavior is to raise a
:exc:`RuntimeError`.
"""
try:
return getattr(_local, 'stack')[-1]
except (AttributeError, IndexError):
if not silent:
raise RuntimeError('There is no active click context.')
def push_context(ctx):
"""Pushes a new context to the current stack."""
_local.__dict__.setdefault('stack', []).append(ctx)
def pop_context():
"""Removes the top level from the stack."""
_local.stack.pop()
def resolve_color_default(color=None):
""""Internal helper to get the default value of the color flag. If a
value is passed it's returned unchanged, otherwise it's looked up from
the current context.
"""
if color is not None:
return color
ctx = get_current_context(silent=True)
if ctx is not None:
return ctx.color
|
chandps/Hackerrank | refs/heads/master | sandbox/caterpilar.py | 3 | from collections import deque
"""
1 <= a[i] <= 10E9
"""
def caterpilar(A, s):
n = len(A)
head = 0
total = 0
for back in range(n):
while head < n and total + A[head] <= s:
total += A[head]
head += 1
if total == s:
return True
total -= A[back]
return False
def caterpilarList(A, s):
n = len(A)
res = deque()
head = 0
total = 0
for back in range(n):
while head < n and total + A[head] <= s:
res.append(A[head])
total += A[head]
head += 1
if total == s:
return res
total -= A[back]
if len(res) > 0:
res.popleft()
return res
A = [1, 9, 1, 2]
print(caterpilar(A, 10))
print(caterpilarList(A, 10))
|
webspinner/webspinner-gae-cms | refs/heads/master | gdata/Crypto/PublicKey/ElGamal.py | 228 | #
# ElGamal.py : ElGamal encryption/decryption and signatures
#
# Part of the Python Cryptography Toolkit
#
# Distribute and use freely; there are no restrictions on further
# dissemination and usage except those imposed by the laws of your
# country of residence. This software is provided "as is" without
# warranty of fitness for use or suitability for any purpose, express
# or implied. Use at your own risk or not at all.
#
__revision__ = "$Id: ElGamal.py,v 1.9 2003/04/04 19:44:26 akuchling Exp $"
from Crypto.PublicKey.pubkey import *
from Crypto.Util import number
class error (Exception):
pass
# Generate an ElGamal key with N bits
def generate(bits, randfunc, progress_func=None):
"""generate(bits:int, randfunc:callable, progress_func:callable)
Generate an ElGamal key of length 'bits', using 'randfunc' to get
random data and 'progress_func', if present, to display
the progress of the key generation.
"""
obj=ElGamalobj()
# Generate prime p
if progress_func:
progress_func('p\n')
obj.p=bignum(getPrime(bits, randfunc))
# Generate random number g
if progress_func:
progress_func('g\n')
size=bits-1-(ord(randfunc(1)) & 63) # g will be from 1--64 bits smaller than p
if size<1:
size=bits-1
while (1):
obj.g=bignum(getPrime(size, randfunc))
if obj.g < obj.p:
break
size=(size+1) % bits
if size==0:
size=4
# Generate random number x
if progress_func:
progress_func('x\n')
while (1):
size=bits-1-ord(randfunc(1)) # x will be from 1 to 256 bits smaller than p
if size>2:
break
while (1):
obj.x=bignum(getPrime(size, randfunc))
if obj.x < obj.p:
break
size = (size+1) % bits
if size==0:
size=4
if progress_func:
progress_func('y\n')
obj.y = pow(obj.g, obj.x, obj.p)
return obj
def construct(tuple):
"""construct(tuple:(long,long,long,long)|(long,long,long,long,long)))
: ElGamalobj
Construct an ElGamal key from a 3- or 4-tuple of numbers.
"""
obj=ElGamalobj()
if len(tuple) not in [3,4]:
raise error, 'argument for construct() wrong length'
for i in range(len(tuple)):
field = obj.keydata[i]
setattr(obj, field, tuple[i])
return obj
class ElGamalobj(pubkey):
keydata=['p', 'g', 'y', 'x']
def _encrypt(self, M, K):
a=pow(self.g, K, self.p)
b=( M*pow(self.y, K, self.p) ) % self.p
return ( a,b )
def _decrypt(self, M):
if (not hasattr(self, 'x')):
raise error, 'Private key not available in this object'
ax=pow(M[0], self.x, self.p)
plaintext=(M[1] * inverse(ax, self.p ) ) % self.p
return plaintext
def _sign(self, M, K):
if (not hasattr(self, 'x')):
raise error, 'Private key not available in this object'
p1=self.p-1
if (GCD(K, p1)!=1):
raise error, 'Bad K value: GCD(K,p-1)!=1'
a=pow(self.g, K, self.p)
t=(M-self.x*a) % p1
while t<0: t=t+p1
b=(t*inverse(K, p1)) % p1
return (a, b)
def _verify(self, M, sig):
v1=pow(self.y, sig[0], self.p)
v1=(v1*pow(sig[0], sig[1], self.p)) % self.p
v2=pow(self.g, M, self.p)
if v1==v2:
return 1
return 0
def size(self):
"Return the maximum number of bits that can be handled by this key."
return number.size(self.p) - 1
def has_private(self):
"""Return a Boolean denoting whether the object contains
private components."""
if hasattr(self, 'x'):
return 1
else:
return 0
def publickey(self):
"""Return a new key object containing only the public information."""
return construct((self.p, self.g, self.y))
object=ElGamalobj
|
a-parhom/edx-platform | refs/heads/master | common/test/acceptance/tests/studio/test_studio_acid_xblock.py | 14 | """
Acceptance tests for Studio related to the acid xblock.
"""
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.xblock.acid import AcidView
from common.test.acceptance.tests.helpers import AcceptanceTest
class XBlockAcidBase(AcceptanceTest):
"""
Base class for tests that verify that XBlock integration is working correctly
"""
shard = 21
__test__ = False
def setUp(self):
"""
Create a unique identifier for the course used in this test.
"""
# Ensure that the superclass sets up
super(XBlockAcidBase, self).setUp()
# Define a unique course identifier
self.course_info = {
'org': 'test_org',
'number': 'course_' + self.unique_id[:5],
'run': 'test_' + self.unique_id,
'display_name': 'Test Course ' + self.unique_id
}
self.outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_id = '{org}.{number}.{run}'.format(**self.course_info)
self.setup_fixtures()
self.auth_page = AutoAuthPage(
self.browser,
staff=False,
username=self.user.get('username'),
email=self.user.get('email'),
password=self.user.get('password')
)
self.auth_page.visit()
def validate_acid_block_preview(self, acid_block):
"""
Validate the Acid Block's preview
"""
self.assertTrue(acid_block.init_fn_passed)
self.assertTrue(acid_block.resource_url_passed)
self.assertTrue(acid_block.scope_passed('user_state'))
self.assertTrue(acid_block.scope_passed('user_state_summary'))
self.assertTrue(acid_block.scope_passed('preferences'))
self.assertTrue(acid_block.scope_passed('user_info'))
def test_acid_block_preview(self):
"""
Verify that all expected acid block tests pass in studio preview
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
acid_block = AcidView(self.browser, unit.xblocks[0].preview_selector)
self.validate_acid_block_preview(acid_block)
def test_acid_block_editor(self):
"""
Verify that all expected acid block tests pass in studio editor
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
acid_block = AcidView(self.browser, unit.xblocks[0].edit().editor_selector)
self.assertTrue(acid_block.init_fn_passed)
self.assertTrue(acid_block.resource_url_passed)
class XBlockAcidNoChildTest(XBlockAcidBase):
"""
Tests of an AcidBlock with no children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid', 'Acid Block')
)
)
)
).install()
self.user = course_fix.user
class XBlockAcidParentBase(XBlockAcidBase):
"""
Base class for tests that verify that parent XBlock integration is working correctly
"""
__test__ = False
def validate_acid_block_preview(self, acid_block):
super(XBlockAcidParentBase, self).validate_acid_block_preview(acid_block)
self.assertTrue(acid_block.child_tests_passed)
def test_acid_block_preview(self):
"""
Verify that all expected acid block tests pass in studio preview
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
container = unit.xblocks[0].go_to_container()
acid_block = AcidView(self.browser, container.xblocks[0].preview_selector)
self.validate_acid_block_preview(acid_block)
class XBlockAcidEmptyParentTest(XBlockAcidParentBase):
"""
Tests of an AcidBlock with children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid_parent', 'Acid Parent Block').add_children(
)
)
)
)
).install()
self.user = course_fix.user
class XBlockAcidChildTest(XBlockAcidParentBase):
"""
Tests of an AcidBlock with children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid_parent', 'Acid Parent Block').add_children(
XBlockFixtureDesc('acid', 'First Acid Child', metadata={'name': 'first'}),
XBlockFixtureDesc('acid', 'Second Acid Child', metadata={'name': 'second'}),
XBlockFixtureDesc('html', 'Html Child', data="<html>Contents</html>"),
)
)
)
)
).install()
self.user = course_fix.user
def test_acid_block_preview(self):
super(XBlockAcidChildTest, self).test_acid_block_preview()
def test_acid_block_editor(self):
super(XBlockAcidChildTest, self).test_acid_block_editor()
|
achanda/flocker | refs/heads/master | flocker/provision/_ssh/_monkeypatch.py | 14 | # Copyright (c) Twisted Matrix Laboratories.
"""
Monkey patch twisted.conch.ssh.transport.SSHClientTransport to support
``diffie-hellman-group-exchange-sha256``.
https://clusterhq.atlassian.net/browse/FLOC-2134
This is adapted from the patch at http://twistedmatrix.com/trac/ticket/7672
"""
from twisted.conch.ssh.transport import (
SSHTransportBase, SSHClientTransport,
_generateX,
DH_GENERATOR, DH_PRIME,
MSG_KEXDH_INIT, MSG_KEX_DH_GEX_REQUEST_OLD,
DISCONNECT_KEY_EXCHANGE_FAILED,
)
from hashlib import sha1, sha256
from twisted.python import randbytes
from twisted.conch.ssh.common import NS, MP, _MPpow
from twisted.conch import error
from twisted.conch.ssh import keys
def _dh_sha256_patch():
"""
Monkey patch twisted.conch.ssh.transport.SSHClientTransport to support
``diffie-hellman-group-exchange-sha256``.
"""
supportedKeyExchanges = ['diffie-hellman-group-exchange-sha1',
'diffie-hellman-group-exchange-sha256',
'diffie-hellman-group1-sha1']
def _getKey(self, c, sharedSecret, exchangeHash):
"""
Get one of the keys for authentication/encryption.
@type c: C{str}
@type sharedSecret: C{str}
@type exchangeHash: C{str}
"""
if self.kexAlg == 'diffie-hellman-group-exchange-sha256':
h = sha256
else:
h = sha1
k1 = h(sharedSecret + exchangeHash + c + self.sessionID)
k1 = k1.digest()
k2 = h(sharedSecret + exchangeHash + k1).digest()
return k1 + k2
def ssh_KEXINIT(self, packet):
"""
Called when we receive a MSG_KEXINIT message. For a description
of the packet, see SSHTransportBase.ssh_KEXINIT(). Additionally,
this method sends the first key exchange packet. If the agreed-upon
exchange is diffie-hellman-group1-sha1, generate a public key
and send it in a MSG_KEXDH_INIT message. If the exchange is
diffie-hellman-group-exchange-sha1, ask for a 2048 bit group with a
MSG_KEX_DH_GEX_REQUEST_OLD message.
"""
if SSHTransportBase.ssh_KEXINIT(self, packet) is None:
return # we disconnected
if self.kexAlg == 'diffie-hellman-group1-sha1':
self.x = _generateX(randbytes.secureRandom, 512)
self.e = _MPpow(DH_GENERATOR, self.x, DH_PRIME)
self.sendPacket(MSG_KEXDH_INIT, self.e)
elif self.kexAlg.startswith('diffie-hellman-group-exchange-'):
self.sendPacket(MSG_KEX_DH_GEX_REQUEST_OLD, '\x00\x00\x08\x00')
else:
raise error.ConchError("somehow, the kexAlg has been set "
"to something we don't support")
def _continueKEXDH_REPLY(self, ignored, pubKey, f, signature):
"""
The host key has been verified, so we generate the keys.
@param pubKey: the public key blob for the server's public key.
@type pubKey: C{str}
@param f: the server's Diffie-Hellman public key.
@type f: C{long}
@param signature: the server's signature, verifying that it has the
correct private key.
@type signature: C{str}
"""
serverKey = keys.Key.fromString(pubKey)
sharedSecret = _MPpow(f, self.x, DH_PRIME)
if self.kexAlg == 'diffie-hellman-group-exchange-sha256':
h = sha256()
else:
h = sha1()
h.update(NS(self.ourVersionString))
h.update(NS(self.otherVersionString))
h.update(NS(self.ourKexInitPayload))
h.update(NS(self.otherKexInitPayload))
h.update(NS(pubKey))
h.update(self.e)
h.update(MP(f))
h.update(sharedSecret)
exchangeHash = h.digest()
if not serverKey.verify(signature, exchangeHash):
self.sendDisconnect(DISCONNECT_KEY_EXCHANGE_FAILED,
'bad signature')
return
self._keySetup(sharedSecret, exchangeHash)
def _continueGEX_REPLY(self, ignored, pubKey, f, signature):
"""
The host key has been verified, so we generate the keys.
@param pubKey: the public key blob for the server's public key.
@type pubKey: C{str}
@param f: the server's Diffie-Hellman public key.
@type f: C{long}
@param signature: the server's signature, verifying that it has the
correct private key.
@type signature: C{str}
"""
serverKey = keys.Key.fromString(pubKey)
sharedSecret = _MPpow(f, self.x, self.p)
if self.kexAlg == 'diffie-hellman-group-exchange-sha256':
h = sha256()
else:
h = sha1()
h.update(NS(self.ourVersionString))
h.update(NS(self.otherVersionString))
h.update(NS(self.ourKexInitPayload))
h.update(NS(self.otherKexInitPayload))
h.update(NS(pubKey))
h.update('\x00\x00\x08\x00')
h.update(MP(self.p))
h.update(MP(self.g))
h.update(self.e)
h.update(MP(f))
h.update(sharedSecret)
exchangeHash = h.digest()
if not serverKey.verify(signature, exchangeHash):
self.sendDisconnect(DISCONNECT_KEY_EXCHANGE_FAILED,
'bad signature')
return
self._keySetup(sharedSecret, exchangeHash)
for var, val in locals().items():
setattr(SSHClientTransport, var, val)
def _patch_7672_needed():
"""
Check if patching ``SSHClientTransport`` sf necessary.
This will be true if ``diffie-hellman-group-exchange-sha256``
is not a supported keyexchange.
"""
return ('diffie-hellman-group-exchange-sha256'
not in SSHClientTransport.supportedKeyExchanges)
patch_7672_applied = False
def patch_twisted_7672():
"""
Apply monkeypatches.
"""
global patch_7672_applied
if patch_7672_applied:
return
if _patch_7672_needed():
patch_7672_applied = True
_dh_sha256_patch()
|
dulaccc/django-accounting | refs/heads/master | accounting/apps/books/migrations/0003_auto_20141029_1606.py | 3 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('books', '0002_auto_20141029_1606'),
('people', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='client',
field=models.ForeignKey(to='people.Client', verbose_name='To Client'),
preserve_default=True,
),
migrations.AddField(
model_name='invoice',
name='organization',
field=models.ForeignKey(related_name='invoices', to='books.Organization', verbose_name='From Organization'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='invoice',
unique_together=set([('number', 'organization')]),
),
migrations.AddField(
model_name='estimateline',
name='invoice',
field=models.ForeignKey(to='books.Estimate', related_name='lines'),
preserve_default=True,
),
migrations.AddField(
model_name='estimateline',
name='tax_rate',
field=models.ForeignKey(to='books.TaxRate'),
preserve_default=True,
),
migrations.AddField(
model_name='estimate',
name='client',
field=models.ForeignKey(to='people.Client', verbose_name='To Client'),
preserve_default=True,
),
migrations.AddField(
model_name='estimate',
name='organization',
field=models.ForeignKey(related_name='estimates', to='books.Organization', verbose_name='From Organization'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='estimate',
unique_together=set([('number', 'organization')]),
),
migrations.AddField(
model_name='billline',
name='bill',
field=models.ForeignKey(to='books.Bill', related_name='lines'),
preserve_default=True,
),
migrations.AddField(
model_name='billline',
name='tax_rate',
field=models.ForeignKey(to='books.TaxRate'),
preserve_default=True,
),
migrations.AddField(
model_name='bill',
name='client',
field=models.ForeignKey(to='people.Client', verbose_name='From Client'),
preserve_default=True,
),
migrations.AddField(
model_name='bill',
name='organization',
field=models.ForeignKey(related_name='bills', to='books.Organization', verbose_name='To Organization'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='bill',
unique_together=set([('number', 'organization')]),
),
]
|
it-projects-llc/website-addons | refs/heads/13.0 | stock_picking_barcode/__init__.py | 2 | # License MIT (https://opensource.org/licenses/MIT).
from . import models
from . import controllers
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.