code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
##############################################################################
#
# Usage example for the procedure PPXF, which
# implements the Penalized Pixel-Fitting (pPXF) method by
# Cappellari M., & Emsellem E., 2004, PASP, 116, 138.
# The example also shows how to include a library of templates
# and how to mask gas emission lines if present.
#
# MODIFICATION HISTORY:
# V1.0.0: Written by Michele Cappellari, Leiden 11 November 2003
# V1.1.0: Log rebin the galaxy spectrum. Show how to correct the velocity
# for the difference in starting wavelength of galaxy and templates.
# MC, Vicenza, 28 December 2004
# V1.1.1: Included explanation of correction for instrumental resolution.
# After feedback from David Valls-Gabaud. MC, Venezia, 27 June 2005
# V2.0.0: Included example routine to determine the goodPixels vector
# by masking known gas emission lines. MC, Oxford, 30 October 2008
# V2.0.1: Included instructions for high-redshift usage. Thanks to Paul Westoby
# for useful feedback on this issue. MC, Oxford, 27 November 2008
# V2.0.2: Included example for obtaining the best-fitting redshift.
# MC, Oxford, 14 April 2009
# V2.1.0: Bug fix: Force PSF_GAUSSIAN to produce a Gaussian with an odd
# number of elements centered on the middle one. Many thanks to
# Harald Kuntschner, Eric Emsellem, Anne-Marie Weijmans and
# Richard McDermid for reporting problems with small offsets
# in systemic velocity. MC, Oxford, 15 February 2010
# V2.1.1: Added normalization of galaxy spectrum to avoid numerical
# instabilities. After feedback from Andrea Cardullo.
# MC, Oxford, 17 March 2010
# V2.2.0: Perform templates convolution in linear wavelength.
# This is useful for spectra with large wavelength range.
# MC, Oxford, 25 March 2010
# V2.2.1: Updated for Coyote Graphics. MC, Oxford, 11 October 2011
# V2.2.2: Renamed PPXF_KINEMATICS_EXAMPLE_SAURON to avoid conflict with the
# new PPXF_KINEMATICS_EXAMPLE_SDSS. Removed DETERMINE_GOOPIXELS which was
# made a separate routine. MC, Oxford, 12 January 2012
# V3.0.0: Translated from IDL into Python. MC, Oxford, 6 December 2013
# V3.0.1: Support both Python 2.6/2.7 and Python 3.x. MC, Oxford, 25 May 2014
# V3.0.2: Explicitly sort template files as glob() output may not be sorted.
# Thanks to Marina Trevisan for reporting problems under Linux.
# MC, Sydney, 4 February 2015
# V3.0.3: Use redshift in determine_goodpixels. MC, Oxford, 5 May 2015
# V3.0.4: Support both Pyfits and Astropy to read FITS files.
# MC, Oxford, 22 October 2015
#
##############################################################################
from __future__ import print_function
try:
import pyfits
except:
from astropy.io import fits as pyfits
from scipy import ndimage
import numpy as np
from time import clock
import glob
from ppxf import ppxf
import ppxf_util as util
def ppxf_kinematics_example_sauron():
# Read a galaxy spectrum and define the wavelength range
#
dir = 'spectra/'
file = dir + 'NGC4550_SAURON.fits'
hdu = pyfits.open(file)
gal_lin = hdu[0].data
h1 = hdu[0].header
lamRange1 = h1['CRVAL1'] + np.array([0.,h1['CDELT1']*(h1['NAXIS1']-1)])
FWHM_gal = 4.2 # SAURON has an instrumental resolution FWHM of 4.2A.
# If the galaxy is at a significant redshift (z > 0.03), one would need to apply
# a large velocity shift in PPXF to match the template to the galaxy spectrum.
# This would require a large initial value for the velocity (V > 1e4 km/s)
# in the input parameter START = [V,sig]. This can cause PPXF to stop!
# The solution consists of bringing the galaxy spectrum roughly to the
# rest-frame wavelength, before calling PPXF. In practice there is no
# need to modify the spectrum before the usual LOG_REBIN, given that a
# red shift corresponds to a linear shift of the log-rebinned spectrum.
# One just needs to compute the wavelength range in the rest-frame
# and adjust the instrumental resolution of the galaxy observations.
# This is done with the following three commented lines:
#
# z = 1.23 # Initial estimate of the galaxy redshift
# lamRange1 = lamRange1/(1+z) # Compute approximate restframe wavelength range
# FWHM_gal = FWHM_gal/(1+z) # Adjust resolution in Angstrom
galaxy, logLam1, velscale = util.log_rebin(lamRange1, gal_lin)
galaxy = galaxy/np.median(galaxy) # Normalize spectrum to avoid numerical issues
noise = galaxy*0 + 0.0049 # Assume constant noise per pixel here
# Read the list of filenames from the Single Stellar Population library
# by Vazdekis (1999, ApJ, 513, 224). A subset of the library is included
# for this example with permission. See http://purl.org/cappellari/software
# for suggestions of more up-to-date stellar libraries.
#
vazdekis = glob.glob(dir + 'Rbi1.30z*.fits')
vazdekis.sort()
FWHM_tem = 1.8 # Vazdekis spectra have a resolution FWHM of 1.8A.
# Extract the wavelength range and logarithmically rebin one spectrum
# to the same velocity scale of the SAURON galaxy spectrum, to determine
# the size needed for the array which will contain the template spectra.
#
hdu = pyfits.open(vazdekis[0])
ssp = hdu[0].data
h2 = hdu[0].header
lamRange2 = h2['CRVAL1'] + np.array([0.,h2['CDELT1']*(h2['NAXIS1']-1)])
sspNew, logLam2, velscale = util.log_rebin(lamRange2, ssp, velscale=velscale)
templates = np.empty((sspNew.size,len(vazdekis)))
# Convolve the whole Vazdekis library of spectral templates
# with the quadratic difference between the SAURON and the
# Vazdekis instrumental resolution. Logarithmically rebin
# and store each template as a column in the array TEMPLATES.
# Quadratic sigma difference in pixels Vazdekis --> SAURON
# The formula below is rigorously valid if the shapes of the
# instrumental spectral profiles are well approximated by Gaussians.
#
FWHM_dif = np.sqrt(FWHM_gal**2 - FWHM_tem**2)
sigma = FWHM_dif/2.355/h2['CDELT1'] # Sigma difference in pixels
for j in range(len(vazdekis)):
hdu = pyfits.open(vazdekis[j])
ssp = hdu[0].data
ssp = ndimage.gaussian_filter1d(ssp,sigma)
sspNew, logLam2, velscale = util.log_rebin(lamRange2, ssp, velscale=velscale)
templates[:,j] = sspNew/np.median(sspNew) # Normalizes templates
# The galaxy and the template spectra do not have the same starting wavelength.
# For this reason an extra velocity shift DV has to be applied to the template
# to fit the galaxy spectrum. We remove this artificial shift by using the
# keyword VSYST in the call to PPXF below, so that all velocities are
# measured with respect to DV. This assume the redshift is negligible.
# In the case of a high-redshift galaxy one should de-redshift its
# wavelength to the rest frame before using the line below (see above).
#
c = 299792.458
dv = (logLam2[0]-logLam1[0])*c # km/s
vel = 450. # Initial estimate of the galaxy velocity in km/s
z = np.exp(vel/c) - 1 # Relation between velocity and redshift in pPXF
goodPixels = util.determine_goodpixels(logLam1, lamRange2, z)
# Here the actual fit starts. The best fit is plotted on the screen.
# Gas emission lines are excluded from the pPXF fit using the GOODPIXELS keyword.
#
start = [vel, 180.] # (km/s), starting guess for [V,sigma]
t = clock()
pp = ppxf(templates, galaxy, noise, velscale, start,
goodpixels=goodPixels, plot=True, moments=4,
degree=4, vsyst=dv)
print("Formal errors:")
print(" dV dsigma dh3 dh4")
print("".join("%8.2g" % f for f in pp.error*np.sqrt(pp.chi2)))
print('Elapsed time in PPXF: %.2f s' % (clock() - t))
# If the galaxy is at significant redshift z and the wavelength has been
# de-redshifted with the three lines "z = 1.23..." near the beginning of
# this procedure, the best-fitting redshift is now given by the following
# commented line (equation 2 of Cappellari et al. 2009, ApJ, 704, L34;
# http://adsabs.harvard.edu/abs/2009ApJ...704L..34C)
#
#print, 'Best-fitting redshift z:', (z + 1)*(1 + sol[0]/c) - 1
#------------------------------------------------------------------------------
if __name__ == '__main__':
ppxf_kinematics_example_sauron()
|
moustakas/impy
|
lib/ppxf/ppxf_kinematics_example_sauron.py
|
Python
|
gpl-2.0
| 8,518
|
import enum
import inspect
import pydoc
import sys
import unittest
import threading
from collections import OrderedDict
from enum import Enum, IntEnum, EnumMeta, Flag, IntFlag, unique, auto
from io import StringIO
from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL
from test import support
from datetime import timedelta
# for pickle tests
try:
class Stooges(Enum):
LARRY = 1
CURLY = 2
MOE = 3
except Exception as exc:
Stooges = exc
try:
class IntStooges(int, Enum):
LARRY = 1
CURLY = 2
MOE = 3
except Exception as exc:
IntStooges = exc
try:
class FloatStooges(float, Enum):
LARRY = 1.39
CURLY = 2.72
MOE = 3.142596
except Exception as exc:
FloatStooges = exc
try:
class FlagStooges(Flag):
LARRY = 1
CURLY = 2
MOE = 3
except Exception as exc:
FlagStooges = exc
# for pickle test and subclass tests
try:
class StrEnum(str, Enum):
'accepts only string values'
class Name(StrEnum):
BDFL = 'Guido van Rossum'
FLUFL = 'Barry Warsaw'
except Exception as exc:
Name = exc
try:
Question = Enum('Question', 'who what when where why', module=__name__)
except Exception as exc:
Question = exc
try:
Answer = Enum('Answer', 'him this then there because')
except Exception as exc:
Answer = exc
try:
Theory = Enum('Theory', 'rule law supposition', qualname='spanish_inquisition')
except Exception as exc:
Theory = exc
# for doctests
try:
class Fruit(Enum):
TOMATO = 1
BANANA = 2
CHERRY = 3
except Exception:
pass
def test_pickle_dump_load(assertion, source, target=None):
if target is None:
target = source
for protocol in range(HIGHEST_PROTOCOL + 1):
assertion(loads(dumps(source, protocol=protocol)), target)
def test_pickle_exception(assertion, exception, obj):
for protocol in range(HIGHEST_PROTOCOL + 1):
with assertion(exception):
dumps(obj, protocol=protocol)
class TestHelpers(unittest.TestCase):
# _is_descriptor, _is_sunder, _is_dunder
def test_is_descriptor(self):
class foo:
pass
for attr in ('__get__','__set__','__delete__'):
obj = foo()
self.assertFalse(enum._is_descriptor(obj))
setattr(obj, attr, 1)
self.assertTrue(enum._is_descriptor(obj))
def test_is_sunder(self):
for s in ('_a_', '_aa_'):
self.assertTrue(enum._is_sunder(s))
for s in ('a', 'a_', '_a', '__a', 'a__', '__a__', '_a__', '__a_', '_',
'__', '___', '____', '_____',):
self.assertFalse(enum._is_sunder(s))
def test_is_dunder(self):
for s in ('__a__', '__aa__'):
self.assertTrue(enum._is_dunder(s))
for s in ('a', 'a_', '_a', '__a', 'a__', '_a_', '_a__', '__a_', '_',
'__', '___', '____', '_____',):
self.assertFalse(enum._is_dunder(s))
# for subclassing tests
class classproperty:
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.fget = fget
self.fset = fset
self.fdel = fdel
if doc is None and fget is not None:
doc = fget.__doc__
self.__doc__ = doc
def __get__(self, instance, ownerclass):
return self.fget(ownerclass)
# tests
class TestEnum(unittest.TestCase):
def setUp(self):
class Season(Enum):
SPRING = 1
SUMMER = 2
AUTUMN = 3
WINTER = 4
self.Season = Season
class Konstants(float, Enum):
E = 2.7182818
PI = 3.1415926
TAU = 2 * PI
self.Konstants = Konstants
class Grades(IntEnum):
A = 5
B = 4
C = 3
D = 2
F = 0
self.Grades = Grades
class Directional(str, Enum):
EAST = 'east'
WEST = 'west'
NORTH = 'north'
SOUTH = 'south'
self.Directional = Directional
from datetime import date
class Holiday(date, Enum):
NEW_YEAR = 2013, 1, 1
IDES_OF_MARCH = 2013, 3, 15
self.Holiday = Holiday
def test_dir_on_class(self):
Season = self.Season
self.assertEqual(
set(dir(Season)),
set(['__class__', '__doc__', '__members__', '__module__',
'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']),
)
def test_dir_on_item(self):
Season = self.Season
self.assertEqual(
set(dir(Season.WINTER)),
set(['__class__', '__doc__', '__module__', 'name', 'value']),
)
def test_dir_with_added_behavior(self):
class Test(Enum):
this = 'that'
these = 'those'
def wowser(self):
return ("Wowser! I'm %s!" % self.name)
self.assertEqual(
set(dir(Test)),
set(['__class__', '__doc__', '__members__', '__module__', 'this', 'these']),
)
self.assertEqual(
set(dir(Test.this)),
set(['__class__', '__doc__', '__module__', 'name', 'value', 'wowser']),
)
def test_dir_on_sub_with_behavior_on_super(self):
# see issue22506
class SuperEnum(Enum):
def invisible(self):
return "did you see me?"
class SubEnum(SuperEnum):
sample = 5
self.assertEqual(
set(dir(SubEnum.sample)),
set(['__class__', '__doc__', '__module__', 'name', 'value', 'invisible']),
)
def test_enum_in_enum_out(self):
Season = self.Season
self.assertIs(Season(Season.WINTER), Season.WINTER)
def test_enum_value(self):
Season = self.Season
self.assertEqual(Season.SPRING.value, 1)
def test_intenum_value(self):
self.assertEqual(IntStooges.CURLY.value, 2)
def test_enum(self):
Season = self.Season
lst = list(Season)
self.assertEqual(len(lst), len(Season))
self.assertEqual(len(Season), 4, Season)
self.assertEqual(
[Season.SPRING, Season.SUMMER, Season.AUTUMN, Season.WINTER], lst)
for i, season in enumerate('SPRING SUMMER AUTUMN WINTER'.split(), 1):
e = Season(i)
self.assertEqual(e, getattr(Season, season))
self.assertEqual(e.value, i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, season)
self.assertIn(e, Season)
self.assertIs(type(e), Season)
self.assertIsInstance(e, Season)
self.assertEqual(str(e), 'Season.' + season)
self.assertEqual(
repr(e),
'<Season.{0}: {1}>'.format(season, i),
)
def test_value_name(self):
Season = self.Season
self.assertEqual(Season.SPRING.name, 'SPRING')
self.assertEqual(Season.SPRING.value, 1)
with self.assertRaises(AttributeError):
Season.SPRING.name = 'invierno'
with self.assertRaises(AttributeError):
Season.SPRING.value = 2
def test_changing_member(self):
Season = self.Season
with self.assertRaises(AttributeError):
Season.WINTER = 'really cold'
def test_attribute_deletion(self):
class Season(Enum):
SPRING = 1
SUMMER = 2
AUTUMN = 3
WINTER = 4
def spam(cls):
pass
self.assertTrue(hasattr(Season, 'spam'))
del Season.spam
self.assertFalse(hasattr(Season, 'spam'))
with self.assertRaises(AttributeError):
del Season.SPRING
with self.assertRaises(AttributeError):
del Season.DRY
with self.assertRaises(AttributeError):
del Season.SPRING.name
def test_bool_of_class(self):
class Empty(Enum):
pass
self.assertTrue(bool(Empty))
def test_bool_of_member(self):
class Count(Enum):
zero = 0
one = 1
two = 2
for member in Count:
self.assertTrue(bool(member))
def test_invalid_names(self):
with self.assertRaises(ValueError):
class Wrong(Enum):
mro = 9
with self.assertRaises(ValueError):
class Wrong(Enum):
_create_= 11
with self.assertRaises(ValueError):
class Wrong(Enum):
_get_mixins_ = 9
with self.assertRaises(ValueError):
class Wrong(Enum):
_find_new_ = 1
with self.assertRaises(ValueError):
class Wrong(Enum):
_any_name_ = 9
def test_bool(self):
# plain Enum members are always True
class Logic(Enum):
true = True
false = False
self.assertTrue(Logic.true)
self.assertTrue(Logic.false)
# unless overridden
class RealLogic(Enum):
true = True
false = False
def __bool__(self):
return bool(self._value_)
self.assertTrue(RealLogic.true)
self.assertFalse(RealLogic.false)
# mixed Enums depend on mixed-in type
class IntLogic(int, Enum):
true = 1
false = 0
self.assertTrue(IntLogic.true)
self.assertFalse(IntLogic.false)
def test_contains(self):
Season = self.Season
self.assertIn(Season.AUTUMN, Season)
with self.assertRaises(TypeError):
3 in Season
with self.assertRaises(TypeError):
'AUTUMN' in Season
val = Season(3)
self.assertIn(val, Season)
class OtherEnum(Enum):
one = 1; two = 2
self.assertNotIn(OtherEnum.two, Season)
def test_comparisons(self):
Season = self.Season
with self.assertRaises(TypeError):
Season.SPRING < Season.WINTER
with self.assertRaises(TypeError):
Season.SPRING > 4
self.assertNotEqual(Season.SPRING, 1)
class Part(Enum):
SPRING = 1
CLIP = 2
BARREL = 3
self.assertNotEqual(Season.SPRING, Part.SPRING)
with self.assertRaises(TypeError):
Season.SPRING < Part.CLIP
def test_enum_duplicates(self):
class Season(Enum):
SPRING = 1
SUMMER = 2
AUTUMN = FALL = 3
WINTER = 4
ANOTHER_SPRING = 1
lst = list(Season)
self.assertEqual(
lst,
[Season.SPRING, Season.SUMMER,
Season.AUTUMN, Season.WINTER,
])
self.assertIs(Season.FALL, Season.AUTUMN)
self.assertEqual(Season.FALL.value, 3)
self.assertEqual(Season.AUTUMN.value, 3)
self.assertIs(Season(3), Season.AUTUMN)
self.assertIs(Season(1), Season.SPRING)
self.assertEqual(Season.FALL.name, 'AUTUMN')
self.assertEqual(
[k for k,v in Season.__members__.items() if v.name != k],
['FALL', 'ANOTHER_SPRING'],
)
def test_duplicate_name(self):
with self.assertRaises(TypeError):
class Color(Enum):
red = 1
green = 2
blue = 3
red = 4
with self.assertRaises(TypeError):
class Color(Enum):
red = 1
green = 2
blue = 3
def red(self):
return 'red'
with self.assertRaises(TypeError):
class Color(Enum):
@property
def red(self):
return 'redder'
red = 1
green = 2
blue = 3
def test_enum_with_value_name(self):
class Huh(Enum):
name = 1
value = 2
self.assertEqual(
list(Huh),
[Huh.name, Huh.value],
)
self.assertIs(type(Huh.name), Huh)
self.assertEqual(Huh.name.name, 'name')
self.assertEqual(Huh.name.value, 1)
def test_format_enum(self):
Season = self.Season
self.assertEqual('{}'.format(Season.SPRING),
'{}'.format(str(Season.SPRING)))
self.assertEqual( '{:}'.format(Season.SPRING),
'{:}'.format(str(Season.SPRING)))
self.assertEqual('{:20}'.format(Season.SPRING),
'{:20}'.format(str(Season.SPRING)))
self.assertEqual('{:^20}'.format(Season.SPRING),
'{:^20}'.format(str(Season.SPRING)))
self.assertEqual('{:>20}'.format(Season.SPRING),
'{:>20}'.format(str(Season.SPRING)))
self.assertEqual('{:<20}'.format(Season.SPRING),
'{:<20}'.format(str(Season.SPRING)))
def test_format_enum_custom(self):
class TestFloat(float, Enum):
one = 1.0
two = 2.0
def __format__(self, spec):
return 'TestFloat success!'
self.assertEqual('{}'.format(TestFloat.one), 'TestFloat success!')
def assertFormatIsValue(self, spec, member):
self.assertEqual(spec.format(member), spec.format(member.value))
def test_format_enum_date(self):
Holiday = self.Holiday
self.assertFormatIsValue('{}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:^20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:>20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:<20}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:%Y %m}', Holiday.IDES_OF_MARCH)
self.assertFormatIsValue('{:%Y %m %M:00}', Holiday.IDES_OF_MARCH)
def test_format_enum_float(self):
Konstants = self.Konstants
self.assertFormatIsValue('{}', Konstants.TAU)
self.assertFormatIsValue('{:}', Konstants.TAU)
self.assertFormatIsValue('{:20}', Konstants.TAU)
self.assertFormatIsValue('{:^20}', Konstants.TAU)
self.assertFormatIsValue('{:>20}', Konstants.TAU)
self.assertFormatIsValue('{:<20}', Konstants.TAU)
self.assertFormatIsValue('{:n}', Konstants.TAU)
self.assertFormatIsValue('{:5.2}', Konstants.TAU)
self.assertFormatIsValue('{:f}', Konstants.TAU)
def test_format_enum_int(self):
Grades = self.Grades
self.assertFormatIsValue('{}', Grades.C)
self.assertFormatIsValue('{:}', Grades.C)
self.assertFormatIsValue('{:20}', Grades.C)
self.assertFormatIsValue('{:^20}', Grades.C)
self.assertFormatIsValue('{:>20}', Grades.C)
self.assertFormatIsValue('{:<20}', Grades.C)
self.assertFormatIsValue('{:+}', Grades.C)
self.assertFormatIsValue('{:08X}', Grades.C)
self.assertFormatIsValue('{:b}', Grades.C)
def test_format_enum_str(self):
Directional = self.Directional
self.assertFormatIsValue('{}', Directional.WEST)
self.assertFormatIsValue('{:}', Directional.WEST)
self.assertFormatIsValue('{:20}', Directional.WEST)
self.assertFormatIsValue('{:^20}', Directional.WEST)
self.assertFormatIsValue('{:>20}', Directional.WEST)
self.assertFormatIsValue('{:<20}', Directional.WEST)
def test_hash(self):
Season = self.Season
dates = {}
dates[Season.WINTER] = '1225'
dates[Season.SPRING] = '0315'
dates[Season.SUMMER] = '0704'
dates[Season.AUTUMN] = '1031'
self.assertEqual(dates[Season.AUTUMN], '1031')
def test_intenum_from_scratch(self):
class phy(int, Enum):
pi = 3
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_intenum_inherited(self):
class IntEnum(int, Enum):
pass
class phy(IntEnum):
pi = 3
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_floatenum_from_scratch(self):
class phy(float, Enum):
pi = 3.1415926
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_floatenum_inherited(self):
class FloatEnum(float, Enum):
pass
class phy(FloatEnum):
pi = 3.1415926
tau = 2 * pi
self.assertTrue(phy.pi < phy.tau)
def test_strenum_from_scratch(self):
class phy(str, Enum):
pi = 'Pi'
tau = 'Tau'
self.assertTrue(phy.pi < phy.tau)
def test_strenum_inherited(self):
class StrEnum(str, Enum):
pass
class phy(StrEnum):
pi = 'Pi'
tau = 'Tau'
self.assertTrue(phy.pi < phy.tau)
def test_intenum(self):
class WeekDay(IntEnum):
SUNDAY = 1
MONDAY = 2
TUESDAY = 3
WEDNESDAY = 4
THURSDAY = 5
FRIDAY = 6
SATURDAY = 7
self.assertEqual(['a', 'b', 'c'][WeekDay.MONDAY], 'c')
self.assertEqual([i for i in range(WeekDay.TUESDAY)], [0, 1, 2])
lst = list(WeekDay)
self.assertEqual(len(lst), len(WeekDay))
self.assertEqual(len(WeekDay), 7)
target = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
target = target.split()
for i, weekday in enumerate(target, 1):
e = WeekDay(i)
self.assertEqual(e, i)
self.assertEqual(int(e), i)
self.assertEqual(e.name, weekday)
self.assertIn(e, WeekDay)
self.assertEqual(lst.index(e)+1, i)
self.assertTrue(0 < e < 8)
self.assertIs(type(e), WeekDay)
self.assertIsInstance(e, int)
self.assertIsInstance(e, Enum)
def test_intenum_duplicates(self):
class WeekDay(IntEnum):
SUNDAY = 1
MONDAY = 2
TUESDAY = TEUSDAY = 3
WEDNESDAY = 4
THURSDAY = 5
FRIDAY = 6
SATURDAY = 7
self.assertIs(WeekDay.TEUSDAY, WeekDay.TUESDAY)
self.assertEqual(WeekDay(3).name, 'TUESDAY')
self.assertEqual([k for k,v in WeekDay.__members__.items()
if v.name != k], ['TEUSDAY', ])
def test_intenum_from_bytes(self):
self.assertIs(IntStooges.from_bytes(b'\x00\x03', 'big'), IntStooges.MOE)
with self.assertRaises(ValueError):
IntStooges.from_bytes(b'\x00\x05', 'big')
def test_floatenum_fromhex(self):
h = float.hex(FloatStooges.MOE.value)
self.assertIs(FloatStooges.fromhex(h), FloatStooges.MOE)
h = float.hex(FloatStooges.MOE.value + 0.01)
with self.assertRaises(ValueError):
FloatStooges.fromhex(h)
def test_pickle_enum(self):
if isinstance(Stooges, Exception):
raise Stooges
test_pickle_dump_load(self.assertIs, Stooges.CURLY)
test_pickle_dump_load(self.assertIs, Stooges)
def test_pickle_int(self):
if isinstance(IntStooges, Exception):
raise IntStooges
test_pickle_dump_load(self.assertIs, IntStooges.CURLY)
test_pickle_dump_load(self.assertIs, IntStooges)
def test_pickle_float(self):
if isinstance(FloatStooges, Exception):
raise FloatStooges
test_pickle_dump_load(self.assertIs, FloatStooges.CURLY)
test_pickle_dump_load(self.assertIs, FloatStooges)
def test_pickle_enum_function(self):
if isinstance(Answer, Exception):
raise Answer
test_pickle_dump_load(self.assertIs, Answer.him)
test_pickle_dump_load(self.assertIs, Answer)
def test_pickle_enum_function_with_module(self):
if isinstance(Question, Exception):
raise Question
test_pickle_dump_load(self.assertIs, Question.who)
test_pickle_dump_load(self.assertIs, Question)
def test_enum_function_with_qualname(self):
if isinstance(Theory, Exception):
raise Theory
self.assertEqual(Theory.__qualname__, 'spanish_inquisition')
def test_class_nested_enum_and_pickle_protocol_four(self):
# would normally just have this directly in the class namespace
class NestedEnum(Enum):
twigs = 'common'
shiny = 'rare'
self.__class__.NestedEnum = NestedEnum
self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
test_pickle_dump_load(self.assertIs, self.NestedEnum.twigs)
def test_pickle_by_name(self):
class ReplaceGlobalInt(IntEnum):
ONE = 1
TWO = 2
ReplaceGlobalInt.__reduce_ex__ = enum._reduce_ex_by_name
for proto in range(HIGHEST_PROTOCOL):
self.assertEqual(ReplaceGlobalInt.TWO.__reduce_ex__(proto), 'TWO')
def test_exploding_pickle(self):
BadPickle = Enum(
'BadPickle', 'dill sweet bread-n-butter', module=__name__)
globals()['BadPickle'] = BadPickle
# now break BadPickle to test exception raising
enum._make_class_unpicklable(BadPickle)
test_pickle_exception(self.assertRaises, TypeError, BadPickle.dill)
test_pickle_exception(self.assertRaises, PicklingError, BadPickle)
def test_string_enum(self):
class SkillLevel(str, Enum):
master = 'what is the sound of one hand clapping?'
journeyman = 'why did the chicken cross the road?'
apprentice = 'knock, knock!'
self.assertEqual(SkillLevel.apprentice, 'knock, knock!')
def test_getattr_getitem(self):
class Period(Enum):
morning = 1
noon = 2
evening = 3
night = 4
self.assertIs(Period(2), Period.noon)
self.assertIs(getattr(Period, 'night'), Period.night)
self.assertIs(Period['morning'], Period.morning)
def test_getattr_dunder(self):
Season = self.Season
self.assertTrue(getattr(Season, '__eq__'))
def test_iteration_order(self):
class Season(Enum):
SUMMER = 2
WINTER = 4
AUTUMN = 3
SPRING = 1
self.assertEqual(
list(Season),
[Season.SUMMER, Season.WINTER, Season.AUTUMN, Season.SPRING],
)
def test_reversed_iteration_order(self):
self.assertEqual(
list(reversed(self.Season)),
[self.Season.WINTER, self.Season.AUTUMN, self.Season.SUMMER,
self.Season.SPRING]
)
def test_programmatic_function_string(self):
SummerMonth = Enum('SummerMonth', 'june july august')
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_string_with_start(self):
SummerMonth = Enum('SummerMonth', 'june july august', start=10)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 10):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_string_list(self):
SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'])
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_string_list_with_start(self):
SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'], start=20)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 20):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_iterable(self):
SummerMonth = Enum(
'SummerMonth',
(('june', 1), ('july', 2), ('august', 3))
)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_from_dict(self):
SummerMonth = Enum(
'SummerMonth',
OrderedDict((('june', 1), ('july', 2), ('august', 3)))
)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(int(e.value), i)
self.assertNotEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type(self):
SummerMonth = Enum('SummerMonth', 'june july august', type=int)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type_with_start(self):
SummerMonth = Enum('SummerMonth', 'june july august', type=int, start=30)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 30):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type_from_subclass(self):
SummerMonth = IntEnum('SummerMonth', 'june july august')
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 1):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_programmatic_function_type_from_subclass_with_start(self):
SummerMonth = IntEnum('SummerMonth', 'june july august', start=40)
lst = list(SummerMonth)
self.assertEqual(len(lst), len(SummerMonth))
self.assertEqual(len(SummerMonth), 3, SummerMonth)
self.assertEqual(
[SummerMonth.june, SummerMonth.july, SummerMonth.august],
lst,
)
for i, month in enumerate('june july august'.split(), 40):
e = SummerMonth(i)
self.assertEqual(e, i)
self.assertEqual(e.name, month)
self.assertIn(e, SummerMonth)
self.assertIs(type(e), SummerMonth)
def test_subclassing(self):
if isinstance(Name, Exception):
raise Name
self.assertEqual(Name.BDFL, 'Guido van Rossum')
self.assertTrue(Name.BDFL, Name('Guido van Rossum'))
self.assertIs(Name.BDFL, getattr(Name, 'BDFL'))
test_pickle_dump_load(self.assertIs, Name.BDFL)
def test_extending(self):
class Color(Enum):
red = 1
green = 2
blue = 3
with self.assertRaises(TypeError):
class MoreColor(Color):
cyan = 4
magenta = 5
yellow = 6
def test_exclude_methods(self):
class whatever(Enum):
this = 'that'
these = 'those'
def really(self):
return 'no, not %s' % self.value
self.assertIsNot(type(whatever.really), whatever)
self.assertEqual(whatever.this.really(), 'no, not that')
def test_wrong_inheritance_order(self):
with self.assertRaises(TypeError):
class Wrong(Enum, str):
NotHere = 'error before this point'
def test_intenum_transitivity(self):
class number(IntEnum):
one = 1
two = 2
three = 3
class numero(IntEnum):
uno = 1
dos = 2
tres = 3
self.assertEqual(number.one, numero.uno)
self.assertEqual(number.two, numero.dos)
self.assertEqual(number.three, numero.tres)
def test_wrong_enum_in_call(self):
class Monochrome(Enum):
black = 0
white = 1
class Gender(Enum):
male = 0
female = 1
self.assertRaises(ValueError, Monochrome, Gender.male)
def test_wrong_enum_in_mixed_call(self):
class Monochrome(IntEnum):
black = 0
white = 1
class Gender(Enum):
male = 0
female = 1
self.assertRaises(ValueError, Monochrome, Gender.male)
def test_mixed_enum_in_call_1(self):
class Monochrome(IntEnum):
black = 0
white = 1
class Gender(IntEnum):
male = 0
female = 1
self.assertIs(Monochrome(Gender.female), Monochrome.white)
def test_mixed_enum_in_call_2(self):
class Monochrome(Enum):
black = 0
white = 1
class Gender(IntEnum):
male = 0
female = 1
self.assertIs(Monochrome(Gender.male), Monochrome.black)
def test_flufl_enum(self):
class Fluflnum(Enum):
def __int__(self):
return int(self.value)
class MailManOptions(Fluflnum):
option1 = 1
option2 = 2
option3 = 3
self.assertEqual(int(MailManOptions.option1), 1)
def test_introspection(self):
class Number(IntEnum):
one = 100
two = 200
self.assertIs(Number.one._member_type_, int)
self.assertIs(Number._member_type_, int)
class String(str, Enum):
yarn = 'soft'
rope = 'rough'
wire = 'hard'
self.assertIs(String.yarn._member_type_, str)
self.assertIs(String._member_type_, str)
class Plain(Enum):
vanilla = 'white'
one = 1
self.assertIs(Plain.vanilla._member_type_, object)
self.assertIs(Plain._member_type_, object)
def test_no_such_enum_member(self):
class Color(Enum):
red = 1
green = 2
blue = 3
with self.assertRaises(ValueError):
Color(4)
with self.assertRaises(KeyError):
Color['chartreuse']
def test_new_repr(self):
class Color(Enum):
red = 1
green = 2
blue = 3
def __repr__(self):
return "don't you just love shades of %s?" % self.name
self.assertEqual(
repr(Color.blue),
"don't you just love shades of blue?",
)
def test_inherited_repr(self):
class MyEnum(Enum):
def __repr__(self):
return "My name is %s." % self.name
class MyIntEnum(int, MyEnum):
this = 1
that = 2
theother = 3
self.assertEqual(repr(MyIntEnum.that), "My name is that.")
def test_multiple_mixin_mro(self):
class auto_enum(type(Enum)):
def __new__(metacls, cls, bases, classdict):
temp = type(classdict)()
names = set(classdict._member_names)
i = 0
for k in classdict._member_names:
v = classdict[k]
if v is Ellipsis:
v = i
else:
i = v
i += 1
temp[k] = v
for k, v in classdict.items():
if k not in names:
temp[k] = v
return super(auto_enum, metacls).__new__(
metacls, cls, bases, temp)
class AutoNumberedEnum(Enum, metaclass=auto_enum):
pass
class AutoIntEnum(IntEnum, metaclass=auto_enum):
pass
class TestAutoNumber(AutoNumberedEnum):
a = ...
b = 3
c = ...
class TestAutoInt(AutoIntEnum):
a = ...
b = 3
c = ...
def test_subclasses_with_getnewargs(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __getnewargs__(self):
return self._args
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(type(self).__name__,
self.__name__,
int.__repr__(self))
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp )
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_with_getnewargs_ex(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __getnewargs_ex__(self):
return self._args, {}
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(type(self).__name__,
self.__name__,
int.__repr__(self))
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp )
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_with_reduce(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __reduce__(self):
return self.__class__, self._args
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(type(self).__name__,
self.__name__,
int.__repr__(self))
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp )
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_with_reduce_ex(self):
class NamedInt(int):
__qualname__ = 'NamedInt' # needed for pickle protocol 4
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
def __reduce_ex__(self, proto):
return self.__class__, self._args
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(type(self).__name__,
self.__name__,
int.__repr__(self))
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp )
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI' # needed for pickle protocol 4
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
test_pickle_dump_load(self.assertEqual, NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_subclasses_without_direct_pickle_support(self):
class NamedInt(int):
__qualname__ = 'NamedInt'
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(type(self).__name__,
self.__name__,
int.__repr__(self))
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp )
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI'
x = ('the-x', 1)
y = ('the-y', 2)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_exception(self.assertRaises, TypeError, NEI.x)
test_pickle_exception(self.assertRaises, PicklingError, NEI)
def test_subclasses_without_direct_pickle_support_using_name(self):
class NamedInt(int):
__qualname__ = 'NamedInt'
def __new__(cls, *args):
_args = args
name, *args = args
if len(args) == 0:
raise TypeError("name and value must be specified")
self = int.__new__(cls, *args)
self._intname = name
self._args = _args
return self
@property
def __name__(self):
return self._intname
def __repr__(self):
# repr() is updated to include the name and type info
return "{}({!r}, {})".format(type(self).__name__,
self.__name__,
int.__repr__(self))
def __str__(self):
# str() is unchanged, even if it relies on the repr() fallback
base = int
base_str = base.__str__
if base_str.__objclass__ is object:
return base.__repr__(self)
return base_str(self)
# for simplicity, we only define one operator that
# propagates expressions
def __add__(self, other):
temp = int(self) + int( other)
if isinstance(self, NamedInt) and isinstance(other, NamedInt):
return NamedInt(
'({0} + {1})'.format(self.__name__, other.__name__),
temp )
else:
return temp
class NEI(NamedInt, Enum):
__qualname__ = 'NEI'
x = ('the-x', 1)
y = ('the-y', 2)
def __reduce_ex__(self, proto):
return getattr, (self.__class__, self._name_)
self.assertIs(NEI.__new__, Enum.__new__)
self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
globals()['NamedInt'] = NamedInt
globals()['NEI'] = NEI
NI5 = NamedInt('test', 5)
self.assertEqual(NI5, 5)
self.assertEqual(NEI.y.value, 2)
test_pickle_dump_load(self.assertIs, NEI.y)
test_pickle_dump_load(self.assertIs, NEI)
def test_tuple_subclass(self):
class SomeTuple(tuple, Enum):
__qualname__ = 'SomeTuple' # needed for pickle protocol 4
first = (1, 'for the money')
second = (2, 'for the show')
third = (3, 'for the music')
self.assertIs(type(SomeTuple.first), SomeTuple)
self.assertIsInstance(SomeTuple.second, tuple)
self.assertEqual(SomeTuple.third, (3, 'for the music'))
globals()['SomeTuple'] = SomeTuple
test_pickle_dump_load(self.assertIs, SomeTuple.first)
def test_duplicate_values_give_unique_enum_items(self):
class AutoNumber(Enum):
first = ()
second = ()
third = ()
def __new__(cls):
value = len(cls.__members__) + 1
obj = object.__new__(cls)
obj._value_ = value
return obj
def __int__(self):
return int(self._value_)
self.assertEqual(
list(AutoNumber),
[AutoNumber.first, AutoNumber.second, AutoNumber.third],
)
self.assertEqual(int(AutoNumber.second), 2)
self.assertEqual(AutoNumber.third.value, 3)
self.assertIs(AutoNumber(1), AutoNumber.first)
def test_inherited_new_from_enhanced_enum(self):
class AutoNumber(Enum):
def __new__(cls):
value = len(cls.__members__) + 1
obj = object.__new__(cls)
obj._value_ = value
return obj
def __int__(self):
return int(self._value_)
class Color(AutoNumber):
red = ()
green = ()
blue = ()
self.assertEqual(list(Color), [Color.red, Color.green, Color.blue])
self.assertEqual(list(map(int, Color)), [1, 2, 3])
def test_inherited_new_from_mixed_enum(self):
class AutoNumber(IntEnum):
def __new__(cls):
value = len(cls.__members__) + 1
obj = int.__new__(cls, value)
obj._value_ = value
return obj
class Color(AutoNumber):
red = ()
green = ()
blue = ()
self.assertEqual(list(Color), [Color.red, Color.green, Color.blue])
self.assertEqual(list(map(int, Color)), [1, 2, 3])
def test_equality(self):
class AlwaysEqual:
def __eq__(self, other):
return True
class OrdinaryEnum(Enum):
a = 1
self.assertEqual(AlwaysEqual(), OrdinaryEnum.a)
self.assertEqual(OrdinaryEnum.a, AlwaysEqual())
def test_ordered_mixin(self):
class OrderedEnum(Enum):
def __ge__(self, other):
if self.__class__ is other.__class__:
return self._value_ >= other._value_
return NotImplemented
def __gt__(self, other):
if self.__class__ is other.__class__:
return self._value_ > other._value_
return NotImplemented
def __le__(self, other):
if self.__class__ is other.__class__:
return self._value_ <= other._value_
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return self._value_ < other._value_
return NotImplemented
class Grade(OrderedEnum):
A = 5
B = 4
C = 3
D = 2
F = 1
self.assertGreater(Grade.A, Grade.B)
self.assertLessEqual(Grade.F, Grade.C)
self.assertLess(Grade.D, Grade.A)
self.assertGreaterEqual(Grade.B, Grade.B)
self.assertEqual(Grade.B, Grade.B)
self.assertNotEqual(Grade.C, Grade.D)
def test_extending2(self):
class Shade(Enum):
def shade(self):
print(self.name)
class Color(Shade):
red = 1
green = 2
blue = 3
with self.assertRaises(TypeError):
class MoreColor(Color):
cyan = 4
magenta = 5
yellow = 6
def test_extending3(self):
class Shade(Enum):
def shade(self):
return self.name
class Color(Shade):
def hex(self):
return '%s hexlified!' % self.value
class MoreColor(Color):
cyan = 4
magenta = 5
yellow = 6
self.assertEqual(MoreColor.magenta.hex(), '5 hexlified!')
def test_subclass_duplicate_name(self):
class Base(Enum):
def test(self):
pass
class Test(Base):
test = 1
self.assertIs(type(Test.test), Test)
def test_subclass_duplicate_name_dynamic(self):
from types import DynamicClassAttribute
class Base(Enum):
@DynamicClassAttribute
def test(self):
return 'dynamic'
class Test(Base):
test = 1
self.assertEqual(Test.test.test, 'dynamic')
def test_no_duplicates(self):
class UniqueEnum(Enum):
def __init__(self, *args):
cls = self.__class__
if any(self.value == e.value for e in cls):
a = self.name
e = cls(self.value).name
raise ValueError(
"aliases not allowed in UniqueEnum: %r --> %r"
% (a, e)
)
class Color(UniqueEnum):
red = 1
green = 2
blue = 3
with self.assertRaises(ValueError):
class Color(UniqueEnum):
red = 1
green = 2
blue = 3
grene = 2
def test_init(self):
class Planet(Enum):
MERCURY = (3.303e+23, 2.4397e6)
VENUS = (4.869e+24, 6.0518e6)
EARTH = (5.976e+24, 6.37814e6)
MARS = (6.421e+23, 3.3972e6)
JUPITER = (1.9e+27, 7.1492e7)
SATURN = (5.688e+26, 6.0268e7)
URANUS = (8.686e+25, 2.5559e7)
NEPTUNE = (1.024e+26, 2.4746e7)
def __init__(self, mass, radius):
self.mass = mass # in kilograms
self.radius = radius # in meters
@property
def surface_gravity(self):
# universal gravitational constant (m3 kg-1 s-2)
G = 6.67300E-11
return G * self.mass / (self.radius * self.radius)
self.assertEqual(round(Planet.EARTH.surface_gravity, 2), 9.80)
self.assertEqual(Planet.EARTH.value, (5.976e+24, 6.37814e6))
def test_ignore(self):
class Period(timedelta, Enum):
'''
different lengths of time
'''
def __new__(cls, value, period):
obj = timedelta.__new__(cls, value)
obj._value_ = value
obj.period = period
return obj
_ignore_ = 'Period i'
Period = vars()
for i in range(13):
Period['month_%d' % i] = i*30, 'month'
for i in range(53):
Period['week_%d' % i] = i*7, 'week'
for i in range(32):
Period['day_%d' % i] = i, 'day'
OneDay = day_1
OneWeek = week_1
OneMonth = month_1
self.assertFalse(hasattr(Period, '_ignore_'))
self.assertFalse(hasattr(Period, 'Period'))
self.assertFalse(hasattr(Period, 'i'))
self.assertTrue(isinstance(Period.day_1, timedelta))
self.assertTrue(Period.month_1 is Period.day_30)
self.assertTrue(Period.week_4 is Period.day_28)
def test_nonhash_value(self):
class AutoNumberInAList(Enum):
def __new__(cls):
value = [len(cls.__members__) + 1]
obj = object.__new__(cls)
obj._value_ = value
return obj
class ColorInAList(AutoNumberInAList):
red = ()
green = ()
blue = ()
self.assertEqual(list(ColorInAList), [ColorInAList.red, ColorInAList.green, ColorInAList.blue])
for enum, value in zip(ColorInAList, range(3)):
value += 1
self.assertEqual(enum.value, [value])
self.assertIs(ColorInAList([value]), enum)
def test_conflicting_types_resolved_in_new(self):
class LabelledIntEnum(int, Enum):
def __new__(cls, *args):
value, label = args
obj = int.__new__(cls, value)
obj.label = label
obj._value_ = value
return obj
class LabelledList(LabelledIntEnum):
unprocessed = (1, "Unprocessed")
payment_complete = (2, "Payment Complete")
self.assertEqual(list(LabelledList), [LabelledList.unprocessed, LabelledList.payment_complete])
self.assertEqual(LabelledList.unprocessed, 1)
self.assertEqual(LabelledList(1), LabelledList.unprocessed)
def test_auto_number(self):
class Color(Enum):
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 1)
self.assertEqual(Color.blue.value, 2)
self.assertEqual(Color.green.value, 3)
def test_auto_name(self):
class Color(Enum):
def _generate_next_value_(name, start, count, last):
return name
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 'red')
self.assertEqual(Color.blue.value, 'blue')
self.assertEqual(Color.green.value, 'green')
def test_auto_name_inherit(self):
class AutoNameEnum(Enum):
def _generate_next_value_(name, start, count, last):
return name
class Color(AutoNameEnum):
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 'red')
self.assertEqual(Color.blue.value, 'blue')
self.assertEqual(Color.green.value, 'green')
def test_auto_garbage(self):
class Color(Enum):
red = 'red'
blue = auto()
self.assertEqual(Color.blue.value, 1)
def test_auto_garbage_corrected(self):
class Color(Enum):
red = 'red'
blue = 2
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 'red')
self.assertEqual(Color.blue.value, 2)
self.assertEqual(Color.green.value, 3)
def test_duplicate_auto(self):
class Dupes(Enum):
first = primero = auto()
second = auto()
third = auto()
self.assertEqual([Dupes.first, Dupes.second, Dupes.third], list(Dupes))
def test_missing(self):
class Color(Enum):
red = 1
green = 2
blue = 3
@classmethod
def _missing_(cls, item):
if item == 'three':
return cls.blue
elif item == 'bad return':
# trigger internal error
return 5
elif item == 'error out':
raise ZeroDivisionError
else:
# trigger not found
return None
self.assertIs(Color('three'), Color.blue)
self.assertRaises(ValueError, Color, 7)
try:
Color('bad return')
except TypeError as exc:
self.assertTrue(isinstance(exc.__context__, ValueError))
else:
raise Exception('Exception not raised.')
try:
Color('error out')
except ZeroDivisionError as exc:
self.assertTrue(isinstance(exc.__context__, ValueError))
else:
raise Exception('Exception not raised.')
def test_multiple_mixin(self):
class MaxMixin:
@classproperty
def MAX(cls):
max = len(cls)
cls.MAX = max
return max
class StrMixin:
def __str__(self):
return self._name_.lower()
class SomeEnum(Enum):
def behavior(self):
return 'booyah'
class AnotherEnum(Enum):
def behavior(self):
return 'nuhuh!'
def social(self):
return "what's up?"
class Color(MaxMixin, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 3)
self.assertEqual(Color.MAX, 3)
self.assertEqual(str(Color.BLUE), 'Color.BLUE')
class Color(MaxMixin, StrMixin, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 3)
self.assertEqual(Color.MAX, 3)
self.assertEqual(str(Color.BLUE), 'blue')
class Color(StrMixin, MaxMixin, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 3)
self.assertEqual(Color.MAX, 3)
self.assertEqual(str(Color.BLUE), 'blue')
class CoolColor(StrMixin, SomeEnum, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(CoolColor.RED.value, 1)
self.assertEqual(CoolColor.GREEN.value, 2)
self.assertEqual(CoolColor.BLUE.value, 3)
self.assertEqual(str(CoolColor.BLUE), 'blue')
self.assertEqual(CoolColor.RED.behavior(), 'booyah')
class CoolerColor(StrMixin, AnotherEnum, Enum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(CoolerColor.RED.value, 1)
self.assertEqual(CoolerColor.GREEN.value, 2)
self.assertEqual(CoolerColor.BLUE.value, 3)
self.assertEqual(str(CoolerColor.BLUE), 'blue')
self.assertEqual(CoolerColor.RED.behavior(), 'nuhuh!')
self.assertEqual(CoolerColor.RED.social(), "what's up?")
class CoolestColor(StrMixin, SomeEnum, AnotherEnum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(CoolestColor.RED.value, 1)
self.assertEqual(CoolestColor.GREEN.value, 2)
self.assertEqual(CoolestColor.BLUE.value, 3)
self.assertEqual(str(CoolestColor.BLUE), 'blue')
self.assertEqual(CoolestColor.RED.behavior(), 'booyah')
self.assertEqual(CoolestColor.RED.social(), "what's up?")
class ConfusedColor(StrMixin, AnotherEnum, SomeEnum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(ConfusedColor.RED.value, 1)
self.assertEqual(ConfusedColor.GREEN.value, 2)
self.assertEqual(ConfusedColor.BLUE.value, 3)
self.assertEqual(str(ConfusedColor.BLUE), 'blue')
self.assertEqual(ConfusedColor.RED.behavior(), 'nuhuh!')
self.assertEqual(ConfusedColor.RED.social(), "what's up?")
class ReformedColor(StrMixin, IntEnum, SomeEnum, AnotherEnum):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(ReformedColor.RED.value, 1)
self.assertEqual(ReformedColor.GREEN.value, 2)
self.assertEqual(ReformedColor.BLUE.value, 3)
self.assertEqual(str(ReformedColor.BLUE), 'blue')
self.assertEqual(ReformedColor.RED.behavior(), 'booyah')
self.assertEqual(ConfusedColor.RED.social(), "what's up?")
self.assertTrue(issubclass(ReformedColor, int))
def test_multiple_inherited_mixin(self):
class StrEnum(str, Enum):
def __new__(cls, *args, **kwargs):
for a in args:
if not isinstance(a, str):
raise TypeError("Enumeration '%s' (%s) is not"
" a string" % (a, type(a).__name__))
return str.__new__(cls, *args, **kwargs)
@unique
class Decision1(StrEnum):
REVERT = "REVERT"
REVERT_ALL = "REVERT_ALL"
RETRY = "RETRY"
class MyEnum(StrEnum):
pass
@unique
class Decision2(MyEnum):
REVERT = "REVERT"
REVERT_ALL = "REVERT_ALL"
RETRY = "RETRY"
def test_empty_globals(self):
# bpo-35717: sys._getframe(2).f_globals['__name__'] fails with KeyError
# when using compile and exec because f_globals is empty
code = "from enum import Enum; Enum('Animal', 'ANT BEE CAT DOG')"
code = compile(code, "<string>", "exec")
global_ns = {}
local_ls = {}
exec(code, global_ns, local_ls)
class TestOrder(unittest.TestCase):
def test_same_members(self):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
def test_same_members_with_aliases(self):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
verde = green
def test_same_members_wrong_order(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue'
red = 1
blue = 3
green = 2
def test_order_has_extra_members(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue purple'
red = 1
green = 2
blue = 3
def test_order_has_extra_members_with_aliases(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue purple'
red = 1
green = 2
blue = 3
verde = green
def test_enum_has_extra_members(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
purple = 4
def test_enum_has_extra_members_with_aliases(self):
with self.assertRaisesRegex(TypeError, 'member order does not match _order_'):
class Color(Enum):
_order_ = 'red green blue'
red = 1
green = 2
blue = 3
purple = 4
verde = green
class TestFlag(unittest.TestCase):
"""Tests of the Flags."""
class Perm(Flag):
R, W, X = 4, 2, 1
class Open(Flag):
RO = 0
WO = 1
RW = 2
AC = 3
CE = 1<<19
class Color(Flag):
BLACK = 0
RED = 1
GREEN = 2
BLUE = 4
PURPLE = RED|BLUE
def test_str(self):
Perm = self.Perm
self.assertEqual(str(Perm.R), 'Perm.R')
self.assertEqual(str(Perm.W), 'Perm.W')
self.assertEqual(str(Perm.X), 'Perm.X')
self.assertEqual(str(Perm.R | Perm.W), 'Perm.R|W')
self.assertEqual(str(Perm.R | Perm.W | Perm.X), 'Perm.R|W|X')
self.assertEqual(str(Perm(0)), 'Perm.0')
self.assertEqual(str(~Perm.R), 'Perm.W|X')
self.assertEqual(str(~Perm.W), 'Perm.R|X')
self.assertEqual(str(~Perm.X), 'Perm.R|W')
self.assertEqual(str(~(Perm.R | Perm.W)), 'Perm.X')
self.assertEqual(str(~(Perm.R | Perm.W | Perm.X)), 'Perm.0')
self.assertEqual(str(Perm(~0)), 'Perm.R|W|X')
Open = self.Open
self.assertEqual(str(Open.RO), 'Open.RO')
self.assertEqual(str(Open.WO), 'Open.WO')
self.assertEqual(str(Open.AC), 'Open.AC')
self.assertEqual(str(Open.RO | Open.CE), 'Open.CE')
self.assertEqual(str(Open.WO | Open.CE), 'Open.CE|WO')
self.assertEqual(str(~Open.RO), 'Open.CE|AC|RW|WO')
self.assertEqual(str(~Open.WO), 'Open.CE|RW')
self.assertEqual(str(~Open.AC), 'Open.CE')
self.assertEqual(str(~(Open.RO | Open.CE)), 'Open.AC')
self.assertEqual(str(~(Open.WO | Open.CE)), 'Open.RW')
def test_repr(self):
Perm = self.Perm
self.assertEqual(repr(Perm.R), '<Perm.R: 4>')
self.assertEqual(repr(Perm.W), '<Perm.W: 2>')
self.assertEqual(repr(Perm.X), '<Perm.X: 1>')
self.assertEqual(repr(Perm.R | Perm.W), '<Perm.R|W: 6>')
self.assertEqual(repr(Perm.R | Perm.W | Perm.X), '<Perm.R|W|X: 7>')
self.assertEqual(repr(Perm(0)), '<Perm.0: 0>')
self.assertEqual(repr(~Perm.R), '<Perm.W|X: 3>')
self.assertEqual(repr(~Perm.W), '<Perm.R|X: 5>')
self.assertEqual(repr(~Perm.X), '<Perm.R|W: 6>')
self.assertEqual(repr(~(Perm.R | Perm.W)), '<Perm.X: 1>')
self.assertEqual(repr(~(Perm.R | Perm.W | Perm.X)), '<Perm.0: 0>')
self.assertEqual(repr(Perm(~0)), '<Perm.R|W|X: 7>')
Open = self.Open
self.assertEqual(repr(Open.RO), '<Open.RO: 0>')
self.assertEqual(repr(Open.WO), '<Open.WO: 1>')
self.assertEqual(repr(Open.AC), '<Open.AC: 3>')
self.assertEqual(repr(Open.RO | Open.CE), '<Open.CE: 524288>')
self.assertEqual(repr(Open.WO | Open.CE), '<Open.CE|WO: 524289>')
self.assertEqual(repr(~Open.RO), '<Open.CE|AC|RW|WO: 524291>')
self.assertEqual(repr(~Open.WO), '<Open.CE|RW: 524290>')
self.assertEqual(repr(~Open.AC), '<Open.CE: 524288>')
self.assertEqual(repr(~(Open.RO | Open.CE)), '<Open.AC: 3>')
self.assertEqual(repr(~(Open.WO | Open.CE)), '<Open.RW: 2>')
def test_or(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual((i | j), Perm(i.value | j.value))
self.assertEqual((i | j).value, i.value | j.value)
self.assertIs(type(i | j), Perm)
for i in Perm:
self.assertIs(i | i, i)
Open = self.Open
self.assertIs(Open.RO | Open.CE, Open.CE)
def test_and(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
for j in values:
self.assertEqual((i & j).value, i.value & j.value)
self.assertIs(type(i & j), Perm)
for i in Perm:
self.assertIs(i & i, i)
self.assertIs(i & RWX, i)
self.assertIs(RWX & i, i)
Open = self.Open
self.assertIs(Open.RO & Open.CE, Open.RO)
def test_xor(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual((i ^ j).value, i.value ^ j.value)
self.assertIs(type(i ^ j), Perm)
for i in Perm:
self.assertIs(i ^ Perm(0), i)
self.assertIs(Perm(0) ^ i, i)
Open = self.Open
self.assertIs(Open.RO ^ Open.CE, Open.CE)
self.assertIs(Open.CE ^ Open.CE, Open.RO)
def test_invert(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
self.assertIs(type(~i), Perm)
self.assertEqual(~~i, i)
for i in Perm:
self.assertIs(~~i, i)
Open = self.Open
self.assertIs(Open.WO & ~Open.WO, Open.RO)
self.assertIs((Open.WO|Open.CE) & ~Open.WO, Open.CE)
def test_bool(self):
Perm = self.Perm
for f in Perm:
self.assertTrue(f)
Open = self.Open
for f in Open:
self.assertEqual(bool(f.value), bool(f))
def test_programatic_function_string(self):
Perm = Flag('Perm', 'R W X')
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_with_start(self):
Perm = Flag('Perm', 'R W X', start=8)
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 8<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_list(self):
Perm = Flag('Perm', ['R', 'W', 'X'])
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_iterable(self):
Perm = Flag('Perm', (('R', 2), ('W', 8), ('X', 32)))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_from_dict(self):
Perm = Flag('Perm', OrderedDict((('R', 2), ('W', 8), ('X', 32))))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_pickle(self):
if isinstance(FlagStooges, Exception):
raise FlagStooges
test_pickle_dump_load(self.assertIs, FlagStooges.CURLY|FlagStooges.MOE)
test_pickle_dump_load(self.assertIs, FlagStooges)
def test_contains(self):
Open = self.Open
Color = self.Color
self.assertFalse(Color.BLACK in Open)
self.assertFalse(Open.RO in Color)
with self.assertRaises(TypeError):
'BLACK' in Color
with self.assertRaises(TypeError):
'RO' in Open
with self.assertRaises(TypeError):
1 in Color
with self.assertRaises(TypeError):
1 in Open
def test_member_contains(self):
Perm = self.Perm
R, W, X = Perm
RW = R | W
RX = R | X
WX = W | X
RWX = R | W | X
self.assertTrue(R in RW)
self.assertTrue(R in RX)
self.assertTrue(R in RWX)
self.assertTrue(W in RW)
self.assertTrue(W in WX)
self.assertTrue(W in RWX)
self.assertTrue(X in RX)
self.assertTrue(X in WX)
self.assertTrue(X in RWX)
self.assertFalse(R in WX)
self.assertFalse(W in RX)
self.assertFalse(X in RW)
def test_auto_number(self):
class Color(Flag):
red = auto()
blue = auto()
green = auto()
self.assertEqual(list(Color), [Color.red, Color.blue, Color.green])
self.assertEqual(Color.red.value, 1)
self.assertEqual(Color.blue.value, 2)
self.assertEqual(Color.green.value, 4)
def test_auto_number_garbage(self):
with self.assertRaisesRegex(TypeError, 'Invalid Flag value: .not an int.'):
class Color(Flag):
red = 'not an int'
blue = auto()
def test_cascading_failure(self):
class Bizarre(Flag):
c = 3
d = 4
f = 6
# Bizarre.c | Bizarre.d
self.assertRaisesRegex(ValueError, "5 is not a valid Bizarre", Bizarre, 5)
self.assertRaisesRegex(ValueError, "5 is not a valid Bizarre", Bizarre, 5)
self.assertRaisesRegex(ValueError, "2 is not a valid Bizarre", Bizarre, 2)
self.assertRaisesRegex(ValueError, "2 is not a valid Bizarre", Bizarre, 2)
self.assertRaisesRegex(ValueError, "1 is not a valid Bizarre", Bizarre, 1)
self.assertRaisesRegex(ValueError, "1 is not a valid Bizarre", Bizarre, 1)
def test_duplicate_auto(self):
class Dupes(Enum):
first = primero = auto()
second = auto()
third = auto()
self.assertEqual([Dupes.first, Dupes.second, Dupes.third], list(Dupes))
def test_bizarre(self):
class Bizarre(Flag):
b = 3
c = 4
d = 6
self.assertEqual(repr(Bizarre(7)), '<Bizarre.d|c|b: 7>')
def test_multiple_mixin(self):
class AllMixin:
@classproperty
def ALL(cls):
members = list(cls)
all_value = None
if members:
all_value = members[0]
for member in members[1:]:
all_value |= member
cls.ALL = all_value
return all_value
class StrMixin:
def __str__(self):
return self._name_.lower()
class Color(AllMixin, Flag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'Color.BLUE')
class Color(AllMixin, StrMixin, Flag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
class Color(StrMixin, AllMixin, Flag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
@support.reap_threads
def test_unique_composite(self):
# override __eq__ to be identity only
class TestFlag(Flag):
one = auto()
two = auto()
three = auto()
four = auto()
five = auto()
six = auto()
seven = auto()
eight = auto()
def __eq__(self, other):
return self is other
def __hash__(self):
return hash(self._value_)
# have multiple threads competing to complete the composite members
seen = set()
failed = False
def cycle_enum():
nonlocal failed
try:
for i in range(256):
seen.add(TestFlag(i))
except Exception:
failed = True
threads = [
threading.Thread(target=cycle_enum)
for _ in range(8)
]
with support.start_threads(threads):
pass
# check that only 248 members were created
self.assertFalse(
failed,
'at least one thread failed while creating composite members')
self.assertEqual(256, len(seen), 'too many composite members created')
class TestIntFlag(unittest.TestCase):
"""Tests of the IntFlags."""
class Perm(IntFlag):
X = 1 << 0
W = 1 << 1
R = 1 << 2
class Open(IntFlag):
RO = 0
WO = 1
RW = 2
AC = 3
CE = 1<<19
class Color(IntFlag):
BLACK = 0
RED = 1
GREEN = 2
BLUE = 4
PURPLE = RED|BLUE
def test_type(self):
Perm = self.Perm
Open = self.Open
for f in Perm:
self.assertTrue(isinstance(f, Perm))
self.assertEqual(f, f.value)
self.assertTrue(isinstance(Perm.W | Perm.X, Perm))
self.assertEqual(Perm.W | Perm.X, 3)
for f in Open:
self.assertTrue(isinstance(f, Open))
self.assertEqual(f, f.value)
self.assertTrue(isinstance(Open.WO | Open.RW, Open))
self.assertEqual(Open.WO | Open.RW, 3)
def test_str(self):
Perm = self.Perm
self.assertEqual(str(Perm.R), 'Perm.R')
self.assertEqual(str(Perm.W), 'Perm.W')
self.assertEqual(str(Perm.X), 'Perm.X')
self.assertEqual(str(Perm.R | Perm.W), 'Perm.R|W')
self.assertEqual(str(Perm.R | Perm.W | Perm.X), 'Perm.R|W|X')
self.assertEqual(str(Perm.R | 8), 'Perm.8|R')
self.assertEqual(str(Perm(0)), 'Perm.0')
self.assertEqual(str(Perm(8)), 'Perm.8')
self.assertEqual(str(~Perm.R), 'Perm.W|X')
self.assertEqual(str(~Perm.W), 'Perm.R|X')
self.assertEqual(str(~Perm.X), 'Perm.R|W')
self.assertEqual(str(~(Perm.R | Perm.W)), 'Perm.X')
self.assertEqual(str(~(Perm.R | Perm.W | Perm.X)), 'Perm.-8')
self.assertEqual(str(~(Perm.R | 8)), 'Perm.W|X')
self.assertEqual(str(Perm(~0)), 'Perm.R|W|X')
self.assertEqual(str(Perm(~8)), 'Perm.R|W|X')
Open = self.Open
self.assertEqual(str(Open.RO), 'Open.RO')
self.assertEqual(str(Open.WO), 'Open.WO')
self.assertEqual(str(Open.AC), 'Open.AC')
self.assertEqual(str(Open.RO | Open.CE), 'Open.CE')
self.assertEqual(str(Open.WO | Open.CE), 'Open.CE|WO')
self.assertEqual(str(Open(4)), 'Open.4')
self.assertEqual(str(~Open.RO), 'Open.CE|AC|RW|WO')
self.assertEqual(str(~Open.WO), 'Open.CE|RW')
self.assertEqual(str(~Open.AC), 'Open.CE')
self.assertEqual(str(~(Open.RO | Open.CE)), 'Open.AC|RW|WO')
self.assertEqual(str(~(Open.WO | Open.CE)), 'Open.RW')
self.assertEqual(str(Open(~4)), 'Open.CE|AC|RW|WO')
def test_repr(self):
Perm = self.Perm
self.assertEqual(repr(Perm.R), '<Perm.R: 4>')
self.assertEqual(repr(Perm.W), '<Perm.W: 2>')
self.assertEqual(repr(Perm.X), '<Perm.X: 1>')
self.assertEqual(repr(Perm.R | Perm.W), '<Perm.R|W: 6>')
self.assertEqual(repr(Perm.R | Perm.W | Perm.X), '<Perm.R|W|X: 7>')
self.assertEqual(repr(Perm.R | 8), '<Perm.8|R: 12>')
self.assertEqual(repr(Perm(0)), '<Perm.0: 0>')
self.assertEqual(repr(Perm(8)), '<Perm.8: 8>')
self.assertEqual(repr(~Perm.R), '<Perm.W|X: -5>')
self.assertEqual(repr(~Perm.W), '<Perm.R|X: -3>')
self.assertEqual(repr(~Perm.X), '<Perm.R|W: -2>')
self.assertEqual(repr(~(Perm.R | Perm.W)), '<Perm.X: -7>')
self.assertEqual(repr(~(Perm.R | Perm.W | Perm.X)), '<Perm.-8: -8>')
self.assertEqual(repr(~(Perm.R | 8)), '<Perm.W|X: -13>')
self.assertEqual(repr(Perm(~0)), '<Perm.R|W|X: -1>')
self.assertEqual(repr(Perm(~8)), '<Perm.R|W|X: -9>')
Open = self.Open
self.assertEqual(repr(Open.RO), '<Open.RO: 0>')
self.assertEqual(repr(Open.WO), '<Open.WO: 1>')
self.assertEqual(repr(Open.AC), '<Open.AC: 3>')
self.assertEqual(repr(Open.RO | Open.CE), '<Open.CE: 524288>')
self.assertEqual(repr(Open.WO | Open.CE), '<Open.CE|WO: 524289>')
self.assertEqual(repr(Open(4)), '<Open.4: 4>')
self.assertEqual(repr(~Open.RO), '<Open.CE|AC|RW|WO: -1>')
self.assertEqual(repr(~Open.WO), '<Open.CE|RW: -2>')
self.assertEqual(repr(~Open.AC), '<Open.CE: -4>')
self.assertEqual(repr(~(Open.RO | Open.CE)), '<Open.AC|RW|WO: -524289>')
self.assertEqual(repr(~(Open.WO | Open.CE)), '<Open.RW: -524290>')
self.assertEqual(repr(Open(~4)), '<Open.CE|AC|RW|WO: -5>')
def test_or(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual(i | j, i.value | j.value)
self.assertEqual((i | j).value, i.value | j.value)
self.assertIs(type(i | j), Perm)
for j in range(8):
self.assertEqual(i | j, i.value | j)
self.assertEqual((i | j).value, i.value | j)
self.assertIs(type(i | j), Perm)
self.assertEqual(j | i, j | i.value)
self.assertEqual((j | i).value, j | i.value)
self.assertIs(type(j | i), Perm)
for i in Perm:
self.assertIs(i | i, i)
self.assertIs(i | 0, i)
self.assertIs(0 | i, i)
Open = self.Open
self.assertIs(Open.RO | Open.CE, Open.CE)
def test_and(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
for j in values:
self.assertEqual(i & j, i.value & j.value, 'i is %r, j is %r' % (i, j))
self.assertEqual((i & j).value, i.value & j.value, 'i is %r, j is %r' % (i, j))
self.assertIs(type(i & j), Perm, 'i is %r, j is %r' % (i, j))
for j in range(8):
self.assertEqual(i & j, i.value & j)
self.assertEqual((i & j).value, i.value & j)
self.assertIs(type(i & j), Perm)
self.assertEqual(j & i, j & i.value)
self.assertEqual((j & i).value, j & i.value)
self.assertIs(type(j & i), Perm)
for i in Perm:
self.assertIs(i & i, i)
self.assertIs(i & 7, i)
self.assertIs(7 & i, i)
Open = self.Open
self.assertIs(Open.RO & Open.CE, Open.RO)
def test_xor(self):
Perm = self.Perm
for i in Perm:
for j in Perm:
self.assertEqual(i ^ j, i.value ^ j.value)
self.assertEqual((i ^ j).value, i.value ^ j.value)
self.assertIs(type(i ^ j), Perm)
for j in range(8):
self.assertEqual(i ^ j, i.value ^ j)
self.assertEqual((i ^ j).value, i.value ^ j)
self.assertIs(type(i ^ j), Perm)
self.assertEqual(j ^ i, j ^ i.value)
self.assertEqual((j ^ i).value, j ^ i.value)
self.assertIs(type(j ^ i), Perm)
for i in Perm:
self.assertIs(i ^ 0, i)
self.assertIs(0 ^ i, i)
Open = self.Open
self.assertIs(Open.RO ^ Open.CE, Open.CE)
self.assertIs(Open.CE ^ Open.CE, Open.RO)
def test_invert(self):
Perm = self.Perm
RW = Perm.R | Perm.W
RX = Perm.R | Perm.X
WX = Perm.W | Perm.X
RWX = Perm.R | Perm.W | Perm.X
values = list(Perm) + [RW, RX, WX, RWX, Perm(0)]
for i in values:
self.assertEqual(~i, ~i.value)
self.assertEqual((~i).value, ~i.value)
self.assertIs(type(~i), Perm)
self.assertEqual(~~i, i)
for i in Perm:
self.assertIs(~~i, i)
Open = self.Open
self.assertIs(Open.WO & ~Open.WO, Open.RO)
self.assertIs((Open.WO|Open.CE) & ~Open.WO, Open.CE)
def test_programatic_function_string(self):
Perm = IntFlag('Perm', 'R W X')
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_with_start(self):
Perm = IntFlag('Perm', 'R W X', start=8)
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 8<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_string_list(self):
Perm = IntFlag('Perm', ['R', 'W', 'X'])
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<i
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_iterable(self):
Perm = IntFlag('Perm', (('R', 2), ('W', 8), ('X', 32)))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_from_dict(self):
Perm = IntFlag('Perm', OrderedDict((('R', 2), ('W', 8), ('X', 32))))
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 3, Perm)
self.assertEqual(lst, [Perm.R, Perm.W, Perm.X])
for i, n in enumerate('R W X'.split()):
v = 1<<(2*i+1)
e = Perm(v)
self.assertEqual(e.value, v)
self.assertEqual(type(e.value), int)
self.assertEqual(e, v)
self.assertEqual(e.name, n)
self.assertIn(e, Perm)
self.assertIs(type(e), Perm)
def test_programatic_function_from_empty_list(self):
Perm = enum.IntFlag('Perm', [])
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 0, Perm)
Thing = enum.Enum('Thing', [])
lst = list(Thing)
self.assertEqual(len(lst), len(Thing))
self.assertEqual(len(Thing), 0, Thing)
def test_programatic_function_from_empty_tuple(self):
Perm = enum.IntFlag('Perm', ())
lst = list(Perm)
self.assertEqual(len(lst), len(Perm))
self.assertEqual(len(Perm), 0, Perm)
Thing = enum.Enum('Thing', ())
self.assertEqual(len(lst), len(Thing))
self.assertEqual(len(Thing), 0, Thing)
def test_contains(self):
Open = self.Open
Color = self.Color
self.assertTrue(Color.GREEN in Color)
self.assertTrue(Open.RW in Open)
self.assertFalse(Color.GREEN in Open)
self.assertFalse(Open.RW in Color)
with self.assertRaises(TypeError):
'GREEN' in Color
with self.assertRaises(TypeError):
'RW' in Open
with self.assertRaises(TypeError):
2 in Color
with self.assertRaises(TypeError):
2 in Open
def test_member_contains(self):
Perm = self.Perm
R, W, X = Perm
RW = R | W
RX = R | X
WX = W | X
RWX = R | W | X
self.assertTrue(R in RW)
self.assertTrue(R in RX)
self.assertTrue(R in RWX)
self.assertTrue(W in RW)
self.assertTrue(W in WX)
self.assertTrue(W in RWX)
self.assertTrue(X in RX)
self.assertTrue(X in WX)
self.assertTrue(X in RWX)
self.assertFalse(R in WX)
self.assertFalse(W in RX)
self.assertFalse(X in RW)
with self.assertRaises(TypeError):
self.assertFalse('test' in RW)
def test_bool(self):
Perm = self.Perm
for f in Perm:
self.assertTrue(f)
Open = self.Open
for f in Open:
self.assertEqual(bool(f.value), bool(f))
def test_multiple_mixin(self):
class AllMixin:
@classproperty
def ALL(cls):
members = list(cls)
all_value = None
if members:
all_value = members[0]
for member in members[1:]:
all_value |= member
cls.ALL = all_value
return all_value
class StrMixin:
def __str__(self):
return self._name_.lower()
class Color(AllMixin, IntFlag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'Color.BLUE')
class Color(AllMixin, StrMixin, IntFlag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
class Color(StrMixin, AllMixin, IntFlag):
RED = auto()
GREEN = auto()
BLUE = auto()
self.assertEqual(Color.RED.value, 1)
self.assertEqual(Color.GREEN.value, 2)
self.assertEqual(Color.BLUE.value, 4)
self.assertEqual(Color.ALL.value, 7)
self.assertEqual(str(Color.BLUE), 'blue')
@support.reap_threads
def test_unique_composite(self):
# override __eq__ to be identity only
class TestFlag(IntFlag):
one = auto()
two = auto()
three = auto()
four = auto()
five = auto()
six = auto()
seven = auto()
eight = auto()
def __eq__(self, other):
return self is other
def __hash__(self):
return hash(self._value_)
# have multiple threads competing to complete the composite members
seen = set()
failed = False
def cycle_enum():
nonlocal failed
try:
for i in range(256):
seen.add(TestFlag(i))
except Exception:
failed = True
threads = [
threading.Thread(target=cycle_enum)
for _ in range(8)
]
with support.start_threads(threads):
pass
# check that only 248 members were created
self.assertFalse(
failed,
'at least one thread failed while creating composite members')
self.assertEqual(256, len(seen), 'too many composite members created')
class TestEmptyAndNonLatinStrings(unittest.TestCase):
def test_empty_string(self):
with self.assertRaises(ValueError):
empty_abc = Enum('empty_abc', ('', 'B', 'C'))
def test_non_latin_character_string(self):
greek_abc = Enum('greek_abc', ('\u03B1', 'B', 'C'))
item = getattr(greek_abc, '\u03B1')
self.assertEqual(item.value, 1)
def test_non_latin_number_string(self):
hebrew_123 = Enum('hebrew_123', ('\u05D0', '2', '3'))
item = getattr(hebrew_123, '\u05D0')
self.assertEqual(item.value, 1)
class TestUnique(unittest.TestCase):
def test_unique_clean(self):
@unique
class Clean(Enum):
one = 1
two = 'dos'
tres = 4.0
@unique
class Cleaner(IntEnum):
single = 1
double = 2
triple = 3
def test_unique_dirty(self):
with self.assertRaisesRegex(ValueError, 'tres.*one'):
@unique
class Dirty(Enum):
one = 1
two = 'dos'
tres = 1
with self.assertRaisesRegex(
ValueError,
'double.*single.*turkey.*triple',
):
@unique
class Dirtier(IntEnum):
single = 1
double = 1
triple = 3
turkey = 3
def test_unique_with_name(self):
@unique
class Silly(Enum):
one = 1
two = 'dos'
name = 3
@unique
class Sillier(IntEnum):
single = 1
name = 2
triple = 3
value = 4
expected_help_output_with_docs = """\
Help on class Color in module %s:
class Color(enum.Enum)
| Color(value, names=None, *, module=None, qualname=None, type=None, start=1)
|\x20\x20
| An enumeration.
|\x20\x20
| Method resolution order:
| Color
| enum.Enum
| builtins.object
|\x20\x20
| Data and other attributes defined here:
|\x20\x20
| blue = <Color.blue: 3>
|\x20\x20
| green = <Color.green: 2>
|\x20\x20
| red = <Color.red: 1>
|\x20\x20
| ----------------------------------------------------------------------
| Data descriptors inherited from enum.Enum:
|\x20\x20
| name
| The name of the Enum member.
|\x20\x20
| value
| The value of the Enum member.
|\x20\x20
| ----------------------------------------------------------------------
| Readonly properties inherited from enum.EnumMeta:
|\x20\x20
| __members__
| Returns a mapping of member name->value.
|\x20\x20\x20\x20\x20\x20
| This mapping lists all enum members, including aliases. Note that this
| is a read-only view of the internal mapping."""
expected_help_output_without_docs = """\
Help on class Color in module %s:
class Color(enum.Enum)
| Color(value, names=None, *, module=None, qualname=None, type=None, start=1)
|\x20\x20
| Method resolution order:
| Color
| enum.Enum
| builtins.object
|\x20\x20
| Data and other attributes defined here:
|\x20\x20
| blue = <Color.blue: 3>
|\x20\x20
| green = <Color.green: 2>
|\x20\x20
| red = <Color.red: 1>
|\x20\x20
| ----------------------------------------------------------------------
| Data descriptors inherited from enum.Enum:
|\x20\x20
| name
|\x20\x20
| value
|\x20\x20
| ----------------------------------------------------------------------
| Data descriptors inherited from enum.EnumMeta:
|\x20\x20
| __members__"""
class TestStdLib(unittest.TestCase):
maxDiff = None
class Color(Enum):
red = 1
green = 2
blue = 3
def test_pydoc(self):
# indirectly test __objclass__
if StrEnum.__doc__ is None:
expected_text = expected_help_output_without_docs % __name__
else:
expected_text = expected_help_output_with_docs % __name__
output = StringIO()
helper = pydoc.Helper(output=output)
helper(self.Color)
result = output.getvalue().strip()
self.assertEqual(result, expected_text)
def test_inspect_getmembers(self):
values = dict((
('__class__', EnumMeta),
('__doc__', 'An enumeration.'),
('__members__', self.Color.__members__),
('__module__', __name__),
('blue', self.Color.blue),
('green', self.Color.green),
('name', Enum.__dict__['name']),
('red', self.Color.red),
('value', Enum.__dict__['value']),
))
result = dict(inspect.getmembers(self.Color))
self.assertEqual(values.keys(), result.keys())
failed = False
for k in values.keys():
if result[k] != values[k]:
print()
print('\n%s\n key: %s\n result: %s\nexpected: %s\n%s\n' %
('=' * 75, k, result[k], values[k], '=' * 75), sep='')
failed = True
if failed:
self.fail("result does not equal expected, see print above")
def test_inspect_classify_class_attrs(self):
# indirectly test __objclass__
from inspect import Attribute
values = [
Attribute(name='__class__', kind='data',
defining_class=object, object=EnumMeta),
Attribute(name='__doc__', kind='data',
defining_class=self.Color, object='An enumeration.'),
Attribute(name='__members__', kind='property',
defining_class=EnumMeta, object=EnumMeta.__members__),
Attribute(name='__module__', kind='data',
defining_class=self.Color, object=__name__),
Attribute(name='blue', kind='data',
defining_class=self.Color, object=self.Color.blue),
Attribute(name='green', kind='data',
defining_class=self.Color, object=self.Color.green),
Attribute(name='red', kind='data',
defining_class=self.Color, object=self.Color.red),
Attribute(name='name', kind='data',
defining_class=Enum, object=Enum.__dict__['name']),
Attribute(name='value', kind='data',
defining_class=Enum, object=Enum.__dict__['value']),
]
values.sort(key=lambda item: item.name)
result = list(inspect.classify_class_attrs(self.Color))
result.sort(key=lambda item: item.name)
failed = False
for v, r in zip(values, result):
if r != v:
print('\n%s\n%s\n%s\n%s\n' % ('=' * 75, r, v, '=' * 75), sep='')
failed = True
if failed:
self.fail("result does not equal expected, see print above")
class MiscTestCase(unittest.TestCase):
def test__all__(self):
support.check__all__(self, enum)
# These are unordered here on purpose to ensure that declaration order
# makes no difference.
CONVERT_TEST_NAME_D = 5
CONVERT_TEST_NAME_C = 5
CONVERT_TEST_NAME_B = 5
CONVERT_TEST_NAME_A = 5 # This one should sort first.
CONVERT_TEST_NAME_E = 5
CONVERT_TEST_NAME_F = 5
class TestIntEnumConvert(unittest.TestCase):
def test_convert_value_lookup_priority(self):
test_type = enum.IntEnum._convert_(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
# We don't want the reverse lookup value to vary when there are
# multiple possible names for a given value. It should always
# report the first lexigraphical name in that case.
self.assertEqual(test_type(5).name, 'CONVERT_TEST_NAME_A')
def test_convert(self):
test_type = enum.IntEnum._convert_(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
# Ensure that test_type has all of the desired names and values.
self.assertEqual(test_type.CONVERT_TEST_NAME_F,
test_type.CONVERT_TEST_NAME_A)
self.assertEqual(test_type.CONVERT_TEST_NAME_B, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_C, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_D, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_E, 5)
# Ensure that test_type only picked up names matching the filter.
self.assertEqual([name for name in dir(test_type)
if name[0:2] not in ('CO', '__')],
[], msg='Names other than CONVERT_TEST_* found.')
@unittest.skipUnless(sys.version_info[:2] == (3, 8),
'_convert was deprecated in 3.8')
def test_convert_warn(self):
with self.assertWarns(DeprecationWarning):
enum.IntEnum._convert(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
@unittest.skipUnless(sys.version_info >= (3, 9),
'_convert was removed in 3.9')
def test_convert_raise(self):
with self.assertRaises(AttributeError):
enum.IntEnum._convert(
'UnittestConvert',
('test.test_enum', '__main__')[__name__=='__main__'],
filter=lambda x: x.startswith('CONVERT_TEST_'))
if __name__ == '__main__':
unittest.main()
|
batermj/algorithm-challenger
|
code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/test/test_enum.py
|
Python
|
apache-2.0
| 108,504
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PySgp4(PythonPackage):
"""Track earth satellite TLE orbits using up-to-date 2010 version of SGP4
"""
homepage = "https://github.com/brandon-rhodes/python-sgp4"
url = "https://pypi.io/packages/source/s/sgp4/sgp4-1.4.tar.gz"
version('1.4', sha256='1fb3cdbc11981a9ff34a032169f83c1f4a2877d1b6c295aed044e1d890b73892')
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
|
iulian787/spack
|
var/spack/repos/builtin/packages/py-sgp4/package.py
|
Python
|
lgpl-2.1
| 635
|
# Copyright 2011 Viewfinder Inc. All Rights Reserved.
"""Tests for Job class.
"""
__author__ = 'marc@emailscrubbed.com (Marc Berhault)'
import time
from viewfinder.backend.base import constants
from viewfinder.backend.base.dotdict import DotDict
from viewfinder.backend.db.job import Job
from viewfinder.backend.db.lock import Lock
from viewfinder.backend.db.lock_resource_type import LockResourceType
from base_test import DBBaseTestCase
class JobTestCase(DBBaseTestCase):
def testLocking(self):
"""Test basic locking mechanism."""
job1 = Job(self._client, 'test_job')
self.assertTrue(self._RunAsync(job1.AcquireLock))
job2 = Job(self._client, 'test_job')
self.assertFalse(self._RunAsync(job2.AcquireLock))
# Abandon job1 lock. We never do this on real jobs, so manually clear the lock.
self._RunAsync(job1._lock.Abandon, self._client)
job1._lock = None
# Set detect_abandonment=False: failure.
self.assertFalse(self._RunAsync(job2.AcquireLock, detect_abandonment=False))
self.assertFalse(self._RunAsync(job2.AcquireLock, detect_abandonment=False))
# Now allow abandoned lock acquisition.
self.assertTrue(self._RunAsync(job2.AcquireLock))
self.assertFalse(self._RunAsync(job1.AcquireLock))
self._RunAsync(job2.ReleaseLock)
# Job1 grabs the lock again.
self.assertTrue(self._RunAsync(job1.AcquireLock))
self._RunAsync(job1.ReleaseLock)
def testMetrics(self):
"""Test fetching/writing metrics."""
# Job being tested.
job = Job(self._client, 'test_job')
prev_runs = self._RunAsync(job.FindPreviousRuns)
self.assertEqual(len(prev_runs), 0)
# Unrelated job with a different name. Run entries should not show up under 'test_job'.
other_job = Job(self._client, 'other_test_job')
other_job.Start()
self._RunAsync(other_job.RegisterRun, Job.STATUS_SUCCESS)
other_job.Start()
self._RunAsync(other_job.RegisterRun, Job.STATUS_FAILURE)
# Calling RegisterRun without first calling Start fails because the start_time is not set.
self.assertIsNone(job._start_time)
self.assertRaises(AssertionError, self._RunAsync, job.RegisterRun, Job.STATUS_SUCCESS)
job.Start()
self.assertIsNotNone(job._start_time)
# Overwrite it for easier testing.
start_time = job._start_time = int(time.time() - (constants.SECONDS_PER_WEEK + constants.SECONDS_PER_HOUR))
# Write run summary with extra stats.
stats = DotDict()
stats['foo.bar'] = 5
stats['baz'] = 'test'
self._RunAsync(job.RegisterRun, Job.STATUS_SUCCESS, stats=stats, failure_msg='foo')
# start_time is reset to prevent multiple calls to RegisterRun.
self.assertIsNone(job._start_time)
self.assertRaises(AssertionError, self._RunAsync, job.RegisterRun, Job.STATUS_SUCCESS)
end_time = int(time.time())
# Default search is "runs started in the past week".
prev_runs = self._RunAsync(job.FindPreviousRuns)
self.assertEqual(len(prev_runs), 0)
# Default search is for successful runs.
prev_runs = self._RunAsync(job.FindPreviousRuns, start_timestamp=(start_time - 10))
self.assertEqual(len(prev_runs), 1)
self.assertEqual(prev_runs[0]['start_time'], start_time)
self.assertAlmostEqual(prev_runs[0]['end_time'], end_time, delta=10)
self.assertEqual(prev_runs[0]['status'], Job.STATUS_SUCCESS)
self.assertEqual(prev_runs[0]['stats.foo.bar'], 5)
self.assertEqual(prev_runs[0]['stats.baz'], 'test')
# failure_msg does nothing when status is SUCCESS.
self.assertTrue('failure_msg' not in prev_runs[0])
# Search for failed runs.
prev_runs = self._RunAsync(job.FindPreviousRuns, start_timestamp=(start_time - 10), status=Job.STATUS_FAILURE)
self.assertEqual(len(prev_runs), 0)
# Create a failed job summary.
job.Start()
start_time2 = job._start_time = int(time.time() - constants.SECONDS_PER_HOUR)
self._RunAsync(job.RegisterRun, Job.STATUS_FAILURE, failure_msg='stack trace')
# Find previous runs using a variety of filters.
prev_runs = self._RunAsync(job.FindPreviousRuns, start_timestamp=(start_time - 10), status=Job.STATUS_SUCCESS)
self.assertEqual(len(prev_runs), 1)
self.assertEqual(prev_runs[0]['start_time'], start_time)
prev_runs = self._RunAsync(job.FindPreviousRuns, start_timestamp=(start_time - 10), status=Job.STATUS_FAILURE)
self.assertEqual(len(prev_runs), 1)
self.assertEqual(prev_runs[0]['status'], Job.STATUS_FAILURE)
self.assertEqual(prev_runs[0]['failure_msg'], 'stack trace')
self.assertEqual(prev_runs[0]['start_time'], start_time2)
prev_runs = self._RunAsync(job.FindPreviousRuns, start_timestamp=(start_time - 10))
self.assertEqual(len(prev_runs), 2)
self.assertEqual(prev_runs[0]['start_time'], start_time)
self.assertEqual(prev_runs[1]['start_time'], start_time2)
prev_runs = self._RunAsync(job.FindPreviousRuns, start_timestamp=(start_time2 - 10))
self.assertEqual(len(prev_runs), 1)
self.assertEqual(prev_runs[0]['start_time'], start_time2)
prev_runs = self._RunAsync(job.FindPreviousRuns, start_timestamp=(start_time - 10), limit=1)
self.assertEqual(len(prev_runs), 1)
self.assertEqual(prev_runs[0]['start_time'], start_time2)
# Find last successful run with optional payload key/value.
prev_success = self._RunAsync(job.FindLastSuccess, start_timestamp=(start_time - 10))
self.assertIsNotNone(prev_success)
self.assertEqual(prev_success['stats.foo.bar'], 5)
prev_success = self._RunAsync(job.FindLastSuccess, start_timestamp=(start_time - 10), with_payload_key='stats.baz')
self.assertIsNotNone(prev_success)
self.assertEqual(prev_success['stats.foo.bar'], 5)
prev_success = self._RunAsync(job.FindLastSuccess, start_timestamp=(start_time - 10), with_payload_key='stats.bar')
self.assertIsNone(prev_success)
prev_success = self._RunAsync(job.FindLastSuccess, start_timestamp=(start_time - 10),
with_payload_key='stats.baz', with_payload_value='test')
self.assertIsNotNone(prev_success)
self.assertEqual(prev_success['stats.foo.bar'], 5)
prev_success = self._RunAsync(job.FindLastSuccess, start_timestamp=(start_time - 10),
with_payload_key='stats.baz', with_payload_value='test2')
self.assertIsNone(prev_success)
|
0359xiaodong/viewfinder
|
backend/db/test/job_test.py
|
Python
|
apache-2.0
| 6,345
|
# ProgressReport.py
# Progress Report For Zaid
#This is an all encompassing program that does everything at once, hopefully placing all
#of the BAMS query results into a single CSV file
#doesn't run properly unless the path is accessed first, interactive python is activated,
#and the code is pasted into terminal..
#Only run the below persist section once:
#Persist Begin
#For Parsing
import rdflib
from rdflib import plugin
#for getting the length of the files
import os
#for working with tempfiles
import os.path as op
import tempfile
#For Unzipping
import zipfile
from StringIO import StringIO
plugin.register(
'sparql', rdflib.query.Processor,
'rdfextras.sparql.processor', 'Processor')
plugin.register(
'sparql', rdflib.query.Result,
'rdfextras.sparql.query', 'SPARQLQueryResult')
zipdata = StringIO()
# open the file using a relative path
#r = open("../Data/BAMS1.zip")
# adding the BAMS Thesaurus instead of the more limited set of data:
#r = open("../Data/bams_thesaurus_2013-09-24_17-12-40.xml.zip")
# Fixed RDF
r = open("../Data/bams_thesaurus_2013-10-06_14-58-56.xml.zip")
#ADDITIONAL CONTENT
#r = open("../Data/bams_ontology_2013-10-16_20-34-52.xml.zip")
# zipdata is a buffer holding the contents of the zip file in memory
zipdata.write(r.read())
print("~40 seconds for zip to open...")
#myzipfile opens the contents of the zip file as an object that knows how to unzip
myzipfile = zipfile.ZipFile(zipdata)
#grab the contents out of myzipfile by name
#foofile = myzipfile.open('bams_ontology_2013-07-10_03-20-00.xml')
#changing the foofile to be the file we upen above^^^^^ in r = open()....etc.
#foofile = myzipfile.open('bams_thesaurus_2013-09-24_17-12-40.xml')
# Fixed RDF
foofile = myzipfile.open('bams_thesaurus_2013-10-06_14-58-56.xml')
#ADDITIONAL CONTENT
#foofile = myzipfile.open('bams_ontology_2013-10-16_20-34-52.xml')
print("loading up the BAMS file in memory...")
#Get a Graph object using a Sleepycat persistent store
g = rdflib.Graph('Sleepycat',identifier='BAMS')
# first time create the store
# put the store in a temp directory so it doesn't get confused with stuff we should commit
tempStore = op.join( tempfile.gettempdir(), 'myRDF_BAMS_Store')
g.open(tempStore, create = True)
#pull in the BAMS RDF document, parse, and store.
#result = g.parse(file=myzipfile.open('bams_ontology_2013-07-10_03-20-00.xml'), format="application/rdf+xml")
#do the same thing but with the BAMS thesaurus file
#result = g.parse(file=myzipfile.open('bams_thesaurus_2013-09-24_17-12-40.xml'), format="application/rdf+xml")
# Fixed RDF
result = g.parse(file=myzipfile.open('bams_thesaurus_2013-10-06_14-58-56.xml'), format="application/rdf+xml")
#ADDITIONAL CONTENT
#result = g.parse(file=myzipfile.open('bams_ontology_2013-10-16_20-34-52.xml'), format="application/rdf+xml")
foofile.close()
# when done!
g.close()
print("Graph stored to disk")
#WORKS PERFECTLY
#Persist End
#########################################################################################
#For Parsing
import rdflib
from rdflib import plugin
#for getting the length of the files
import os
#for working with tempfiles
import os.path as op
import tempfile
#for csv output
import csv
plugin.register(
'sparql', rdflib.query.Processor,
'rdfextras.sparql.processor', 'Processor')
plugin.register(
'sparql', rdflib.query.Result,
'rdfextras.sparql.query', 'SPARQLQueryResult')
#Get a Graph object
g = rdflib.Graph('Sleepycat',identifier='BAMS')
print("loading up the BAMS file in memory...")
# assumes myRDF_BAMS_Store has been created
tempStore = op.join( tempfile.gettempdir(), 'myRDF_BAMS_Store')
g.open(tempStore)
print("going to get results...")
print("The graph has " + str(len(g)) + " items in it")
#BAMS Thesaurus content has 3797 items in it
#additional BAMS content (graph) has 167178 items in it
# CHOOSE ONE OF THE FOLLOWING QUERIES
#########################################################################################
#BASAL GANGLIA OF TELENCEPHALON QUERY:
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia of telencephalon" .
?subject ?predicate ?object
}""")
#########################################################################################
#// Basal Ganglia Query:
#// Good For Testing Purposes
#BASAL GANGLIA QUERY:
qres = g.query(
"""PREFIX bamsProp: <http://brancusi1.usc.edu/RDF/>
SELECT ?subject ?predicate ?object
WHERE {
?subject bamsProp:entry "Basal ganglia" .
?subject ?predicate ?object
}""")
for r in qres.result:
print str(r[0]), str(r[1]), str(r[2])
# Open/Write CSV file
# (Copy) Experimental -- best working yet
#########################################################################################
with open('Progress_Report.csv', 'wb') as f:
BAMS_Dict = {"Subject": qres.result[0][0], "Predicate": qres.result[0][1], "Object": qres.result[0][2]}
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
for r in qres.result:
c = csv.writer(open("Progress_Report.csv","wb"))
c.writerows(qres.result)
#########################################################################################
# CSV File Generated Containing BAMS Data From Queries
#########################################################################################
for r in qres.result:
#print str(r[0]), str(r[1]), str(r[2])
#print str(r[0][0]) #gives the first position in the first tripple "h" for the url
#c = csv.writer(open("BAMS_Thesaurus_Data4Upload.csv","wb"))
c = csv.writer(open("BAMS_Formatted_Data.csv","wb"))
c.writerows(qres.result)
# skip a row
# open the file
# allow program to enter loop and continue to open and insert data into the file
# Experimental -- best working yet
#########################################################################################
with open('BAMS_Formatted_Data.csv', 'wb') as f:
BAMS_Dict = {"Subject": qres.result[0][0], "Predicate": qres.result[0][1], "Object": qres.result[0][2]}
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
for r in qres.result:
c = csv.writer(open("BAMS_Formatted_Data.csv","wb"))
c.writerows(qres.result)
#w.writerows(qres.result)
#########################################################################################
#########################################################################################
#with open('mycsvfileV1.csv', 'wb') as f: # Just use 'w' mode in 3.x
with open('BAMS_Formatted_Data.csv', 'wb') as f: # Just use 'w' mode in 3.x
#First Entire Triple, Second Entire Triple, Third Entire Triple.....
#BAMS_Dict = {"Subject": qres.result[0], "Predicate": qres.result[1], "Object": qres.result[2]}
#Subject Of First Triple, Predicate Of First Triple, Object Of First Triple.....
BAMS_Dict = {"Subject": qres.result[0][0], "Predicate": qres.result[0][1], "Object": qres.result[0][2]}
w = csv.DictWriter(f, BAMS_Dict.keys())
w.writeheader()
w.writerow(BAMS_Dict)
#Check To See If A DictWriter Library Of Some Sort Is Required For Access To These Methods
#for row in BAMS_DICT:
#out_f.write("%s%s" %(delimiter.join([row[name] for name in f]), lineterminator))
#Left off with this vvvvvvvvvv
#DictWriter.writerows(...)
|
rsoscia/BAMS-to-NeuroLex
|
src/ProgressReport.py
|
Python
|
mit
| 7,486
|
import re, random, hexchat
from subprocess import Popen, PIPE
__module_name__ = 'Fake CTCP'
__module_version__ = '0.1'
__module_description__ = 'Fakes unessential CTCP requests: VERSION PING TIME'
FAKE_VERSION = 'pinoyChat v1.3.3.4 - Windows XP SP2,'\
' @400MHz Celeron Mendocino, Administrator:password'
def debug(msg):
hexchat.prnt('DEBUG: {}'.format(msg))
def get_mangled_date():
date_s = Popen(['date', '+"%a %b %d"'], stdout=PIPE).communicate()[0].rstrip()
date_s=date_s[1:-1]
hour = random.randint(00, 24)
min = random.randint(00, 60)
sec = random.randint(00, 60)
date = str(date_s) + ' ' + str(hour) + ':' + str(min) + ':' + str(sec)
return date
def extract_sender(word):
pat = '^:(.+?)!'
m = re.search(pat, word[0])
if m:
name = m.groups(1)[0]
return name
def ctcp_reply(nick, cmd, msg):
hexchat.command('nctcp {} {} {}'.format(nick, cmd, msg))
def ctcp_callback(word, word_eol, userdata):
# Grab the PRIVMSG IRC command
recv_cmd = word_eol[0]
sending_nick = extract_sender(word) # Grab sender of cmd
# Get the start of the PRIVMSG and copy till end into ..frag
idx = recv_cmd.index('PRIVMSG')
nic_cmd_frag = recv_cmd[idx:]
# Extract the nick and cmd. If nick is me, then handle dangerous
# cmds
try:
nick, cmd = nic_cmd_frag.split(':', 1)
except:
debug("ERROR freenode_ctcp.py! PRIVMSG - problem with :")
debug(word[0])
debug(word_eol[0])
return EAT_ALL
# Obtain current nickname from hexchat cfg
mynick = hexchat.get_info('nick')
if mynick in nick:
if 'VERSION' in cmd:
ctcp_reply(sending_nick, 'VERSION', FAKE_VERSION)
debug(word_eol)
return hexchat.EAT_ALL
elif 'TIME' in cmd:
ctcp_reply(sending_nick, 'TIME', get_mangled_date())
debug(word_eol)
return hexchat.EAT_ALL
elif 'PING' in cmd:
ctcp_reply(sending_nick, 'PING', 10)
debug(word_eol)
return hexchat.EAT_ALL
else:
debug(word_eol)
return hexchat.EAT_ALL
return hexchat.EAT_NONE
#-------------------------------------------------------------
hexchat.prnt('CTCP script loaded')
hexchat.hook_server('PRIVMSG', ctcp_callback)
|
Veek/Python
|
IRC/Hexchat/fake_ctcp.py
|
Python
|
mit
| 2,339
|
import chute
import random
NUM_SERVERS = 4
SERVERS = ['server %d' % i for i in range(NUM_SERVERS)]
@chute.process(chute.dist.exponential(.5))
class Customer(object):
ACTIVE = set()
def __call__(self):
# Track existing customers so we can randomly use them as resources.
self.ACTIVE.add(self)
if random.random() < .1:
# Most customers just require one server.
yield chute.request, SERVERS
yield chute.hold, chute.dist.exponential(.75)
else:
# A particularly difficult customer will do all of the following:
# - Use the time of 2 servers.
# - Ask for the manager.
# - Release a server and the manager, but involve a customer.
yield chute.request, SERVERS, SERVERS
yield chute.hold, chute.dist.exponential(.5)
yield chute.request, 'manager'
yield chute.hold, chute.dist.exponential(.75)
yield chute.release, SERVERS, 'manager'
# If there are any other customers around, get their time too.
others = self.ACTIVE.difference(set([self]))
if others:
yield chute.request, list(others)
yield chute.hold, chute.dist.exponential(.65)
yield chute.release
self.ACTIVE.remove(self)
|
ryanjoneil/chute
|
examples/mmkrazy.py
|
Python
|
bsd-2-clause
| 1,355
|
#!/usr/bin/env python
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy
import scipy.linalg
from pyscf import gto
from pyscf import scf
from pyscf import df
from pyscf import ao2mo
from pyscf import mcscf
b = 1.4
mol = gto.M(
verbose = 5,
output = '/dev/null',
atom = [
['N',( 0.000000, 0.000000, -b/2)],
['N',( 0.000000, 0.000000, b/2)], ],
basis = {'N': 'ccpvdz', },
max_memory = 1,
)
m = scf.RHF(mol)
m.conv_tol = 1e-9
m.scf()
molsym = gto.M(
verbose = 5,
output = '/dev/null',
atom = [
['N',( 0.000000, 0.000000, -b/2)],
['N',( 0.000000, 0.000000, b/2)], ],
basis = {'N': 'ccpvdz', },
max_memory = 1,
symmetry = True,
)
msym = scf.RHF(molsym)
msym.conv_tol = 1e-9
msym.scf()
def tearDownModule():
global mol, molsym, m, msym
mol.stdout.close()
molsym.stdout.close()
del mol, molsym, m, msym
class KnownValues(unittest.TestCase):
def test_mc1step_4o4e(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(m, 4, 4), auxbasis='weigend')
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc2step_4o4e(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(m, 4, 4), auxbasis='weigend')
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc1step_4o4e_df(self):
mc = mcscf.DFCASSCF(m, 4, 4, auxbasis='weigend')
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.9105231091045, 7)
def test_mc2step_4o4e_df(self):
mc = mcscf.density_fit(mcscf.CASSCF(m, 4, 4), auxbasis='weigend')
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.91052310869014, 7)
def test_mc1step_6o6e_high_cost(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(m, 6, 6), auxbasis='weigend')
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_mc2step_6o6e_high_cost(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(m, 6, 6), auxbasis='weigend')
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_mc1step_symm_4o4e(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(msym, 4, 4), auxbasis='weigend')
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc2step_symm_4o4e(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(msym, 4, 4), auxbasis='weigend')
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.7015375913946591, 4)
def test_mc1step_symm_6o6e(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(msym, 6, 6), auxbasis='weigend')
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_mc2step_symm_6o6e(self):
mc = mcscf.approx_hessian(mcscf.CASSCF(msym, 6, 6), auxbasis='weigend')
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.980105451388, 7)
def test_casci_4o4e(self):
mc = mcscf.CASCI(m.density_fit('weigend'), 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.88669369639578, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.6910276344981119, 4)
def test_casci_symm_4o4e(self):
mc = mcscf.CASCI(msym.density_fit('weigend'), 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.88669369639578, 7)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.6910276344981119, 4)
def test_casci_4o4e_1(self):
mc = mcscf.DFCASCI(m.density_fit('weigend'), 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.88669369639578, 7)
def test_casci_symm_4o4e_1(self):
mc = mcscf.DFCASCI(msym.density_fit('weigend'), 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.88669369639578, 7)
def test_casci_from_uhf(self):
mf = scf.UHF(mol).run()
mc = mcscf.CASCI(mf.density_fit('weigend'), 4, 4)
emc = mc.casci()[0]
self.assertAlmostEqual(emc, -108.88669369639578, 6)
self.assertAlmostEqual(numpy.linalg.norm(mc.analyze()),
2.6910275883606078, 4)
def test_casci_from_uhf1(self):
mf = scf.UHF(mol)
mf.scf()
mc = mcscf.approx_hessian(mcscf.CASSCF(mf, 4, 4))
emc = mc.mc1step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
emc = mc.mc2step()[0]
self.assertAlmostEqual(emc, -108.913786407955, 7)
def test_df_ao2mo(self):
mf = scf.density_fit(msym, auxbasis='weigend')
mf.max_memory = 100
mf.kernel()
mc = mcscf.DFCASSCF(mf, 4, 4)
with df.load(mf._cderi) as feri:
cderi = numpy.asarray(feri)
eri0 = numpy.dot(cderi.T, cderi)
nmo = mc.mo_coeff.shape[1]
ncore = mc.ncore
nocc = ncore + mc.ncas
eri0 = ao2mo.restore(1, ao2mo.kernel(eri0, mc.mo_coeff), nmo)
eris = mc.ao2mo(mc.mo_coeff)
self.assertTrue(numpy.allclose(eri0[:,:,ncore:nocc,ncore:nocc], eris.ppaa))
self.assertTrue(numpy.allclose(eri0[:,ncore:nocc,:,ncore:nocc], eris.papa))
def test_assign_cderi(self):
nao = molsym.nao_nr()
w, u = scipy.linalg.eigh(mol.intor('int2e_sph', aosym='s4'))
idx = w > 1e-9
mf = scf.density_fit(scf.RHF(molsym))
mf._cderi = (u[:,idx] * numpy.sqrt(w[idx])).T.copy()
mf.kernel()
mc = mcscf.DFCASSCF(mf, 6, 6)
mc.kernel()
self.assertAlmostEqual(mc.e_tot, -108.98010545803884, 7)
def test_init(self):
from pyscf.mcscf import df
mf = scf.RHF(mol)
self.assertTrue(isinstance(mcscf.CASCI(mf, 2, 2), mcscf.casci.CASCI))
self.assertTrue(isinstance(mcscf.CASCI(mf.density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASCI(mf.newton(), 2, 2), mcscf.casci.CASCI))
self.assertTrue(isinstance(mcscf.CASCI(mf.density_fit().newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASCI(mf.newton().density_fit(), 2, 2), mcscf.casci.CASCI))
self.assertTrue(isinstance(mcscf.CASCI(mf.density_fit().newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(mf, 2, 2), mcscf.mc1step.CASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(mf.density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(mf.newton(), 2, 2), mcscf.mc1step.CASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(mf.density_fit().newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(mf.newton().density_fit(), 2, 2), mcscf.mc1step.CASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(mf.density_fit().newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASCI(mf, 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASCI(mf.density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASCI(mf.newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASCI(mf.density_fit().newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASCI(mf.newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASCI(mf.density_fit().newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASSCF(mf, 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASSCF(mf.density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASSCF(mf.newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASSCF(mf.density_fit().newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASSCF(mf.newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.DFCASSCF(mf.density_fit().newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASCI(msym, 2, 2), mcscf.casci_symm.CASCI))
self.assertTrue(isinstance(mcscf.CASCI(msym.density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASCI(msym.newton(), 2, 2), mcscf.casci_symm.CASCI))
self.assertTrue(isinstance(mcscf.CASCI(msym.density_fit().newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASCI(msym.newton().density_fit(), 2, 2), mcscf.casci_symm.CASCI))
self.assertTrue(isinstance(mcscf.CASCI(msym.density_fit().newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(msym, 2, 2), mcscf.mc1step_symm.CASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(msym.density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(msym.newton(), 2, 2), mcscf.mc1step_symm.CASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(msym.density_fit().newton(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(msym.newton().density_fit(), 2, 2), mcscf.mc1step_symm.CASSCF))
self.assertTrue(isinstance(mcscf.CASSCF(msym.density_fit().newton().density_fit(), 2, 2), df._DFCASSCF))
self.assertTrue(isinstance(msym.CASCI(2, 2), mcscf.casci_symm.CASCI))
self.assertTrue(isinstance(msym.density_fit().CASCI(2, 2), df._DFCASCI))
self.assertTrue(isinstance(msym.density_fit().CASCI(2, 2), mcscf.casci_symm.CASCI))
self.assertTrue(isinstance(msym.CASSCF(2, 2), mcscf.mc1step_symm.CASSCF))
self.assertTrue(isinstance(msym.density_fit().CASSCF(2, 2), df._DFCASSCF))
self.assertTrue(isinstance(msym.density_fit().CASSCF(2, 2), mcscf.mc1step_symm.CASSCF))
if __name__ == "__main__":
print("Full Tests for density fitting N2")
unittest.main()
|
sunqm/pyscf
|
pyscf/mcscf/test/test_n2_df.py
|
Python
|
apache-2.0
| 10,791
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==============================================================================
"""TensorFlow API compatibility tests.
This test ensures all changes to the public API of TensorFlow are intended.
If this test fails, it means a change has been made to the public API. Backwards
incompatible changes are not allowed. You can run the test with
"--update_goldens" flag set to "True" to update goldens when making changes to
the public TF python API.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import re
import sys
import unittest
import tensorflow as tf
from tensorflow._api import v2 as tf_v2
from google.protobuf import message
from google.protobuf import text_format
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.tools.api.lib import api_objects_pb2
from tensorflow.tools.api.lib import python_object_to_proto_visitor
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
# FLAGS defined at the bottom:
FLAGS = None
# DEFINE_boolean, update_goldens, default False:
_UPDATE_GOLDENS_HELP = """
Update stored golden files if API is updated. WARNING: All API changes
have to be authorized by TensorFlow leads.
"""
# DEFINE_boolean, verbose_diffs, default True:
_VERBOSE_DIFFS_HELP = """
If set to true, print line by line diffs on all libraries. If set to
false, only print which libraries have differences.
"""
_API_GOLDEN_FOLDER_V1 = 'tensorflow/tools/api/golden/v1'
_API_GOLDEN_FOLDER_V2 = 'tensorflow/tools/api/golden/v2'
_TEST_README_FILE = 'tensorflow/tools/api/tests/README.txt'
_UPDATE_WARNING_FILE = 'tensorflow/tools/api/tests/API_UPDATE_WARNING.txt'
def _KeyToFilePath(key, api_version):
"""From a given key, construct a filepath.
Filepath will be inside golden folder for api_version.
"""
def _ReplaceCapsWithDash(matchobj):
match = matchobj.group(0)
return '-%s' % (match.lower())
case_insensitive_key = re.sub('([A-Z]{1})', _ReplaceCapsWithDash, key)
api_folder = (
_API_GOLDEN_FOLDER_V2 if api_version == 2 else _API_GOLDEN_FOLDER_V1)
return os.path.join(api_folder, '%s.pbtxt' % case_insensitive_key)
def _FileNameToKey(filename):
"""From a given filename, construct a key we use for api objects."""
def _ReplaceDashWithCaps(matchobj):
match = matchobj.group(0)
return match[1].upper()
base_filename = os.path.basename(filename)
base_filename_without_ext = os.path.splitext(base_filename)[0]
api_object_key = re.sub(
'((-[a-z]){1})', _ReplaceDashWithCaps, base_filename_without_ext)
return api_object_key
def _VerifyNoSubclassOfMessageVisitor(path, parent, unused_children):
"""A Visitor that crashes on subclasses of generated proto classes."""
# If the traversed object is a proto Message class
if not (isinstance(parent, type) and
issubclass(parent, message.Message)):
return
if parent is message.Message:
return
# Check that it is a direct subclass of Message.
if message.Message not in parent.__bases__:
raise NotImplementedError(
'Object tf.%s is a subclass of a generated proto Message. '
'They are not yet supported by the API tools.' % path)
class ApiCompatibilityTest(test.TestCase):
def __init__(self, *args, **kwargs):
super(ApiCompatibilityTest, self).__init__(*args, **kwargs)
golden_update_warning_filename = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_UPDATE_WARNING_FILE)
self._update_golden_warning = file_io.read_file_to_string(
golden_update_warning_filename)
test_readme_filename = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_TEST_README_FILE)
self._test_readme_message = file_io.read_file_to_string(
test_readme_filename)
def _AssertProtoDictEquals(self,
expected_dict,
actual_dict,
verbose=False,
update_goldens=False,
additional_missing_object_message='',
api_version=2):
"""Diff given dicts of protobufs and report differences a readable way.
Args:
expected_dict: a dict of TFAPIObject protos constructed from golden
files.
actual_dict: a ict of TFAPIObject protos constructed by reading from the
TF package linked to the test.
verbose: Whether to log the full diffs, or simply report which files were
different.
update_goldens: Whether to update goldens when there are diffs found.
additional_missing_object_message: Message to print when a symbol is
missing.
api_version: TensorFlow API version to test.
"""
diffs = []
verbose_diffs = []
expected_keys = set(expected_dict.keys())
actual_keys = set(actual_dict.keys())
only_in_expected = expected_keys - actual_keys
only_in_actual = actual_keys - expected_keys
all_keys = expected_keys | actual_keys
# This will be populated below.
updated_keys = []
for key in all_keys:
diff_message = ''
verbose_diff_message = ''
# First check if the key is not found in one or the other.
if key in only_in_expected:
diff_message = 'Object %s expected but not found (removed). %s' % (
key, additional_missing_object_message)
verbose_diff_message = diff_message
elif key in only_in_actual:
diff_message = 'New object %s found (added).' % key
verbose_diff_message = diff_message
else:
# Do not truncate diff
self.maxDiffs = None # pylint: disable=invalid-name
# Now we can run an actual proto diff.
try:
self.assertProtoEquals(expected_dict[key], actual_dict[key])
except AssertionError as e:
updated_keys.append(key)
diff_message = 'Change detected in python object: %s.' % key
verbose_diff_message = str(e)
# All difference cases covered above. If any difference found, add to the
# list.
if diff_message:
diffs.append(diff_message)
verbose_diffs.append(verbose_diff_message)
# If diffs are found, handle them based on flags.
if diffs:
diff_count = len(diffs)
logging.error(self._test_readme_message)
logging.error('%d differences found between API and golden.', diff_count)
messages = verbose_diffs if verbose else diffs
for i in range(diff_count):
print('Issue %d\t: %s' % (i + 1, messages[i]), file=sys.stderr)
if update_goldens:
# Write files if requested.
logging.warning(self._update_golden_warning)
# If the keys are only in expected, some objects are deleted.
# Remove files.
for key in only_in_expected:
filepath = _KeyToFilePath(key, api_version)
file_io.delete_file(filepath)
# If the files are only in actual (current library), these are new
# modules. Write them to files. Also record all updates in files.
for key in only_in_actual | set(updated_keys):
filepath = _KeyToFilePath(key, api_version)
file_io.write_string_to_file(
filepath, text_format.MessageToString(actual_dict[key]))
else:
# Fail if we cannot fix the test by updating goldens.
self.fail('%d differences found between API and golden.' % diff_count)
else:
logging.info('No differences found between API and golden.')
def testNoSubclassOfMessage(self):
visitor = public_api.PublicAPIVisitor(_VerifyNoSubclassOfMessageVisitor)
visitor.do_not_descend_map['tf'].append('contrib')
# Skip compat.v1 and compat.v2 since they are validated in separate tests.
visitor.private_map['tf.compat'] = ['v1', 'v2']
traverse.traverse(tf, visitor)
def testNoSubclassOfMessageV1(self):
if not hasattr(tf.compat, 'v1'):
return
visitor = public_api.PublicAPIVisitor(_VerifyNoSubclassOfMessageVisitor)
visitor.do_not_descend_map['tf'].append('contrib')
traverse.traverse(tf_v2.compat.v1, visitor)
def testNoSubclassOfMessageV2(self):
if not hasattr(tf.compat, 'v2'):
return
visitor = public_api.PublicAPIVisitor(_VerifyNoSubclassOfMessageVisitor)
visitor.do_not_descend_map['tf'].append('contrib')
traverse.traverse(tf_v2, visitor)
def _checkBackwardsCompatibility(
self, root, golden_file_pattern, api_version,
additional_private_map=None):
# Extract all API stuff.
visitor = python_object_to_proto_visitor.PythonObjectToProtoVisitor()
public_api_visitor = public_api.PublicAPIVisitor(visitor)
public_api_visitor.do_not_descend_map['tf'].append('contrib')
public_api_visitor.do_not_descend_map['tf.GPUOptions'] = [
'Experimental']
if additional_private_map:
public_api_visitor.private_map.update(additional_private_map)
traverse.traverse(root, public_api_visitor)
proto_dict = visitor.GetProtos()
# Read all golden files.
golden_file_list = file_io.get_matching_files(golden_file_pattern)
def _ReadFileToProto(filename):
"""Read a filename, create a protobuf from its contents."""
ret_val = api_objects_pb2.TFAPIObject()
text_format.Merge(file_io.read_file_to_string(filename), ret_val)
return ret_val
golden_proto_dict = {
_FileNameToKey(filename): _ReadFileToProto(filename)
for filename in golden_file_list
}
# Diff them. Do not fail if called with update.
# If the test is run to update goldens, only report diffs but do not fail.
self._AssertProtoDictEquals(
golden_proto_dict,
proto_dict,
verbose=FLAGS.verbose_diffs,
update_goldens=FLAGS.update_goldens,
api_version=api_version)
@unittest.skipUnless(
sys.version_info.major == 2,
'API compabitility test goldens are generated using python2.')
def testAPIBackwardsCompatibility(self):
api_version = 1
golden_file_pattern = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_KeyToFilePath('*', api_version))
self._checkBackwardsCompatibility(
tf,
golden_file_pattern,
api_version,
# Skip compat.v1 and compat.v2 since they are validated
# in separate tests.
additional_private_map={'tf.compat': ['v1', 'v2']})
@unittest.skipUnless(
sys.version_info.major == 2,
'API compabitility test goldens are generated using python2.')
def testAPIBackwardsCompatibilityV1(self):
api_version = 1
golden_file_pattern = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_KeyToFilePath('*', api_version))
self._checkBackwardsCompatibility(
tf_v2.compat.v1, golden_file_pattern, api_version)
@unittest.skipUnless(
sys.version_info.major == 2,
'API compabitility test goldens are generated using python2.')
def testAPIBackwardsCompatibilityV2(self):
api_version = 2
golden_file_pattern = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_KeyToFilePath('*', api_version))
self._checkBackwardsCompatibility(
tf_v2, golden_file_pattern, api_version,
additional_private_map={'tf.compat': ['v1']})
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--update_goldens', type=bool, default=False, help=_UPDATE_GOLDENS_HELP)
parser.add_argument(
'--verbose_diffs', type=bool, default=True, help=_VERBOSE_DIFFS_HELP)
FLAGS, unparsed = parser.parse_known_args()
# Now update argv, so that unittest library does not get confused.
sys.argv = [sys.argv[0]] + unparsed
test.main()
|
AnishShah/tensorflow
|
tensorflow/tools/api/tests/api_compatibility_test.py
|
Python
|
apache-2.0
| 12,593
|
from django.contrib import admin
from .organisms import CollectiveAdmin, GrowthAdmin, IndividualAdmin
from core.models.organisms import Individual, Collective, Growth
admin.site.register(Individual, IndividualAdmin)
admin.site.register(Collective, CollectiveAdmin)
admin.site.register(Growth, GrowthAdmin)
|
fako/datascope
|
src/core/admin/__init__.py
|
Python
|
gpl-3.0
| 309
|
from django.core.exceptions import ValidationError
from django.shortcuts import redirect, render
from lists.models import Item, List
def home_page(request):
return render(request, 'home.html')
def new_list(request):
list_ = List.objects.create()
item = Item(text=request.POST['item_text'], list=list_)
try:
item.full_clean()
item.save()
except ValidationError:
list_.delete()
error = "You can't have an empty list item"
return render(request, 'home.html', {"error": error})
return redirect(list_)
def view_list(request, list_id):
list_ = List.objects.get(id=list_id)
error = None
if request.method == 'POST':
try:
item = Item(text=request.POST['item_text'], list=list_)
item.full_clean()
item.save()
return redirect(list_)
except ValidationError:
error = "You can't have an empty list item"
return render(request, 'list.html', {'list': list_, 'error': error})
|
freddyiniguez/cimat_scrum_developer
|
superlists/lists/views.py
|
Python
|
gpl-2.0
| 1,022
|
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Gtk modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.plug import Gramplet
from gramps.gui.widgets import Photo
from gramps.gen.utils.file import media_path_full
class MediaPreview(Gramplet):
"""
Displays a preview of the media object.
"""
def init(self):
self.gui.WIDGET = self.build_gui()
self.gui.get_container_widget().remove(self.gui.textview)
self.gui.get_container_widget().add(self.gui.WIDGET)
def build_gui(self):
"""
Build the GUI interface.
"""
self.top = Gtk.Box()
self.photo = Photo(self.uistate.screen_height() < 1000)
self.top.pack_start(self.photo, fill=True, expand=False, padding=5)
self.top.show_all()
return self.top
def db_changed(self):
self.connect(self.dbstate.db, 'media-update', self.update)
self.connect_signal('Media', self.update)
def update_has_data(self):
active_handle = self.get_active('Media')
if active_handle:
active_media = self.dbstate.db.get_media_from_handle(active_handle)
self.set_has_data(active_media is not None)
else:
self.set_has_data(False)
def main(self):
active_handle = self.get_active('Media')
if active_handle:
media = self.dbstate.db.get_media_from_handle(active_handle)
self.top.hide()
if media:
self.load_image(media)
self.set_has_data(True)
else:
self.photo.set_image(None)
self.set_has_data(False)
self.top.show()
else:
self.photo.set_image(None)
self.set_has_data(False)
def load_image(self, media):
"""
Load the primary image if it exists.
"""
self.full_path = media_path_full(self.dbstate.db, media.get_path())
mime_type = media.get_mime_type()
self.photo.set_image(self.full_path, mime_type)
self.photo.set_uistate(self.uistate, None)
|
Nick-Hall/gramps
|
gramps/plugins/gramplet/mediapreview.py
|
Python
|
gpl-2.0
| 3,153
|
from ..base import ShopifyResource
from shopify import mixins
from comment import Comment
class Article(ShopifyResource, mixins.Metafields, mixins.Events):
_prefix_source = "/admin/blogs/$blog_id/"
@classmethod
def _prefix(cls, options={}):
blog_id = options.get("blog_id")
if blog_id:
return "/admin/blogs/%s" % (blog_id)
else:
return "/admin"
def comments(self):
return Comment.find(article_id=self.id)
@classmethod
def authors(cls, **kwargs):
return cls.get('authors', **kwargs)
@classmethod
def tags(cls, **kwargs):
return cls.get('tags', **kwargs)
|
roninio/gae-shopify-python-boilerplate
|
shopify/resources/article.py
|
Python
|
lgpl-3.0
| 663
|
#! /usr/bin/env python
from sklearn import datasets
print datasets , type(datasets)
iris = datasets.load_iris()
digits = datasets.load_digits()
#print iris
print digits.target
#plot them
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
images_and_labels = zip(digits.images, digits.target)
with PdfPages("images.pdf") as pdf :
for (image, label) in images_and_labels[:4]:
plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
plt.title("training %g"% label)
pdf.savefig()
plt.close()
nsamples = len(digits.images)
print digits.images[0]
data = digits.images.reshape((nsamples,-1))
print data[0]
#decision tree
|
alexshires/ml
|
sklearn/basics.py
|
Python
|
gpl-2.0
| 709
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.OrganizationListView.as_view(),
name="list"),
url(r'^data.geojson$', views.OrganizationMapLayer.as_view(), name='data'),
url(r'^data-(?P<z>\d+)-(?P<x>\d+)-(?P<y>\d+).geojson$',
views.OrganizationTiledGeoJSONLayerView.as_view(), name='data'),
url(_(r'^region-(?P<region>[\w\d-]+)$'), views.OrganizationListView.as_view(),
name="list"),
url(_(r'^organization-(?P<slug>[\w\d-]+)$'), views.OrganizationDetailView.as_view(),
name="details"),
url(_(r'^organization-(?P<slug>[\w\d-]+)/~fix$'), views.OrganizationFixView.as_view(),
name="fix"),
url(r'^(?P<category>[\w\d-]+)$', views.OrganizationListView.as_view(),
name="list"),
]
|
watchdogpolska/watchdog-kj-kultura
|
watchdog_kj_kultura/organizations/urls.py
|
Python
|
mit
| 907
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Thibault Kruse
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
'pychecker plugin'
from __future__ import absolute_import, print_function, unicode_literals
import os
from unilint.python_source_plugin import AbstractPythonPlugin
from unilint.unilint_plugin import UnilintPluginInitException
from unilint.issue import Issue
from unilint.unilint_main import LEVEL_ERROR, LEVELS
class PycheckerPlugin(AbstractPythonPlugin):
'pychecker plugin'
def __init__(self, shell_function):
super(PycheckerPlugin, self).__init__(shell_function)
@classmethod
def get_id(cls):
return 'pychecker'
@classmethod
def is_enabled_by_default(cls):
# pychecker is evil, because it compiles code, which means
# malicious code maybe executed.
return False
def get_meta_information(self):
cmd = "pychecker --version"
value, output, message = self.shell_cmd(cmd,
shell=True,
us_env=True,
ignore_returncodes=[127])
if value == 0:
return "pychecker: %s" % output
raise UnilintPluginInitException(
"ERROR Can't find pychecker, install via apt or pip\n%s" % message)
def check_resource(self, options, path, type_categories):
'runs the tool and processes the output'
if not 'python-src' in type_categories and \
not 'python-module' in type_categories:
return None
suppress_cmds = []
if options.level >= LEVELS[LEVEL_ERROR]:
suppress_cmds.append('--no-argsused')
suppress_cmds.append('--no-import')
cmd = 'pychecker --only -#100 %s %s' % (' '.join(suppress_cmds), path)
if options.verbose:
print(cmd)
_, output, _ = self.shell_cmd(cmd,
shell=True,
us_env=True,
ignore_returncodes=[1, 2, 123])
if options.raw:
print(path)
print(output)
return None
else:
return (self.parse(output), None)
def parse(self, output):
'creates an issue from an output line'
issues = []
# Example output
# /tmp/unilint/plugin.py:13: Local variable (pythonfound) not used
# /tmp/unilint/pychecker_plugin.py:47: No global (Issue) found
for line in output.splitlines():
ignore = False
if line.strip() == '':
continue
# File or pathname element
for prefix in ['Warnings...', 'None']:
if line.startswith(prefix):
ignore = True
break
if ignore:
continue
if line.count(':') > 1:
path, line_number, message = line.split(':', 2)
else:
path = ''
message = line
# pychecker seems to print weird double lines with
# incomplete path when run through subprocess.Popen
if not os.path.exists(path):
continue
message = message.strip()
severity = "warning"
issue = Issue(path=path,
message=message,
checker_id=self.get_id(),
line_number_start=line_number,
line_number_end=line_number,
severity=severity)
issues.append(issue)
return issues
|
tkruse/unilint
|
src/unilint/pychecker_plugin.py
|
Python
|
bsd-2-clause
| 4,991
|
from __future__ import unicode_literals, absolute_import
import telegram
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from smartmin.views import SmartFormView
from ...models import Channel
from ...views import ClaimViewMixin
class ClaimView(ClaimViewMixin, SmartFormView):
class Form(ClaimViewMixin.Form):
auth_token = forms.CharField(label=_("Authentication Token"),
help_text=_("The Authentication token for your Telegram Bot"))
def clean_auth_token(self):
org = self.request.user.get_org()
value = self.cleaned_data['auth_token']
# does a bot already exist on this account with that auth token
for channel in Channel.objects.filter(org=org, is_active=True, channel_type=self.channel_type.code):
if channel.config_json()['auth_token'] == value:
raise ValidationError(_("A telegram channel for this bot already exists on your account."))
try:
bot = telegram.Bot(token=value)
bot.get_me()
except telegram.TelegramError:
raise ValidationError(_("Your authentication token is invalid, please check and try again"))
return value
form_class = Form
def form_valid(self, form):
org = self.request.user.get_org()
auth_token = self.form.cleaned_data['auth_token']
bot = telegram.Bot(auth_token)
me = bot.get_me()
self.object = Channel.create(org, self.request.user, None, self.channel_type,
name=me.first_name, address=me.username, config={'auth_token': auth_token})
return super(ClaimView, self).form_valid(form)
|
onaio/rapidpro
|
temba/channels/types/telegram/views.py
|
Python
|
agpl-3.0
| 1,824
|
# Benjamin Slack
# CS 5310
# Chan's Minimalist Convex Hull in R^3
import array
import random
import geo
class Point():
"""
Generalized point class
"""
def __init__(self, x=0.0, y=0.0, z=0.0, n=None, p=None):
"""
Creates a point. Optional attributes
given as the parameters below.
:type x: float
:param x: default 0.0
:type y: float
:param y: default 0.0
:type z: float
:param z: default 0.0
:type n: Point
:param n: default None
:type p: Point
:param p: default None
"""
self.x = float(x)
self.y = float(y)
self.z = float(z)
self.nxt = n
self.prv = p
def __str__(self):
return "({0},{1},{2})".format(
self.x,
self.y,
self.z)
def act(self):
if self.prv.nxt != self:
# insertion
self.prv.nxt = self
self.nxt.prv = self
else:
# deletion
self.prv.nxt = self.nxt
self.nxt.prv = self.prv
def y_prime(self, t):
return self.z - t*self.y
def proj_2d(self, t):
return tuple(self.x, self.y_prime(t))
CONST_INF = float("1e99")
__nil = Point(CONST_INF, CONST_INF, CONST_INF)
def turn(p, q, r):
"""
Determins the rotational orientation of a
tripet of given points, p,q,r.
Returns a float, if return < 0, orientation
is clockwise, return 0 = colinear, r >
0 implies counter clockwise
:type p: Point
:type q: Point
:type r: Point
:return: float
"""
if (p == __nil) or (q == __nil) or (r == __nil):
return 1.0
else:
return (q.x - p.x) * (r.y - p.y) - (r.x - p.x) * (q.y - p.y)
def time(p, q, r):
"""
calculates the time at which a given
triplet of points changes its orientation
from clockwise to counterclockwise or vice
versa
:type p: Point
:param p: first pt
:type q: Point
:param q: second pt
:type p: Point
:param r: third pt
:rtype : float
:return: time
"""
if (p == __nil) or (q == __nil) or (r == __nil):
return CONST_INF
else:
return ((q.x - p.x) * (r.z - p.z) - (r.x - p.x) * (q.z - p.z)) / turn(p, q, r)
def x_sort(P):
"""
:type P: list[Point]
:param P: list of Points
:return: void
"""
P.sort(key=lambda i: i.x)
for this_p in P:
if P.index(this_p) == 0:
this_p.prv = None
this_p.nxt = P[1]
elif P.index(this_p) == len(P) - 1:
this_p.nxt = None
this_p.prv = P[len(P)-2]
else:
this_p.prv = P[P.index(this_p) - 1]
this_p.nxt = P[P.index(this_p) + 1]
def hull(p_list, list_q, list_r, n, A, B, s_ab, t_ab):
"""
:type p_list: list[Point]
:param p_list: list of points
:param list_q, list_r: int indexes of p_list to work
:type n: int
:param n: number of points
:type A: list[Point]
:param A: list of points in L hull
:type B: list[Point]
:param B: list of points in R hull
:param s,t : int indexes of A & B to work
:return: void
"""
#u = Point()
#v = Point()
#mid = Point()
t = array.array("d", [0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
#old_t = 0.0
#new_t = 0.0
i, j, k, l, min_l = [0, 0, 0, 0, 0]
#base case
if n == 1:
A[s_ab] = __nil
p_list[list_q].prv = __nil
p_list[list_q].nxt = __nil
return
#step through p_list to find
#a middle point to partition from
i = 0
u = p_list[list_q]
while (i < n//2 - 1):
u = u.nxt
i += 1
#u should now point to the middle of the
#set of points
mid = u.nxt
v = u.nxt
#recurse with each half of the partition
hull(p_list, list_q, p_list.index(mid), n // 2, B, A, s_ab, s_ab + n // 2 * 2)
hull(p_list, p_list.index(mid), list_r, n - n // 2, B, A, s_ab + n // 2 * 2, t_ab)
#find initial bridge
while True:
if turn(u, v, v.nxt) < 0:
v = v.nxt
elif turn(u.prv, u, v) < 0:
u = u.prv
else:
break
#merge by tracking bridge uv over time
i = 0
k = 0
j = n//2*2
old_t = -CONST_INF
while True:
t[0] = time(B[s_ab + i].prv, B[s_ab + i], B[s_ab + i].nxt)
t[1] = time(B[s_ab + j].prv, B[s_ab + j], B[s_ab + j].nxt)
t[2] = time(u, u.nxt, v)
t[3] = time(u.prv, u, v)
t[4] = time(u, v.prv, v)
t[5] = time(u, v, v.nxt)
new_t = CONST_INF
l = 0
while l < 6:
if (t[l] > old_t) and (t[l] < new_t):
min_l = l
new_t = t[l]
l += 1
#loop scape clause
if new_t == CONST_INF:
break
#case 0
if min_l == 0:
if B[s_ab + i].x < u.x:
A[s_ab + k] = B[s_ab + i]
k += 1
B[s_ab + i].act()
i += 1
#case 1
elif min_l == 1:
if B[s_ab + j].x > v.x:
A[s_ab + k] = B[s_ab + j]
k +=1
B[s_ab + j].act()
j += 1
#case 2
elif min_l == 2:
A[s_ab + k] = u.nxt
u = u.nxt
k += 1
#case 3
elif min_l == 3:
A[s_ab + k] = u
k += 1
u = u.prv
#case 4
elif min_l == 4:
A[s_ab + k] = v.prv
k += 1
v = v.prv
#case 5
elif min_l == 5:
A[s_ab + k] = v
k += 1
v = v.nxt
else:
pass
#end cases
#increment old_t
old_t = new_t
#after loop
A[s_ab + k] = __nil
#update pointers
u.nxt = v
v.prv = u
k -= 1
while k >= 0:
if (A[s_ab + k].x <= u.x) or (A[s_ab + k].x >= v.x):
A[s_ab + k].act()
if A[s_ab + k] == u:
u = u.prv
elif A[s_ab + k] == v:
v = v.nxt
else:
u.nxt = A[s_ab + k]
A[s_ab + k].prv = u
v.prv = A[s_ab + k]
A[s_ab + k].nxt = v
if A[s_ab + k].x < mid.x:
u = A[s_ab + k]
else:
v = A[s_ab + k]
k -= 1
def chan(raw_pts, filename="chan.obj"):
"""
:type raw_pts: list[tuple[float]]
:param raw_pts: a list of raw tuples of 3 floats
:type filename: str
:param filename: a filename (path) to the obj
:rtype : geo.Mesh
:return: Mesh obj of generated hull
"""
#setup a list of Points
p_list = list()
for pt in raw_pts:
p_list.append(Point(pt[0], pt[1], pt[2]))
#sort the points on X coord
x_sort(p_list)
# i = 0
# for this_pt in p_list:
# print("v[{0}] {1}".format(i + 1, str(this_pt)))
# i += 1
A = list()
B = list()
for i in range(2*len(p_list)):
A.append(None)
B.append(None)
hull(p_list, 0, len(p_list), len(p_list), A, B, 0, len(A))
# i = 0
# while A[i] != __nil:
# print("f[{0}] {1} {2} {3}".format(
# i+1,
# p_list.index(A[i].prv) + 1,
# p_list.index(A[i]) + 1,
# p_list.index(A[i].nxt) + 1))
# A[i].act()
# i += 1
#output the lower hull
test_mesh = geo.Mesh()
for this_pt in p_list:
test_mesh.add_vert((this_pt.x, this_pt.y, this_pt.z))
i = 0
while A[i] != __nil:
#use turn to determine the normal orientation
if turn(A[i].prv, A[i], A[i].nxt) < 0:
test_mesh.add_face((p_list.index(A[i].prv) + 1,
p_list.index(A[i]) + 1,
p_list.index(A[i].nxt) + 1))
else:
test_mesh.add_face((p_list.index(A[i].nxt) + 1,
p_list.index(A[i]) + 1,
p_list.index(A[i].prv) + 1))
A[i].act()
i += 1
#rerun the hull algo, with the data flipped
#on z. Then flip the point order on the the
#face on rebuild to reverse the mirror
A = list()
B = list()
for i in range(2 * len(p_list)):
A.append(None)
B.append(None)
#run x_sort to restore the point
#to point links
x_sort(p_list)
#flip the z
for this_pt in p_list:
this_pt.z *= -1
#upper hull as lower hull on mirror
hull(p_list, 0, len(p_list), len(p_list), A, B, 0, len(A))
i = 0
while A[i] != __nil:
#just flip the sign on turn switch to mirror
if turn(A[i].prv, A[i], A[i].nxt) > 0:
test_mesh.add_face((p_list.index(A[i].prv) + 1,
p_list.index(A[i]) + 1,
p_list.index(A[i].nxt) + 1))
else:
test_mesh.add_face((p_list.index(A[i].nxt) + 1,
p_list.index(A[i]) + 1,
p_list.index(A[i].prv) + 1))
A[i].act()
i += 1
test_mesh.generate_obj(filename)
return test_mesh
if __name__ == "__main__":
test = list()
# test.append((1.0, 0.0, 0.0))
# test.append((0.1, 0.1, 1.0))
# test.append((-1.0, 0.2, 0.1))
# test.append((0.4, 0.3, -1.0))
# test.append((0.3, 2.1, -0.5))
# test.append((0.2, 1.44, 0.2))
# test.append((5.0, 5.0, -5.0))
# test.append((4.0, -2.0, 4.0))
# test.append((3.0, -4.0, -3.0))
# test.append((2.0, 1.0, 2.0))
# test.append((1.0, 3.0, -1.0))
# test.append((-1.0, 4.0, 1.0))
# test.append((-2.0, 0.0, -2.0))
# test.append((-3.0, -5.0, 3.0))
# test.append((-4.0, 1.0, -4.0))
# test.append((-5.0, 4.0, 5.0))
# for i in range(100):
# test.append((random.random()*20-10, random.random()*20-10, random.random()*20-10))
test = [
(0.09964782850167017, 0.46903571101375924, 0.012097950134946217),
(-0.1755456491170128, 0.5456326040668025, 0.20478850115406594),
(-0.21830065947676314, 0.7209408956737615, -0.13513941138262817),
(-0.044629166881916316, 0.5348308556270078, -0.15079826891715226),
(0.45229166746139526, 0.847848653793335, 0.24407055974006653),
(-0.4783439636230469, 0.5599833726882935, 0.470014750957489),
(-0.2732780873775482, 0.9194132089614868, -0.440346896648407),
(0.07073074579238892, 0.020669907331466675, -0.1684664785861969)
]
chan(test)
|
baslack/quickhull
|
chan/__init__.py
|
Python
|
gpl-3.0
| 10,525
|
# This file contains the dialogBox class
import pygame
from transcendence.graphics import widget, button, text
import transcendence.graphics as graphics
from transcendence import util
"""class ContentBox(Box):
def size_changed(self):
self.parent.needs_redraw = True
self.recalculate_collision_rect()
par_rect = pygame.Rect((0,0), self.parent.size)
own_rect = pygame.Rect(self.pos, self.size)
if par_rect.contains(own_rect):
self.remake_surfaces()
else:
self.parent.size = own_rect.union(par_rect).size
size = util.call_on_change("_size", size_changed)"""
#class DialogBox(widget.OffscreenWidget):
class DialogBox(widget.Widget):
def __init__(self, size, title, **kwargs):
title_buttons = kwargs.pop("title_buttons", [])
full_size = (size[0] + TitleBar.offset[0], size[1] + TitleBar.offset[1])
borders = kwargs.pop("borders", graphics.ALL)
background = kwargs.pop("background", graphics.colors.background)
border_color = kwargs.pop("border_color", graphics.colors.strong_highlight)
super().__init__(full_size)
self.contentbox = widget.Box(size, borders=borders, background=background, border_color=border_color)
self.add_child(self.contentbox, TitleBar.offset)
self.titlebar = TitleBar(size[0], title, title_buttons)
self.add_child(self.titlebar, (0,0))
class TitleBar(widget.Widget):
"""The titlebar appears at the top of a dialog."""
offset = (3,32)
descent_height = 40
buttons_height = 30
line_width = 4
font_size = 24
class Spacer(widget.Widget):
def __init__(self):
super().__init__((2, TitleBar.buttons_height))
transparent = False
def redraw_self(self):
y_start = (self.size[1] - TitleBar.font_size) // 2
self.surface.fill(graphics.colors.strong_highlight)
self.surface.fill(graphics.colors.background, rect=(0,y_start,2,TitleBar.font_size))
def __init__(self, dialog_width, title, title_buttons):
my_size = (dialog_width + self.offset[0], self.descent_height + self.buttons_height - self.line_width)
# (width + 3, 66)
title_width = graphics.font[2].get_rect(title, size=self.font_size)[2] + 32
super().__init__(my_size)
self.add_child(
text.FastLabel((title_width,self.buttons_height), title, self.font_size, font=graphics.font[2],
text_color=graphics.colors.black, background=graphics.colors.strong_highlight, align=graphics.CENTER),
(self.line_width, 0))
if len(title_buttons) != 0:
self.add_child(TitleBar.Spacer(), (self.line_width + title_width, 0))
x = self.line_width + title_width + 2
for caption, func in title_buttons:
w = graphics.font[2].get_rect(caption, size=self.font_size)[2] + 32
self.add_child(
button.FuncButton((w, self.buttons_height), caption, self.font_size, func, borders=(),
background=graphics.colors.strong_highlight, hover_background=graphics.colors.med_highlight,
text_color=graphics.colors.black, hover_text_color=graphics.colors.white),
(x,0))
x += w
def redraw_self(self):
self.surface.fill(graphics.colors.strong_highlight,
(0, self.buttons_height - self.line_width, self.line_width, self.descent_height))
self.surface.fill(graphics.colors.strong_highlight,
(0,self. buttons_height - self.line_width, self.size[0], self.line_width))
|
Scaatis/Endgame
|
transcendence/graphics/dialogbox.py
|
Python
|
gpl-2.0
| 3,672
|
from flask import Blueprint, render_template
projects = Blueprint('projects', __name__,
template_folder='templates')
@projects.route('/')
def home():
return render_template('projects/index.html')
|
ardinor/mojibake
|
mojibake/projects/views.py
|
Python
|
mit
| 224
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main package for Course Builder, which handles URL routing."""
import os
import webapp2
# The following import is needed in order to add third-party libraries.
import appengine_config # pylint: disable-msg=unused-import
from controllers import assessments
from controllers import lessons
from controllers import sites
from controllers import utils
from modules.admin import admin
from modules.admin import config
from modules.announcements import announcements
from modules.dashboard import dashboard
urls = [
('/', lessons.CourseHandler),
('/activity', lessons.ActivityHandler),
('/announcements', announcements.AnnouncementsHandler),
('/answer', assessments.AnswerHandler),
('/assessment', lessons.AssessmentHandler),
('/course', lessons.CourseHandler),
('/forum', utils.ForumHandler),
('/dashboard', dashboard.DashboardHandler),
('/preview', utils.PreviewHandler),
('/register', utils.RegisterHandler),
('/student/editstudent', utils.StudentEditStudentHandler),
('/student/home', utils.StudentProfileHandler),
('/student/unenroll', utils.StudentUnenrollHandler),
('/unit', lessons.UnitHandler),
('/mentors', utils.Mentors)]
sites.ApplicationRequestHandler.bind(urls)
inputex_handler = (
'/static/inputex-3.1.0/(.*)', sites.make_zip_handler(
os.path.join(appengine_config.BUNDLE_ROOT, 'lib/inputex-3.1.0.zip')))
admin_handlers = [
('/admin', admin.AdminHandler),
('/rest/config/item', config.ConfigPropertyItemRESTHandler),
('/rest/courses/item', config.CoursesItemRESTHandler)]
app_handler = (r'(.*)', sites.ApplicationRequestHandler)
webapp2_i18n_config = {'translations_path': os.path.join(
appengine_config.BUNDLE_ROOT, 'modules/i18n/resources/locale')}
debug = not appengine_config.PRODUCTION_MODE
app = webapp2.WSGIApplication(
admin_handlers + [inputex_handler] + [app_handler],
config={'webapp2_extras.i18n': webapp2_i18n_config}, debug=debug)
|
henrymp/coursebuilder
|
main.py
|
Python
|
apache-2.0
| 2,562
|
'''Shorty Template Tags'''
from django import template
from django.core.urlresolvers import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def build_short_url(context, path):
return context['request'].build_absolute_uri(reverse('redirect', kwargs={'slug': path}))
|
ocadotechnology/djshorty
|
shorty/templatetags/shorty.py
|
Python
|
apache-2.0
| 303
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for third_party.tensorflow.contrib.ffmpeg.encode_audio_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import six
from tensorflow.contrib import ffmpeg
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
class EncodeAudioOpTest(test.TestCase):
def setUp(self):
super(EncodeAudioOpTest, self).setUp()
path = os.path.join(resource_loader.get_data_files_path(),
'testdata/mono_10khz.wav')
with open(path, 'rb') as f:
self._contents = f.read()
def _compareWavFiles(self, original, encoded):
"""Compares the important bits of two WAV files.
Some encoders will create a slightly different header to the WAV file.
This compares only the important bits of the header as well as the contents.
Args:
original: Contents of the original .wav file.
encoded: Contents of the new, encoded .wav file.
"""
self.assertLess(44, len(original))
self.assertLess(44, len(encoded))
self.assertEqual(original[:4], encoded[:4])
# Skip file size
self.assertEqual(original[8:16], encoded[8:16])
# Skip header size
self.assertEqual(original[20:36], encoded[20:36])
# Skip extra bits inserted by ffmpeg.
self.assertEqual(original[original.find(b'data'):],
encoded[encoded.find(b'data'):])
def testRoundTrip(self):
"""Reads a wav file, writes it, and compares them."""
with self.test_session():
audio_op = ffmpeg.decode_audio(
self._contents,
file_format='wav',
samples_per_second=10000,
channel_count=1)
encode_op = ffmpeg.encode_audio(
audio_op, file_format='wav', samples_per_second=10000)
encoded_contents = encode_op.eval()
self._compareWavFiles(self._contents, encoded_contents)
def testRoundTripWithPlaceholderSampleRate(self):
with self.test_session():
placeholder = array_ops.placeholder(dtypes.int32)
audio_op = ffmpeg.decode_audio(
self._contents,
file_format='wav',
samples_per_second=placeholder,
channel_count=1)
encode_op = ffmpeg.encode_audio(
audio_op, file_format='wav', samples_per_second=placeholder)
encoded_contents = encode_op.eval(feed_dict={placeholder: 10000})
self._compareWavFiles(self._contents, encoded_contents)
def testFloatingPointSampleRateInvalid(self):
with self.test_session():
with self.assertRaises(TypeError):
ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=12345.678)
def testZeroSampleRateInvalid(self):
with self.test_session() as sess:
encode_op = ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=0)
with six.assertRaisesRegex(self, Exception, 'must be positive'):
sess.run(encode_op)
def testNegativeSampleRateInvalid(self):
with self.test_session() as sess:
encode_op = ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=-2)
with six.assertRaisesRegex(self, Exception, 'must be positive'):
sess.run(encode_op)
if __name__ == '__main__':
test.main()
|
xuleiboy1234/autoTitle
|
tensorflow/tensorflow/contrib/ffmpeg/encode_audio_op_test.py
|
Python
|
mit
| 4,151
|
# -*- coding: utf-8 -*-
#
from collections import OrderedDict
import copy
from rest_framework.generics import ListCreateAPIView
from rest_framework import viewsets
from rest_framework.views import APIView, Response
from rest_framework.permissions import AllowAny
from django.shortcuts import get_object_or_404
from rest_framework.decorators import api_view
from .models import Terminal, TerminalHeatbeat
from .serializers import TerminalSerializer, TerminalHeatbeatSerializer
from .hands import IsSuperUserOrAppUser, IsAppUser, ProxyLog, \
IsSuperUserOrAppUserOrUserReadonly
from common.utils import get_object_or_none
class TerminalRegisterView(ListCreateAPIView):
queryset = Terminal.objects.all()
serializer_class = TerminalSerializer
permission_classes = (AllowAny,)
def create(self, request, *args, **kwargs):
name = request.data.get('name', '')
remote_addr = request.META.get('X-Real-IP') or \
request.META.get('REMOTE_ADDR')
serializer = self.serializer_class(
data={'name': name, 'remote_addr': remote_addr})
if get_object_or_none(Terminal, name=name):
return Response({'msg': 'Already register, Need '
'administrator active it'}, status=200)
if serializer.is_valid():
terminal = serializer.save()
app_user, access_key = terminal.create_related_app_user()
data = OrderedDict()
data['terminal'] = copy.deepcopy(serializer.data)
data['user'] = app_user.to_json()
data['access_key_id'] = access_key.id
data['access_key_secret'] = access_key.secret
return Response(data, status=201)
else:
data = {'msg': 'Not valid', 'detail': ';'.join(serializer.errors)}
return Response(data, status=400)
def list(self, request, *args, **kwargs):
return Response('', status=404)
class TerminalViewSet(viewsets.ModelViewSet):
queryset = Terminal.objects.all()
serializer_class = TerminalSerializer
permission_classes = (IsSuperUserOrAppUserOrUserReadonly,)
def create(self, request, *args, **kwargs):
return Response({'msg': 'Use register view except that'}, status=404)
# def destroy(self, request, *args, **kwargs):
# instance = self.get_object()
# if instance.user is not None:
# instance.user.delete()
# return super(TerminalViewSet, self).destroy(request, *args, **kwargs)
tasks = OrderedDict()
# tasks = {1: [{'name': 'kill_proxy', 'proxy_log_id': 23}]}
class TerminalHeatbeatViewSet(viewsets.ModelViewSet):
queryset = TerminalHeatbeat.objects.all()
serializer_class = TerminalHeatbeatSerializer
permission_classes = (IsAppUser,)
def create(self, request, *args, **kwargs):
terminal = request.user.terminal
TerminalHeatbeat.objects.create(terminal=terminal)
task = tasks.get(terminal.name)
tasks[terminal.name] = []
return Response({'msg': 'Success',
'tasks': task},
status=201)
class TerminateConnectionView(APIView):
def post(self, request, *args, **kwargs):
if isinstance(request.data, dict):
data = [request.data]
else:
data = request.data
for d in data:
proxy_log_id = d.get('proxy_log_id')
proxy_log = get_object_or_404(ProxyLog, id=proxy_log_id)
terminal_id = proxy_log.terminal
if terminal_id in tasks:
tasks[terminal_id].append({'name': 'kill_proxy',
'proxy_log_id': proxy_log_id})
else:
tasks[terminal_id] = [{'name': 'kill_proxy',
'proxy_log_id': proxy_log_id}]
return Response({'msg': 'get it'})
|
choldrim/jumpserver
|
apps/applications/api.py
|
Python
|
gpl-2.0
| 3,907
|
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 13 21:37:43 2019
@author: CHaithcock
"""
import RHState
s1 = RHState.RHState(273852143882168472463624642887680, 13)
'''
array([[6, 6, 0, 0, 4, 0],
[0, 0, 0, 0, 4, 0],
[0, 6, 6, 0, 0, 0],
[0, 0, 0, 6, 6, 0],
[4, 0, 0, 0, 0, 0],
[4, 0, 0, 0, 0, 0]])
'''
s2 = RHState.RHState(619161633702689745841553664,14)
'''
array([[0, 0, 0, 0, 0, 0],
[4, 0, 0, 0, 5, 0],
[4, 4, 6, 6, 5, 5],
[0, 4, 0, 0, 5, 5],
[4, 0, 0, 4, 0, 5],
[4, 0, 0, 4, 0, 0]])
'''
s3 = RHState.RHState(162259973170842939159871940984864,14)
'''
array([[4, 0, 0, 0, 0, 0],
[4, 4, 0, 0, 0, 0],
[0, 4, 6, 6, 0, 4],
[0, 0, 0, 4, 0, 4],
[0, 0, 0, 4, 4, 0],
[0, 0, 0, 0, 4, 0]])
'''
s4 = RHState.RHState(202824966466489499896703589024069,14)
'''
array([[5, 0, 0, 0, 0, 0],
[5, 5, 0, 0, 0, 0],
[5, 5, 6, 6, 0, 0],
[0, 5, 0, 5, 0, 5],
[0, 0, 0, 5, 0, 5],
[0, 0, 0, 5, 0, 5]])
'''
s5 = RHState.RHState(7782220156096217088,15)
'''
array([[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 6, 6, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
'''
s6 = RHState.RHState(34226566206169976060567082762240,15)
'''
array([[0, 6, 6, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 6, 6, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
'''
s7 = RHState.RHState(7782220163343974400,15)
'''
array([[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 6, 6, 0],
[0, 0, 0, 0, 0, 0],
[0, 6, 6, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
'''
|
crhaithcock/RushHour
|
RHGraph/RHLibrary.py
|
Python
|
cc0-1.0
| 1,692
|
import unittest, doctest, copy
from logSort import logSort
from pivotArray import PartiallySortedArray, maxBubblePass, minBubblePass, preorder
class PartiallySortedArrayTest(unittest.TestCase):
"""Basic tests for algorithms computing prefix free codes.
"""
def testMaxBubblePassOnSortedArray(self):
"""Test Max bubble pass on sorted array.
"""
W = [1,2,3,4,5,6,7,8]
T = PartiallySortedArray(W)
T.pivot = [1]*len(T.values)
T.maxBubblePass()
self.assertEqual(T.values,W)
self.assertEqual(T.pivot,[1,1,1,1,1,1,1,1])
def testMaxBubblePassOnInvertedArray(self):
"""Test Max bubble pass on inverted array.
"""
W = [8,7,6,5,4,3,2,1]
T = PartiallySortedArray(W)
T.pivot = [1]*len(T.values)
T.maxBubblePass()
self.assertEqual(T.values,[7,6,5,4,3,2,1,8])
self.assertEqual(T.pivot,[0,0,0,0,0,0,0,1])
def testMaxBubblePassOnShuffledArray(self):
"""Test Max bubble pass on Shuffled array.
"""
T = PartiallySortedArray([4,3,2,1,5,8,7,6])
T.pivot = [1]*len(T.values) # Testing max Bubble Pass form pivots set to 1.
T.maxBubblePass()
self.assertEqual(T.values,[3,2,1,4,5,7,6,8])
self.assertEqual(T.pivot,[0,0,0,1,1,0,0,1])
def testMinBubblePassOnSortedArray(self):
"""Test min bubble pass on sorted array.
"""
W = [1,2,3,4,5,6,7,8]
T = PartiallySortedArray(W)
T.pivot = [1]*len(T.values) # Testing min buble pass from pivots set to 1
T.minBubblePass()
self.assertEqual(T.values,W)
self.assertEqual(T.pivot,[1,1,1,1,1,1,1,1])
def testMinBubblePassOnInvertedArray(self):
"""Test min bubble pass on inverted array.
"""
W = [8,7,6,5,4,3,2,1]
T = PartiallySortedArray(W)
T.pivot = [1]*len(T.values) # Testing min buble pass from pivots set to 1
T.minBubblePass()
self.assertEqual(T.values,[1,8,7,6,5,4,3,2])
self.assertEqual(T.pivot,[1,0,0,0,0,0,0,0])
def testMinBubblePassOnShuffledArray(self):
"""Test min bubble pass on Shuffled array.
"""
T = PartiallySortedArray([4,3,2,1,5,8,7,6])
T.pivot = [1]*len(T.values) # Testing min buble pass from pivots set to 1
T.minBubblePass()
self.assertEqual(T.values,[1,4,3,2,5,6,8,7])
self.assertEqual(T.pivot,[1,0,0,0,1,1,0,0])
def testPreorderOnSmallShuffledArray(self):
"""Test preorder pass on a small Shuffled array with one point already in place.
"""
T = PartiallySortedArray([4,3,2,1,5,8,7,6])
T.preorder()
self.assertEqual(T.values,[1,2,3,4,5,6,7,8])
self.assertEqual(T.pivot,[1, 1, 1, 1, 1, 1, 1, 1])
def testPreorderOnLargeInvertedArray(self):
"""Test preorder pass on a large inverted array.
"""
T = PartiallySortedArray([ 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
T.preorder()
self.assertEqual(T.values,[1, 2, 3, 4, 5, 6, 7, 8, 9, 14, 13, 12, 11, 10, 15])
self.assertEqual(T.pivot,[1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1])
def testPreorderOnTwoInvertedArraysConcatenated(self):
"""Test preorder pass on two inverted arrays concatenated.
"""
T = PartiallySortedArray([ 7, 6, 5, 4, 3, 2, 1, 8, 15, 14, 13, 12, 11, 10, 9])
T.preorder()
self.assertEqual(T.values,[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 14, 13, 12, 11, 15])
self.assertEqual(T.pivot,[1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1])
def testUpdatePartialSum(self):
W = [1]*8
A = PartiallySortedArray(W)
A.updatePartialSum(0,len(W))
self.assertEqual(A.partialSums,[1,2,3,4,5,6,7,8])
def testRangeSum(self):
W = [1]*8
A = PartiallySortedArray(W)
self.assertEqual(A.rangeSum(1,4),3)
def testPartialSumOnOrderedArray(self):
W = [1]*8
A = PartiallySortedArray(W)
self.assertEqual(A.partialSum(1),1)
# def testPartialSumOnDisorderedArray(self):
# W = [9,8,7,6,5,4,3,2,1]
# A = PartiallySortedArray(W)
# self.assertEqual(A.partialSum(2),3)
# print A.partialSums
# # self.assertEqual(A.rangeSum(4,5),9)
# def testSelectOnFirstValue(self):
# T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
# self.assertEqual(T.select(0),1)
# self.assertEqual(T.values,[1,70,60,50,40,30,30,30,20,10])
# self.assertEqual(T.pivot,[0]*9+[1])
# def testSelectOnLastValue(self):
# T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
# self.assertEqual(T.select(9),70)
# self.assertEqual(T.values,[60,50,40,30,30,30,20,10,1,70])
# self.assertEqual(T.pivot,[0]*9+[1])
# def testSelectOnLargeNumbersOnRepeatedCentralValue(self):
# T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
# self.assertEqual(T.select(4),30)
# self.assertEqual(T.values,[20, 10, 1, 30, 30, 30, 70, 60, 50, 40])
# self.assertEqual(T.pivot,[0, 0, 0, 1, 1, 1, 0, 0, 0, 0])
# def testSelectOnSmallNumbers(self):
# W = [9,8,7,6,5,4,3,2,1,0]
# A = PartiallySortedArray(W)
# self.assertEqual(A.select(0),0)
# self.assertEqual(A.values,[0,9,8,7,6,5,4,3,2,1])
def testRankOnLastElement(self):
T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
self.assertEqual(T.rank(60),9)
self.assertEqual(T.values,[60,50,40,30,30,30,20,10,1,70])
def testRankOnFirstElement(self):
T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
self.assertEqual(T.rank(1),0)
self.assertEqual(T.values,[1,70,60,50,40,30,30,30,20,10])
def testRankOnMiddle(self):
T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
self.assertEqual(T.rank(30),3)
self.assertEqual(T.values,[20, 10, 1, 30, 30, 30, 70, 60, 50, 40])
self.assertEqual(T.pivot,[0, 0, 0, 1, 1, 1, 0, 0, 0, 0])
def testFindLeftAndRightPivots(self):
T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
T.values = [10,1,20,40,30,30,30,50,70,60]
T.pivot = [0, 0, 1, 0, 0, 0, 0, 1, 0, 0]
self.assertEqual(T.findLeftAndRightPivots(5),(2,7))
def testPartitionByValueOnLastValue(self):
T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
self.assertEqual(T.PartitionByValue(70),([60,50,40,30,30,30,20,10,1],[70],[]))
def testPartitionByValueOnFirstValue(self):
T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
self.assertEqual(T.PartitionByValue(1),([],[1],[70,60,50,40,30,30,30,20,10]))
def testPartitionByValueOnMiddleValue(self):
T = PartiallySortedArray([70,60,50,40,30,30,30,20,10,1])
self.assertEqual(T.PartitionByValue(30),([20, 10, 1], [30, 30, 30], [70, 60, 50, 40]))
def testInitialize(self):
W = [1,2,3,4,5,6,7,8]
T = PartiallySortedArray(W)
self.assertEqual(T.values,W)
def main():
unittest.main()
if __name__ == '__main__':
doctest.testmod()
main()
|
jyby/DDSRankSelectInMultisets
|
Implementations/Python/pivotArray.test.py
|
Python
|
gpl-3.0
| 7,221
|
from orders import models
from orders.models import Surcharge
NZ_GST = 1.15
def add_gst(amount):
return float(amount) * NZ_GST
def order_total_incl_gst(ingredients, quantities):
item_total = lambda i, q: float(i.unit_cost_excl_gst_incl_surcharge) * q
total = sum(map(item_total, ingredients, quantities))
total = add_gst(total)
total += Surcharge.get_order_surcharge()
return total
def get_ingredient(name):
try:
return models.Hop.objects.get(name=name)
except models.Hop.DoesNotExist:
return models.Grain.objects.get(name=name)
|
gkampjes/ucbc
|
orders/utils.py
|
Python
|
mit
| 584
|
# coding:utf-8
"""
Use this module to write functional tests for the view-functions, only!
"""
import os
import unittest
from django_webtest import WebTest
from django.core.urlresolvers import reverse
from django.core import mail
from django.test import TestCase
from journalmanager.tests import modelfactories
from journalmanager import forms
from journalmanager import models
from scielomanager.utils.modelmanagers.helpers import (
_patch_userrequestcontextfinder_settings_setup,
_patch_userrequestcontextfinder_settings_teardown,
)
HASH_FOR_123 = 'sha1$93d45$5f366b56ce0444bfea0f5634c7ce8248508c9799'
def _makePermission(perm, model, app_label='journalmanager'):
"""
Retrieves a Permission according to the given model and app_label.
"""
from django.contrib.contenttypes import models
from django.contrib.auth import models as auth_models
ct = models.ContentType.objects.get(model=model,
app_label=app_label)
return auth_models.Permission.objects.get(codename=perm, content_type=ct)
def _makeUseLicense():
ul = models.UseLicense(license_code='TEST')
ul.save()
class CollectionFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
def test_access_without_permission(self):
"""
Asserts that authenticated users without the required permissions
are unable to access the form. They must be redirected to a page
with informations about their lack of permissions.
"""
collection = modelfactories.CollectionFactory.create()
collection.add_user(self.user, is_manager=False)
response = self.app.get(reverse('collection.edit', args=[collection.pk]),
user=self.user).follow()
response.mustcontain('not authorized to access')
self.assertTemplateUsed(response, 'accounts/unauthorized.html')
def test_POST_workflow_with_valid_formdata(self):
"""
When a valid form is submited, the user is redirected to
the index page.
In order to take this action, the user needs the following
permissions: ``journalmanager.change_collection``.
"""
perm1 = _makePermission(perm='change_collection', model='collection')
self.user.user_permissions.add(perm1)
form = self.app.get(reverse('collection.edit', args=[self.collection.pk]),
user=self.user).forms['collection-form']
form['collection-name'] = 'Brasil'
form['collection-url'] = 'http://www.scielo.br'
form['collection-country'] = 'Brasil'
form['collection-address'] = 'Rua Machado Bittencourt'
form['collection-address_number'] = '430'
form['collection-email'] = 'scielo@scielo.org'
response = form.submit().follow()
self.assertTemplateUsed(response,
'journalmanager/add_collection.html')
response.mustcontain('Saved')
def test_POST_workflow_with_invalid_formdata(self):
"""
When an invalid form is submited, no action is taken, the
form is rendered again and an alert is shown with the message
``There are some errors or missing data``.
"""
perm = _makePermission(perm='change_collection', model='collection')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('collection.edit', args=[self.collection.pk]),
user=self.user).forms['collection-form']
form['collection-name'] = ''
form['collection-url'] = ''
form['collection-country'] = ''
form['collection-address'] = ''
form['collection-address_number'] = ''
form['collection-email'] = ''
response = form.submit()
response.mustcontain('There are some errors or missing data')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the section form is
empty. This is needed because the same form is used to add
a new or edit an existing entry.
"""
perm = _makePermission(perm='change_collection', model='collection')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('collection.edit', args=[self.collection.pk]),
user=self.user).forms['collection-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the section form is
``POST``.
"""
perm = _makePermission(perm='change_collection', model='collection')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('collection.edit', args=[self.collection.pk]),
user=self.user).forms['collection-form']
self.assertEqual(form.method.lower(), 'post')
def test_form_enctype_must_be_multipart_formdata(self):
"""
Asserts that the enctype attribute of the section form is
``multipart/form-data``.
"""
perm = _makePermission(perm='change_collection', model='collection')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('collection.edit', args=[self.collection.pk]),
user=self.user).forms['collection-form']
self.assertEqual(form.enctype.lower(), 'multipart/form-data')
class SectionFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
def test_access_without_permission(self):
"""
Asserts that authenticated users without the required permissions
are unable to access the form. They must be redirected to a page
with informations about their lack of permissions.
"""
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
response = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user).follow()
response.mustcontain('not authorized to access')
self.assertTemplateUsed(response, 'accounts/unauthorized.html')
def test_basic_structure(self):
"""
Just to make sure that the required hidden fields are all
present.
All the management fields from inlineformsets used in this
form should be part of this test.
"""
perm = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
form = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user)
self.assertTemplateUsed(form, 'journalmanager/add_section.html')
form.mustcontain('section-form',
'csrfmiddlewaretoken',
'titles-TOTAL_FORMS',
'titles-INITIAL_FORMS',
'titles-MAX_NUM_FORMS',
)
def test_POST_workflow_with_valid_formdata(self):
"""
When a valid form is submited, the user is redirected to
the section's list and the new section must be part
of the list.
In order to take this action, the user needs the following
permissions: ``journalmanager.change_section`` and
``journalmanager.list_section``.
"""
perm1 = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm1)
perm2 = _makePermission(perm='list_section', model='section')
self.user.user_permissions.add(perm2)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
journal.languages.add(language)
form = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user).forms['section-form']
form['titles-0-title'] = 'Original Article'
form.set('titles-0-language', language.pk)
response = form.submit().follow()
self.assertTemplateUsed(response,
'journalmanager/section_list.html')
response.mustcontain('Original Article')
def test_POST_workflow_with_invalid_formdata(self):
"""
When an invalid form is submited, no action is taken, the
form is rendered again and an alert is shown with the message
``There are some errors or missing data``.
"""
perm = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
journal.languages.add(language)
form = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user).forms['section-form']
response = form.submit()
response.mustcontain('There are some errors or missing data')
def test_POST_workflow_with_exist_title_on_the_same_journal(self):
"""
Asserts that duplacates are allowed
"""
perm1 = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm1)
perm2 = _makePermission(perm='list_section', model='section')
self.user.user_permissions.add(perm2)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
journal.languages.add(language)
section = modelfactories.SectionFactory(journal=journal)
section.add_title('Original Article', language=language)
form = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user).forms['section-form']
form['titles-0-title'] = 'Original Article'
form.set('titles-0-language', language.pk)
response = form.submit().follow()
self.assertTemplateUsed(response,
'journalmanager/section_list.html')
def test_section_must_allow_new_title_translations(self):
"""
Asserts that is possible to create new title translations to
existing Sections.
"""
perm1 = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm1)
perm2 = _makePermission(perm='list_section', model='section')
self.user.user_permissions.add(perm2)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
language2 = modelfactories.LanguageFactory.create(iso_code='pt',
name='portuguese')
journal.languages.add(language)
journal.languages.add(language2)
section = modelfactories.SectionFactory(journal=journal)
section.add_title('Original Article', language=language)
form = self.app.get(reverse('section.edit',
args=[journal.pk, section.pk]), user=self.user).forms['section-form']
form['titles-1-title'] = 'Artigo Original'
form.set('titles-1-language', language2.pk)
response = form.submit().follow()
self.assertTemplateUsed(response,
'journalmanager/section_list.html')
response.mustcontain('Artigo Original')
response.mustcontain('Original Article')
def test_section_translations_not_based_on_the_journal_languages(self):
"""
Section translations are no more restricted to the languages the journal
publishes its contents. See:
https://github.com/scieloorg/SciELO-Manager/issues/502
"""
perm1 = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm1)
perm2 = _makePermission(perm='list_section', model='section')
self.user.user_permissions.add(perm2)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
language2 = modelfactories.LanguageFactory.create(iso_code='pt',
name='portuguese')
journal.languages.add(language)
form = self.app.get(reverse('section.add',
args=[journal.pk]), user=self.user).forms['section-form']
form['titles-0-title'] = 'Artigo Original'
self.assertIsNone(form.set('titles-0-language', language2.pk))
def test_form_enctype_must_be_urlencoded(self):
"""
Asserts that the enctype attribute of the section form is
``application/x-www-form-urlencoded``
"""
perm = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
form = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user).forms['section-form']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the section form is
empty. This is needed because the same form is used to add
a new or edit an existing entry.
"""
perm = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
form = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user).forms['section-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the section form is
``POST``.
"""
perm = _makePermission(perm='change_section', model='section')
self.user.user_permissions.add(perm)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
form = self.app.get(reverse('section.add', args=[journal.pk]),
user=self.user).forms['section-form']
self.assertEqual(form.method.lower(), 'post')
class UserFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
def test_access_without_permission(self):
"""
Asserts that authenticated users without the required permissions
are unable to access the form. They must be redirected to a page
with informations about their lack of permissions.
"""
response = self.app.get(reverse('user.add'),
user=self.user).follow()
response.mustcontain('not authorized to access')
self.assertTemplateUsed(response, 'accounts/unauthorized.html')
def test_access_without_being_manager(self):
"""
Asserts that authenticated users that are not managers of the
collection are unable to access the form. They must be redirected
to a page with informations about their lack of permissions.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
# adding another collection the user lacks manager privileges
other_collection = modelfactories.CollectionFactory.create()
other_collection.add_user(self.user, is_manager=False)
other_collection.make_default_to_user(self.user)
response = self.app.get(reverse('user.add'),
user=self.user).follow()
response.mustcontain('not authorized to access')
self.assertTemplateUsed(response, 'accounts/unauthorized.html')
def test_basic_structure(self):
"""
Just to make sure that the required hidden fields are all
present.
All the management fields from inlineformsets used in this
form should be part of this test.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
page = self.app.get(reverse('user.add'), user=self.user)
self.assertTemplateUsed(page, 'journalmanager/add_user.html')
page.mustcontain('user-form',
'csrfmiddlewaretoken',
'usercollections-TOTAL_FORMS',
'usercollections-INITIAL_FORMS',
'usercollections-MAX_NUM_FORMS',
)
def test_POST_workflow_with_valid_formdata(self):
"""
When a valid form is submited, the user is redirected to
the user's list and the new user must be part
of the list.
An email must be sent to the new user.
In order to take this action, the user needs the following
permissions: ``journalmanager.change_user``.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
form['user-username'] = 'bazz'
form['user-first_name'] = 'foo'
form['user-last_name'] = 'bar'
form['userprofile-0-email'] = 'bazz@spam.org'
# form.set('asmSelect0', '1') # groups
form.set('usercollections-0-collection', self.collection.pk)
response = form.submit().follow()
self.assertTemplateUsed(response, 'journalmanager/user_list.html')
response.mustcontain('bazz', 'bazz@spam.org')
# check if basic state has been set
self.assertTrue(response.context['user'].user_collection.get(
pk=self.collection.pk))
def test_new_users_must_receive_an_email_to_define_their_password(self):
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
form['user-username'] = 'bazz'
form['user-first_name'] = 'foo'
form['user-last_name'] = 'bar'
form['userprofile-0-email'] = 'bazz@spam.org'
form.set('usercollections-0-collection', self.collection.pk)
response = form.submit().follow()
# check if an email has been sent to the new user
self.assertEqual(len(mail.outbox), 1)
self.assertIn('bazz@spam.org', mail.outbox[0].recipients())
def test_emails_are_not_sent_when_users_data_are_modified(self):
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.edit', args=[self.user.pk]),
user=self.user).forms['user-form']
form['user-username'] = 'bazz'
form['user-first_name'] = 'foo'
form['user-last_name'] = 'bar'
form['userprofile-0-email'] = 'bazz@spam.org'
form.set('usercollections-0-collection', self.collection.pk)
response = form.submit().follow()
# check if the outbox is empty
self.assertEqual(len(mail.outbox), 0)
def test_POST_workflow_with_invalid_formdata(self):
"""
When an invalid form is submited, no action is taken, the
form is rendered again and an alert is shown with the message
``There are some errors or missing data``.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
response = form.submit()
response.mustcontain('There are some errors or missing data')
def test_POST_workflow_with_invalid_formdata_without_collection_add_form(self):
"""
In order to take this action, the user needs the following
permissions: ``journalmanager.change_user``.
The collection is mandatory on user add form.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
form['user-username'] = 'bazz'
form['user-first_name'] = 'foo'
form['user-last_name'] = 'bar'
form['userprofile-0-email'] = 'bazz@spam.org'
response = form.submit()
self.assertTemplateUsed(response, 'journalmanager/add_user.html')
response.mustcontain('Please fill in at least one form')
def test_POST_workflow_with_invalid_formdata_without_collection_edit_form(self):
"""
In order to take this action, the user needs the following
permissions: ``journalmanager.change_user``.
The collection is mandatory on user edit form.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.edit', args=[self.user.pk]),
user=self.user).forms['user-form']
form['user-username'] = 'bazz'
form['user-first_name'] = 'foo'
form['user-last_name'] = 'bar'
form['userprofile-0-email'] = 'bazz@spam.org'
#Remove the collection
form.set('usercollections-0-collection', '')
response = form.submit()
self.assertTemplateUsed(response, 'journalmanager/add_user.html')
response.mustcontain('Please fill in at least one form')
def test_form_enctype_must_be_urlencoded(self):
"""
Asserts that the enctype attribute of the user form is
``application/x-www-form-urlencoded``
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the user form is
empty. This is needed because the same form is used to add
a new or edit an existing entry.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the user form is
``POST``.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
self.assertEqual(form.method.lower(), 'post')
def test_add_users_only_to_managed_collections(self):
"""
A user can only add users to collections which he is manager.
In order to take this action, the user needs the following
permissions: ``journalmanager.change_user``.
"""
perm = _makePermission(perm='change_user',
model='user', app_label='auth')
self.user.user_permissions.add(perm)
other_collection = modelfactories.CollectionFactory.create()
other_collection.add_user(self.user)
form = self.app.get(reverse('user.add'),
user=self.user).forms['user-form']
self.assertRaises(ValueError, lambda: form.set('usercollections-0-collection', other_collection.pk))
class JournalFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
_makeUseLicense()
def test_access_without_permission(self):
"""
Asserts that authenticated users without the required permissions
are unable to access the form. They must be redirected to a page
with informations about their lack of permissions.
"""
response = self.app.get(reverse('journal.add'),
user=self.user).follow()
response.mustcontain('not authorized to access')
self.assertTemplateUsed(response, 'accounts/unauthorized.html')
def test_basic_structure(self):
"""
Just to make sure that the required hidden fields are all
present.
All the management fields from inlineformsets used in this
form should be part of this test.
"""
perm = _makePermission(perm='change_journal',
model='journal',
app_label='journalmanager')
self.user.user_permissions.add(perm)
response = self.app.get(reverse('journal.add'), user=self.user)
self.assertTemplateUsed(response, 'journalmanager/add_journal.html')
response.mustcontain('journal-form',
'csrfmiddlewaretoken',
'title-TOTAL_FORMS',
'title-INITIAL_FORMS',
'title-MAX_NUM_FORMS',
'mission-TOTAL_FORMS',
'mission-INITIAL_FORMS',
'mission-MAX_NUM_FORMS',
)
def test_POST_workflow_with_invalid_formdata(self):
"""
When an invalid form is submited, no action is taken, the
form is rendered again and an alert is shown with the message
``There are some errors or missing data``.
"""
perm = _makePermission(perm='change_journal',
model='journal', app_label='journalmanager')
self.user.user_permissions.add(perm)
sponsor = modelfactories.SponsorFactory.create()
form = self.app.get(reverse('journal.add'), user=self.user).forms['journal-form']
form['journal-sponsor'] = [sponsor.pk]
form['journal-ctrl_vocabulary'] = 'decs'
form['journal-frequency'] = 'Q'
form['journal-final_num'] = ''
form['journal-eletronic_issn'] = '0102-6720'
form['journal-init_vol'] = '1'
form['journal-title'] = u'ABCD. Arquivos Brasileiros de Cirurgia Digestiva (São Paulo)'
form['journal-title_iso'] = u'ABCD. Arquivos B. de C. D. (São Paulo)'
form['journal-short_title'] = u'ABCD.(São Paulo)'
form['journal-editorial_standard'] = 'vancouv'
form['journal-scielo_issn'] = 'print'
form['journal-init_year'] = '1986'
form['journal-acronym'] = 'ABCD'
form['journal-pub_level'] = 'CT'
form['journal-init_num'] = '1'
form['journal-final_vol'] = ''
form['journal-subject_descriptors'] = 'MEDICINA, CIRURGIA, GASTROENTEROLOGIA, GASTROENTEROLOGIA'
form['journal-print_issn'] = '0102-6720'
form['journal-copyrighter'] = 'Texto do copyrighter'
form['journal-publisher_name'] = 'Colégio Brasileiro de Cirurgia Digestiva'
form['journal-publisher_country'] = 'BR'
form['journal-publisher_state'] = 'SP'
form['journal-publication_city'] = 'São Paulo'
form['journal-editor_address'] = 'Av. Brigadeiro Luiz Antonio, 278 - 6° - Salas 10 e 11, 01318-901 \
São Paulo/SP Brasil, Tel.: (11) 3288-8174/3289-0741'
form['journal-editor_email'] = 'cbcd@cbcd.org.br'
response = form.submit()
self.assertTrue('alert alert-error', response.body)
self.assertIn('There are some errors or missing data', response.body)
self.assertTemplateUsed(response, 'journalmanager/add_journal.html')
@unittest.skip('datamodel-ovehaul-v2')
def test_user_add_journal_with_valid_formdata(self):
"""
When a valid form is submited, the user is redirected to
the journal's list and the new user must be part
of the list.
In order to take this action, the user needs the following
permissions: ``journalmanager.change_journal`` and
``journalmanager.list_journal``.
"""
perm_journal_change = _makePermission(perm='change_journal',
model='journal', app_label='journalmanager')
perm_journal_list = _makePermission(perm='list_journal',
model='journal', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_journal_list)
sponsor = modelfactories.SponsorFactory.create()
use_license = modelfactories.UseLicenseFactory.create()
language = modelfactories.LanguageFactory.create()
subject_category = modelfactories.SubjectCategoryFactory.create()
study_area = modelfactories.StudyAreaFactory.create()
form = self.app.get(reverse('journal.add'), user=self.user).forms[1]
form['journal-sponsor'] = [sponsor.pk]
form['journal-study_areas'] = [study_area.pk]
form['journal-ctrl_vocabulary'] = 'decs'
form['journal-frequency'] = 'Q'
form['journal-final_num'] = ''
form['journal-eletronic_issn'] = '0102-6720'
form['journal-init_vol'] = '1'
form['journal-title'] = u'ABCD. Arquivos Brasileiros de Cirurgia Digestiva (São Paulo)'
form['journal-title_iso'] = u'ABCD. Arquivos B. de C. D. (São Paulo)'
form['journal-short_title'] = u'ABCD.(São Paulo)'
form['journal-editorial_standard'] = 'vancouv'
form['journal-scielo_issn'] = 'print'
form['journal-init_year'] = '1986'
form['journal-acronym'] = 'ABCD'
form['journal-pub_level'] = 'CT'
form['journal-init_num'] = '1'
form['journal-final_vol'] = ''
form['journal-subject_descriptors'] = 'MEDICINA, CIRURGIA, GASTROENTEROLOGIA, GASTROENTEROLOGIA'
form['journal-print_issn'] = '0102-6720'
form['journal-copyrighter'] = 'Texto do copyrighter'
form['journal-publisher_name'] = 'Colégio Brasileiro de Cirurgia Digestiva'
form['journal-publisher_country'] = 'BR'
form['journal-publisher_state'] = 'SP'
form['journal-publication_city'] = 'São Paulo'
form['journal-editor_name'] = 'Colégio Brasileiro de Cirurgia Digestiva'
form['journal-editor_address'] = 'Av. Brigadeiro Luiz Antonio, 278 - 6° - Salas 10 e 11'
form['journal-editor_address_city'] = 'São Paulo'
form['journal-editor_address_state'] = 'SP'
form['journal-editor_address_zip'] = '01318-901'
form['journal-editor_address_country'] = 'BR'
form['journal-editor_phone1'] = '(11) 3288-8174'
form['journal-editor_phone2'] = '(11) 3289-0741'
form['journal-editor_email'] = 'cbcd@cbcd.org.br'
form['journal-use_license'] = use_license.pk
form.set('journal-collections', str(self.collection.pk))
form['journal-languages'] = [language.pk]
form['journal-abstract_keyword_languages'] = [language.pk]
form.set('journal-subject_categories', str(subject_category.pk))
form['journal-is_indexed_scie'] = True
form['journal-is_indexed_ssci'] = False
form['journal-is_indexed_aehci'] = True
upload_cover_name = os.path.dirname(__file__) + '/image_test/cover.gif'
uploaded_cover_contents = open(upload_cover_name, "rb").read()
form.set('journal-cover', (upload_cover_name, uploaded_cover_contents))
response = form.submit().follow()
self.assertIn('Saved.', response.body)
self.assertIn('ABCD.(São Paulo)',
response.body)
self.assertTemplateUsed(response, 'journalmanager/journal_dash.html')
def test_form_enctype_must_be_multipart_formdata(self):
"""
Asserts that the enctype attribute of the user form is
``multipart/form-data``
"""
perm_journal_change = _makePermission(perm='change_journal',
model='journal', app_label='journalmanager')
perm_journal_list = _makePermission(perm='list_journal',
model='journal', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_journal_list)
form = self.app.get(reverse('journal.add'), user=self.user).forms[1]
self.assertEqual(form.enctype, 'multipart/form-data')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the journal form is
empty. This is needed because the same form is used to add
a new or edit an existing entry.
"""
perm_journal_change = _makePermission(perm='change_journal',
model='journal', app_label='journalmanager')
perm_journal_list = _makePermission(perm='list_journal',
model='journal', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_journal_list)
form = self.app.get(reverse('journal.add'), user=self.user).forms[1]
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the journal form is
``POST``.
"""
perm_journal_change = _makePermission(perm='change_journal',
model='journal', app_label='journalmanager')
perm_journal_list = _makePermission(perm='list_journal',
model='journal', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_journal_list)
form = self.app.get(reverse('journal.add'), user=self.user).forms[1]
self.assertEqual(form.method.lower(), 'post')
class SponsorFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
def test_basic_structure(self):
"""
Just to make sure that the required hidden fields are all
present.
All the management fields from inlineformsets used in this
form should be part of this test.
"""
perm = _makePermission(perm='add_sponsor',
model='sponsor', app_label='journalmanager')
self.user.user_permissions.add(perm)
page = self.app.get(reverse('sponsor.add'), user=self.user)
page.mustcontain('sponsor-name', 'sponsor-collections')
self.assertTemplateUsed(page, 'journalmanager/add_sponsor.html')
def test_access_without_permission(self):
"""
Asserts that authenticated users without the required permissions
are unable to access the form. They must be redirected to a page
with informations about their lack of permissions.
"""
page = self.app.get(reverse('sponsor.add'), user=self.user).follow()
self.assertTemplateUsed(page, 'accounts/unauthorized.html')
page.mustcontain('not authorized to access')
def test_POST_workflow_with_valid_formdata(self):
"""
When a valid form is submited, the user is redirected to
the sponsor's list and the new sponsor must be part
of the list.
In order to take this action, the user needs the following
permissions: ``journalmanager.add_sponsor`` and
``journalmanager.list_sponsor``.
"""
perm_sponsor_change = _makePermission(perm='add_sponsor',
model='sponsor', app_label='journalmanager')
perm_sponsor_list = _makePermission(perm='list_sponsor',
model='sponsor', app_label='journalmanager')
self.user.user_permissions.add(perm_sponsor_change)
self.user.user_permissions.add(perm_sponsor_list)
form = self.app.get(reverse('sponsor.add'), user=self.user).forms['sponsor-form']
form['sponsor-name'] = u'Fundação de Amparo a Pesquisa do Estado de São Paulo'
form['sponsor-address'] = u'Av. Professor Lineu Prestes, 338 Cidade Universitária \
Caixa Postal 8105 05508-900 São Paulo SP Brazil Tel. / Fax: +55 11 3091-3047'
form['sponsor-email'] = 'fapesp@scielo.org'
form['sponsor-complement'] = ''
form['sponsor-collections'] = [self.collection.pk]
response = form.submit().follow()
self.assertTemplateUsed(response,
'journalmanager/sponsor_list.html')
self.assertIn('Saved.', response.body)
self.assertIn('Funda\xc3\xa7\xc3\xa3o de Amparo a Pesquisa do Estado de S\xc3\xa3o Paulo', response.body)
def test_POST_workflow_with_invalid_formdata(self):
"""
When an invalid form is submited, no action is taken, the
form is rendered again and an alert is shown with the message
``There are some errors or missing data``.
"""
perm_sponsor_change = _makePermission(perm='add_sponsor',
model='sponsor', app_label='journalmanager')
perm_sponsor_list = _makePermission(perm='list_sponsor',
model='sponsor', app_label='journalmanager')
self.user.user_permissions.add(perm_sponsor_change)
self.user.user_permissions.add(perm_sponsor_list)
form = self.app.get(reverse('sponsor.add'), user=self.user).forms['sponsor-form']
form['sponsor-address'] = u'Av. Professor Lineu Prestes, 338 Cidade Universitária \
Caixa Postal 8105 05508-900 São Paulo SP Brazil Tel. / Fax: +55 11 3091-3047'
form['sponsor-email'] = 'fapesp@scielo.org'
form['sponsor-complement'] = ''
form['sponsor-collections'] = [self.collection.pk]
response = form.submit()
self.assertTrue('alert alert-error' in response.body)
self.assertIn('There are some errors or missing data', response.body)
self.assertTemplateUsed(response, 'journalmanager/add_sponsor.html')
def test_form_enctype_must_be_urlencoded(self):
"""
Asserts that the enctype attribute of the sponsor form is
``application/x-www-form-urlencoded``
"""
perm_sponsor_change = _makePermission(perm='add_sponsor',
model='sponsor', app_label='journalmanager')
perm_sponsor_list = _makePermission(perm='list_sponsor',
model='sponsor', app_label='journalmanager')
self.user.user_permissions.add(perm_sponsor_change)
self.user.user_permissions.add(perm_sponsor_list)
form = self.app.get(reverse('sponsor.add'), user=self.user).forms['sponsor-form']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the sponsor form is
empty. This is needed because the same form is used to add
a new or edit an existing entry.
"""
perm_sponsor_change = _makePermission(perm='add_sponsor',
model='sponsor', app_label='journalmanager')
perm_sponsor_list = _makePermission(perm='list_sponsor',
model='sponsor', app_label='journalmanager')
self.user.user_permissions.add(perm_sponsor_change)
self.user.user_permissions.add(perm_sponsor_list)
form = self.app.get(reverse('sponsor.add'), user=self.user).forms['sponsor-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the sponsor form is
``POST``.
"""
perm_sponsor_change = _makePermission(perm='add_sponsor',
model='sponsor', app_label='journalmanager')
perm_sponsor_list = _makePermission(perm='list_sponsor',
model='sponsor', app_label='journalmanager')
self.user.user_permissions.add(perm_sponsor_change)
self.user.user_permissions.add(perm_sponsor_list)
form = self.app.get(reverse('sponsor.add'), user=self.user).forms['sponsor-form']
self.assertEqual(form.method.lower(), 'post')
def test_collections_field_must_only_display_collections_the_user_is_bound(self):
"""
Asserts that the user cannot add a sponsor to a collection
that he is not related to.
"""
perm_sponsor_change = _makePermission(perm='add_sponsor',
model='sponsor', app_label='journalmanager')
perm_sponsor_list = _makePermission(perm='list_sponsor',
model='sponsor', app_label='journalmanager')
self.user.user_permissions.add(perm_sponsor_change)
self.user.user_permissions.add(perm_sponsor_list)
another_collection = modelfactories.CollectionFactory.create()
form = self.app.get(reverse('sponsor.add'), user=self.user).forms['sponsor-form']
self.assertRaises(ValueError,
lambda: form.set('sponsor-collections', [another_collection.pk]))
class IssueBaseFormClassTests(unittest.TestCase):
def test_basic_structure(self):
issue_form = forms.IssueBaseForm()
from django import forms as dj_forms
expected = {'section': dj_forms.ModelMultipleChoiceField,
'volume': dj_forms.CharField,
'publication_start_month': dj_forms.ChoiceField,
'publication_end_month': dj_forms.ChoiceField,
'publication_year': dj_forms.IntegerField,
'is_marked_up': dj_forms.BooleanField,
'use_license': dj_forms.ModelChoiceField,
'total_documents': dj_forms.IntegerField,
'ctrl_vocabulary': dj_forms.ChoiceField,
'editorial_standard': dj_forms.ChoiceField,
'cover': dj_forms.ImageField,
}
self.assertEqual(len(expected.keys()), len(issue_form.fields.keys()))
self.assertEqual(sorted(expected.keys()), sorted(issue_form.fields.keys()))
self.assertEqual(
expected,
{fname: type(field) for fname, field in issue_form.fields.items()}
)
def test_save_commit_eq_False(self):
from journalmanager import models
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '1',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
issue_model = issue_form.save(commit=False)
issue_model.journal = journal
issue_model.save()
issue_form.save_m2m()
self.assertIsInstance(issue_model, models.Issue)
self.assertTrue(section in issue_model.section.all())
self.assertEqual(issue_model.volume, u'1')
self.assertEqual(issue_model.publication_start_month, u'1')
self.assertEqual(issue_model.publication_end_month, u'2')
self.assertEqual(issue_model.publication_year, 2014)
self.assertEqual(issue_model.is_marked_up, True)
self.assertEqual(issue_model.use_license, use_license)
self.assertEqual(issue_model.total_documents, 10)
self.assertEqual(issue_model.ctrl_vocabulary, u'nd')
self.assertEqual(issue_model.editorial_standard, u'iso690')
self.assertEqual(issue_model.cover, None)
def test_save_m2m_while_commit_eq_False(self):
from journalmanager import models
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '1',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
issue_model = issue_form.save(commit=False)
self.assertTrue(hasattr(issue_form, 'save_m2m'))
class RegularIssueFormClassTests(unittest.TestCase):
def test_journal_kwargs_is_required(self):
self.assertRaises(TypeError, lambda: forms.RegularIssueForm())
def test_inheritance(self):
# By checking the inheritance, we assume that all base fields are present.
self.assertTrue(issubclass(forms.RegularIssueForm, forms.IssueBaseForm))
def test_basic_structure(self):
from django import forms as dj_forms
journal = modelfactories.JournalFactory()
issue_form = forms.RegularIssueForm(params={'journal': journal})
self.assertEqual(dj_forms.CharField, type(issue_form.fields['number']))
def test_clean(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '1',
'number': '2',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
def test_clean_volume_only(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '1',
'number': '',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
def test_clean_number_only(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '',
'number': '1',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
def test_clean_fails_if_missing_volume_and_number(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '',
'number': '',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fails_if_issue_is_duplicated(self):
issue = modelfactories.IssueFactory(type='regular')
journal = issue.journal
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fails_if_duplicated_issue(self):
journal = modelfactories.JournalFactory()
issue = modelfactories.IssueFactory(type='regular', volume='1',
number='2', publication_year=2013, journal=journal)
issue2 = modelfactories.IssueFactory(type='regular', volume='1',
number='2', publication_year=2013, journal=journal)
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_on_edit(self):
journal = modelfactories.JournalFactory()
issue = modelfactories.IssueFactory(type='regular', volume='1',
number='2', publication_year=2013, journal=journal)
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'publication_start_month': '2',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.RegularIssueForm(POST,
instance=issue,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
class SupplementIssueFormClassTests(unittest.TestCase):
def test_journal_kwargs_is_required(self):
self.assertRaises(TypeError, lambda: forms.SupplementIssueForm())
def test_inheritance(self):
# By checking the inheritance, we assume that all base fields are present.
self.assertTrue(issubclass(forms.SupplementIssueForm, forms.IssueBaseForm))
def test_basic_structure(self):
from django import forms as dj_forms
journal = modelfactories.JournalFactory()
issue_form = forms.SupplementIssueForm(params={'journal': journal})
self.assertEqual(dj_forms.CharField, type(issue_form.fields['number']))
self.assertEqual(dj_forms.ChoiceField, type(issue_form.fields['suppl_type']))
def test_clean_for_volume_type(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'volume',
'volume': '1',
'number': '',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
def test_clean_for_type_number(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'number',
'volume': '',
'number': '1',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
def test_clean_fail_for_type_number_with_both_volume_and_number(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'number',
'volume': '1',
'number': '1',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fail_for_type_volume_with_both_volume_and_number(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'volume',
'volume': '1',
'number': '1',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fail_for_type_number_without_number(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'number',
'volume': '1',
'number': '',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fail_for_type_volume_without_volume(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'number',
'volume': '1',
'number': '',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fail_for_type_number_without_number_and_without_volume(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'number',
'volume': '',
'number': '',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fail_for_type_volume_without_number_and_without_volume(self):
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'suppl_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod',
'suppl_type' : 'volume',
'volume': '',
'number': '',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fails_for_type_number_if_duplicated_issue(self):
journal = modelfactories.JournalFactory()
issue = modelfactories.IssueFactory(volume='',
number='1',
suppl_text='1',
publication_year=2013,
journal=journal,
type='supplement')
issue2 = modelfactories.IssueFactory(volume='',
number='1',
suppl_text='1',
publication_year=2013,
journal=journal,
type='supplement')
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'suppl_type':'number',
'suppl_text': issue.suppl_text,
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fails_for_type_volume_if_duplicated_issue(self):
journal = modelfactories.JournalFactory()
issue = modelfactories.IssueFactory(volume='1',
number='',
suppl_text='1',
publication_year=2013,
journal=journal,
type='supplement')
issue2 = modelfactories.IssueFactory(volume='1',
number='',
suppl_text='1',
publication_year=2013,
journal=journal,
type='supplement')
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'suppl_type':'volume',
'suppl_text': issue.suppl_text,
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fails_for_type_number_if_issue_already_exist(self):
issue = modelfactories.IssueFactory(number='1', volume='', type='supplement')
journal = issue.journal
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'suppl_type':issue.suppl_type,
'suppl_text': issue.suppl_text,
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_fails_for_type_volume_if_issue_already_exist(self):
issue = modelfactories.IssueFactory(number='', volume='1', type='supplement')
journal = issue.journal
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'suppl_type':issue.suppl_type,
'suppl_text': issue.suppl_text,
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertFalse(issue_form.is_valid())
def test_clean_for_type_number_on_edit(self):
journal = modelfactories.JournalFactory()
issue = modelfactories.IssueFactory(volume='',
number='2',
suppl_text='1',
publication_year=2013,
journal=journal,
type='supplement')
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'suppl_type':issue.suppl_type,
'suppl_text': issue.suppl_text,
'publication_start_month': '2',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
instance=issue,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
def test_clean_for_type_volume_on_edit(self):
journal = modelfactories.JournalFactory()
issue = modelfactories.IssueFactory(volume='2',
number='',
suppl_text='1',
publication_year=2013,
journal=journal,
type='supplement')
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': issue.volume,
'number': issue.number,
'suppl_type':issue.suppl_type,
'suppl_text': issue.suppl_text,
'publication_start_month': '2',
'publication_end_month': '2',
'publication_year': issue.publication_year,
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SupplementIssueForm(POST,
instance=issue,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
class SpecialIssueFormClassTests(unittest.TestCase):
def test_journal_kwargs_is_required(self):
self.assertRaises(TypeError, lambda: forms.SpecialIssueForm())
def test_inheritance(self):
# By checking the inheritance, we assume that all base fields are present.
self.assertTrue(issubclass(forms.SpecialIssueForm, forms.RegularIssueForm))
def test_basic_structure(self):
from django import forms as dj_forms
journal = modelfactories.JournalFactory()
issue_form = forms.SpecialIssueForm(params={'journal': journal})
self.assertEqual(dj_forms.CharField, type(issue_form.fields['number']))
def test_mandatory_number_value(self):
from django import forms as dj_forms
from journalmanager.forms import SPECIAL_ISSUE_FORM_FIELD_NUMBER
journal = modelfactories.JournalFactory()
issue_form = forms.SpecialIssueForm(params={'journal': journal})
self.assertEqual(issue_form['number'].value(), SPECIAL_ISSUE_FORM_FIELD_NUMBER)
def test_clean(self):
from journalmanager.forms import SPECIAL_ISSUE_FORM_FIELD_NUMBER
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '',
'number': SPECIAL_ISSUE_FORM_FIELD_NUMBER,
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_regular_form = forms.RegularIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_regular_form.is_valid())
issue_form = forms.SpecialIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
def test_clean_with_any_number_value(self):
from journalmanager.forms import SPECIAL_ISSUE_FORM_FIELD_NUMBER
journal = modelfactories.JournalFactory()
section = modelfactories.SectionFactory(journal=journal)
use_license = modelfactories.UseLicenseFactory()
POST = {
'section': [section.pk],
'volume': '',
'number': '1',
'publication_start_month': '1',
'publication_end_month': '2',
'publication_year': '2014',
'is_marked_up': True,
'use_license': use_license.pk,
'total_documents': '10',
'ctrl_vocabulary': 'nd',
'editorial_standard': 'iso690',
'cover': '',
}
issue_form = forms.SpecialIssueForm(POST,
params={'journal': journal},
querysets={
'section': journal.section_set.all(),
'use_license': models.UseLicense.objects.all(),
})
self.assertTrue(issue_form.is_valid())
self.assertEqual(issue_form.cleaned_data['number'], SPECIAL_ISSUE_FORM_FIELD_NUMBER)
####
# Integration tests on forms
####
class IssueFormTests(WebTest):
@_patch_userrequestcontextfinder_settings_setup
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
self.collection.make_default_to_user(self.user)
self.journal = modelfactories.JournalFactory.create()
self.journal.join(self.collection, self.user)
@_patch_userrequestcontextfinder_settings_teardown
def tearDown(self):
pass
def test_basic_struture(self):
"""
Just to make sure that the required hidden fields are all
present.
All the management fields from inlineformsets used in this
form should be part of this test.
"""
perm = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm)
for t in ['regular', 'supplement', 'special']:
page = self.app.get(reverse('issue.add_%s' % t, args=[self.journal.pk]), user=self.user)
page.mustcontain('number', 'cover',
'title-0-title',
'title-0-language',
'title-TOTAL_FORMS',
'title-INITIAL_FORMS',
'title-MAX_NUM_FORMS')
self.assertTemplateUsed(page, 'journalmanager/add_issue_%s.html' % t)
def test_access_without_permission(self):
"""
Asserts that authenticated users without the required permissions
are unable to access the form. They must be redirected to a page
with informations about their lack of permissions.
"""
for t in ['regular', 'supplement', 'special']:
page = self.app.get(reverse('issue.add_%s' % t, args=[self.journal.pk]), user=self.user).follow()
self.assertTemplateUsed(page, 'accounts/unauthorized.html')
page.mustcontain('not authorized to access')
def test_POST_workflow_with_valid_formdata(self):
"""
When a valid form is submited, the user is redirected to
the issue's list and the new user must be part
of the list.
In order to take this action, the user needs the following
permissions: ``journalmanager.add_issue`` and
``journalmanager.list_issue``.
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'supplement', 'special']:
form = self.app.get(reverse('issue.add_%s' % t, args=[self.journal.pk]), user=self.user).forms['issue-form']
if t == 'supplement':
form['number'] = ''
form['volume'] = '29'
form['suppl_type'] = 'volume'
form['suppl_text'] = 'suppl.X'
elif t == 'special':
form['number'] = '3'
else: # regular
form['number'] = '3'
form['volume'] = '29'
form['total_documents'] = '16'
form.set('ctrl_vocabulary', 'decs')
form['publication_start_month'] = '9'
form['publication_end_month'] = '11'
form['publication_year'] = '2012'
form['is_marked_up'] = False
form['editorial_standard'] = 'other'
response = form.submit().follow()
self.assertIn('Saved.', response.body)
self.assertTemplateUsed(response, 'journalmanager/issue_list.html')
def test_POST_workflow_without_volume_and_number_formdata(self):
"""
When a user submit a issue the form must contain unless one of the
fields Volume or Number
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'supplement', 'special']:
form = self.app.get(reverse('issue.add_%s' % t, args=[self.journal.pk]), user=self.user).forms['issue-form']
form['total_documents'] = '16'
form.set('ctrl_vocabulary', 'decs')
form['number'] = ''
form['volume'] = ''
form['publication_start_month'] = '9'
form['publication_end_month'] = '11'
form['publication_year'] = '2012'
form['is_marked_up'] = False
form['editorial_standard'] = 'other'
response = form.submit()
if t == 'supplement':
self.assertIn('There are some errors or missing data.', response.body)
elif t == 'special':
# for t=='special' -> number field will be overwrited it 'spe' text
pass
else: # regular
self.assertIn('You must complete at least one of two fields volume or number.', response.body)
self.assertTemplateUsed(response, 'journalmanager/add_issue_%s.html' % t)
def test_POST_workflow_with_invalid_formdata(self):
"""
When an invalid form is submited, no action is taken, the
form is rendered again and an alert is shown with the message
``There are some errors or missing data``.
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'supplement', 'special']:
form = self.app.get(reverse('issue.add_%s' % t, args=[self.journal.pk]), user=self.user).forms['issue-form']
form['total_documents'] = '16'
form.set('ctrl_vocabulary', 'decs')
form['number'] = '3'
form['volume'] = ''
form['is_marked_up'] = False
form['editorial_standard'] = 'other'
response = form.submit()
self.assertIn('There are some errors or missing data.', response.body)
self.assertTemplateUsed(response, 'journalmanager/add_issue_%s.html' % t)
def test_POST_workflow_with_exist_year_number_volume_on_the_same_journal(self):
"""
Asserts if any message error is displayed while trying to insert a duplicate
Year, Number and Volume issue object from a specific Journal
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'special']:
issue = modelfactories.IssueFactory(journal=self.journal, suppl_text='', type=t)
form = self.app.get(reverse('issue.add_%s' % t, args=[self.journal.pk]), user=self.user).forms['issue-form']
form['total_documents'] = '16'
form.set('ctrl_vocabulary', 'decs')
form['number'] = str(issue.number)
form['volume'] = str(issue.volume)
form['publication_start_month'] = '9'
form['publication_end_month'] = '11'
form['publication_year'] = str(issue.publication_year)
form['is_marked_up'] = False
form['editorial_standard'] = 'other'
response = form.submit()
if t in ('regular',):
# for t == 'special' number field will be overwrited in clean_number method,
# so will be a redirecto (http 302) because save was succesfully.
# for other types, will raise a validations error
self.assertIn('There are some errors or missing data.', response.body)
self.assertIn('Issue with this Year and (Volume or Number) already exists for this Journal', response.body)
self.assertTemplateUsed(response, 'journalmanager/add_issue_%s.html' % t)
else:
self.assertEqual(302, response.status_code)
self.assertIn(reverse('issue.index', args=[issue.journal.pk]), response.location)
self.assertEqual('', response.body)
def test_POST_workflow_with_exist_year_number_volume_suppl_text_on_the_same_journal(self):
"""
Asserts if any message error is displayed while trying to insert a duplicate
Year, Number and Volume issue object from a specific Journal
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
issue = modelfactories.IssueFactory(journal=self.journal, suppl_text='1', volume='1', number='', type='supplement')
form = self.app.get(reverse('issue.add_supplement', args=[self.journal.pk]), user=self.user).forms['issue-form']
form['total_documents'] = '16'
form.set('ctrl_vocabulary', 'decs')
form['number'] = str(issue.number)
form['volume'] = str(issue.volume)
form['suppl_text'] = issue.suppl_text
form['publication_start_month'] = '9'
form['publication_end_month'] = '11'
form['publication_year'] = str(issue.publication_year)
form['is_marked_up'] = False
form['editorial_standard'] = 'other'
response = form.submit()
self.assertIn('There are some errors or missing data.', response.body)
self.assertIn('Issue with this Year and (Volume or Number) already exists for this Journal', response.body)
self.assertTemplateUsed(response, 'journalmanager/add_issue_supplement.html')
def test_issues_can_be_edited(self):
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'supplement', 'special']:
issue = modelfactories.IssueFactory(journal=self.journal, suppl_text='', type=t)
form = self.app.get(reverse('issue.edit', args=[self.journal.pk, issue.pk]), user=self.user).forms['issue-form']
form['total_documents'] = '99'
if t == 'supplement':
form['suppl_type'] = 'volume'
form['suppl_text'] = 'suppl.XX'
form['volume'] = '99'
form['number'] = ''
response = form.submit().follow()
self.assertIn('Saved.', response.body)
self.assertTemplateUsed(response, 'journalmanager/issue_list.html')
def test_form_enctype_must_be_multipart_formdata(self):
"""
Asserts that the enctype attribute of the issue form is
``multipart/form-data``
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'supplement', 'special']:
form = self.app.get(reverse('issue.add_%s' % t,
args=[self.journal.pk]), user=self.user).forms['issue-form']
self.assertEqual(form.enctype, 'multipart/form-data')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the issue form is
empty. This is needed because the same form is used to add
a new or edit an existing entry.
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'supplement', 'special']:
form = self.app.get(reverse('issue.add_%s' % t,
args=[self.journal.pk]), user=self.user).forms['issue-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the issue form is
``POST``.
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
for t in ['regular', 'supplement', 'special']:
form = self.app.get(reverse('issue.add_%s' % t,
args=[self.journal.pk]), user=self.user).forms['issue-form']
self.assertEqual(form.method.lower(), 'post')
def test_sections_must_not_be_trashed(self):
"""
Only valid sections must be available for the user to
bind to a issue.
"""
perm_issue_change = _makePermission(perm='add_issue',
model='issue', app_label='journalmanager')
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_issue_change)
self.user.user_permissions.add(perm_issue_list)
trashed_section = modelfactories.SectionFactory.create(
journal=self.journal, is_trashed=True)
for t in ['regular', 'supplement', 'special']:
form = self.app.get(reverse('issue.add_%s' % t,
args=[self.journal.pk]), user=self.user).forms['issue-form']
self.assertRaises(ValueError,
lambda: form.set('section', str(trashed_section.pk)))
class SearchFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
perm = _makePermission(perm='list_journal', model='journal')
self.user.user_permissions.add(perm)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
def test_basic_struture(self):
"""
Just to make sure that the required hidden fields are all
present.
All the management fields from inlineformsets used in this
form should be part of this test.
"""
page = self.app.get(reverse('index'), user=self.user)
page.mustcontain('list_model', 'q')
self.assertTemplateUsed(page, 'journalmanager/home_journal.html')
def test_form_enctype_must_be_urlencoded(self):
"""
Asserts that the enctype attribute of the search form is
``application/x-www-form-urlencoded``
"""
form = self.app.get(reverse('index'),
user=self.user).forms['search-form']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the search form is
the journal home.
"""
form = self.app.get(reverse('index'),
user=self.user).forms['search-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_get(self):
"""
Asserts that the method attribute of the search form is
``GET``.
"""
form = self.app.get(reverse('index'),
user=self.user).forms['search-form']
self.assertEqual(form.method.lower(), 'get')
def test_GET_search_journal(self):
"""
Asserts that the search return the correct journal list
"""
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
page = self.app.get(reverse('journal.index') + '?q=Arquivos',
user=self.user)
self.assertIn('ABCD. Arquivos Brasileiros de Cirurgia Digestiva (S\xc3\xa3o Paulo)',
page.body)
def test_GET_search_sponsor(self):
"""
Asserts that the search return the correct sponsor list
"""
perm = _makePermission(perm='list_sponsor', model='sponsor',
app_label='journalmanager')
self.user.user_permissions.add(perm)
sponsor = modelfactories.SponsorFactory.create()
sponsor.collections.add(self.collection)
page = self.app.get(reverse('sponsor.index') + '?q=Amparo',
user=self.user)
self.assertIn('Funda\xc3\xa7\xc3\xa3o de Amparo a Pesquisa do Estado de S\xc3\xa3o Paulo',
page.body)
def test_GET_journal_filter_by_letter(self):
"""
Asserts that the filter with letter return the correct journal list
"""
perm = _makePermission(perm='list_journal', model='journal',
app_label='journalmanager')
self.user.user_permissions.add(perm)
journal = modelfactories.JournalFactory.create()
journal.join(self.collection, self.user)
page = self.app.get(reverse('journal.index') + '?letter=A', user=self.user)
self.assertIn('ABCD. Arquivos Brasileiros de Cirurgia Digestiva (S\xc3\xa3o Paulo)',
page.body)
def test_GET_sponsor_filter_by_letter(self):
"""
Asserts that the filter with letter return the correct journal list
"""
perm = _makePermission(perm='list_sponsor', model='sponsor',
app_label='journalmanager')
self.user.user_permissions.add(perm)
sponsor = modelfactories.SponsorFactory.create()
sponsor.collections.add(self.collection)
page = self.app.get(reverse('sponsor.index') + '?letter=F', user=self.user)
self.assertIn('Funda\xc3\xa7\xc3\xa3o de Amparo a Pesquisa do Estado de S\xc3\xa3o Paulo',
page.body)
class SectionTitleFormValidationTests(TestCase):
def test_same_titles_in_different_languages_must_be_valid(self):
user = modelfactories.UserFactory(is_active=True)
collection = modelfactories.CollectionFactory.create()
collection.add_user(user, is_manager=True)
journal = modelfactories.JournalFactory.create()
journal.join(collection, user)
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
language2 = modelfactories.LanguageFactory.create(iso_code='pt',
name='portuguese')
journal.languages.add(language)
journal.languages.add(language2)
section = modelfactories.SectionFactory(journal=journal)
section.add_title('Original Article', language=language)
post_dict = {
u'titles-INITIAL_FORMS': 0,
u'titles-TOTAL_FORMS': 1,
u'legacy_code': u'',
u'titles-0-language': unicode(language2.pk),
u'titles-0-title': u'Original Article',
}
section_forms = forms.get_all_section_forms(post_dict,
journal=journal, section=section)
self.assertTrue(section_forms['section_form'].is_valid())
self.assertTrue(section_forms['section_title_formset'].is_valid())
class JournalEditorsTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
self.journal = modelfactories.JournalFactory.create()
self.journal.join(self.collection, self.user)
perm_journal_list = _makePermission(perm='list_journal',
model='journal',
app_label='journalmanager')
self.user.user_permissions.add(perm_journal_list)
def test_form_ectype_must_be_urlencoded(self):
from waffle import Flag
Flag.objects.create(name='editor_manager', everyone=True)
form = self.app.get(reverse('journal_editors.index',
args=[self.journal.pk]), user=self.user).forms['add-editor']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the ahead form is
``POST``.
"""
from waffle import Flag
Flag.objects.create(name='editor_manager', everyone=True)
form = self.app.get(reverse('journal_editors.index',
args=[self.journal.pk]), user=self.user).forms['add-editor']
self.assertEqual(form.method.lower(), 'post')
def test_form_action_must_not_be_empty(self):
from waffle import Flag
Flag.objects.create(name='editor_manager', everyone=True)
form = self.app.get(reverse('journal_editors.index',
args=[self.journal.pk]), user=self.user).forms['add-editor']
r = reverse('journal_editors.add', args=[self.journal.pk])
self.assertEqual(form.action, r)
def test_form_adding_an_editor_with_a_valid_username(self):
from waffle import Flag
Flag.objects.create(name='editor_manager', everyone=True)
perm_journal_change = _makePermission(perm='change_journal',
model='journal',
app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
form = self.app.get(reverse('journal_editors.index',
args=[self.journal.pk]), user=self.user).forms['add-editor']
form['query'] = self.user.username
response = form.submit()
self.assertIn('Now, %s is an editor of this journal.' % self.user.username, response.body)
def test_form_adding_an_editor_with_a_invalid_username(self):
from waffle import Flag
Flag.objects.create(name='editor_manager', everyone=True)
perm_journal_change = _makePermission(perm='change_journal',
model='journal',
app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
form = self.app.get(reverse('journal_editors.index',
args=[self.journal.pk]), user=self.user).forms['add-editor']
form['query'] = 'fakeuser'
response = form.submit()
self.assertIn('User fakeuser does not exists', response.body)
class AheadFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
self.journal = modelfactories.JournalFactory.create()
self.journal.join(self.collection, self.user)
def test_form_enctype_must_be_urlencoded(self):
"""
Asserts that the enctype attribute of the ahead form is
``application/x-www-form-urlencoded``
"""
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
perm_journal_change = _makePermission(perm='change_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_issue_list)
form = self.app.get(reverse('issue.index', args=[self.journal.pk]),
user=self.user).forms['ahead-form']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the ahead form is
empty.
"""
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
perm_journal_change = _makePermission(perm='change_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_issue_list)
form = self.app.get(reverse('issue.index', args=[self.journal.pk]),
user=self.user).forms['ahead-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the ahead form is
``POST``.
"""
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
perm_journal_change = _makePermission(perm='change_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_issue_list)
form = self.app.get(reverse('issue.index', args=[self.journal.pk]),
user=self.user).forms['ahead-form']
self.assertEqual(form.method.lower(), 'post')
def test_basic_structure(self):
perm_issue_list = _makePermission(perm='list_issue',
model='issue', app_label='journalmanager')
perm_journal_change = _makePermission(perm='change_issue',
model='issue', app_label='journalmanager')
self.user.user_permissions.add(perm_journal_change)
self.user.user_permissions.add(perm_issue_list)
form = self.app.get(reverse('issue.index', args=[self.journal.pk]),
user=self.user).forms['ahead-form']
self.assertIn('csrfmiddlewaretoken', form.fields)
class PressReleaseFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
self.journal = modelfactories.JournalFactory.create()
self.journal.join(self.collection, self.user)
def test_form_enctype_must_be_urlencoded(self):
"""
Asserts that the enctype attribute of the pressrelease form is
``application/x-www-form-urlencoded``
"""
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
form = self.app.get(reverse('prelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the press release form is
empty.
"""
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_list)
self.user.user_permissions.add(perm_prelease_add)
form = self.app.get(reverse('prelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the press release form is
``POST``.
"""
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_list)
self.user.user_permissions.add(perm_prelease_add)
form = self.app.get(reverse('prelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertEqual(form.method.lower(), 'post')
def test_basic_structure(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
form = self.app.get(reverse('prelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertIn('csrfmiddlewaretoken', form.fields)
def test_POST_pressrelease_with_valid_data(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
issue = modelfactories.IssueFactory(journal=self.journal)
language = modelfactories.LanguageFactory(iso_code='en',
name='english')
self.journal.languages.add(language)
form = self.app.get(reverse('prelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
form.set('issue', issue.pk)
form['doi'] = "http://dx.doi.org/10.1590/S0102-86502013001300002"
form['article-0-article_pid'] = 'S0102-86502013001300002'
form.set('translation-0-language', language.pk)
form['translation-0-title'] = "Press Relasea MFP"
form['translation-0-content'] = "<p>Body of some HTML</p>"
response = form.submit().follow()
self.assertIn('Saved.', response.body)
def test_POST_pressrelease_with_invalid_data(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
language = modelfactories.LanguageFactory(iso_code='en',
name='english')
self.journal.languages.add(language)
form = self.app.get(reverse('prelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
form['doi'] = "http://dx.doi.org/10.1590/S0102-86502013001300002"
form['article-0-article_pid'] = 'S0102-86502013001300002'
form.set('translation-0-language', language.pk)
form['translation-0-title'] = "Press Relasea MFP"
form['translation-0-content'] = "<p>Body of some HTML</p>"
response = form.submit()
self.assertIn('There are some errors or missing data.', response.body)
self.assertTemplateUsed(response,
'journalmanager/add_pressrelease.html')
def test_pressrelease_if_on_edit_form_it_has_article_pid(self):
perm_prelease_edit = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_edit)
ahead_prelease = modelfactories.AheadPressReleaseFactory()
article_prelease = modelfactories.PressReleaseArticleFactory(
press_release=ahead_prelease,
article_pid="S0102-311X2013000300001")
form_ahead_prelease = self.app.get(reverse('aprelease.edit',
args=[self.journal.pk, ahead_prelease.pk]),
user=self.user).forms['prelease-form']
self.assertEqual(form_ahead_prelease['article-0-article_pid'].value, "S0102-311X2013000300001")
def test_POST_pressrelease_must_contain_at_least_one_press_release_translation(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
issue = modelfactories.IssueFactory(journal=self.journal)
language = modelfactories.LanguageFactory(iso_code='en',
name='english')
self.journal.languages.add(language)
form = self.app.get(reverse('prelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
form.set('issue', issue.pk)
form['doi'] = "http://dx.doi.org/10.1590/S0102-86502013001300002"
form['article-0-article_pid'] = 'S0102-86502013001300002'
response = form.submit()
self.assertIn('There are some errors or missing data.', response.body)
self.assertIn('Please fill in at least one form', response.body)
self.assertTemplateUsed(response,
'journalmanager/add_pressrelease.html')
def test_pressrelease_translations_language_filtering(self):
language1 = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
language2 = modelfactories.LanguageFactory.create(iso_code='pt',
name='portuguese')
journal = modelfactories.JournalFactory.create()
journal.languages.add(language1)
testing_form = forms.PressReleaseTranslationForm(journal=journal)
res_qset = testing_form['language'].field.queryset
self.assertEqual(len(res_qset), 1)
self.assertEqual(res_qset[0], language1)
def test_pressrelease_translations_raises_TypeError_while_missing_journal(self):
self.assertRaises(
TypeError,
lambda: forms.PressReleaseTranslationForm())
def test_get_all_pressrelease_forms(self):
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
journal = modelfactories.JournalFactory.create()
journal.languages.add(language)
pr_forms = forms.get_all_pressrelease_forms(
{}, journal, models.PressRelease())
self.assertEqual(
sorted(pr_forms.keys()),
sorted([
'pressrelease_form',
'translation_formset',
'article_formset',
])
)
def test_get_all_pressrelease_language_filtering(self):
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
journal = modelfactories.JournalFactory.create()
journal.languages.add(language)
pr_forms = forms.get_all_pressrelease_forms(
{}, journal, models.PressRelease())
res_qset = pr_forms['translation_formset'][0].fields['language'].queryset
self.assertEqual(len(res_qset), 1)
self.assertEqual(res_qset[0], language)
def test_issues_must_not_be_trashed(self):
"""
Only valid issues must be available for the user to
bind to a pressrelease.
"""
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_list)
self.user.user_permissions.add(perm_prelease_add)
trashed_issue = modelfactories.IssueFactory.create(
journal=self.journal, is_trashed=True)
language = modelfactories.LanguageFactory(iso_code='en',
name='english')
self.journal.languages.add(language)
form = self.app.get(reverse('prelease.add',
args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertRaises(ValueError,
lambda: form.set('issue', str(trashed_issue.pk)))
class AheadPressReleaseFormTests(WebTest):
def setUp(self):
self.user = modelfactories.UserFactory(is_active=True)
self.collection = modelfactories.CollectionFactory.create()
self.collection.add_user(self.user, is_manager=True)
self.journal = modelfactories.JournalFactory()
self.journal.join(self.collection, self.user)
def test_form_enctype_must_be_urlencoded(self):
"""
Asserts that the enctype attribute of the pressrelease form is
``application/x-www-form-urlencoded``
"""
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
form = self.app.get(reverse('aprelease.add',
args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertEqual(form.enctype, 'application/x-www-form-urlencoded')
def test_form_action_must_be_empty(self):
"""
Asserts that the action attribute of the press release form is
empty.
"""
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_list)
self.user.user_permissions.add(perm_prelease_add)
form = self.app.get(reverse('aprelease.add', args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertEqual(form.action, '')
def test_form_method_must_be_post(self):
"""
Asserts that the method attribute of the press release form is
``POST``.
"""
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_list)
self.user.user_permissions.add(perm_prelease_add)
form = self.app.get(reverse('aprelease.add',
args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertEqual(form.method.lower(), 'post')
def test_basic_structure(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
form = self.app.get(reverse('aprelease.add',
args=[self.journal.pk]),
user=self.user).forms['prelease-form']
self.assertIn('csrfmiddlewaretoken', form.fields)
def test_POST_pressrelease_with_valid_data(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
language = modelfactories.LanguageFactory(iso_code='en',
name='english')
self.journal.languages.add(language)
form = self.app.get(reverse('aprelease.add',
args=[self.journal.pk]),
user=self.user).forms['prelease-form']
form['doi'] = "http://dx.doi.org/10.1590/S0102-86502013001300002"
form['article-0-article_pid'] = 'S0102-86502013001300002'
form.set('translation-0-language', language.pk)
form['translation-0-title'] = "Press Relasea MFP"
form['translation-0-content'] = "<p>Body of some HTML</p>"
response = form.submit().follow()
self.assertIn('Saved.', response.body)
def test_POST_pressrelease_with_invalid_data(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
language = modelfactories.LanguageFactory(iso_code='en',
name='english')
self.journal.languages.add(language)
form = self.app.get(reverse('aprelease.add',
args=[self.journal.pk]),
user=self.user).forms['prelease-form']
form['doi'] = "http://dx.doi.org/10.1590/S0102-86502013001300002"
form['article-0-article_pid'] = 'S0102-86502013001300002'
# missing translation language
form['translation-0-title'] = "Press Relasea MFP"
form['translation-0-content'] = "<p>Body of some HTML</p>"
response = form.submit()
self.assertIn('There are some errors or missing data.', response.body)
self.assertTemplateUsed(response,
'journalmanager/add_pressrelease.html')
def test_POST_pressrelease_must_contain_at_least_one_press_release_translation(self):
perm_prelease_list = _makePermission(perm='list_pressrelease',
model='pressrelease',
app_label='journalmanager')
perm_prelease_add = _makePermission(perm='add_pressrelease',
model='pressrelease',
app_label='journalmanager')
self.user.user_permissions.add(perm_prelease_add)
self.user.user_permissions.add(perm_prelease_list)
language = modelfactories.LanguageFactory(iso_code='en',
name='english')
self.journal.languages.add(language)
form = self.app.get(reverse('aprelease.add',
args=[self.journal.pk]),
user=self.user).forms['prelease-form']
form['doi'] = "http://dx.doi.org/10.1590/S0102-86502013001300002"
form['article-0-article_pid'] = 'S0102-86502013001300002'
response = form.submit()
self.assertIn('There are some errors or missing data.', response.body)
self.assertIn('Please fill in at least one form', response.body)
self.assertTemplateUsed(response,
'journalmanager/add_pressrelease.html')
def test_pressrelease_translations_language_filtering(self):
language1 = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
language2 = modelfactories.LanguageFactory.create(iso_code='pt',
name='portuguese')
journal = modelfactories.JournalFactory.create()
journal.languages.add(language1)
testing_form = forms.PressReleaseTranslationForm(journal=journal)
res_qset = testing_form['language'].field.queryset
self.assertEqual(len(res_qset), 1)
self.assertEqual(res_qset[0], language1)
def test_pressrelease_translations_raises_TypeError_while_missing_journal(self):
self.assertRaises(
TypeError,
lambda: forms.PressReleaseTranslationForm())
def test_get_all_pressrelease_forms(self):
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
journal = modelfactories.JournalFactory.create()
journal.languages.add(language)
pr_forms = forms.get_all_pressrelease_forms(
{}, journal, models.PressRelease())
self.assertEqual(
sorted(pr_forms.keys()),
sorted([
'pressrelease_form',
'translation_formset',
'article_formset',
])
)
def test_get_all_ahead_pressrelease_language_filtering(self):
language = modelfactories.LanguageFactory.create(iso_code='en',
name='english')
journal = modelfactories.JournalFactory.create()
journal.languages.add(language)
pr_forms = forms.get_all_ahead_pressrelease_forms(
{}, journal, models.AheadPressRelease())
res_qset = pr_forms['translation_formset'][0].fields['language'].queryset
self.assertEqual(len(res_qset), 1)
self.assertEqual(res_qset[0], language)
|
jamilatta/scielo-manager
|
scielomanager/journalmanager/tests/tests_forms.py
|
Python
|
bsd-2-clause
| 131,301
|
###################################################
# header_music.py
# This file contains declarations for music tracks
# DO NOT EDIT THIS FILE!
###################################################
mtf_culture_1 = 0x00000001
mtf_culture_2 = 0x00000002
mtf_culture_3 = 0x00000004
mtf_culture_4 = 0x00000008
mtf_culture_5 = 0x00000010
mtf_culture_6 = 0x00000020
mtf_culture_all = 0x0000003F
mtf_looping = 0x00000040
mtf_start_immediately = 0x00000080
mtf_persist_until_finished = 0x00000100
mtf_sit_tavern = 0x00000200
mtf_sit_fight = 0x00000400
mtf_sit_multiplayer_fight = 0x00000800
mtf_sit_ambushed = 0x00001000
mtf_sit_town = 0x00002000
mtf_sit_town_infiltrate = 0x00004000
mtf_sit_killed = 0x00008000
mtf_sit_travel = 0x00010000
mtf_sit_arena = 0x00020000
mtf_sit_siege = 0x00040000
mtf_sit_night = 0x00080000
mtf_sit_day = 0x00100000
mtf_sit_encounter_hostile = 0x00200000
mtf_sit_main_title = 0x00400000
mtf_sit_victorious = 0x00800000
mtf_sit_feast = 0x01000000
mtf_module_track = 0x10000000 ##set this flag for tracks placed under module folder
|
Sw4T/Warband-Development
|
mb_warband_module_system_1166/Module_system 1.166/headers/header_music.py
|
Python
|
mit
| 1,662
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This file contains the definition of the GCS Blob storage Class used to
integrate GCS Blob storage with spack buildcache.
"""
import os
import sys
import llnl.util.tty as tty
def gcs_client():
"""Create a GCS client
Creates an authenticated GCS client to access GCS buckets and blobs
"""
try:
import google.auth
from google.cloud import storage
except ImportError as ex:
tty.error('{0}, google-cloud-storage python module is missing.'.format(ex) +
' Please install to use the gs:// backend.')
sys.exit(1)
storage_credentials, storage_project = google.auth.default()
storage_client = storage.Client(storage_project,
storage_credentials)
return storage_client
class GCSBucket(object):
"""GCS Bucket Object
Create a wrapper object for a GCS Bucket. Provides methods to wrap spack
related tasks, such as destroy.
"""
def __init__(self, url, client=None):
"""Constructor for GCSBucket objects
Args:
url (str): The url pointing to the GCS bucket to build an object out of
client (google.cloud.storage.client.Client): A pre-defined storage
client that will be used to access the GCS bucket.
"""
if url.scheme != 'gs':
raise ValueError('Can not create GCS bucket connection with scheme {SCHEME}'
.format(SCHEME=url.scheme))
self.url = url
self.name = self.url.netloc
if self.url.path[0] == '/':
self.prefix = self.url.path[1:]
else:
self.prefix = self.url.path
self.client = client or gcs_client()
self.bucket = None
tty.debug('New GCS bucket:')
tty.debug(" name: {0}".format(self.name))
tty.debug(" prefix: {0}".format(self.prefix))
def exists(self):
from google.cloud.exceptions import NotFound
if not self.bucket:
try:
self.bucket = self.client.bucket(self.name)
except NotFound as ex:
tty.error("{0}, Failed check for bucket existence".format(ex))
sys.exit(1)
return self.bucket is not None
def create(self):
if not self.bucket:
self.bucket = self.client.create_bucket(self.name)
def get_blob(self, blob_path):
if self.exists():
return self.bucket.get_blob(blob_path)
return None
def blob(self, blob_path):
if self.exists():
return self.bucket.blob(blob_path)
return None
def get_all_blobs(self, recursive=True, relative=True):
"""Get a list of all blobs
Returns a list of all blobs within this bucket.
Args:
relative: If true (default), print blob paths
relative to 'build_cache' directory.
If false, print absolute blob paths (useful for
destruction of bucket)
"""
tty.debug('Getting GCS blobs... Recurse {0} -- Rel: {1}'.format(
recursive, relative))
converter = str
if relative:
converter = self._relative_blob_name
if self.exists():
all_blobs = self.bucket.list_blobs(prefix=self.prefix)
blob_list = []
base_dirs = len(self.prefix.split('/')) + 1
for blob in all_blobs:
if not recursive:
num_dirs = len(blob.name.split('/'))
if num_dirs <= base_dirs:
blob_list.append(converter(blob.name))
else:
blob_list.append(converter(blob.name))
return blob_list
def _relative_blob_name(self, blob_name):
return os.path.relpath(blob_name, self.prefix)
def destroy(self, recursive=False, **kwargs):
"""Bucket destruction method
Deletes all blobs within the bucket, and then deletes the bucket itself.
Uses GCS Batch operations to bundle several delete operations together.
"""
from google.cloud.exceptions import NotFound
tty.debug("Bucket.destroy(recursive={0})".format(recursive))
try:
bucket_blobs = self.get_all_blobs(recursive=recursive, relative=False)
batch_size = 1000
num_blobs = len(bucket_blobs)
for i in range(0, num_blobs, batch_size):
with self.client.batch():
for j in range(i, min(i + batch_size, num_blobs)):
blob = self.blob(bucket_blobs[j])
blob.delete()
except NotFound as ex:
tty.error("{0}, Could not delete a blob in bucket {1}.".format(
ex, self.name))
sys.exit(1)
class GCSBlob(object):
"""GCS Blob object
Wraps some blob methods for spack functionality
"""
def __init__(self, url, client=None):
self.url = url
if url.scheme != 'gs':
raise ValueError('Can not create GCS blob connection with scheme: {SCHEME}'
.format(SCHEME=url.scheme))
self.client = client or gcs_client()
self.bucket = GCSBucket(url)
self.blob_path = self.url.path.lstrip('/')
tty.debug("New GCSBlob")
tty.debug(" blob_path = {0}".format(self.blob_path))
if not self.bucket.exists():
tty.warn("The bucket {0} does not exist, it will be created"
.format(self.bucket.name))
self.bucket.create()
def get(self):
return self.bucket.get_blob(self.blob_path)
def exists(self):
from google.cloud.exceptions import NotFound
try:
blob = self.bucket.blob(self.blob_path)
exists = blob.exists()
except NotFound:
return False
return exists
def delete_blob(self):
from google.cloud.exceptions import NotFound
try:
blob = self.bucket.blob(self.blob_path)
blob.delete()
except NotFound as ex:
tty.error("{0}, Could not delete gcs blob {1}".format(ex, self.blob_path))
def upload_to_blob(self, local_file_path):
blob = self.bucket.blob(self.blob_path)
blob.upload_from_filename(local_file_path)
def get_blob_byte_stream(self):
return self.bucket.get_blob(self.blob_path).open(mode='rb')
def get_blob_headers(self):
blob = self.bucket.get_blob(self.blob_path)
headers = {
'Content-type': blob.content_type,
'Content-encoding': blob.content_encoding,
'Content-language': blob.content_language,
'MD5Hash': blob.md5_hash
}
return headers
|
LLNL/spack
|
lib/spack/spack/util/gcs.py
|
Python
|
lgpl-2.1
| 7,039
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v9.enums",
marshal="google.ads.googleads.v9",
manifest={"AdGroupCriterionApprovalStatusEnum",},
)
class AdGroupCriterionApprovalStatusEnum(proto.Message):
r"""Container for enum describing possible AdGroupCriterion
approval statuses.
"""
class AdGroupCriterionApprovalStatus(proto.Enum):
r"""Enumerates AdGroupCriterion approval statuses."""
UNSPECIFIED = 0
UNKNOWN = 1
APPROVED = 2
DISAPPROVED = 3
PENDING_REVIEW = 4
UNDER_REVIEW = 5
__all__ = tuple(sorted(__protobuf__.manifest))
|
googleads/google-ads-python
|
google/ads/googleads/v9/enums/types/ad_group_criterion_approval_status.py
|
Python
|
apache-2.0
| 1,262
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-keypairs'
POLICY_ROOT = 'os_compute_api:os-keypairs:%s'
keypairs_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'discoverable',
check_str=base.RULE_ANY),
policy.RuleDefault(
name=POLICY_ROOT % 'index',
check_str='rule:admin_api or user_id:%(user_id)s'),
policy.RuleDefault(
name=POLICY_ROOT % 'create',
check_str='rule:admin_api or user_id:%(user_id)s'),
policy.RuleDefault(
name=POLICY_ROOT % 'delete',
check_str='rule:admin_api or user_id:%(user_id)s'),
policy.RuleDefault(
name=POLICY_ROOT % 'show',
check_str='rule:admin_api or user_id:%(user_id)s'),
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return keypairs_policies
|
xuweiliang/Codelibrary
|
nova/policies/keypairs.py
|
Python
|
apache-2.0
| 1,555
|
from __future__ import absolute_import
import json
import numpy as np
from ..app import app
from . import test_utils
from ...plotting import (reset_output, output_server, push, curdoc, figure)
from ...session import TestSession
from ...models.sources import ServerDataSource
from ...models.ranges import Range1d
from ...models.renderers import GlyphRenderer
from ...transforms import ar_downsample as ar
from unittest import skip
@skip
class TestAr(test_utils.FlaskClientTestCase):
def test_ar(self):
#move to setUp
reset_output()
sess = TestSession(client=app.test_client())
output_server('ar', session=sess)
# wierd - server source has no params now besides the blaze expression
# we don't pass a data_url right now because right now we assume the
# server is the bokeh server, however that can be handled later
orig_source = ServerDataSource(expr={'op': 'Field', 'args': [':leaf', 'gauss']})
#make template plot
p = figure(x_range=Range1d(start=0, end=0), y_range=Range1d(start=0, end=0))
plot = p.square('oneA', 'oneB', color='#FF00FF', source=orig_source)
#replace that plot with an abstract rendering one
arplot = ar.heatmap(
plot,
spread=3,
transform=None,
title="Server-rendered, uncorrected")
# set explicit value for ranges, or else they are set at 0
# until the javascript auto-sets it
arplot.x_range = Range1d(start=-2.0, end=2.0)
arplot.y_range = Range1d(start=-2.0, end=2.0)
glyph = arplot.select({'type' : GlyphRenderer})[0].glyph
#extract the original data source because it was replaced?!
source = arplot.select({'type' : ServerDataSource})[0]
#our docs don't have screen ranges, because we make those on the fly in javascript
#so we make fake ones!
screen_x_range = Range1d(start=0, end=200)
screen_y_range = Range1d(start=0, end=200)
#this dumping to json thing is terrible
plot_state = {'screen_x' : curdoc().dump(screen_x_range)[0]['attributes'],
'screen_y' : curdoc().dump(screen_y_range)[0]['attributes'],
'data_x' : curdoc().dump(arplot.x_range)[0]['attributes'],
'data_y' : curdoc().dump(arplot.y_range)[0]['attributes']}
#save data to server
#hack - because recent changes broke AR
push()
data = {'plot_state' : plot_state}
url = "/render/%s/%s/%s" % (curdoc().docid, source._id, glyph._id)
result = self.client.post(
url,
data=json.dumps(data),
headers={'content-type' : 'application/json'}
)
assert result.status_code == 200
data = json.loads(result.data.decode('utf-8'))
image = np.array(data['data']['image'][0])
#I guess it's data dependent so the shape changes....
assert image.shape[0] >200
assert image.shape[1] >200
def test_line1d_downsample(self):
reset_output()
sess = TestSession(client=app.test_client())
output_server('ar', session=sess)
source = ServerDataSource(expr={'op': 'Field', 'args': [':leaf', 'aapl']})
source.transform = dict(direction='x',
resample='line1d',
method='minmax')
# hacky - we have to specify range, otherwise code doesn't know how to serialize
# data ranges
p = figure(x_range=Range1d(start=0, end=0), y_range=Range1d(start=0, end=0))
plot = p.line('date', 'close',
x_axis_type = "datetime",
color='#A6CEE3', tools="pan,wheel_zoom,box_zoom,reset,previewsave",
source=source,
legend='AAPL')
push()
screen_x_range = Range1d(start=0, end=200)
screen_y_range = Range1d(start=0, end=200)
plot_state = {'screen_x' : curdoc().dump(screen_x_range)[0]['attributes'],
'screen_y' : curdoc().dump(screen_y_range)[0]['attributes'],
'data_x' : curdoc().dump(plot.x_range)[0]['attributes'],
'data_y' : curdoc().dump(plot.y_range)[0]['attributes']}
data = {'plot_state' : plot_state, 'auto_bounds' : 'True'}
glyph = plot.select({'type' : GlyphRenderer})[0].glyph
url = "/render/%s/%s/%s" % (curdoc().docid, source._id, glyph._id)
result = self.client.post(
url,
data=json.dumps(data),
headers={'content-type' : 'application/json'}
)
assert result.status_code == 200
data = json.loads(result.data.decode('utf-8'))
#2 x plot size (200)
assert len(data['data']['close']) == 400
def test_heatmap_downsample(self):
reset_output()
sess = TestSession(client=app.test_client())
output_server('ar', session=sess)
source = ServerDataSource(expr={'op': 'Field', 'args': [':leaf', 'array']})
source.transform = dict(resample='heatmap',
global_x_range=[0, 10],
global_y_range=[0, 10],
global_offset_x=0,
global_offset_y=0,
type="ndarray",
)
# hacky - we have to specify range, otherwise code doesn't know how to serialize
# data ranges
p = figure(x_range=Range1d(start=0, end=10), y_range=Range1d(start=0, end=10))
plot = p.image(image="image",
x='x',
y='y',
dw='dw',
dh='dh',
source=source,
)
push()
screen_x_range = Range1d(start=0, end=200)
screen_y_range = Range1d(start=0, end=200)
plot_state = {'screen_x' : curdoc().dump(screen_x_range)[0]['attributes'],
'screen_y' : curdoc().dump(screen_y_range)[0]['attributes'],
'data_x' : curdoc().dump(plot.x_range)[0]['attributes'],
'data_y' : curdoc().dump(plot.y_range)[0]['attributes']}
data = {'plot_state' : plot_state}
glyph = plot.select({'type' : GlyphRenderer})[0].glyph
url = "/render/%s/%s/%s" % (curdoc().docid, source._id, glyph._id)
result = self.client.post(
url,
data=json.dumps(data),
headers={'content-type' : 'application/json'}
)
assert result.status_code == 200
data = json.loads(result.data.decode('utf-8'))
#2 x plot size (200)
assert np.array(data['data']['image'][0]).shape == (200,200)
|
rhiever/bokeh
|
bokeh/server/tests/remotedata_tests.py
|
Python
|
bsd-3-clause
| 6,798
|
"""Utility class for formatting scansion patterns"""
import logging
from cltk.prosody.lat.scansion_constants import ScansionConstants
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
__author__ = ["Todd Cook <todd.g.cook@gmail.com>"]
__license__ = "MIT License"
class ScansionFormatter:
"""
Users can specify which scansion symbols to use in the formatting.
>>> print(ScansionFormatter().hexameter( "-UU-UU-UU---UU--"))
-UU|-UU|-UU|--|-UU|--
>>> constants = ScansionConstants(unstressed="˘", \
stressed= "¯", optional_terminal_ending="x")
>>> formatter = ScansionFormatter(constants)
>>> print(formatter.hexameter( "¯˘˘¯˘˘¯˘˘¯¯¯˘˘¯¯"))
¯˘˘|¯˘˘|¯˘˘|¯¯|¯˘˘|¯¯
"""
def __init__(self, constants=ScansionConstants()):
self.constants = constants
self.stress_accent_dict = dict(
zip(
list(self.constants.VOWELS + self.constants.ACCENTED_VOWELS),
list(self.constants.ACCENTED_VOWELS + self.constants.ACCENTED_VOWELS),
)
)
def hexameter(self, line: str) -> str:
"""
Format a string of hexameter metrical stress patterns into foot divisions
:param line: the scansion pattern
:return: the scansion string formatted with foot breaks
>>> print(ScansionFormatter().hexameter( "-UU-UU-UU---UU--"))
-UU|-UU|-UU|--|-UU|--
"""
mylist = list(line)
items = len(mylist)
idx_start = items - 2
idx_end = items
while idx_start > 0:
potential_foot = "".join(mylist[idx_start:idx_end])
if (
potential_foot == self.constants.HEXAMETER_ENDING
or potential_foot == self.constants.SPONDEE
):
mylist.insert(idx_start, self.constants.FOOT_SEPARATOR)
idx_start -= 1
idx_end -= 2
if potential_foot == self.constants.DACTYL:
mylist.insert(idx_start, "|")
idx_start -= 1
idx_end -= 3
idx_start -= 1
return "".join(mylist)
def merge_line_scansion(self, line: str, scansion: str) -> str:
"""
Merge a line of verse with its scansion string. Do not accent dipthongs.
:param line: the original Latin verse line
:param scansion: the scansion pattern
:return: the original line with the scansion pattern applied via macrons
>>> print(ScansionFormatter().merge_line_scansion(
... "Arma virumque cano, Troiae qui prīmus ab ōrīs",
... "- U U - U U - UU- - - U U - -"))
Ārma virūmque canō, Troiae quī prīmus ab ōrīs
>>> print(ScansionFormatter().merge_line_scansion(
... "lītora, multum ille et terrīs iactātus et alto",
... " - U U - - - - - - - U U - U"))
lītora, mūltum īlle ēt tērrīs iāctātus et ālto
>>> print(ScansionFormatter().merge_line_scansion(
... 'aut facere, haec a te dictaque factaque sunt',
... ' - U U - - - - U U - U U - '))
aut facere, haec ā tē dīctaque fāctaque sūnt
"""
letters = list(line)
marks = list(scansion)
if len(scansion) < len(line):
marks += ((len(line) - len(scansion)) * " ").split()
for idx in range(0, len(marks)):
if marks[idx] == self.constants.STRESSED:
vowel = letters[idx]
if vowel not in self.stress_accent_dict:
LOG.error(
"problem! vowel: {} not in dict for line {}".format(vowel, line)
)
pass
else:
if idx > 1:
if (letters[idx - 2] + letters[idx - 1]).lower() == "qu":
new_vowel = self.stress_accent_dict[vowel]
letters[idx] = new_vowel
continue
if idx > 0:
if letters[idx - 1] + vowel in self.constants.DIPTHONGS:
continue
new_vowel = self.stress_accent_dict[vowel]
letters[idx] = new_vowel
else:
new_vowel = self.stress_accent_dict[vowel]
letters[idx] = new_vowel
return "".join(letters).rstrip()
|
kylepjohnson/cltk
|
src/cltk/prosody/lat/scansion_formatter.py
|
Python
|
mit
| 4,537
|
from __future__ import absolute_import, print_function
from django.conf.urls import patterns, url
from .endpoints.auth_index import AuthIndexEndpoint
from .endpoints.broadcast_index import BroadcastIndexEndpoint
from .endpoints.catchall import CatchallEndpoint
from .endpoints.event_details import EventDetailsEndpoint
from .endpoints.group_details import GroupDetailsEndpoint
from .endpoints.group_events import GroupEventsEndpoint
from .endpoints.group_events_latest import GroupEventsLatestEndpoint
from .endpoints.group_notes import GroupNotesEndpoint
from .endpoints.group_stats import GroupStatsEndpoint
from .endpoints.group_tags import GroupTagsEndpoint
from .endpoints.group_tagkey_details import GroupTagKeyDetailsEndpoint
from .endpoints.group_tagkey_values import GroupTagKeyValuesEndpoint
from .endpoints.helppage_details import HelpPageDetailsEndpoint
from .endpoints.helppage_index import HelpPageIndexEndpoint
from .endpoints.index import IndexEndpoint
from .endpoints.internal_stats import InternalStatsEndpoint
from .endpoints.legacy_project_redirect import LegacyProjectRedirectEndpoint
from .endpoints.organization_access_request_details import OrganizationAccessRequestDetailsEndpoint
from .endpoints.organization_details import OrganizationDetailsEndpoint
from .endpoints.organization_member_details import OrganizationMemberDetailsEndpoint
from .endpoints.organization_member_index import OrganizationMemberIndexEndpoint
from .endpoints.organization_member_team_details import OrganizationMemberTeamDetailsEndpoint
from .endpoints.organization_index import OrganizationIndexEndpoint
from .endpoints.organization_projects import OrganizationProjectsEndpoint
from .endpoints.organization_stats import OrganizationStatsEndpoint
from .endpoints.organization_teams import OrganizationTeamsEndpoint
from .endpoints.project_details import ProjectDetailsEndpoint
from .endpoints.project_group_index import ProjectGroupIndexEndpoint
from .endpoints.project_keys import ProjectKeysEndpoint
from .endpoints.project_key_details import ProjectKeyDetailsEndpoint
from .endpoints.project_member_index import ProjectMemberIndexEndpoint
from .endpoints.project_releases import ProjectReleasesEndpoint
from .endpoints.project_rules import ProjectRulesEndpoint
from .endpoints.project_rule_details import ProjectRuleDetailsEndpoint
from .endpoints.project_searches import ProjectSearchesEndpoint
from .endpoints.project_search_details import ProjectSearchDetailsEndpoint
from .endpoints.project_stats import ProjectStatsEndpoint
from .endpoints.project_tagkey_details import ProjectTagKeyDetailsEndpoint
from .endpoints.project_tagkey_values import ProjectTagKeyValuesEndpoint
from .endpoints.release_details import ReleaseDetailsEndpoint
from .endpoints.release_files import ReleaseFilesEndpoint
from .endpoints.release_file_details import ReleaseFileDetailsEndpoint
from .endpoints.system_health import SystemHealthEndpoint
from .endpoints.team_details import TeamDetailsEndpoint
from .endpoints.team_groups_new import TeamGroupsNewEndpoint
from .endpoints.team_groups_trending import TeamGroupsTrendingEndpoint
from .endpoints.team_project_index import TeamProjectIndexEndpoint
from .endpoints.team_stats import TeamStatsEndpoint
from .endpoints.user_details import UserDetailsEndpoint
urlpatterns = patterns(
'',
# Auth
url(r'^auth/$',
AuthIndexEndpoint.as_view(),
name='sentry-api-0-auth'),
# Broadcasts
url(r'^broadcasts/$',
BroadcastIndexEndpoint.as_view(),
name='sentry-api-0-broadcast-index'),
# Users
url(r'^users/(?P<user_id>[^\/]+)/$',
UserDetailsEndpoint.as_view(),
name='sentry-api-0-user-details'),
# Organizations
url(r'^organizations/$',
OrganizationIndexEndpoint.as_view(),
name='sentry-api-0-organizations'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/$',
OrganizationDetailsEndpoint.as_view(),
name='sentry-api-0-organization-details'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/access-requests/(?P<request_id>\d+)/$',
OrganizationAccessRequestDetailsEndpoint.as_view(),
name='sentry-api-0-organization-access-request-details'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/members/$',
OrganizationMemberIndexEndpoint.as_view(),
name='sentry-api-0-organization-member-index'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/members/(?P<member_id>[^\/]+)/$',
OrganizationMemberDetailsEndpoint.as_view(),
name='sentry-api-0-organization-member-details'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/members/(?P<member_id>[^\/]+)/teams/(?P<team_slug>[^\/]+)/$',
OrganizationMemberTeamDetailsEndpoint.as_view(),
name='sentry-api-0-organization-member-team-details'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/projects/$',
OrganizationProjectsEndpoint.as_view(),
name='sentry-api-0-organization-projects'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/stats/$',
OrganizationStatsEndpoint.as_view(),
name='sentry-api-0-organization-stats'),
url(r'^organizations/(?P<organization_slug>[^\/]+)/teams/$',
OrganizationTeamsEndpoint.as_view(),
name='sentry-api-0-organization-teams'),
# Teams
url(r'^teams/(?P<organization_slug>[^\/]+)/(?P<team_slug>[^\/]+)/$',
TeamDetailsEndpoint.as_view(),
name='sentry-api-0-team-details'),
url(r'^teams/(?P<organization_slug>[^\/]+)/(?P<team_slug>[^\/]+)/groups/new/$',
TeamGroupsNewEndpoint.as_view(),
name='sentry-api-0-team-groups-new'),
url(r'^teams/(?P<organization_slug>[^\/]+)/(?P<team_slug>[^\/]+)/groups/trending/$',
TeamGroupsTrendingEndpoint.as_view(),
name='sentry-api-0-team-groups-trending'),
url(r'^teams/(?P<organization_slug>[^\/]+)/(?P<team_slug>[^\/]+)/projects/$',
TeamProjectIndexEndpoint.as_view(),
name='sentry-api-0-team-project-index'),
url(r'^teams/(?P<organization_slug>[^\/]+)/(?P<team_slug>[^\/]+)/stats/$',
TeamStatsEndpoint.as_view(),
name='sentry-api-0-team-stats'),
# Handles redirecting project_id => org_slug/project_slug
# TODO(dcramer): remove this after a reasonable period of time
url(r'^projects/(?P<project_id>\d+)/(?P<path>(?:groups|releases|stats|tags)/.+)?',
LegacyProjectRedirectEndpoint.as_view()),
# Projects
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/$',
ProjectDetailsEndpoint.as_view(),
name='sentry-api-0-project-details'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/groups/$',
ProjectGroupIndexEndpoint.as_view(),
name='sentry-api-0-project-group-index'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/keys/$',
ProjectKeysEndpoint.as_view(),
name='sentry-api-0-project-keys'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/keys/(?P<key_id>[^\/]+)/$',
ProjectKeyDetailsEndpoint.as_view(),
name='sentry-api-0-project-key-details'),
url(r'^projects/(?P<organization_slug>[^/]+)/(?P<project_slug>[^/]+)/members/$',
ProjectMemberIndexEndpoint.as_view(),
name='sentry-api-0-project-member-index'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/releases/$',
ProjectReleasesEndpoint.as_view(),
name='sentry-api-0-project-releases'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/releases/(?P<version>[^/]+)/$',
ReleaseDetailsEndpoint.as_view(),
name='sentry-api-0-release-details'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/releases/(?P<version>[^/]+)/files/$',
ReleaseFilesEndpoint.as_view(),
name='sentry-api-0-release-files'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/releases/(?P<version>[^/]+)/files/(?P<file_id>\d+)/$',
ReleaseFileDetailsEndpoint.as_view(),
name='sentry-api-0-release-file-details'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/rules/$',
ProjectRulesEndpoint.as_view(),
name='sentry-api-0-project-rules'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/rules/(?P<rule_id>[^\/]+)/$',
ProjectRuleDetailsEndpoint.as_view(),
name='sentry-api-0-project-rule-details'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/searches/$',
ProjectSearchesEndpoint.as_view(),
name='sentry-api-0-project-searches'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/searches/(?P<search_id>[^\/]+)/$',
ProjectSearchDetailsEndpoint.as_view(),
name='sentry-api-0-project-search-details'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/stats/$',
ProjectStatsEndpoint.as_view(),
name='sentry-api-0-project-stats'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/tags/(?P<key>[^/]+)/$',
ProjectTagKeyDetailsEndpoint.as_view(),
name='sentry-api-0-project-tagkey-details'),
url(r'^projects/(?P<organization_slug>[^\/]+)/(?P<project_slug>[^\/]+)/tags/(?P<key>[^/]+)/values/$',
ProjectTagKeyValuesEndpoint.as_view(),
name='sentry-api-0-project-tagkey-values'),
# Groups
url(r'^groups/(?P<group_id>\d+)/$',
GroupDetailsEndpoint.as_view(),
name='sentry-api-0-group-details'),
url(r'^groups/(?P<group_id>\d+)/events/$',
GroupEventsEndpoint.as_view(),
name='sentry-api-0-group-events'),
url(r'^groups/(?P<group_id>\d+)/events/latest/$',
GroupEventsLatestEndpoint.as_view(),
name='sentry-api-0-group-events-latest'),
url(r'^groups/(?P<group_id>\d+)/notes/$',
GroupNotesEndpoint.as_view(),
name='sentry-api-0-group-notes'),
url(r'^groups/(?P<group_id>\d+)/stats/$',
GroupStatsEndpoint.as_view(),
name='sentry-api-0-group-stats'),
url(r'^groups/(?P<group_id>\d+)/tags/$',
GroupTagsEndpoint.as_view(),
name='sentry-api-0-group-tags'),
url(r'^groups/(?P<group_id>\d+)/tags/(?P<key>[^/]+)/$',
GroupTagKeyDetailsEndpoint.as_view(),
name='sentry-api-0-group-tagkey-details'),
url(r'^groups/(?P<group_id>\d+)/tags/(?P<key>[^/]+)/values/$',
GroupTagKeyValuesEndpoint.as_view(),
name='sentry-api-0-group-tagkey-values'),
# Events
url(r'^events/(?P<event_id>\d+)/$',
EventDetailsEndpoint.as_view(),
name='sentry-api-0-event-details'),
# Help Pages
url(r'^helppages/$',
HelpPageIndexEndpoint.as_view(),
name='sentry-api-0-helppage-index'),
url(r'^helppages/(?P<page_id>\d+)/$',
HelpPageDetailsEndpoint.as_view(),
name='sentry-api-0-helppage-details'),
# Internal
url(r'^internal/health/$',
SystemHealthEndpoint.as_view(),
name='sentry-api-0-system-health'),
url(r'^internal/stats/$',
InternalStatsEndpoint.as_view(),
name='sentry-api-0-internal-stats'),
url(r'^$',
IndexEndpoint.as_view(),
name='sentry-api-index'),
url(r'^',
CatchallEndpoint.as_view(),
name='sentry-api-catchall'),
# url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
)
|
1tush/sentry
|
src/sentry/api/urls.py
|
Python
|
bsd-3-clause
| 11,496
|
import argparse
import csv
import json
import os
import numpy as np
import warnings
parser = argparse.ArgumentParser()
parser.add_argument('metafile')
args = parser.parse_args()
GLOBAL_PHASE = np.linspace(0, 1, num = 50).tolist()
with open(args.metafile) as f:
meta = json.load(f)
raw = {"data":{}}
for obj in meta['data']:
if obj['P'] != 'null':
raw_json= "./lightcurves/{}/raw/{}.dat.json"\
.format(meta['survey'], obj['uid'])
if os.path.exists(raw_json):
raw['data'][obj['uid']] = json.load(open(raw_json))
else:
print('{} does not exist'.format(obj['uid']))
else:
print('{} does not have period'.format(obj['uid']))
f_out = open('./lightcurves/{}/dat.json'.format(meta['survey']), 'w')
f_out.write(json.dumps(raw))
f_out.close()
|
zhewang/lcvis
|
python_scripts/PLV_to_New_Format/get_raw.py
|
Python
|
gpl-2.0
| 881
|
# -*- encoding: utf-8 -*-
import os
from abjad.tools import documentationtools
from abjad.tools import systemtools
from abjad.tools.developerscripttools.DeveloperScript import DeveloperScript
from abjad.tools.developerscripttools.ReplaceInFilesScript \
import ReplaceInFilesScript
class RenameModulesScript(DeveloperScript):
r'''Renames classes and functions.
Handle renaming the module and package, as well as any tests,
documentation or mentions of the class throughout the Abjad codebase:
.. shell::
ajv rename --help
'''
### PUBLIC PROPERTIES ###
@property
def alias(self):
r'''Alias of script.
Returns ``'rename'``.
'''
return 'rename'
@property
def long_description(self):
r'''Long description of script.
Returns string or none.
'''
return None
@property
def scripting_group(self):
r'''Scripting group of script.
Returns none.
'''
return None
@property
def short_description(self):
r'''Short description of script.
Returns string.
'''
return 'Rename public modules.'
@property
def version(self):
r'''Version of script.
Returns float.
'''
return 1.0
### PRIVATE METHODS ###
def _codebase_name_to_codebase_docs_path(self, codebase):
from abjad import abjad_configuration
if codebase == 'mainline':
return os.path.join(
abjad_configuration.abjad_directory,
'docs',
'source',
'api',
'tools',
)
elif codebase == 'experimental':
return os.path.join(
abjad_configuration.abjad_experimental_directory,
'docs',
'source',
'tools',
)
message = 'bad codebase name: {!r}.'
message = message.format(codebase)
raise Exception(message)
def _codebase_name_to_codebase_tools_path(self, codebase):
from abjad import abjad_configuration
if codebase == 'mainline':
return os.path.join(
abjad_configuration.abjad_directory, 'tools')
elif codebase == 'experimental':
return os.path.join(
abjad_configuration.abjad_experimental_directory, 'tools')
message = 'bad codebase name: {!r}.'
message = message.format(codebase)
raise Exception(message)
def _confirm_name_changes(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
max_codebase = max(len(old_codebase), len(new_codebase))
old_codebase = old_codebase.ljust(max_codebase)
new_codebase = new_codebase.ljust(max_codebase)
print('')
print('Is ...')
print('')
print(' [{}] {}.{}()'.format(
old_codebase, old_tools_package_name, old_module_name))
print(' ===>')
print(' [{}] {}.{}()'.format(
new_codebase, new_tools_package_name, new_module_name))
print('')
string = raw_input('... correct [yes, no, abort]? ').lower()
print('')
if string in ('y', 'yes'):
return True
elif string in ('a', 'abort', 'q', 'quit'):
raise SystemExit
elif string in ('n', 'no'):
return False
def _get_object_names(self, kind, codebase, tools_package_name):
assert kind in ('class', 'function')
tools_path = self._codebase_name_to_codebase_tools_path(codebase)
path = os.path.join(tools_path, tools_package_name)
if kind == 'class':
generator = documentationtools.yield_all_classes(
code_root=path,
include_private_objects=True,
)
elif kind == 'function':
generator = documentationtools.yield_all_functions(
code_root=path,
include_private_objects=True,
)
return tuple(sorted(generator, key=lambda x: x.__name__))
def _get_tools_package_names(self, codebase):
tools_path = self._codebase_name_to_codebase_tools_path(codebase)
names = []
for x in os.listdir(tools_path):
if os.path.isdir(os.path.join(tools_path, x)):
if not x.startswith(('_', '.')):
names.append(x)
return tuple(sorted(names))
def _parse_tools_package_path(self, path):
from abjad import abjad_configuration
if '.' not in path:
raise SystemExit
tools_package_name, module_name = path.split('.')
mainline_tools_directory = os.path.join(
abjad_configuration.abjad_directory,
'tools',
)
for directory_name in os.listdir(mainline_tools_directory):
directory = os.path.join(
mainline_tools_directory, directory_name)
if not os.path.isdir(directory):
continue
elif directory_name != tools_package_name:
continue
return 'mainline', tools_package_name, module_name
experimental_tools_directory = os.path.join(
abjad_configuration.abjad_experimental_directory,
'tools',
)
for directory_name in os.listdir(mainline_tools_directory):
directory = os.path.join(
experimental_tools_directory, directory_name)
if not os.path.isdir(directory):
continue
elif directory_name != tools_package_name:
continue
return 'experimental', tools_package_name, module_name
raise SystemExit
def _rename_old_api_page(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
print('Renaming old API page ...')
old_docs_path = self._codebase_name_to_codebase_docs_path(old_codebase)
new_docs_path = self._codebase_name_to_codebase_docs_path(new_codebase)
old_rst_file_name = old_module_name + '.rst'
new_rst_file_name = new_module_name + '.rst'
old_api_path = os.path.join(
old_docs_path, old_tools_package_name, old_rst_file_name)
new_api_path = os.path.join(
new_docs_path, new_tools_package_name, new_rst_file_name)
command = 'mv {} {}'.format(
old_api_path, new_api_path)
systemtools.IOManager.spawn_subprocess(command)
print('')
def _rename_old_module(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
print('Renaming old module ...')
old_tools_path = self._codebase_name_to_codebase_tools_path(
old_codebase)
new_tools_path = self._codebase_name_to_codebase_tools_path(
new_codebase)
old_module = old_module_name + '.py'
old_path = os.path.join(
old_tools_path, old_tools_package_name, old_module)
new_module = new_module_name + '.py'
new_path = os.path.join(
new_tools_path, new_tools_package_name, new_module)
command = 'git mv -f {} {}'.format(
old_path, new_path)
systemtools.IOManager.spawn_subprocess(command)
print('')
def _rename_old_test_files(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
print('Renaming old test file(s) ...')
old_tools_path = self._codebase_name_to_codebase_tools_path(
old_codebase)
old_test_path = os.path.join(
old_tools_path, old_tools_package_name, 'test')
if not os.path.exists(old_test_path):
return
new_tools_path = self._codebase_name_to_codebase_tools_path(
new_codebase)
new_test_path = os.path.join(
new_tools_path, new_tools_package_name, 'test')
old_test_file_prefix = 'test_{}_{}'.format(
old_tools_package_name, old_module_name)
old_test_file_names = [x for x in os.listdir(old_test_path)
if x.startswith(old_test_file_prefix) and x.endswith('.py')]
for old_test_file_name in old_test_file_names:
old_test_file_path = os.path.join(
old_test_path, old_test_file_name)
old_test_file_suffix = old_test_file_name[
len(old_test_file_prefix):]
new_test_file_name = 'test_{}_{}{}'.format(
new_tools_package_name, new_module_name, old_test_file_suffix)
new_test_file_path = os.path.join(
new_test_path, new_test_file_name)
command = 'git mv -f {} {}'.format(
old_test_file_path, new_test_file_path)
systemtools.IOManager.spawn_subprocess(command)
print('')
def _update_codebase(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
from abjad import abjad_configuration
without_dirs = ['--without-dirs', 'build', '--without-dirs', '_build']
directory = abjad_configuration.abjad_root_directory
print('Updating codebase ...')
print('')
old_text = '{}.{}'.format(old_tools_package_name, old_module_name)
new_text = '{}.{}'.format(new_tools_package_name, new_module_name)
command = [
directory,
old_text,
new_text,
'--force',
'--whole-words-only',
#'--verbose',
]
command.extend(without_dirs)
ReplaceInFilesScript()(command)
print('')
old_text = 'test_{}_{}_'.format(
old_tools_package_name, old_module_name)
new_text = 'test_{}_{}_'.format(
new_tools_package_name, new_module_name)
command = [directory, old_text, new_text, '--force', '--verbose']
command.extend(without_dirs)
ReplaceInFilesScript()(command)
print('')
old_text = old_module_name
new_text = new_module_name
command = [
directory,
old_text,
new_text,
'--force',
'--whole-words-only',
#'--verbose',
]
command.extend(without_dirs)
ReplaceInFilesScript()(command)
print('')
### PUBLIC METHODS ###
def process_args(self, args):
r'''Processes `args`.
Returns none.
'''
systemtools.IOManager.clear_terminal()
# Handle source path:
old_codebase, old_tools_package_name, old_module_name = \
self._parse_tools_package_path(args.source)
old_codebase_tools_path = self._codebase_name_to_codebase_tools_path(
old_codebase)
old_module_path = os.path.join(
old_codebase_tools_path,
old_tools_package_name,
old_module_name + '.py',
)
if not os.path.exists(old_module_path):
message = 'source does not exist: {}'
message = message.format(old_module_path)
raise SystemExit(message)
# Handle destination path:
new_codebase, new_tools_package_name, new_module_name = \
self._parse_tools_package_path(args.destination)
new_codebase_tools_path = self._codebase_name_to_codebase_tools_path(
new_codebase)
new_module_path = os.path.join(
new_codebase_tools_path,
new_tools_package_name,
new_module_name + '.py',
)
if os.path.exists(new_module_path):
message = 'destination already exists: {}'
message = message.format(old_module_path)
raise SystemExit(message)
# Process changes:
new_args = (
old_codebase, old_tools_package_name, old_module_name,
new_codebase, new_tools_package_name, new_module_name,
)
if not self._confirm_name_changes(*new_args):
raise SystemExit
self._rename_old_test_files(*new_args)
self._rename_old_api_page(*new_args)
self._rename_old_module(*new_args)
self._update_codebase(*new_args)
raise SystemExit
def setup_argument_parser(self, parser):
r'''Sets up argument `parser`.
Returns none.
'''
parser.add_argument(
'source',
help='toolspackage path of source module',
)
parser.add_argument(
'destination',
help='toolspackage path of destination module',
)
|
mscuthbert/abjad
|
abjad/tools/developerscripttools/RenameModulesScript.py
|
Python
|
gpl-3.0
| 13,078
|
import unittest
import mock
from mopidy_playbackdefaults import PlaybackDefaultsFrontend
class PlaybackDefaultsFrontendTest(unittest.TestCase):
def test_no_settings(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_random(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_random'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_random.assert_called_once_with(True)
config['playbackdefaults']['default_random'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_random.call_count, 2)
core.tracklist.set_random.assert_called_with(False)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_repeat(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_repeat'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_repeat.assert_called_once_with(True)
config['playbackdefaults']['default_repeat'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_repeat.call_count, 2)
core.tracklist.set_repeat.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_consume(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_consume'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_consume.assert_called_once_with(True)
config['playbackdefaults']['default_consume'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_consume.call_count, 2)
core.tracklist.set_consume.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
def test_single(self):
config = {'playbackdefaults': {'default_random': '', 'default_repeat': '', 'default_consume': '', 'default_single': ''}}
core = mock.Mock()
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
self.assertEqual(core.tracklist.set_single.call_count, 0)
config['playbackdefaults']['default_single'] = True
PlaybackDefaultsFrontend(config, core)
core.tracklist.set_single.assert_called_once_with(True)
config['playbackdefaults']['default_single'] = False
PlaybackDefaultsFrontend(config, core)
self.assertEqual(core.tracklist.set_single.call_count, 2)
core.tracklist.set_single.assert_called_with(False)
self.assertEqual(core.tracklist.set_random.call_count, 0)
self.assertEqual(core.tracklist.set_repeat.call_count, 0)
self.assertEqual(core.tracklist.set_consume.call_count, 0)
|
DavisNT/mopidy-playbackdefaults
|
tests/test_frontend.py
|
Python
|
apache-2.0
| 5,145
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import sys
sys.path.append("../")
from op_test import OpTest
class TestSequenceSliceOp(OpTest):
def set_data(self):
self.init_test_case()
# only supprot one level LoD
x = np.random.random(self.x_dim).astype('float32')
lod = self.x_lod
offset = np.array(self.offset).astype("int64")
length = np.array(self.length).astype("int64")
self.inputs = {'X': (x, lod), 'Offset': offset, 'Length': length}
outs = [] #np.zeros((100, 3, 2)).astype('float32')
out_lod = [[]]
lod_offset = 0
for i in range(len(offset)):
sub_x = x[lod_offset + offset[i, 0]:lod_offset + offset[i, 0] +
length[i, 0], :]
outs.append(sub_x)
out_lod[0].append(len(sub_x))
lod_offset += lod[0][i]
outs = np.concatenate(outs, axis=0)
self.outputs = {'Out': (outs, out_lod)}
def init_test_case(self):
self.x_dim = (100, 3, 2)
self.x_lod = [[20, 20, 20, 20, 20]]
self.offset = [[1], [2], [3], [4], [5]]
self.length = [[10], [8], [6], [4], [2]]
def setUp(self):
self.op_type = "sequence_slice"
self.set_data()
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestSequenceSliceOpSeqlen0Case0(TestSequenceSliceOp):
def init_test_case(self):
self.x_dim = (100, 3, 2)
self.x_lod = [[20, 30, 0, 30, 20]]
self.offset = [[1], [2], [0], [4], [5]]
self.length = [[10], [8], [0], [4], [2]]
class TestSequenceSliceOpSeqlen0Case1(TestSequenceSliceOp):
def init_test_case(self):
self.x_dim = (100, 3, 2)
self.x_lod = [[0, 70, 0, 30, 0]]
self.offset = [[0], [2], [0], [4], [0]]
self.length = [[0], [8], [0], [4], [0]]
class TestSequenceSliceOpSeqlen0Case2(TestSequenceSliceOp):
def init_test_case(self):
self.x_dim = (100, 3, 2)
self.x_lod = [[0, 100, 0, 0, 0]]
self.offset = [[0], [2], [0], [0], [0]]
self.length = [[0], [8], [0], [0], [0]]
if __name__ == '__main__':
unittest.main()
|
PaddlePaddle/Paddle
|
python/paddle/fluid/tests/unittests/sequence/test_sequence_slice_op.py
|
Python
|
apache-2.0
| 2,865
|
# Domato - main generator script
# -------------------------------
#
# Written and maintained by Ivan Fratric <ifratric@google.com>
#
# Copyright 2017 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import random
import sys
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append(parent_dir)
from grammar import Grammar
_N_MAIN_LINES = 100
_N_EVENTHANDLER_LINES = 1
def generate_function_body(jsgrammar, num_lines):
js = ''
js += jsgrammar._generate_code(num_lines)
return js
def GenerateNewSample(template, jsgrammar):
"""Parses grammar rules from string.
Args:
template: A template string.
htmlgrammar: Grammar for generating HTML code.
cssgrammar: Grammar for generating CSS code.
jsgrammar: Grammar for generating JS code.
Returns:
A string containing sample data.
"""
result = template
handlers = False
while '<glfuzz>' in result:
numlines = _N_MAIN_LINES
if handlers:
numlines = _N_EVENTHANDLER_LINES
else:
handlers = True
result = result.replace(
'<glfuzz>',
generate_function_body(jsgrammar, numlines),
1
)
return result
def generate_samples(grammar_dir, outfiles):
"""Generates a set of samples and writes them to the output files.
Args:
grammar_dir: directory to load grammar files from.
outfiles: A list of output filenames.
"""
f = open(os.path.join(grammar_dir, 'template.html'))
template = f.read()
f.close()
jsgrammar = Grammar()
err = jsgrammar.parse_from_file(os.path.join(grammar_dir, 'webgl.txt'))
if err > 0:
print('There were errors parsing grammar')
return
for outfile in outfiles:
result = GenerateNewSample(template, jsgrammar)
if result is not None:
print('Writing a sample to ' + outfile)
try:
f = open(outfile, 'w')
f.write(result)
f.close()
except IOError:
print('Error writing to output')
def get_option(option_name):
for i in range(len(sys.argv)):
if (sys.argv[i] == option_name) and ((i + 1) < len(sys.argv)):
return sys.argv[i + 1]
elif sys.argv[i].startswith(option_name + '='):
return sys.argv[i][len(option_name) + 1:]
return None
def main():
fuzzer_dir = os.path.dirname(__file__)
multiple_samples = False
for a in sys.argv:
if a.startswith('--output_dir='):
multiple_samples = True
if '--output_dir' in sys.argv:
multiple_samples = True
if multiple_samples:
print('Running on ClusterFuzz')
out_dir = get_option('--output_dir')
nsamples = int(get_option('--no_of_files'))
print('Output directory: ' + out_dir)
print('Number of samples: ' + str(nsamples))
if not os.path.exists(out_dir):
os.mkdir(out_dir)
outfiles = []
for i in range(nsamples):
outfiles.append(os.path.join(out_dir, 'fuzz-' + str(i).zfill(5) + '.html'))
generate_samples(fuzzer_dir, outfiles)
elif len(sys.argv) > 1:
outfile = sys.argv[1]
generate_samples(fuzzer_dir, [outfile])
else:
print('Arguments missing')
print("Usage:")
print("\tpython generator.py <output file>")
print("\tpython generator.py --output_dir <output directory> --no_of_files <number of output files>")
if __name__ == '__main__':
main()
|
googleprojectzero/domato
|
webgl/generator.py
|
Python
|
apache-2.0
| 4,364
|
# Copyright 2015 VMware, Inc. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 OR GPL-3.0-only
from __future__ import print_function
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('all')
# Use testinfra to get a handy function to run commands locally
check_output = testinfra.get_backend(
"local://"
).get_module("Command").check_output
def test_hosts_file(File):
f = File('/root/.ssh/authorized_keys')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
command = check_output(
'cat ~/.ssh/ansible_role_test_key.pub')
assert command == f.content_string.rstrip()
|
vmware/ansible-role-sshkeys
|
tests/test_default.py
|
Python
|
apache-2.0
| 730
|
# -*- coding: utf-8 -*-
__title__ = 'transliterate.tests.data.python32'
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2015 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
latin_text = "Lorem ipsum dolor sit amet"
armenian_text = 'Լօրեմ իպսում դօլօր սիտ ամետ'
cyrillic_text = 'Лорем ипсум долор сит амет'
ukrainian_cyrillic_text = 'Лорем іпсум долор сіт амет'
bulgarian_cyrillic_text = 'Лорем ипсум долор сит амет'
georgian_text = 'Ⴊორემ იფსუმ დოლორ სით ამეთ'
greek_text = 'Λορεμ ιψυμ δολορ σιτ αμετ'
hebrew_text = 'Lורeמ יpסuמ דולור סית אמeת'
mongolian_text = u'Лорэм ипсүм долор сит амет'
test_15_register_custom_language_pack_mapping = (
"abcdefghij",
"1234567890",
)
test_33_register_unregister_mapping = (
"abcdefghij",
"1234567890",
)
test_34_latin_to_latin_mapping = (
"abgdezilxkhmjnpsvtrcqw&ofABGDEZILXKHMJNPSVTRCQOFW",
"zbgdeailxkhnjmpswtrcqv&ofZBGDEAILXKHNJMPSWTRCQOFV",
)
test_34_latin_to_latin_characters = "abgdezilxkhmjnpsvtrcqw&ofABGDEZILXKHMJNPSVTRCQOFW"
test_34_latin_to_latin_reversed_characters = "abgdezilxkhmjnpsvtrcqw&ofABGDEZILXKHMJNPSVTRCQOFW"
test_34_latin_to_latin_text = "Lorem ipsum dolor sit amet 123453254593485938"
|
akosiaris/transliterate
|
src/transliterate/tests/data/python32.py
|
Python
|
gpl-2.0
| 1,382
|
# encoding: utf-8
from __future__ import unicode_literals
import re
import itertools
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urlparse,
compat_urllib_parse,
)
from ..utils import (
ExtractorError,
int_or_none,
unified_strdate,
)
class SoundcloudIE(InfoExtractor):
"""Information extractor for soundcloud.com
To access the media, the uid of the song and a stream token
must be extracted from the page source and the script must make
a request to media.soundcloud.com/crossdomain.xml. Then
the media can be grabbed by requesting from an url composed
of the stream token and uid
"""
_VALID_URL = r'''(?x)^(?:https?://)?
(?:(?:(?:www\.|m\.)?soundcloud\.com/
(?P<uploader>[\w\d-]+)/
(?!sets/|likes/?(?:$|[?#]))
(?P<title>[\w\d-]+)/?
(?P<token>[^?]+?)?(?:[?].*)?$)
|(?:api\.soundcloud\.com/tracks/(?P<track_id>\d+)
(?:/?\?secret_token=(?P<secret_token>[^&]+))?)
|(?P<player>(?:w|player|p.)\.soundcloud\.com/player/?.*?url=.*)
)
'''
IE_NAME = 'soundcloud'
_TESTS = [
{
'url': 'http://soundcloud.com/ethmusic/lostin-powers-she-so-heavy',
'md5': 'ebef0a451b909710ed1d7787dddbf0d7',
'info_dict': {
'id': '62986583',
'ext': 'mp3',
'upload_date': '20121011',
'description': 'No Downloads untill we record the finished version this weekend, i was too pumped n i had to post it , earl is prolly gonna b hella p.o\'d',
'uploader': 'E.T. ExTerrestrial Music',
'title': 'Lostin Powers - She so Heavy (SneakPreview) Adrian Ackers Blueprint 1',
'duration': 143,
}
},
# not streamable song
{
'url': 'https://soundcloud.com/the-concept-band/goldrushed-mastered?in=the-concept-band/sets/the-royal-concept-ep',
'info_dict': {
'id': '47127627',
'ext': 'mp3',
'title': 'Goldrushed',
'description': 'From Stockholm Sweden\r\nPovel / Magnus / Filip / David\r\nwww.theroyalconcept.com',
'uploader': 'The Royal Concept',
'upload_date': '20120521',
'duration': 227,
},
'params': {
# rtmp
'skip_download': True,
},
},
# private link
{
'url': 'https://soundcloud.com/jaimemf/youtube-dl-test-video-a-y-baw/s-8Pjrp',
'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604',
'info_dict': {
'id': '123998367',
'ext': 'mp3',
'title': 'Youtube - Dl Test Video \'\' Ä↭',
'uploader': 'jaimeMF',
'description': 'test chars: \"\'/\\ä↭',
'upload_date': '20131209',
'duration': 9,
},
},
# private link (alt format)
{
'url': 'https://api.soundcloud.com/tracks/123998367?secret_token=s-8Pjrp',
'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604',
'info_dict': {
'id': '123998367',
'ext': 'mp3',
'title': 'Youtube - Dl Test Video \'\' Ä↭',
'uploader': 'jaimeMF',
'description': 'test chars: \"\'/\\ä↭',
'upload_date': '20131209',
'duration': 9,
},
},
# downloadable song
{
'url': 'https://soundcloud.com/oddsamples/bus-brakes',
'md5': '7624f2351f8a3b2e7cd51522496e7631',
'info_dict': {
'id': '128590877',
'ext': 'mp3',
'title': 'Bus Brakes',
'description': 'md5:0053ca6396e8d2fd7b7e1595ef12ab66',
'uploader': 'oddsamples',
'upload_date': '20140109',
'duration': 17,
},
},
]
_CLIENT_ID = 'b45b1aa10f1ac2941910a7f0d10f8e28'
_IPHONE_CLIENT_ID = '376f225bf427445fc4bfb6b99b72e0bf'
def report_resolve(self, video_id):
"""Report information extraction."""
self.to_screen('%s: Resolving id' % video_id)
@classmethod
def _resolv_url(cls, url):
return 'http://api.soundcloud.com/resolve.json?url=' + url + '&client_id=' + cls._CLIENT_ID
def _extract_info_dict(self, info, full_title=None, quiet=False, secret_token=None):
track_id = compat_str(info['id'])
name = full_title or track_id
if quiet:
self.report_extraction(name)
thumbnail = info['artwork_url']
if thumbnail is not None:
thumbnail = thumbnail.replace('-large', '-t500x500')
ext = 'mp3'
result = {
'id': track_id,
'uploader': info['user']['username'],
'upload_date': unified_strdate(info['created_at']),
'title': info['title'],
'description': info['description'],
'thumbnail': thumbnail,
'duration': int_or_none(info.get('duration'), 1000),
'webpage_url': info.get('permalink_url'),
}
formats = []
if info.get('downloadable', False):
# We can build a direct link to the song
format_url = (
'https://api.soundcloud.com/tracks/{0}/download?client_id={1}'.format(
track_id, self._CLIENT_ID))
formats.append({
'format_id': 'download',
'ext': info.get('original_format', 'mp3'),
'url': format_url,
'vcodec': 'none',
'preference': 10,
})
# We have to retrieve the url
streams_url = ('http://api.soundcloud.com/i1/tracks/{0}/streams?'
'client_id={1}&secret_token={2}'.format(track_id, self._IPHONE_CLIENT_ID, secret_token))
format_dict = self._download_json(
streams_url,
track_id, 'Downloading track url')
for key, stream_url in format_dict.items():
if key.startswith('http'):
formats.append({
'format_id': key,
'ext': ext,
'url': stream_url,
'vcodec': 'none',
})
elif key.startswith('rtmp'):
# The url doesn't have an rtmp app, we have to extract the playpath
url, path = stream_url.split('mp3:', 1)
formats.append({
'format_id': key,
'url': url,
'play_path': 'mp3:' + path,
'ext': ext,
'vcodec': 'none',
})
if not formats:
# We fallback to the stream_url in the original info, this
# cannot be always used, sometimes it can give an HTTP 404 error
formats.append({
'format_id': 'fallback',
'url': info['stream_url'] + '?client_id=' + self._CLIENT_ID,
'ext': ext,
'vcodec': 'none',
})
for f in formats:
if f['format_id'].startswith('http'):
f['protocol'] = 'http'
if f['format_id'].startswith('rtmp'):
f['protocol'] = 'rtmp'
self._sort_formats(formats)
result['formats'] = formats
return result
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url, flags=re.VERBOSE)
if mobj is None:
raise ExtractorError('Invalid URL: %s' % url)
track_id = mobj.group('track_id')
token = None
if track_id is not None:
info_json_url = 'http://api.soundcloud.com/tracks/' + track_id + '.json?client_id=' + self._CLIENT_ID
full_title = track_id
token = mobj.group('secret_token')
if token:
info_json_url += "&secret_token=" + token
elif mobj.group('player'):
query = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query)
return self.url_result(query['url'][0])
else:
# extract uploader (which is in the url)
uploader = mobj.group('uploader')
# extract simple title (uploader + slug of song title)
slug_title = mobj.group('title')
token = mobj.group('token')
full_title = resolve_title = '%s/%s' % (uploader, slug_title)
if token:
resolve_title += '/%s' % token
self.report_resolve(full_title)
url = 'http://soundcloud.com/%s' % resolve_title
info_json_url = self._resolv_url(url)
info = self._download_json(info_json_url, full_title, 'Downloading info JSON')
return self._extract_info_dict(info, full_title, secret_token=token)
class SoundcloudSetIE(SoundcloudIE):
_VALID_URL = r'https?://(?:www\.)?soundcloud\.com/(?P<uploader>[\w\d-]+)/sets/(?P<slug_title>[\w\d-]+)(?:/(?P<token>[^?/]+))?'
IE_NAME = 'soundcloud:set'
_TESTS = [{
'url': 'https://soundcloud.com/the-concept-band/sets/the-royal-concept-ep',
'info_dict': {
'title': 'The Royal Concept EP',
},
'playlist_mincount': 6,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
# extract uploader (which is in the url)
uploader = mobj.group('uploader')
# extract simple title (uploader + slug of song title)
slug_title = mobj.group('slug_title')
full_title = '%s/sets/%s' % (uploader, slug_title)
url = 'http://soundcloud.com/%s/sets/%s' % (uploader, slug_title)
token = mobj.group('token')
if token:
full_title += '/' + token
url += '/' + token
self.report_resolve(full_title)
resolv_url = self._resolv_url(url)
info = self._download_json(resolv_url, full_title)
if 'errors' in info:
for err in info['errors']:
self._downloader.report_error('unable to download video webpage: %s' % compat_str(err['error_message']))
return
return {
'_type': 'playlist',
'entries': [self._extract_info_dict(track, secret_token=token) for track in info['tracks']],
'id': info['id'],
'title': info['title'],
}
class SoundcloudUserIE(SoundcloudIE):
_VALID_URL = r'https?://(www\.)?soundcloud\.com/(?P<user>[^/]+)/?((?P<rsrc>tracks|likes)/?)?(\?.*)?$'
IE_NAME = 'soundcloud:user'
_TESTS = [{
'url': 'https://soundcloud.com/the-concept-band',
'info_dict': {
'id': '9615865',
'title': 'The Royal Concept',
},
'playlist_mincount': 12
}, {
'url': 'https://soundcloud.com/the-concept-band/likes',
'info_dict': {
'id': '9615865',
'title': 'The Royal Concept',
},
'playlist_mincount': 1,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader = mobj.group('user')
resource = mobj.group('rsrc')
if resource is None:
resource = 'tracks'
elif resource == 'likes':
resource = 'favorites'
url = 'http://soundcloud.com/%s/' % uploader
resolv_url = self._resolv_url(url)
user = self._download_json(
resolv_url, uploader, 'Downloading user info')
base_url = 'http://api.soundcloud.com/users/%s/%s.json?' % (uploader, resource)
entries = []
for i in itertools.count():
data = compat_urllib_parse.urlencode({
'offset': i * 50,
'limit': 50,
'client_id': self._CLIENT_ID,
})
new_entries = self._download_json(
base_url + data, uploader, 'Downloading track page %s' % (i + 1))
if len(new_entries) == 0:
self.to_screen('%s: End page received' % uploader)
break
entries.extend(self._extract_info_dict(e, quiet=True) for e in new_entries)
return {
'_type': 'playlist',
'id': compat_str(user['id']),
'title': user['username'],
'entries': entries,
}
class SoundcloudPlaylistIE(SoundcloudIE):
_VALID_URL = r'https?://api\.soundcloud\.com/playlists/(?P<id>[0-9]+)(?:/?\?secret_token=(?P<token>[^&]+?))?$'
IE_NAME = 'soundcloud:playlist'
_TESTS = [{
'url': 'http://api.soundcloud.com/playlists/4110309',
'info_dict': {
'id': '4110309',
'title': 'TILT Brass - Bowery Poetry Club, August \'03 [Non-Site SCR 02]',
'description': 're:.*?TILT Brass - Bowery Poetry Club',
},
'playlist_count': 6,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
playlist_id = mobj.group('id')
base_url = '%s//api.soundcloud.com/playlists/%s.json?' % (self.http_scheme(), playlist_id)
data_dict = {
'client_id': self._CLIENT_ID,
}
token = mobj.group('token')
if token:
data_dict['secret_token'] = token
data = compat_urllib_parse.urlencode(data_dict)
data = self._download_json(
base_url + data, playlist_id, 'Downloading playlist')
entries = [
self._extract_info_dict(t, quiet=True, secret_token=token)
for t in data['tracks']]
return {
'_type': 'playlist',
'id': playlist_id,
'title': data.get('title'),
'description': data.get('description'),
'entries': entries,
}
|
0x7678/youtube-dl
|
youtube_dl/extractor/soundcloud.py
|
Python
|
unlicense
| 14,130
|
import os
import re
import sys
import textwrap
from doctest import ELLIPSIS, OutputChecker
import pytest
from tests.lib import (
_create_test_package,
_create_test_package_with_srcdir,
_git_commit,
need_bzr,
need_mercurial,
need_svn,
path_to_url,
)
distribute_re = re.compile('^distribute==[0-9.]+\n', re.MULTILINE)
def _check_output(result, expected):
checker = OutputChecker()
actual = str(result)
# FIXME! The following is a TOTAL hack. For some reason the
# __str__ result for pkg_resources.Requirement gets downcased on
# Windows. Since INITools is the only package we're installing
# in this file with funky case requirements, I'm forcibly
# upcasing it. You can also normalize everything to lowercase,
# but then you have to remember to upcase <BLANKLINE>. The right
# thing to do in the end is probably to find out how to report
# the proper fully-cased package name in our error message.
if sys.platform == 'win32':
actual = actual.replace('initools', 'INITools')
# This allows our existing tests to work when run in a context
# with distribute installed.
actual = distribute_re.sub('', actual)
def banner(msg):
return '\n========== %s ==========\n' % msg
assert checker.check_output(expected, actual, ELLIPSIS), (
banner('EXPECTED') + expected + banner('ACTUAL') + actual +
banner(6 * '=')
)
def test_basic_freeze(script):
"""
Some tests of freeze, first we have to install some stuff. Note that
the test is a little crude at the end because Python 2.5+ adds egg
info to the standard library, so stuff like wsgiref will show up in
the freezing. (Probably that should be accounted for in pip, but
currently it is not).
"""
script.scratch_path.joinpath("initools-req.txt").write_text(textwrap.dedent("""\
simple==2.0
# and something else to test out:
simple2<=3.0
"""))
script.pip_install_local(
'-r', script.scratch_path / 'initools-req.txt',
)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
...simple==2.0
simple2==3.0...
<BLANKLINE>""")
_check_output(result.stdout, expected)
def test_freeze_with_pip(script):
"""Test pip shows itself"""
result = script.pip('freeze', '--all')
assert 'pip==' in result.stdout
def test_freeze_with_invalid_names(script):
"""
Test that invalid names produce warnings and are passed over gracefully.
"""
def fake_install(pkgname, dest):
egg_info_path = os.path.join(
dest, '{}-1.0-py{}.{}.egg-info'.format(
pkgname.replace('-', '_'),
sys.version_info[0],
sys.version_info[1]
)
)
with open(egg_info_path, 'w') as egg_info_file:
egg_info_file.write(textwrap.dedent("""\
Metadata-Version: 1.0
Name: {}
Version: 1.0
""".format(pkgname)
))
valid_pkgnames = ('middle-dash', 'middle_underscore', 'middle.dot')
invalid_pkgnames = (
'-leadingdash', '_leadingunderscore', '.leadingdot',
'trailingdash-', 'trailingunderscore_', 'trailingdot.'
)
for pkgname in valid_pkgnames + invalid_pkgnames:
fake_install(pkgname, script.site_packages_path)
result = script.pip('freeze', expect_stderr=True)
for pkgname in valid_pkgnames:
_check_output(
result.stdout,
'...{}==1.0...'.format(pkgname.replace('_', '-'))
)
for pkgname in invalid_pkgnames:
# Check that the full distribution repr is present.
dist_repr = '{} 1.0 ('.format(pkgname.replace('_', '-'))
expected = (
'...Could not generate requirement for '
'distribution {}...'.format(dist_repr)
)
_check_output(result.stderr, expected)
# Also check that the parse error details occur at least once.
# We only need to find one occurrence to know that exception details
# are logged.
expected = '...site-packages): Parse error at "...'
_check_output(result.stderr, expected)
@pytest.mark.git
def test_freeze_editable_not_vcs(script, tmpdir):
"""
Test an editable install that is not version controlled.
"""
pkg_path = _create_test_package(script)
# Rename the .git directory so the directory is no longer recognized
# as a VCS directory.
os.rename(os.path.join(pkg_path, '.git'), os.path.join(pkg_path, '.bak'))
script.pip('install', '-e', pkg_path)
result = script.pip('freeze')
# We need to apply os.path.normcase() to the path since that is what
# the freeze code does.
expected = textwrap.dedent("""\
...# Editable install with no version control (version-pkg==0.1)
-e {}
...""".format(os.path.normcase(pkg_path)))
_check_output(result.stdout, expected)
@pytest.mark.git
def test_freeze_editable_git_with_no_remote(script, tmpdir, deprecated_python):
"""
Test an editable Git install with no remote url.
"""
pkg_path = _create_test_package(script)
script.pip('install', '-e', pkg_path)
result = script.pip('freeze')
if not deprecated_python:
assert result.stderr == ''
# We need to apply os.path.normcase() to the path since that is what
# the freeze code does.
expected = textwrap.dedent("""\
...# Editable Git install with no remote (version-pkg==0.1)
-e {}
...""".format(os.path.normcase(pkg_path)))
_check_output(result.stdout, expected)
@need_svn
def test_freeze_svn(script, tmpdir):
"""Test freezing a svn checkout"""
checkout_path = _create_test_package(script, vcs='svn')
# Install with develop
script.run(
'python', 'setup.py', 'develop',
cwd=checkout_path, expect_stderr=True
)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
...-e svn+...#egg=version_pkg
...""")
_check_output(result.stdout, expected)
@pytest.mark.git
@pytest.mark.xfail
def test_freeze_exclude_editable(script, tmpdir):
"""
Test excluding editable from freezing list.
"""
# Returns path to a generated package called "version_pkg"
pkg_version = _create_test_package(script)
result = script.run(
'git', 'clone', pkg_version, 'pip-test-package',
expect_stderr=True,
)
repo_dir = script.scratch_path / 'pip-test-package'
result = script.run(
'python', 'setup.py', 'develop',
cwd=repo_dir,
expect_stderr=True,
)
result = script.pip('freeze', '--exclude-editable', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e git+...#egg=version_pkg
...
"""
).strip()
_check_output(result.stdout, expected)
@pytest.mark.git
def test_freeze_git_clone(script, tmpdir):
"""
Test freezing a Git clone.
"""
# Returns path to a generated package called "version_pkg"
pkg_version = _create_test_package(script)
result = script.run(
'git', 'clone', pkg_version, 'pip-test-package',
expect_stderr=True,
)
repo_dir = script.scratch_path / 'pip-test-package'
result = script.run(
'python', 'setup.py', 'develop',
cwd=repo_dir,
expect_stderr=True,
)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e git+...#egg=version_pkg
...
"""
).strip()
_check_output(result.stdout, expected)
result = script.pip(
'freeze', '-f', '%s#egg=pip_test_package' % repo_dir,
expect_stderr=True,
)
expected = textwrap.dedent(
"""
-f %(repo)s#egg=pip_test_package...
-e git+...#egg=version_pkg
...
""" % {'repo': repo_dir},
).strip()
_check_output(result.stdout, expected)
# Check that slashes in branch or tag names are translated.
# See also issue #1083: https://github.com/pypa/pip/issues/1083
script.run(
'git', 'checkout', '-b', 'branch/name/with/slash',
cwd=repo_dir,
expect_stderr=True,
)
# Create a new commit to ensure that the commit has only one branch
# or tag name associated to it (to avoid the non-determinism reported
# in issue #1867).
script.run('touch', 'newfile', cwd=repo_dir)
script.run('git', 'add', 'newfile', cwd=repo_dir)
_git_commit(script, repo_dir, message='...')
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e ...@...#egg=version_pkg
...
"""
).strip()
_check_output(result.stdout, expected)
@pytest.mark.git
def test_freeze_git_clone_srcdir(script, tmpdir):
"""
Test freezing a Git clone where setup.py is in a subdirectory
relative the repo root and the source code is in a subdirectory
relative to setup.py.
"""
# Returns path to a generated package called "version_pkg"
pkg_version = _create_test_package_with_srcdir(script)
result = script.run(
'git', 'clone', pkg_version, 'pip-test-package',
expect_stderr=True,
)
repo_dir = script.scratch_path / 'pip-test-package'
result = script.run(
'python', 'setup.py', 'develop',
cwd=repo_dir / 'subdir',
expect_stderr=True,
)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e git+...#egg=version_pkg&subdirectory=subdir
...
"""
).strip()
_check_output(result.stdout, expected)
result = script.pip(
'freeze', '-f', '%s#egg=pip_test_package' % repo_dir,
expect_stderr=True,
)
expected = textwrap.dedent(
"""
-f %(repo)s#egg=pip_test_package...
-e git+...#egg=version_pkg&subdirectory=subdir
...
""" % {'repo': repo_dir},
).strip()
_check_output(result.stdout, expected)
@need_mercurial
def test_freeze_mercurial_clone_srcdir(script, tmpdir):
"""
Test freezing a Mercurial clone where setup.py is in a subdirectory
relative to the repo root and the source code is in a subdirectory
relative to setup.py.
"""
# Returns path to a generated package called "version_pkg"
pkg_version = _create_test_package_with_srcdir(script, vcs='hg')
result = script.run(
'hg', 'clone', pkg_version, 'pip-test-package'
)
repo_dir = script.scratch_path / 'pip-test-package'
result = script.run(
'python', 'setup.py', 'develop',
cwd=repo_dir / 'subdir'
)
result = script.pip('freeze')
expected = textwrap.dedent(
"""
...-e hg+...#egg=version_pkg&subdirectory=subdir
...
"""
).strip()
_check_output(result.stdout, expected)
result = script.pip(
'freeze', '-f', '%s#egg=pip_test_package' % repo_dir
)
expected = textwrap.dedent(
"""
-f %(repo)s#egg=pip_test_package...
-e hg+...#egg=version_pkg&subdirectory=subdir
...
""" % {'repo': repo_dir},
).strip()
_check_output(result.stdout, expected)
@pytest.mark.git
def test_freeze_git_remote(script, tmpdir):
"""
Test freezing a Git clone.
"""
# Returns path to a generated package called "version_pkg"
pkg_version = _create_test_package(script)
result = script.run(
'git', 'clone', pkg_version, 'pip-test-package',
expect_stderr=True,
)
repo_dir = script.scratch_path / 'pip-test-package'
result = script.run(
'python', 'setup.py', 'develop',
cwd=repo_dir,
expect_stderr=True,
)
origin_remote = pkg_version
other_remote = pkg_version + '-other'
# check frozen remote after clone
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e git+{remote}@...#egg=version_pkg
...
"""
).format(remote=origin_remote).strip()
_check_output(result.stdout, expected)
# check frozen remote when there is no remote named origin
script.run('git', 'remote', 'remove', 'origin', cwd=repo_dir)
script.run('git', 'remote', 'add', 'other', other_remote, cwd=repo_dir)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e git+{remote}@...#egg=version_pkg
...
"""
).format(remote=other_remote).strip()
_check_output(result.stdout, expected)
# when there are more than one origin, priority is given to the
# remote named origin
script.run('git', 'remote', 'add', 'origin', origin_remote, cwd=repo_dir)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e git+{remote}@...#egg=version_pkg
...
"""
).format(remote=origin_remote).strip()
_check_output(result.stdout, expected)
@need_mercurial
def test_freeze_mercurial_clone(script, tmpdir):
"""
Test freezing a Mercurial clone.
"""
# Returns path to a generated package called "version_pkg"
pkg_version = _create_test_package(script, vcs='hg')
result = script.run(
'hg', 'clone', pkg_version, 'pip-test-package',
expect_stderr=True,
)
repo_dir = script.scratch_path / 'pip-test-package'
result = script.run(
'python', 'setup.py', 'develop',
cwd=repo_dir,
expect_stderr=True,
)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent(
"""
...-e hg+...#egg=version_pkg
...
"""
).strip()
_check_output(result.stdout, expected)
result = script.pip(
'freeze', '-f', '%s#egg=pip_test_package' % repo_dir,
expect_stderr=True,
)
expected = textwrap.dedent(
"""
-f %(repo)s#egg=pip_test_package...
...-e hg+...#egg=version_pkg
...
""" % {'repo': repo_dir},
).strip()
_check_output(result.stdout, expected)
@need_bzr
def test_freeze_bazaar_clone(script, tmpdir):
"""
Test freezing a Bazaar clone.
"""
try:
checkout_path = _create_test_package(script, vcs='bazaar')
except OSError as e:
pytest.fail('Invoking `bzr` failed: %s' % e)
result = script.run(
'bzr', 'checkout', checkout_path, 'bzr-package'
)
result = script.run(
'python', 'setup.py', 'develop',
cwd=script.scratch_path / 'bzr-package',
expect_stderr=True,
)
result = script.pip('freeze', expect_stderr=True)
expected = textwrap.dedent("""\
...-e bzr+file://...@1#egg=version_pkg
...""")
_check_output(result.stdout, expected)
result = script.pip(
'freeze', '-f',
'%s/#egg=django-wikiapp' % checkout_path,
expect_stderr=True,
)
expected = textwrap.dedent("""\
-f %(repo)s/#egg=django-wikiapp
...-e bzr+file://...@...#egg=version_pkg
...""" % {'repo': checkout_path})
_check_output(result.stdout, expected)
# used by the test_freeze_with_requirement_* tests below
_freeze_req_opts = textwrap.dedent("""\
# Unchanged requirements below this line
-r ignore.txt
--requirement ignore.txt
-Z ignore
--always-unzip ignore
-f http://ignore
-i http://ignore
--pre
--trusted-host url
--process-dependency-links
--extra-index-url http://ignore
--find-links http://ignore
--index-url http://ignore
""")
def test_freeze_with_requirement_option_file_url_egg_not_installed(
script, deprecated_python):
"""
Test "freeze -r requirements.txt" with a local file URL whose egg name
is not installed.
"""
url = path_to_url('my-package.tar.gz') + '#egg=Does.Not-Exist'
requirements_path = script.scratch_path.joinpath('requirements.txt')
requirements_path.write_text(url + '\n')
result = script.pip(
'freeze', '--requirement', 'requirements.txt', expect_stderr=True,
)
expected_err = (
'WARNING: Requirement file [requirements.txt] contains {}, '
"but package 'Does.Not-Exist' is not installed\n"
).format(url)
if deprecated_python:
assert expected_err in result.stderr
else:
assert expected_err == result.stderr
def test_freeze_with_requirement_option(script):
"""
Test that new requirements are created correctly with --requirement hints
"""
script.scratch_path.joinpath("hint1.txt").write_text(textwrap.dedent("""\
INITools==0.1
NoExist==4.2 # A comment that ensures end of line comments work.
simple==3.0; python_version > '1.0'
""") + _freeze_req_opts)
script.scratch_path.joinpath("hint2.txt").write_text(textwrap.dedent("""\
iniTools==0.1
Noexist==4.2 # A comment that ensures end of line comments work.
Simple==3.0; python_version > '1.0'
""") + _freeze_req_opts)
result = script.pip_install_local('initools==0.2')
result = script.pip_install_local('simple')
result = script.pip(
'freeze', '--requirement', 'hint1.txt',
expect_stderr=True,
)
expected = textwrap.dedent("""\
INITools==0.2
simple==3.0
""")
expected += _freeze_req_opts
expected += "## The following requirements were added by pip freeze:..."
_check_output(result.stdout, expected)
assert (
"Requirement file [hint1.txt] contains NoExist==4.2, but package "
"'NoExist' is not installed"
) in result.stderr
result = script.pip(
'freeze', '--requirement', 'hint2.txt',
expect_stderr=True,
)
_check_output(result.stdout, expected)
assert (
"Requirement file [hint2.txt] contains Noexist==4.2, but package "
"'Noexist' is not installed"
) in result.stderr
def test_freeze_with_requirement_option_multiple(script):
"""
Test that new requirements are created correctly with multiple
--requirement hints
"""
script.scratch_path.joinpath('hint1.txt').write_text(textwrap.dedent("""\
INITools==0.1
NoExist==4.2
simple==3.0; python_version > '1.0'
""") + _freeze_req_opts)
script.scratch_path.joinpath('hint2.txt').write_text(textwrap.dedent("""\
NoExist2==2.0
simple2==1.0
""") + _freeze_req_opts)
result = script.pip_install_local('initools==0.2')
result = script.pip_install_local('simple')
result = script.pip_install_local('simple2==1.0')
result = script.pip_install_local('meta')
result = script.pip(
'freeze', '--requirement', 'hint1.txt', '--requirement', 'hint2.txt',
expect_stderr=True,
)
expected = textwrap.dedent("""\
INITools==0.2
simple==1.0
""")
expected += _freeze_req_opts
expected += textwrap.dedent("""\
simple2==1.0
""")
expected += "## The following requirements were added by pip freeze:"
expected += '\n' + textwrap.dedent("""\
...meta==1.0...
""")
_check_output(result.stdout, expected)
assert (
"Requirement file [hint1.txt] contains NoExist==4.2, but package "
"'NoExist' is not installed"
) in result.stderr
assert (
"Requirement file [hint2.txt] contains NoExist2==2.0, but package "
"'NoExist2' is not installed"
) in result.stderr
# any options like '--index-url http://ignore' should only be emitted once
# even if they are listed in multiple requirements files
assert result.stdout.count("--index-url http://ignore") == 1
def test_freeze_with_requirement_option_package_repeated_one_file(script):
"""
Test freezing with single requirements file that contains a package
multiple times
"""
script.scratch_path.joinpath('hint1.txt').write_text(textwrap.dedent("""\
simple2
simple2
NoExist
""") + _freeze_req_opts)
result = script.pip_install_local('simple2==1.0')
result = script.pip_install_local('meta')
result = script.pip(
'freeze', '--requirement', 'hint1.txt',
expect_stderr=True,
)
expected_out = textwrap.dedent("""\
simple2==1.0
""")
expected_out += _freeze_req_opts
expected_out += "## The following requirements were added by pip freeze:"
expected_out += '\n' + textwrap.dedent("""\
...meta==1.0...
""")
_check_output(result.stdout, expected_out)
err1 = ("Requirement file [hint1.txt] contains NoExist, "
"but package 'NoExist' is not installed\n")
err2 = "Requirement simple2 included multiple times [hint1.txt]\n"
assert err1 in result.stderr
assert err2 in result.stderr
# there shouldn't be any other 'is not installed' warnings
assert result.stderr.count('is not installed') == 1
def test_freeze_with_requirement_option_package_repeated_multi_file(script):
"""
Test freezing with multiple requirements file that contain a package
"""
script.scratch_path.joinpath('hint1.txt').write_text(textwrap.dedent("""\
simple
""") + _freeze_req_opts)
script.scratch_path.joinpath('hint2.txt').write_text(textwrap.dedent("""\
simple
NoExist
""") + _freeze_req_opts)
result = script.pip_install_local('simple==1.0')
result = script.pip_install_local('meta')
result = script.pip(
'freeze', '--requirement', 'hint1.txt',
'--requirement', 'hint2.txt',
expect_stderr=True,
)
expected_out = textwrap.dedent("""\
simple==1.0
""")
expected_out += _freeze_req_opts
expected_out += "## The following requirements were added by pip freeze:"
expected_out += '\n' + textwrap.dedent("""\
...meta==1.0...
""")
_check_output(result.stdout, expected_out)
err1 = ("Requirement file [hint2.txt] contains NoExist, but package "
"'NoExist' is not installed\n")
err2 = ("Requirement simple included multiple times "
"[hint1.txt, hint2.txt]\n")
assert err1 in result.stderr
assert err2 in result.stderr
# there shouldn't be any other 'is not installed' warnings
assert result.stderr.count('is not installed') == 1
@pytest.mark.network
@pytest.mark.incompatible_with_test_venv
def test_freeze_user(script, virtualenv, data):
"""
Testing freeze with --user, first we have to install some stuff.
"""
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip_install_local('--find-links', data.find_links,
'--user', 'simple==2.0')
script.pip_install_local('--find-links', data.find_links,
'simple2==3.0')
result = script.pip('freeze', '--user', expect_stderr=True)
expected = textwrap.dedent("""\
simple==2.0
<BLANKLINE>""")
_check_output(result.stdout, expected)
assert 'simple2' not in result.stdout
@pytest.mark.network
def test_freeze_path(tmpdir, script, data):
"""
Test freeze with --path.
"""
script.pip('install', '--find-links', data.find_links,
'--target', tmpdir, 'simple==2.0')
result = script.pip('freeze', '--path', tmpdir)
expected = textwrap.dedent("""\
simple==2.0
<BLANKLINE>""")
_check_output(result.stdout, expected)
@pytest.mark.network
@pytest.mark.incompatible_with_test_venv
def test_freeze_path_exclude_user(tmpdir, script, data):
"""
Test freeze with --path and make sure packages from --user are not picked
up.
"""
script.pip_install_local('--find-links', data.find_links,
'--user', 'simple2')
script.pip('install', '--find-links', data.find_links,
'--target', tmpdir, 'simple==1.0')
result = script.pip('freeze', '--user')
expected = textwrap.dedent("""\
simple2==3.0
<BLANKLINE>""")
_check_output(result.stdout, expected)
result = script.pip('freeze', '--path', tmpdir)
expected = textwrap.dedent("""\
simple==1.0
<BLANKLINE>""")
_check_output(result.stdout, expected)
@pytest.mark.network
def test_freeze_path_multiple(tmpdir, script, data):
"""
Test freeze with multiple --path arguments.
"""
path1 = tmpdir / "path1"
os.mkdir(path1)
path2 = tmpdir / "path2"
os.mkdir(path2)
script.pip('install', '--find-links', data.find_links,
'--target', path1, 'simple==2.0')
script.pip('install', '--find-links', data.find_links,
'--target', path2, 'simple2==3.0')
result = script.pip('freeze', '--path', path1)
expected = textwrap.dedent("""\
simple==2.0
<BLANKLINE>""")
_check_output(result.stdout, expected)
result = script.pip('freeze', '--path', path1, '--path', path2)
expected = textwrap.dedent("""\
simple==2.0
simple2==3.0
<BLANKLINE>""")
_check_output(result.stdout, expected)
|
xavfernandez/pip
|
tests/functional/test_freeze.py
|
Python
|
mit
| 25,185
|
r"""File-like objects that read from or write to a string buffer.
This implements (nearly) all stdio methods.
f = StringIO() # ready for writing
f = StringIO(buf) # ready for reading
f.close() # explicitly release resources held
flag = f.isatty() # always false
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
buf = f.readline() # read until end of line ('\n') or EOF
list = f.readlines()# list of f.readline() results until EOF
f.truncate([size]) # truncate file at to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
f.getvalue() # return whole file's contents as a string
Notes:
- Using a real file is often faster (but less convenient).
- There's also a much faster implementation in C, called cStringIO, but
it's not subclassable.
- fileno() is left unimplemented so that code which uses it triggers
an exception early.
- Seeking far beyond EOF and then writing will insert real null
bytes that occupy space in the buffer.
- There's a simple test set (see end of this file).
"""
try:
from errno import EINVAL
except ImportError:
EINVAL = 22
__all__ = ["StringIO"]
def _complain_ifclosed(closed):
if closed:
raise ValueError, "I/O operation on closed file"
class StringIO:
"""class StringIO([buffer])
When a StringIO object is created, it can be initialized to an existing
string by passing the string to the constructor. If no string is given,
the StringIO will start empty.
The StringIO object can accept either Unicode or 8-bit strings, but
mixing the two may take some care. If both are used, 8-bit strings that
cannot be interpreted as 7-bit ASCII (that use the 8th bit) will cause
a UnicodeError to be raised when getvalue() is called.
"""
def __init__(self, buf = ''):
# Force self.buf to be a string or unicode
if not isinstance(buf, basestring):
buf = str(buf)
self.buf = buf
self.len = len(buf)
self.buflist = []
self.pos = 0
self.closed = False
self.softspace = 0
def __iter__(self):
return self
def next(self):
"""A file object is its own iterator, for example iter(f) returns f
(unless f is closed). When a file is used as an iterator, typically
in a for loop (for example, for line in f: print line), the next()
method is called repeatedly. This method returns the next input line,
or raises StopIteration when EOF is hit.
"""
_complain_ifclosed(self.closed)
r = self.readline()
if not r:
raise StopIteration
return r
def close(self):
"""Free the memory buffer.
"""
if not self.closed:
self.closed = True
del self.buf, self.pos
def isatty(self):
"""Returns False because StringIO objects are not connected to a
tty-like device.
"""
_complain_ifclosed(self.closed)
return False
def seek(self, pos, mode = 0):
"""Set the file's current position.
The mode argument is optional and defaults to 0 (absolute file
positioning); other values are 1 (seek relative to the current
position) and 2 (seek relative to the file's end).
There is no return value.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if mode == 1:
pos += self.pos
elif mode == 2:
pos += self.len
self.pos = max(0, pos)
def tell(self):
"""Return the file's current position."""
_complain_ifclosed(self.closed)
return self.pos
def read(self, n = -1):
"""Read at most size bytes from the file
(less if the read hits EOF before obtaining size bytes).
If the size argument is negative or omitted, read all data until EOF
is reached. The bytes are returned as a string object. An empty
string is returned when EOF is encountered immediately.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if n is None or n < 0:
newpos = self.len
else:
newpos = min(self.pos+n, self.len)
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readline(self, length=None):
r"""Read one entire line from the file.
A trailing newline character is kept in the string (but may be absent
when a file ends with an incomplete line). If the size argument is
present and non-negative, it is a maximum byte count (including the
trailing newline) and an incomplete line may be returned.
An empty string is returned only when EOF is encountered immediately.
Note: Unlike stdio's fgets(), the returned string contains null
characters ('\0') if they occurred in the input.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
i = self.buf.find('\n', self.pos)
if i < 0:
newpos = self.len
else:
newpos = i+1
if length is not None and length > 0:
if self.pos + length < newpos:
newpos = self.pos + length
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readlines(self, sizehint = 0):
"""Read until EOF using readline() and return a list containing the
lines thus read.
If the optional sizehint argument is present, instead of reading up
to EOF, whole lines totalling approximately sizehint bytes (or more
to accommodate a final whole line).
"""
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def truncate(self, size=None):
"""Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
"""
_complain_ifclosed(self.closed)
if size is None:
size = self.pos
elif size < 0:
raise IOError(EINVAL, "Negative size not allowed")
elif size < self.pos:
self.pos = size
self.buf = self.getvalue()[:size]
self.len = size
def write(self, s):
"""Write a string to the file.
There is no return value.
"""
_complain_ifclosed(self.closed)
if not s: return
# Force s to be a string or unicode
if not isinstance(s, basestring):
s = str(s)
spos = self.pos
slen = self.len
if spos == slen:
self.buflist.append(s)
self.len = self.pos = spos + len(s)
return
if spos > slen:
self.buflist.append('\0'*(spos - slen))
slen = spos
newpos = spos + len(s)
if spos < slen:
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = [self.buf[:spos], s, self.buf[newpos:]]
self.buf = ''
if newpos > slen:
slen = newpos
else:
self.buflist.append(s)
slen = newpos
self.len = slen
self.pos = newpos
def writelines(self, iterable):
"""Write a sequence of strings to the file. The sequence can be any
iterable object producing strings, typically a list of strings. There
is no return value.
(The name is intended to match readlines(); writelines() does not add
line separators.)
"""
write = self.write
for line in iterable:
write(line)
def flush(self):
"""Flush the internal buffer
"""
_complain_ifclosed(self.closed)
def getvalue(self):
"""
Retrieve the entire contents of the "file" at any time before
the StringIO object's close() method is called.
The StringIO object can accept either Unicode or 8-bit strings,
but mixing the two may take some care. If both are used, 8-bit
strings that cannot be interpreted as 7-bit ASCII (that use the
8th bit) will cause a UnicodeError to be raised when getvalue()
is called.
"""
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
return self.buf
# A little test suite
def test():
import sys
if sys.argv[1:]:
file = sys.argv[1]
else:
file = '/etc/passwd'
lines = open(file, 'r').readlines()
text = open(file, 'r').read()
f = StringIO()
for line in lines[:-2]:
f.write(line)
f.writelines(lines[-2:])
if f.getvalue() != text:
raise RuntimeError, 'write failed'
length = f.tell()
print 'File length =', length
f.seek(len(lines[0]))
f.write(lines[1])
f.seek(0)
print 'First line =', repr(f.readline())
print 'Position =', f.tell()
line = f.readline()
print 'Second line =', repr(line)
f.seek(-len(line), 1)
line2 = f.read(len(line))
if line != line2:
raise RuntimeError, 'bad result after seek back'
f.seek(len(line2), 1)
list = f.readlines()
line = list[-1]
f.seek(f.tell() - len(line))
line2 = f.read()
if line != line2:
raise RuntimeError, 'bad result after seek back from EOF'
print 'Read', len(list), 'more lines'
print 'File length =', f.tell()
if f.tell() != length:
raise RuntimeError, 'bad length'
f.truncate(length/2)
f.seek(0, 2)
print 'Truncated length =', f.tell()
if f.tell() != length/2:
raise RuntimeError, 'truncate did not adjust length'
f.close()
if __name__ == '__main__':
test()
|
babyliynfg/cross
|
tools/project-creator/Python2.6.6/Lib/StringIO.py
|
Python
|
mit
| 10,944
|
#!/usr/bin/env python
class ColorScheme(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'Accent1': 'str',
'Accent2': 'str',
'Accent3': 'str',
'Accent4': 'str',
'Accent5': 'str',
'Accent6': 'str',
'Dark1': 'str',
'Dark2': 'str',
'FollowedHyperlink': 'str',
'Hyperlink': 'str',
'Light1': 'str',
'Light2': 'str',
'SelfUri': 'ResourceUri',
'AlternateLinks': 'list[ResourceUri]',
'Links': 'list[ResourceUri]'
}
self.attributeMap = {
'Accent1': 'Accent1','Accent2': 'Accent2','Accent3': 'Accent3','Accent4': 'Accent4','Accent5': 'Accent5','Accent6': 'Accent6','Dark1': 'Dark1','Dark2': 'Dark2','FollowedHyperlink': 'FollowedHyperlink','Hyperlink': 'Hyperlink','Light1': 'Light1','Light2': 'Light2','SelfUri': 'SelfUri','AlternateLinks': 'AlternateLinks','Links': 'Links'}
self.Accent1 = None # str
self.Accent2 = None # str
self.Accent3 = None # str
self.Accent4 = None # str
self.Accent5 = None # str
self.Accent6 = None # str
self.Dark1 = None # str
self.Dark2 = None # str
self.FollowedHyperlink = None # str
self.Hyperlink = None # str
self.Light1 = None # str
self.Light2 = None # str
self.SelfUri = None # ResourceUri
self.AlternateLinks = None # list[ResourceUri]
self.Links = None # list[ResourceUri]
|
sohail-aspose/Aspose_Slides_Cloud
|
SDKs/Aspose.Slides_Cloud_SDK_for_Python/asposeslidescloud/models/ColorScheme.py
|
Python
|
mit
| 1,920
|
# coding=utf-8
# Copyright (c) 2001, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# [matrix] channel #navitia:matrix.org (https://app.element.io/#/room/#navitia:matrix.org)
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from tests.mock_navitia import navitia_response
response = navitia_response.NavitiaResponse()
response.queries = [
"vehicle_journeys/?filter=vehicle_journey.has_code(source, Code-orders)&since=20120615T120000Z&until=20120615T190000Z&data_freshness=base_schedule&depth=2"
# resquest time is UTC -> 12:00 is 8:00 local in Sherbrooke
]
response.response_code = 200
response.json_response = """
{
"disruptions": [],
"feed_publishers": [
{
"id": "builder",
"license": "ODBL",
"name": "departure board",
"url": "www.canaltp.fr"
}
],
"links": [
],
"pagination": {
"items_on_page": 1,
"items_per_page": 25,
"start_page": 0,
"total_result": 1
},
"vehicle_journeys": [
{
"calendars": [
{
"active_periods": [
{
"begin": "20120615",
"end": "20130615"
}
],
"week_pattern": {
"friday": true,
"monday": false,
"saturday": false,
"sunday": false,
"thursday": false,
"tuesday": false,
"wednesday": false
}
}
],
"disruptions": [],
"id": "R:vj1",
"name": "R:vj1",
"stop_times": [
{
"arrival_time": "100000",
"departure_time": "100000",
"utc_arrival_time": "140000",
"utc_departure_time": "140000",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:14"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR1"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR1",
"label": "StopR1",
"links": [],
"name": "StopR1",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR1",
"label": "StopR1",
"links": [],
"name": "StopR1",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "101000",
"departure_time": "101000",
"utc_arrival_time": "140100",
"utc_departure_time": "140100",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:15"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR2"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR2",
"label": "StopR2",
"links": [],
"name": "StopR2",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR2",
"label": "StopR2",
"links": [],
"name": "StopR2",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "102000",
"departure_time": "102000",
"utc_arrival_time": "140200",
"utc_departure_time": "140200",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:16"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR3"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR3",
"label": "StopR3",
"links": [],
"name": "StopR3",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR3",
"label": "StopR3",
"links": [],
"name": "StopR3",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "103000",
"departure_time": "103000",
"utc_arrival_time": "140300",
"utc_departure_time": "140300",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:17"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR4"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR4",
"label": "StopR4",
"links": [],
"name": "StopR4",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR4",
"label": "StopR4",
"links": [],
"name": "StopR4",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "104000",
"departure_time": "104000",
"utc_arrival_time": "140400",
"utc_departure_time": "140400",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:17"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR5"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR5",
"label": "StopR5",
"links": [],
"name": "StopR5",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR5",
"label": "StopR5",
"links": [],
"name": "StopR5",
"timezone": "America/Montreal"
}
}
},
{
"arrival_time": "105000",
"departure_time": "105000",
"utc_arrival_time": "140500",
"utc_departure_time": "140500",
"headsign": "R:vj1",
"journey_pattern_point": {
"id": "journey_pattern_point:17"
},
"stop_point": {
"codes": [
{
"type": "source",
"value": "Code-StopR6"
}
],
"coord": {
"lat": "0",
"lon": "0"
},
"equipments": [
"has_wheelchair_boarding",
"has_bike_accepted"
],
"id": "StopR6",
"label": "StopR6",
"links": [],
"name": "StopR6",
"stop_area": {
"coord": {
"lat": "0",
"lon": "0"
},
"id": "StopR6",
"label": "StopR6",
"links": [],
"name": "StopR6",
"timezone": "America/Montreal"
}
}
}
],
"trip": {
"id": "R:vj1",
"name": "R:vj1"
},
"validity_pattern": {
"beginning_date": "20120614",
"days": "100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010000001000000100000010"
}
}
]
}
"""
|
CanalTP/kirin
|
tests/mock_navitia/vj_bad_order.py
|
Python
|
agpl-3.0
| 13,166
|
###
# Copyright (c) 2011, Alex Wood
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Turn text into ASCII art using Figlet.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "0.1"
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.Author('Alex Wood', 'al', 'awood@redhat.com')
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = '' # 'http://supybot.com/Members/yourname/Figlet/download'
import config
import plugin
import pyfiglet
reload(plugin) # In case we're being reloaded.
reload(pyfiglet) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
kg-bot/SupyBot
|
plugins/Asci/__init__.py
|
Python
|
gpl-3.0
| 2,672
|
# coding=utf-8
import sys
from kg.db.generate_words import generate
try:
if len(sys.argv) > 1:
generate(sys.argv[1])
else:
generate()
except Exception as e:
print(u"Ката:")
print("\t"+e.message)
|
MasterAlish/kyrgyz_tili
|
generator.py
|
Python
|
gpl-3.0
| 232
|
"""
Utils for URLs (to avoid circular imports)
"""
DASHBOARD_URL = '/dashboard/'
PROFILE_URL = '/profile/'
PROFILE_PERSONAL_URL = '{}personal/?'.format(PROFILE_URL)
PROFILE_EDUCATION_URL = '{}education/?'.format(PROFILE_URL)
PROFILE_EMPLOYMENT_URL = '{}professional/?'.format(PROFILE_URL)
SETTINGS_URL = "/settings/"
SEARCH_URL = "/learners/"
ORDER_SUMMARY = "/order_summary/"
EMAIL_URL = "/automaticemails/"
PAYMENT_CALL_BACK_URL = "/payment-callback/"
DASHBOARD_URLS = [
DASHBOARD_URL,
PROFILE_URL,
PROFILE_PERSONAL_URL,
PROFILE_EDUCATION_URL,
PROFILE_EMPLOYMENT_URL,
SETTINGS_URL,
SEARCH_URL,
ORDER_SUMMARY,
EMAIL_URL,
]
|
mitodl/micromasters
|
ui/url_utils.py
|
Python
|
bsd-3-clause
| 662
|
from copy import deepcopy
from plenum.common.constants import NAME, VERSION
from plenum.test import waits as plenumWaits
from indy_client.test.helper import checkRejects, checkNacks
from indy_common.constants import CANCEL, \
ACTION
from indy_node.test.upgrade.helper import sendUpgrade, ensureUpgradeSent, \
bumpedVersion
from stp_core.loop.eventually import eventually
whitelist = ['Failed to upgrade node']
def testNodeRejectsPoolUpgrade(looper, nodeSet, tdir, trustee,
trusteeWallet, invalidUpgrade):
_, req = sendUpgrade(trustee, trusteeWallet, invalidUpgrade)
timeout = plenumWaits.expectedReqNAckQuorumTime()
looper.run(eventually(checkNacks, trustee, req.reqId,
'since time span between upgrades', retryWait=1,
timeout=timeout))
def testOnlyTrusteeCanSendPoolUpgrade(looper, steward, validUpgrade):
# A steward sending POOL_UPGRADE but txn fails
stClient, stWallet = steward
validUpgrade = deepcopy(validUpgrade)
validUpgrade[NAME] = 'upgrade-20'
validUpgrade[VERSION] = bumpedVersion()
_, req = sendUpgrade(stClient, stWallet, validUpgrade)
timeout = plenumWaits.expectedReqNAckQuorumTime()
looper.run(eventually(checkRejects, stClient, req.reqId,
'cannot do', retryWait=1, timeout=timeout))
def testNonTrustyCannotCancelUpgrade(looper, validUpgradeSent,
steward, validUpgrade):
stClient, stWallet = steward
validUpgradeCopy = deepcopy(validUpgrade)
validUpgradeCopy[ACTION] = CANCEL
_, req = sendUpgrade(stClient, stWallet, validUpgradeCopy)
looper.run(eventually(checkRejects, stClient, req.reqId,
'cannot do'))
def test_accept_then_reject_upgrade(
looper, trustee, trusteeWallet, validUpgradeSent, validUpgrade):
upgrade_name = validUpgrade[NAME]
error_msg = "InvalidClientRequest(\"Upgrade '{}' is already scheduled\"".\
format(upgrade_name)
validUpgrade2 = deepcopy(validUpgrade)
_, req = sendUpgrade(trustee, trusteeWallet, validUpgrade2)
timeout = plenumWaits.expectedReqNAckQuorumTime()
looper.run(eventually(checkRejects, trustee, req.reqId,
error_msg, retryWait=1, timeout=timeout))
def testOnlyTrusteeCanSendPoolUpgradeForceTrue(
looper, steward, validUpgradeExpForceTrue):
stClient, stWallet = steward
_, req = sendUpgrade(stClient, stWallet, validUpgradeExpForceTrue)
timeout = plenumWaits.expectedReqNAckQuorumTime()
looper.run(eventually(checkNacks, stClient, req.reqId,
'cannot do', retryWait=1, timeout=timeout))
|
TechWritingWhiz/indy-node
|
indy_node/test/upgrade/test_pool_upgrade_reject.py
|
Python
|
apache-2.0
| 2,714
|
"""Event Decorators for custom components."""
import functools
from homeassistant.helpers import event
HASS = None
def track_state_change(entity_ids, from_state=None, to_state=None):
"""Decorator factory to track state changes for entity id."""
def track_state_change_decorator(action):
"""Decorator to track state changes."""
event.track_state_change(HASS, entity_ids,
functools.partial(action, HASS),
from_state, to_state)
return action
return track_state_change_decorator
def track_sunrise(offset=None):
"""Decorator factory to track sunrise events."""
def track_sunrise_decorator(action):
"""Decorator to track sunrise events."""
event.track_sunrise(HASS,
functools.partial(action, HASS),
offset)
return action
return track_sunrise_decorator
def track_sunset(offset=None):
"""Decorator factory to track sunset events."""
def track_sunset_decorator(action):
"""Decorator to track sunset events."""
event.track_sunset(HASS,
functools.partial(action, HASS),
offset)
return action
return track_sunset_decorator
# pylint: disable=too-many-arguments
def track_time_change(year=None, month=None, day=None, hour=None, minute=None,
second=None):
"""Decorator factory to track time changes."""
def track_time_change_decorator(action):
"""Decorator to track time changes."""
event.track_time_change(HASS,
functools.partial(action, HASS),
year, month, day, hour, minute, second)
return action
return track_time_change_decorator
# pylint: disable=too-many-arguments
def track_utc_time_change(year=None, month=None, day=None, hour=None,
minute=None, second=None):
"""Decorator factory to track time changes."""
def track_utc_time_change_decorator(action):
"""Decorator to track time changes."""
event.track_utc_time_change(HASS,
functools.partial(action, HASS),
year, month, day, hour, minute, second)
return action
return track_utc_time_change_decorator
|
justyns/home-assistant
|
homeassistant/helpers/event_decorators.py
|
Python
|
mit
| 2,402
|
#!/usr/bin/env python3
# Copyright (C) 2015 Robert Jordens <jordens@gmail.com>
import argparse
import os
import subprocess
import tempfile
import shutil
from artiq import __artiq_dir__ as artiq_dir
from artiq.frontend.bit2bin import bit2bin
def scripts_path():
p = ["share", "openocd", "scripts"]
if os.name == "nt":
p.insert(0, "Library")
p = os.path.abspath(os.path.join(
os.path.dirname(shutil.which("openocd")),
"..", *p))
return p
def get_argparser():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="ARTIQ flashing/deployment tool",
epilog="""\
Valid actions:
* proxy: load the flash proxy gateware bitstream
* gateware: write gateware bitstream to flash
* bios: write bios to flash
* runtime: write runtime to flash
* storage: write storage image to flash
* load: load gateware bitstream into device (volatile but fast)
* start: trigger the target to (re)load its gateware bitstream from flash
Prerequisites:
* Connect the board through its/a JTAG adapter.
* Have OpenOCD installed and in your $PATH.
* Have access to the JTAG adapter's devices. Udev rules from OpenOCD:
'sudo cp openocd/contrib/99-openocd.rules /etc/udev/rules.d'
and replug the device. Ensure you are member of the
plugdev group: 'sudo adduser $USER plugdev' and re-login.
""")
parser.add_argument("-t", "--target", default="kc705",
help="target board, default: %(default)s")
parser.add_argument("-m", "--adapter", default="nist_clock",
help="target adapter, default: %(default)s")
parser.add_argument("--target-file", default=None,
help="use alternative OpenOCD target file")
parser.add_argument("-f", "--storage", help="write file to storage area")
parser.add_argument("-d", "--dir", help="look for files in this directory")
parser.add_argument("action", metavar="ACTION", nargs="*",
default="proxy gateware bios runtime start".split(),
help="actions to perform, default: %(default)s")
return parser
def main():
parser = get_argparser()
opts = parser.parse_args()
config = {
"kc705": {
"chip": "xc7k325t",
"start": "xc7_program xc7.tap",
"gateware": 0x000000,
"bios": 0xaf0000,
"runtime": 0xb00000,
"storage": 0xb80000,
},
}[opts.target]
if opts.dir is None:
opts.dir = os.path.join(artiq_dir, "binaries",
"{}-{}".format(opts.target, opts.adapter))
if not os.path.exists(opts.dir) and opts.action != ["start"]:
raise SystemExit("Binaries directory '{}' does not exist"
.format(opts.dir))
conv = False
prog = []
prog.append("init")
for action in opts.action:
if action == "proxy":
proxy_base = "bscan_spi_{}.bit".format(config["chip"])
proxy = None
for p in [opts.dir, os.path.expanduser("~/.migen"),
"/usr/local/share/migen", "/usr/share/migen"]:
proxy_ = os.path.join(p, proxy_base)
if os.access(proxy_, os.R_OK):
proxy = "jtagspi_init 0 {{{}}}".format(proxy_)
break
if not proxy:
raise SystemExit(
"proxy gateware bitstream {} not found".format(proxy_base))
prog.append(proxy)
elif action == "gateware":
bin = os.path.join(opts.dir, "top.bin")
if not os.access(bin, os.R_OK):
bin_handle, bin = tempfile.mkstemp()
bit = os.path.join(opts.dir, "top.bit")
conv = True
prog.append("jtagspi_program {{{}}} 0x{:x}".format(
bin, config["gateware"]))
elif action == "bios":
prog.append("jtagspi_program {{{}}} 0x{:x}".format(
os.path.join(opts.dir, "bios.bin"), config["bios"]))
elif action == "runtime":
prog.append("jtagspi_program {{{}}} 0x{:x}".format(
os.path.join(opts.dir, "runtime.fbi"), config["runtime"]))
elif action == "storage":
prog.append("jtagspi_program {{{}}} 0x{:x}".format(
opts.storage, config["storage"]))
elif action == "load":
prog.append("pld load 0 {{{}}}".format(
os.path.join(opts.dir, "top.bit")))
elif action == "start":
prog.append(config["start"])
else:
raise ValueError("invalid action", action)
prog.append("exit")
try:
if conv:
bit2bin(bit, bin_handle)
if opts.target_file is None:
target_file = os.path.join("board", opts.target + ".cfg")
else:
target_file = opts.target_file
subprocess.check_call([
"openocd",
"-s", scripts_path(),
"-f", target_file,
"-c", "; ".join(prog),
])
finally:
if conv:
os.unlink(bin)
if __name__ == "__main__":
main()
|
JQIamo/artiq
|
artiq/frontend/artiq_flash.py
|
Python
|
lgpl-3.0
| 5,247
|
# coding: utf-8
import tensorflow as tf
import numpy as np
#get_ipython().run_line_magic('matplotlib', 'inline')
import matplotlib.pyplot as plt
import sys
import glob
import os
from tensorflow.python.platform import flags
import argparse
tf.app.flags.FLAGS = flags._FlagValues()
tf.app.flags._global_parser = argparse.ArgumentParser()
tf.app.flags.DEFINE_integer(flag_name="epochs", docstring="number of training epoches", default_value=1000)
tf.app.flags.DEFINE_integer(flag_name="crop_height", docstring="image cropping height", default_value=500)
tf.app.flags.DEFINE_integer(flag_name="crop_width", docstring="image cropping width", default_value=500)
tf.app.flags.DEFINE_integer(flag_name="target_height", docstring="image resize height", default_value=64)
tf.app.flags.DEFINE_integer(flag_name="target_width", docstring="image resize width", default_value=64)
tf.app.flags.DEFINE_integer(flag_name="batch_size", docstring="image batchsize", default_value=128)
tf.app.flags.DEFINE_float(flag_name="learning_rate", docstring="learning rate", default_value=2e-4)
tf.app.flags.DEFINE_float(flag_name="prior_scaling", docstring="scale of prior", default_value=1e0)
tf.app.flags.DEFINE_float(flag_name="penalty_factor", docstring="penalty factor", default_value=10e0)
tf.app.flags.DEFINE_boolean(flag_name="is_training", docstring="whether the model is at training stage", default_value=True)
tf.app.flags.DEFINE_string(flag_name="log_dir", docstring="The log directory", default_value="./log")
tf.app.flags.DEFINE_string(flag_name="visible_gpu", docstring="which gpu is visible", default_value="0")
tf.app.flags.DEFINE_string(flag_name="model_dir", docstring="pretrained model dir", default_value=None)
os.environ["CUDA_VISIBLE_DEVICES"]=tf.app.flags.FLAGS.visible_gpu
class DataProcess(object):
def __init__(self, src_dir):
self.src_dir = src_dir
self.label_batch, self.img_batch = self._process()
def _process(self):
def img_process(fn):
img = tf.image.decode_image(tf.read_file(fn))
cropped = tf.image.resize_image_with_crop_or_pad(img, tf.app.flags.FLAGS.crop_height, tf.app.flags.FLAGS.crop_width)
new_img = tf.image.resize_images(cropped, (tf.app.flags.FLAGS.target_height, tf.app.flags.FLAGS.target_width), method =
tf.image.ResizeMethod.AREA)
return fn, new_img
filenames = tf.constant(glob.glob(os.path.join(self.src_dir,"*")))
dataset = tf.data.Dataset.from_tensor_slices((filenames, ))
dataset = dataset.map(img_process)
dataset = dataset.shuffle(buffer_size=10000)
dataset = dataset.batch(tf.app.flags.FLAGS.batch_size)
dataset = dataset.repeat(tf.app.flags.FLAGS.epochs)
iterator = dataset.make_one_shot_iterator()
labels, imgs = iterator.get_next()
return labels, imgs
def get_data(self):
return self.label_batch, self.img_batch
#dp = DataProcess("../dataset/102flowers/jpg/")
#dp._process()
#with tf.Session() as s:
# s.run(tf.global_variables_initializer())
# s.run(tf.local_variables_initializer())
# coord = tf.train.Coordinator()
# threads = tf.train.start_queue_runners(coord=coord)
# try:
# while True:
# labels, imgs = s.run(dp.get_data(), feed_dict = {})
# #print(np.min(imgs))
# plt.imshow(imgs[0,:,:,:]/256.0)
# plt.show()
# break
# except tf.errors.OutOfRangeError as e:
# print("fetch data ended")
# coord.request_stop()
# coord.join(threads)
class Generator(object):
"""
Generator in GAN, used to generate "fake" images
Original paper: https://arxiv.org/pdf/1511.06434.pdf
"""
def __init__(self):
self._build_graph()
def _build_graph(self):
with tf.variable_scope("generator") as scope:
print("### Print Generator Intermediate Parameter")
self.prior = tf.placeholder(dtype=tf.float32, shape=(None, 100), name="prior_gen")
self.is_training = tf.placeholder(dtype=tf.bool, shape = (), name="training_flag")
prior_proj = tf.contrib.layers.fully_connected(inputs=self.prior, num_outputs=4*4*1024,
activation_fn=None, scope="prior_projection")
prior_proj = tf.contrib.layers.batch_norm(inputs=prior_proj, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
is_training= self.is_training, scope="bn0")
conv0 = tf.reshape(prior_proj, (-1, 4, 4, 1024))
conv1 = tf.contrib.layers.convolution2d_transpose(inputs=conv0, num_outputs=512, activation_fn=None,
kernel_size=(5,5), stride=(2,2), padding="SAME",scope="deconv1")
conv1 = tf.contrib.layers.batch_norm(inputs=conv1, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
is_training= self.is_training, scope="bn1")
print(conv1.shape)
conv2 = tf.contrib.layers.convolution2d_transpose(inputs=conv1, num_outputs=256, activation_fn=None,
kernel_size=(5,5), stride=(2,2), padding="SAME",scope="deconv2")
conv2 = tf.contrib.layers.batch_norm(inputs=conv2, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
is_training= self.is_training, scope="bn2")
print(conv2.shape)
conv3 = tf.contrib.layers.convolution2d_transpose(inputs=conv2, num_outputs=128, activation_fn=None,
kernel_size=(5,5), stride=(2,2), padding="SAME",scope="deconv3")
conv3 = tf.contrib.layers.batch_norm(inputs=conv3, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
is_training= self.is_training, scope="bn3")
print(conv3.shape)
conv4 = tf.contrib.layers.convolution2d_transpose(inputs=conv3, num_outputs=3, activation_fn=None,
kernel_size=(5,5), stride=(2,2), padding="SAME",scope="deconv4")
self.gen_img = tf.nn.tanh(conv4)
self.gen_img_out = tf.cast(x= tf.floor(self.gen_img*128.0 + 128.0), dtype=tf.int32)
print(conv4.shape)
print("### End Print Generator Intermediate Parameter")
# tf.reset_default_graph()
# g = Generator()
class Discriminator(object):
"""
Discriminator in GAN, used to distinguish "fake" and "real" images
"""
def __init__(self, img_gen):
self._build_graph(img_gen)
def _build_graph(self, image_gen):
self.real_img = tf.placeholder(tf.float32, (None, 64, 64, 3), name="real_image")
#real_img = (self.real_img - 128.0)/128.0
real_img = self.real_img/255.0*2.0 - 1.0
self.is_training = tf.placeholder(dtype=tf.bool, shape = (), name="training_flag")
self.real_judge = self._discrim(real_img)
#print(self.real_judge)
self.fake_judge = self._discrim(image_gen.gen_img, reuse = True)
#print(self.fake_judge)
def _discrim(self, input_img, reuse = None):
"""
This function will be called twice,
one for real images, and one for fake images.
"""
with tf.variable_scope("discriminator") as scope:
if reuse: scope.reuse_variables()
print("### Print Discriminator Intermediate Parameter")
print(self.is_training)
conv1 = tf.contrib.layers.convolution2d(inputs=input_img, num_outputs=128, padding="SAME",
kernel_size=(5,5), stride=(2,2), activation_fn=tf.nn.relu, scope = "conv1")
#conv1 = tf.contrib.layers.layer_norm(inputs=conv1, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
# scope="bn1")
print(conv1.shape)
conv2 = tf.contrib.layers.convolution2d(inputs=conv1, num_outputs=256, padding="SAME",
kernel_size=(5,5), stride=(2,2), activation_fn=tf.nn.relu, scope = "conv2")
#conv2 = tf.contrib.layers.layer_norm(inputs=conv2, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
# scope="bn2")
print(conv2.shape)
###
conv3 = tf.contrib.layers.convolution2d(inputs=conv2, num_outputs=512, padding="SAME",
kernel_size=(5,5), stride=(2,2), activation_fn=tf.nn.relu, scope = "conv3")
#conv3 = tf.contrib.layers.layer_norm(inputs=conv3, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
# scope="bn3")
print(conv3.shape)
conv4 = tf.contrib.layers.convolution2d(inputs=conv3, num_outputs=1024, padding="SAME",
kernel_size=(5,5), stride=(2,2), activation_fn=tf.nn.relu, scope = "conv4")
#conv4 = tf.contrib.layers.layer_norm(inputs=conv4, center=True, scale=True, activation_fn=tf.nn.leaky_relu,
# scope="bn4")
print(conv4.shape)
###
#conv5 = tf.contrib.layers.avg_pool2d(inputs=conv4, kernel_size=(4,4), stride=(4,4), padding="SAME", scope="avg_pool5")
#print(conv5.shape)
print("### End Print Discriminator Intermediate Parameter")
### no need to perform sigmoid
return tf.contrib.layers.fully_connected(inputs=tf.reshape(conv4,(-1, 4*4*1024)), num_outputs=1,
activation_fn=None, scope="output_projection")
# tf.reset_default_graph()
# g = Generator()
# d = Discriminator(g)
# tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='generator')
# tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='discriminator')
class GANModel(object):
def __init__(self, generator, discriminator, datasrc):
self.generator = generator
self.discriminator = discriminator
self.datasrc = datasrc
self.sess = None
self.saver = tf.train.Saver()
def train(self, model_path = None):
self.sess = tf.Session()
temp = set(tf.global_variables())
fake_result = self.discriminator.fake_judge
real_result = self.discriminator.real_judge
#fake_rate = tf.reduce_mean(tf.cast(tf.nn.sigmoid(fake_result) > 0.5, tf.float32))
epsilon = tf.placeholder(tf.float32, (None,1,1,1), "uniform_random")
mixing = self.generator.gen_img + epsilon*(self.discriminator.real_img - self.generator.gen_img)
grads = tf.gradients(self.discriminator._discrim(mixing, reuse = True), mixing)[0]
penalty = tf.reduce_mean(tf.square(tf.sqrt(tf.reduce_sum(tf.square(grads), axis = [1,2,3])) - 1.0))
#print("penalty", grads)
#print(tf.app.flags.FLAGS.penalty_factor)
#sys.exit()
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
loss_g = -tf.reduce_mean(fake_result)
loss_d = -tf.reduce_mean(real_result) + tf.reduce_mean(fake_result) + tf.app.flags.FLAGS.penalty_factor*penalty
optim_g = tf.train.AdamOptimizer(tf.app.flags.FLAGS.learning_rate, beta1 = 0.0, beta2 = 0.5).minimize(loss_g, var_list =\
tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='generator'))
optim_d = tf.train.AdamOptimizer(tf.app.flags.FLAGS.learning_rate, beta1 = 0.0, beta2 = 0.5).minimize(loss_d, var_list =\
tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='discriminator'))
writer = tf.summary.FileWriter(tf.app.flags.FLAGS.log_dir, self.sess.graph)
summary_g = tf.summary.scalar(name="generator_loss", tensor=loss_g)
summary_d = tf.summary.scalar(name="discriminator_loss", tensor=loss_d)
#summary_fake_rate = tf.summary.scalar(name="fake_rate", tensor=fake_rate)
if model_path:
self.saver.restore(self.sess, model_path)
self.sess.run(tf.variables_initializer(set(tf.global_variables()) - temp))
else:
self.sess.run(tf.global_variables_initializer())
self.sess.run(tf.local_variables_initializer())
cnt_total = 0
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=self.sess)
try:
while True:
labels, imgs = self.sess.run(self.datasrc.get_data(), feed_dict = {})
# First train discriminator
# print(self.discriminator.is_training)
# print(self.generator.is_training)
# print(np.max(imgs/255.0*2 - 1.0), np.min(imgs/255.0*2 - 1.0))
# sys.exit()
#print(imgs.shape[0],1,1,1)
#sys.exit()
loss_d_out, _, summary_d_out = self.sess.run([loss_d, optim_d, summary_d],
feed_dict = {
self.discriminator.real_img: imgs,
epsilon: np.random.rand(imgs.shape[0],1,1,1),
self.generator.prior: \
np.random.randn(imgs.shape[0], 100)* tf.app.flags.FLAGS.prior_scaling,
self.discriminator.is_training: True,
self.generator.is_training: True
})
#print(p)
#self.sess.run([var.assign(tf.clip_by_value(var, -1e-2, 1e-2)) for var in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='discriminator')])
#if cnt_total % 5 == 0:
# Then train generator
loss_g_out, _, summary_g_out = self.sess.run([loss_g, optim_g, summary_g],
feed_dict = {
self.discriminator.real_img: imgs,
self.generator.prior: \
np.random.randn(imgs.shape[0], 100)* tf.app.flags.FLAGS.prior_scaling,
self.discriminator.is_training: True,
self.generator.is_training: True,
})
# Then evaluate the fake ratio
#fake_rate_out, summary_fake_rate_out = self.sess.run([fake_rate, summary_fake_rate],
# feed_dict = {
# self.generator.prior: \
# np.random.randn(tf.app.flags.FLAGS.batch_size, 100)* tf.app.flags.FLAGS.prior_scaling,
# self.discriminator.is_training: False,
# self.generator.is_training: False,
# })
cnt_total += 1
writer.add_summary(summary_d_out, cnt_total)
writer.add_summary(summary_g_out, cnt_total)
#writer.add_summary(summary_fake_rate_out, cnt_total)
print("In batch %3d, Dicriminator Loss %.3f, Generator Loss %.3f\r" \
%(cnt_total, loss_d_out, loss_g_out), end="")
# Save every 100 batches
if cnt_total % 50 == 0:
self.saver.save(self.sess, os.path.join(tf.app.flags.FLAGS.log_dir, "model_%03d.ckpt" %(cnt_total//50)))
except tf.errors.OutOfRangeError as e:
print("fetch data ended")
coord.request_stop()
coord.join(threads)
def infer_gen(self, model_path = None, n_img = 1, prior=None):
"""
After the training, now we can use generator images!
n_img: number of images, if not given any prior
prior: given priors, if None, then random generate
"""
if not self.sess:
self.sess = tf.Session()
if not model_path:
print("Invalid model path!")
sys.exit()
else:
self.saver.restore(self.sess, model_path)
if not prior:
prior = np.random.randn(n_img, 100) * tf.app.flags.FLAGS.prior_scaling
imgs = self.sess.run(self.generator.gen_img_out, feed_dict = {self.generator.prior: prior, self.generator.is_training: False})
return imgs
def infer_dis(self):
"""
In fact, discriminator can be used to predict,
but here we will not complete the code
"""
pass
if __name__ == "__main__":
#os.system("rm -rf ./log")
#tf.reset_default_graph()
dp = DataProcess("../dataset/102flowers/jpg/")
g = Generator()
d = Discriminator(g)
gan = GANModel(generator=g, discriminator=d, datasrc=dp)
gan.train(tf.app.flags.FLAGS.model_dir)
# For test only
#imgs = gan.infer_gen(model_path="./log/model_001.ckpt", n_img = 1)
#img = np.reshape(imgs, (64, 64, 3))
#plt.imshow(img/256.0)
#plt.show()
|
zxjzxj9/deeplearning
|
gan_talk_tensorflow/dcgan_w_gp.py
|
Python
|
gpl-3.0
| 18,360
|
from setuptools import setup, find_packages
setup(
name='museris-data',
version='0.1',
description='Data models and scraper for https://museris.lausanne.ch/',
author='Cruncher',
author_email='marco@cruncher.ch',
url='https://github.com/cruncher/museris',
license='MIT',
packages=find_packages(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
include_package_data=True,
zip_safe=False,
install_requires=[
'Django >= 1.7',
'beautifulsoup4 >= 4.3.2',
'progress >= 1.2',
'requests >= 2.5.1'
]
)
|
cruncher/museris
|
setup.py
|
Python
|
mit
| 944
|
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
from pootle.core.delegate import tp_tool
def pytest_generate_tests(metafunc):
from pootle_project.models import PROJECT_CHECKERS
if 'checkers' in metafunc.funcargnames:
metafunc.parametrize("checkers", PROJECT_CHECKERS.keys())
def _require_tp(language, project):
"""Helper to get/create a new translation project."""
from pootle_translationproject.models import TranslationProject
tp, __ = TranslationProject.objects.get_or_create(
language=language, project=project)
return tp
@pytest.fixture
def afrikaans_tutorial(afrikaans, tutorial):
"""Require Afrikaans Tutorial."""
return _require_tp(afrikaans, tutorial)
@pytest.fixture
def en_tutorial_obsolete(english_tutorial):
"""Require Arabic Tutorial in obsolete state."""
english_tutorial.directory.makeobsolete()
return english_tutorial
@pytest.fixture
def english_tutorial(english, tutorial):
"""Require English Tutorial."""
return _require_tp(english, tutorial)
@pytest.fixture
def italian_tutorial(italian, tutorial):
"""Require Italian Tutorial."""
return _require_tp(italian, tutorial)
@pytest.fixture
def tp_checker_tests(request, english, checkers):
from pytest_pootle.factories import ProjectDBFactory
checker_name = checkers
project = ProjectDBFactory(
checkstyle=checker_name,
source_language=english)
return (checker_name, project)
@pytest.fixture
def templates_project0(request, templates, project0):
"""Require the templates/project0/ translation project."""
tps = project0.translationproject_set.select_related(
"data",
"directory")
template_tp = tps.get(language=templates)
template_tp.language = templates
return template_tp
@pytest.fixture
def tp0(language0, project0):
"""Require English Project0."""
tps = project0.translationproject_set.select_related(
"data",
"directory")
tp0 = tps.get(language=language0)
tp0.language = language0
return tp0
@pytest.fixture
def no_tp_tool_(request):
start_receivers = tp_tool.receivers
tp_tool.receivers = []
def _reset_tp_tool():
tp_tool.receivers = start_receivers
request.addfinalizer(_reset_tp_tool)
|
unho/pootle
|
pytest_pootle/fixtures/models/translation_project.py
|
Python
|
gpl-3.0
| 2,531
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import cPickle
from binascii import unhexlify
from functools import partial
from PyQt5.Qt import (QPixmap, QSize, QWidget, Qt, pyqtSignal, QUrl, QIcon,
QPropertyAnimation, QEasingCurve, QApplication, QFontInfo, QAction,
QSizePolicy, QPainter, QRect, pyqtProperty, QLayout, QPalette, QMenu,
QPen, QColor)
from PyQt5.QtWebKitWidgets import QWebView
from calibre import fit_image
from calibre.gui2.dnd import (dnd_has_image, dnd_get_image, dnd_get_files,
IMAGE_EXTENSIONS, dnd_has_extension)
from calibre.ebooks import BOOK_EXTENSIONS
from calibre.ebooks.metadata.book.base import (field_metadata, Metadata)
from calibre.ebooks.metadata.book.render import mi_to_html
from calibre.gui2 import (config, open_url, pixmap_to_data, gprefs, rating_font)
from calibre.utils.config import tweaks
def render_html(mi, css, vertical, widget, all_fields=False, render_data_func=None): # {{{
table, comment_fields = (render_data_func or render_data)(mi, all_fields=all_fields,
use_roman_numbers=config['use_roman_numerals_for_series_number'])
def color_to_string(col):
ans = '#000000'
if col.isValid():
col = col.toRgb()
if col.isValid():
ans = unicode(col.name())
return ans
fi = QFontInfo(QApplication.font(widget))
f = fi.pixelSize() + 1 + int(tweaks['change_book_details_font_size_by'])
fam = unicode(fi.family()).strip().replace('"', '')
if not fam:
fam = 'sans-serif'
c = color_to_string(QApplication.palette().color(QPalette.Normal,
QPalette.WindowText))
templ = u'''\
<html>
<head>
<style type="text/css">
body, td {
background-color: transparent;
font-size: %dpx;
font-family: "%s",sans-serif;
color: %s
}
</style>
<style type="text/css">
%s
</style>
</head>
<body>
%%s
</body>
<html>
'''%(f, fam, c, css)
comments = u''
if comment_fields:
comments = '\n'.join(u'<div>%s</div>' % x for x in comment_fields)
right_pane = u'<div id="comments" class="comments">%s</div>'%comments
if vertical:
ans = templ%(table+right_pane)
else:
ans = templ%(u'<table><tr><td valign="top" '
'style="padding-right:2em; width:40%%">%s</td><td valign="top">%s</td></tr></table>'
% (table, right_pane))
return ans
def get_field_list(fm, use_defaults=False):
from calibre.gui2.ui import get_gui
db = get_gui().current_db
if use_defaults:
src = db.prefs.defaults
else:
old_val = gprefs.get('book_display_fields', None)
if old_val is not None and not db.prefs.has_setting(
'book_display_fields'):
src = gprefs
else:
src = db.prefs
fieldlist = list(src['book_display_fields'])
names = frozenset([x[0] for x in fieldlist])
for field in fm.displayable_field_keys():
if field not in names:
fieldlist.append((field, True))
available = frozenset(fm.displayable_field_keys())
return [(f, d) for f, d in fieldlist if f in available]
def render_data(mi, use_roman_numbers=True, all_fields=False):
field_list = get_field_list(getattr(mi, 'field_metadata', field_metadata))
field_list = [(x, all_fields or display) for x, display in field_list]
return mi_to_html(mi, field_list=field_list, use_roman_numbers=use_roman_numbers,
rating_font=rating_font(), default_author_link=gprefs.get('default_author_link'))
# }}}
def details_context_menu_event(view, ev, book_info): # {{{
p = view.page()
mf = p.mainFrame()
r = mf.hitTestContent(ev.pos())
url = unicode(r.linkUrl().toString(QUrl.None)).strip()
menu = p.createStandardContextMenu()
ca = view.pageAction(p.Copy)
for action in list(menu.actions()):
if action is not ca:
menu.removeAction(action)
if not r.isNull():
if url.startswith('format:'):
parts = url.split(':')
try:
book_id, fmt = int(parts[1]), parts[2].upper()
except:
import traceback
traceback.print_exc()
else:
from calibre.gui2.ui import get_gui
from calibre.ebooks.oeb.polish.main import SUPPORTED
db = get_gui().current_db.new_api
ofmt = fmt.upper() if fmt.startswith('ORIGINAL_') else 'ORIGINAL_' + fmt
nfmt = ofmt[len('ORIGINAL_'):]
fmts = {x.upper() for x in db.formats(book_id)}
for a, t in [('remove', _('Delete the %s format')),
('save', _('Save the %s format to disk')),
('restore', _('Restore the %s format')),
('compare', ''),
]:
if a == 'restore' and not fmt.startswith('ORIGINAL_'):
continue
if a == 'compare':
if ofmt not in fmts or nfmt not in SUPPORTED:
continue
t = _('Compare to the %s format') % (fmt[9:] if fmt.startswith('ORIGINAL_') else ofmt)
else:
t = t % fmt
ac = getattr(book_info, '%s_format_action'%a)
ac.current_fmt = (book_id, fmt)
ac.setText(t)
menu.addAction(ac)
if not fmt.upper().startswith('ORIGINAL_'):
from calibre.gui2.open_with import populate_menu, edit_programs
m = QMenu(_('Open %s with...') % fmt.upper())
populate_menu(m, partial(book_info.open_with, book_id, fmt), fmt)
if len(m.actions()) == 0:
menu.addAction(_('Open %s with...') % fmt.upper(), partial(book_info.choose_open_with, book_id, fmt))
else:
m.addSeparator()
m.addAction(_('Add other application for %s files...') % fmt.upper(), partial(book_info.choose_open_with, book_id, fmt))
m.addAction(_('Edit Open With applications...'), partial(edit_programs, fmt, book_info))
menu.addMenu(m)
ac = book_info.copy_link_action
ac.current_url = r.linkElement().attribute('data-full-path')
if ac.current_url:
ac.setText(_('&Copy path to file'))
menu.addAction(ac)
else:
el = r.linkElement()
data = el.attribute('data-item')
author = el.toPlainText() if unicode(el.attribute('calibre-data')) == u'authors' else None
if not url.startswith('search:'):
for a, t in [('copy', _('&Copy Link')),
]:
ac = getattr(book_info, '%s_link_action'%a)
ac.current_url = url
if url.startswith('path:'):
ac.current_url = el.attribute('title')
ac.setText(t)
menu.addAction(ac)
if author is not None:
ac = book_info.manage_author_action
ac.current_fmt = author
ac.setText(_('Manage %s') % author)
menu.addAction(ac)
if data:
try:
field, value, book_id = cPickle.loads(unhexlify(data))
except Exception:
field = value = book_id = None
if field:
ac = book_info.remove_item_action
ac.data = (field, value, book_id)
ac.setText(_('Remove %s from this book') % value)
menu.addAction(ac)
if len(menu.actions()) > 0:
menu.exec_(ev.globalPos())
# }}}
class CoverView(QWidget): # {{{
cover_changed = pyqtSignal(object, object)
cover_removed = pyqtSignal(object)
open_cover_with = pyqtSignal(object, object)
def __init__(self, vertical, parent=None):
QWidget.__init__(self, parent)
self._current_pixmap_size = QSize(120, 120)
self.vertical = vertical
self.animation = QPropertyAnimation(self, b'current_pixmap_size', self)
self.animation.setEasingCurve(QEasingCurve(QEasingCurve.OutExpo))
self.animation.setDuration(1000)
self.animation.setStartValue(QSize(0, 0))
self.animation.valueChanged.connect(self.value_changed)
self.setSizePolicy(
QSizePolicy.Expanding if vertical else QSizePolicy.Minimum,
QSizePolicy.Expanding)
self.default_pixmap = QPixmap(I('book.png'))
self.pixmap = self.default_pixmap
self.pwidth = self.pheight = None
self.data = {}
self.do_layout()
def value_changed(self, val):
self.update()
def setCurrentPixmapSize(self, val):
self._current_pixmap_size = val
def do_layout(self):
if self.rect().width() == 0 or self.rect().height() == 0:
return
pixmap = self.pixmap
pwidth, pheight = pixmap.width(), pixmap.height()
try:
self.pwidth, self.pheight = fit_image(pwidth, pheight,
self.rect().width(), self.rect().height())[1:]
except:
self.pwidth, self.pheight = self.rect().width()-1, \
self.rect().height()-1
self.current_pixmap_size = QSize(self.pwidth, self.pheight)
self.animation.setEndValue(self.current_pixmap_size)
def show_data(self, data):
self.animation.stop()
same_item = getattr(data, 'id', True) == self.data.get('id', False)
self.data = {'id':data.get('id', None)}
if data.cover_data[1]:
self.pixmap = QPixmap.fromImage(data.cover_data[1])
if self.pixmap.isNull() or self.pixmap.width() < 5 or \
self.pixmap.height() < 5:
self.pixmap = self.default_pixmap
else:
self.pixmap = self.default_pixmap
self.do_layout()
self.update()
if (not same_item and not config['disable_animations'] and
self.isVisible()):
self.animation.start()
def paintEvent(self, event):
canvas_size = self.rect()
width = self.current_pixmap_size.width()
extrax = canvas_size.width() - width
if extrax < 0:
extrax = 0
x = int(extrax/2.)
height = self.current_pixmap_size.height()
extray = canvas_size.height() - height
if extray < 0:
extray = 0
y = int(extray/2.)
target = QRect(x, y, width, height)
p = QPainter(self)
p.setRenderHints(QPainter.Antialiasing | QPainter.SmoothPixmapTransform)
p.drawPixmap(target, self.pixmap.scaled(target.size(),
Qt.KeepAspectRatio, Qt.SmoothTransformation))
if gprefs['bd_overlay_cover_size']:
sztgt = target.adjusted(0, 0, 0, -4)
f = p.font()
f.setBold(True)
p.setFont(f)
sz = u'\u00a0%d x %d\u00a0'%(self.pixmap.width(), self.pixmap.height())
flags = Qt.AlignBottom|Qt.AlignRight|Qt.TextSingleLine
szrect = p.boundingRect(sztgt, flags, sz)
p.fillRect(szrect.adjusted(0, 0, 0, 4), QColor(0, 0, 0, 200))
p.setPen(QPen(QColor(255,255,255)))
p.drawText(sztgt, flags, sz)
p.end()
current_pixmap_size = pyqtProperty('QSize',
fget=lambda self: self._current_pixmap_size,
fset=setCurrentPixmapSize
)
def contextMenuEvent(self, ev):
from calibre.gui2.open_with import populate_menu, edit_programs
cm = QMenu(self)
paste = cm.addAction(_('Paste Cover'))
copy = cm.addAction(_('Copy Cover'))
remove = cm.addAction(_('Remove Cover'))
gc = cm.addAction(_('Generate Cover from metadata'))
if not QApplication.instance().clipboard().mimeData().hasImage():
paste.setEnabled(False)
copy.triggered.connect(self.copy_to_clipboard)
paste.triggered.connect(self.paste_from_clipboard)
remove.triggered.connect(self.remove_cover)
gc.triggered.connect(self.generate_cover)
m = QMenu(_('Open cover with...'))
populate_menu(m, self.open_with, 'cover_image')
if len(m.actions()) == 0:
cm.addAction(_('Open cover with...'), self.choose_open_with)
else:
m.addSeparator()
m.addAction(_('Add another application to open cover...'), self.choose_open_with)
m.addAction(_('Edit Open With applications...'), partial(edit_programs, 'cover_image', self))
cm.addMenu(m)
cm.exec_(ev.globalPos())
def open_with(self, entry):
id_ = self.data.get('id', None)
if id_ is not None:
self.open_cover_with.emit(id_, entry)
def choose_open_with(self):
from calibre.gui2.open_with import choose_program
entry = choose_program('cover_image', self)
if entry is not None:
self.open_with(entry)
def copy_to_clipboard(self):
QApplication.instance().clipboard().setPixmap(self.pixmap)
def paste_from_clipboard(self, pmap=None):
if not isinstance(pmap, QPixmap):
cb = QApplication.instance().clipboard()
pmap = cb.pixmap()
if pmap.isNull() and cb.supportsSelection():
pmap = cb.pixmap(cb.Selection)
if not pmap.isNull():
self.update_cover(pmap)
def update_cover(self, pmap=None, cdata=None):
if pmap is None:
pmap = QPixmap()
pmap.loadFromData(cdata)
if pmap.isNull():
return
self.pixmap = pmap
self.do_layout()
self.update()
self.update_tooltip(getattr(self.parent(), 'current_path', ''))
if not config['disable_animations']:
self.animation.start()
id_ = self.data.get('id', None)
if id_ is not None:
self.cover_changed.emit(id_, cdata or pixmap_to_data(pmap))
def generate_cover(self, *args):
book_id = self.data.get('id')
if book_id is not None:
from calibre.ebooks.covers import generate_cover
from calibre.gui2.ui import get_gui
mi = get_gui().current_db.new_api.get_metadata(book_id)
cdata = generate_cover(mi)
self.update_cover(cdata=cdata)
def remove_cover(self):
id_ = self.data.get('id', None)
self.pixmap = self.default_pixmap
self.do_layout()
self.update()
if id_ is not None:
self.cover_removed.emit(id_)
def update_tooltip(self, current_path):
try:
sz = self.pixmap.size()
except:
sz = QSize(0, 0)
self.setToolTip(
'<p>'+_('Double-click to open Book Details window') +
'<br><br>' + _('Path') + ': ' + current_path +
'<br><br>' + _('Cover size: %(width)d x %(height)d')%dict(
width=sz.width(), height=sz.height())
)
# }}}
# Book Info {{{
class BookInfo(QWebView):
link_clicked = pyqtSignal(object)
remove_format = pyqtSignal(int, object)
remove_item = pyqtSignal(int, object, object)
save_format = pyqtSignal(int, object)
restore_format = pyqtSignal(int, object)
compare_format = pyqtSignal(int, object)
copy_link = pyqtSignal(object)
manage_author = pyqtSignal(object)
open_fmt_with = pyqtSignal(int, object, object)
def __init__(self, vertical, parent=None):
QWebView.__init__(self, parent)
s = self.settings()
s.setAttribute(s.JavascriptEnabled, False)
self.vertical = vertical
self.page().setLinkDelegationPolicy(self.page().DelegateAllLinks)
self.linkClicked.connect(self.link_activated)
self._link_clicked = False
self.setAttribute(Qt.WA_OpaquePaintEvent, False)
palette = self.palette()
self.setAcceptDrops(False)
palette.setBrush(QPalette.Base, Qt.transparent)
self.page().setPalette(palette)
self.css = P('templates/book_details.css', data=True).decode('utf-8')
for x, icon in [
('remove_format', 'trash.png'), ('save_format', 'save.png'),
('restore_format', 'edit-undo.png'), ('copy_link','edit-copy.png'),
('manage_author', 'user_profile.png'), ('compare_format', 'diff.png')]:
ac = QAction(QIcon(I(icon)), '', self)
ac.current_fmt = None
ac.current_url = None
ac.triggered.connect(getattr(self, '%s_triggerred'%x))
setattr(self, '%s_action'%x, ac)
self.remove_item_action = ac = QAction(QIcon(I('minus.png')), '...', self)
ac.data = (None, None, None)
ac.triggered.connect(self.remove_item_triggered)
self.setFocusPolicy(Qt.NoFocus)
def remove_item_triggered(self):
field, value, book_id = self.remove_item_action.data
if field:
self.remove_item.emit(book_id, field, value)
def context_action_triggered(self, which):
f = getattr(self, '%s_action'%which).current_fmt
url = getattr(self, '%s_action'%which).current_url
if f and 'format' in which:
book_id, fmt = f
getattr(self, which).emit(book_id, fmt)
if url and 'link' in which:
getattr(self, which).emit(url)
def remove_format_triggerred(self):
self.context_action_triggered('remove_format')
def save_format_triggerred(self):
self.context_action_triggered('save_format')
def restore_format_triggerred(self):
self.context_action_triggered('restore_format')
def compare_format_triggerred(self):
self.context_action_triggered('compare_format')
def copy_link_triggerred(self):
self.context_action_triggered('copy_link')
def manage_author_triggerred(self):
self.manage_author.emit(self.manage_author_action.current_fmt)
def link_activated(self, link):
self._link_clicked = True
if unicode(link.scheme()) in ('http', 'https'):
return open_url(link)
link = unicode(link.toString(QUrl.None))
self.link_clicked.emit(link)
def turnoff_scrollbar(self, *args):
self.page().mainFrame().setScrollBarPolicy(Qt.Horizontal, Qt.ScrollBarAlwaysOff)
def show_data(self, mi):
html = render_html(mi, self.css, self.vertical, self.parent())
self.setHtml(html)
def mouseDoubleClickEvent(self, ev):
swidth = self.page().mainFrame().scrollBarGeometry(Qt.Vertical).width()
sheight = self.page().mainFrame().scrollBarGeometry(Qt.Horizontal).height()
if self.width() - ev.x() < swidth or \
self.height() - ev.y() < sheight:
# Filter out double clicks on the scroll bar
ev.accept()
else:
ev.ignore()
def contextMenuEvent(self, ev):
details_context_menu_event(self, ev, self)
def open_with(self, book_id, fmt, entry):
self.open_fmt_with.emit(book_id, fmt, entry)
def choose_open_with(self, book_id, fmt):
from calibre.gui2.open_with import choose_program
entry = choose_program(fmt, self)
if entry is not None:
self.open_with(book_id, fmt, entry)
# }}}
class DetailsLayout(QLayout): # {{{
def __init__(self, vertical, parent):
QLayout.__init__(self, parent)
self.vertical = vertical
self._children = []
self.min_size = QSize(190, 200) if vertical else QSize(120, 120)
self.setContentsMargins(0, 0, 0, 0)
def minimumSize(self):
return QSize(self.min_size)
def addItem(self, child):
if len(self._children) > 2:
raise ValueError('This layout can only manage two children')
self._children.append(child)
def itemAt(self, i):
try:
return self._children[i]
except:
pass
return None
def takeAt(self, i):
try:
self._children.pop(i)
except:
pass
return None
def count(self):
return len(self._children)
def sizeHint(self):
return QSize(self.min_size)
def setGeometry(self, r):
QLayout.setGeometry(self, r)
self.do_layout(r)
def cover_height(self, r):
if not self._children[0].widget().isVisible():
return 0
mh = min(int(r.height()/2.), int(4/3. * r.width())+1)
try:
ph = self._children[0].widget().pixmap.height()
except:
ph = 0
if ph > 0:
mh = min(mh, ph)
return mh
def cover_width(self, r):
if not self._children[0].widget().isVisible():
return 0
mw = 1 + int(3/4. * r.height())
try:
pw = self._children[0].widget().pixmap.width()
except:
pw = 0
if pw > 0:
mw = min(mw, pw)
return mw
def do_layout(self, rect):
if len(self._children) != 2:
return
left, top, right, bottom = self.getContentsMargins()
r = rect.adjusted(+left, +top, -right, -bottom)
x = r.x()
y = r.y()
cover, details = self._children
if self.vertical:
ch = self.cover_height(r)
cover.setGeometry(QRect(x, y, r.width(), ch))
cover.widget().do_layout()
y += ch + 5
details.setGeometry(QRect(x, y, r.width(), r.height()-ch-5))
else:
cw = self.cover_width(r)
cover.setGeometry(QRect(x, y, cw, r.height()))
cover.widget().do_layout()
x += cw + 5
details.setGeometry(QRect(x, y, r.width() - cw - 5, r.height()))
# }}}
class BookDetails(QWidget): # {{{
show_book_info = pyqtSignal()
open_containing_folder = pyqtSignal(int)
view_specific_format = pyqtSignal(int, object)
search_requested = pyqtSignal(object)
remove_specific_format = pyqtSignal(int, object)
remove_metadata_item = pyqtSignal(int, object, object)
save_specific_format = pyqtSignal(int, object)
restore_specific_format = pyqtSignal(int, object)
compare_specific_format = pyqtSignal(int, object)
copy_link = pyqtSignal(object)
remote_file_dropped = pyqtSignal(object, object)
files_dropped = pyqtSignal(object, object)
cover_changed = pyqtSignal(object, object)
open_cover_with = pyqtSignal(object, object)
cover_removed = pyqtSignal(object)
view_device_book = pyqtSignal(object)
manage_author = pyqtSignal(object)
open_fmt_with = pyqtSignal(int, object, object)
# Drag 'n drop {{{
DROPABBLE_EXTENSIONS = IMAGE_EXTENSIONS+BOOK_EXTENSIONS
def dragEnterEvent(self, event):
md = event.mimeData()
if dnd_has_extension(md, self.DROPABBLE_EXTENSIONS) or \
dnd_has_image(md):
event.acceptProposedAction()
def dropEvent(self, event):
event.setDropAction(Qt.CopyAction)
md = event.mimeData()
x, y = dnd_get_image(md)
if x is not None:
# We have an image, set cover
event.accept()
if y is None:
# Local image
self.cover_view.paste_from_clipboard(x)
self.update_layout()
else:
self.remote_file_dropped.emit(x, y)
# We do not support setting cover *and* adding formats for
# a remote drop, anyway, so return
return
# Now look for ebook files
urls, filenames = dnd_get_files(md, BOOK_EXTENSIONS)
if not urls:
# Nothing found
return
if not filenames:
# Local files
self.files_dropped.emit(event, urls)
else:
# Remote files, use the first file
self.remote_file_dropped.emit(urls[0], filenames[0])
event.accept()
def dragMoveEvent(self, event):
event.acceptProposedAction()
# }}}
def __init__(self, vertical, parent=None):
QWidget.__init__(self, parent)
self.setAcceptDrops(True)
self._layout = DetailsLayout(vertical, self)
self.setLayout(self._layout)
self.current_path = ''
self.cover_view = CoverView(vertical, self)
self.cover_view.cover_changed.connect(self.cover_changed.emit)
self.cover_view.open_cover_with.connect(self.open_cover_with.emit)
self.cover_view.cover_removed.connect(self.cover_removed.emit)
self._layout.addWidget(self.cover_view)
self.book_info = BookInfo(vertical, self)
self._layout.addWidget(self.book_info)
self.book_info.link_clicked.connect(self.handle_click)
self.book_info.remove_format.connect(self.remove_specific_format)
self.book_info.remove_item.connect(self.remove_metadata_item)
self.book_info.open_fmt_with.connect(self.open_fmt_with)
self.book_info.save_format.connect(self.save_specific_format)
self.book_info.restore_format.connect(self.restore_specific_format)
self.book_info.compare_format.connect(self.compare_specific_format)
self.book_info.copy_link.connect(self.copy_link)
self.book_info.manage_author.connect(self.manage_author)
self.setCursor(Qt.PointingHandCursor)
def handle_click(self, link):
typ, val = link.partition(':')[0::2]
if typ == 'path':
self.open_containing_folder.emit(int(val))
elif typ == 'format':
id_, fmt = val.split(':')
self.view_specific_format.emit(int(id_), fmt)
elif typ == 'devpath':
self.view_device_book.emit(val)
elif typ == 'search':
self.search_requested.emit(unhexlify(val).decode('utf-8'))
else:
try:
open_url(QUrl(link, QUrl.TolerantMode))
except:
import traceback
traceback.print_exc()
def mouseDoubleClickEvent(self, ev):
ev.accept()
self.show_book_info.emit()
def show_data(self, data):
self.book_info.show_data(data)
self.cover_view.show_data(data)
self.current_path = getattr(data, u'path', u'')
self.update_layout()
def update_layout(self):
self.cover_view.setVisible(gprefs['bd_show_cover'])
self._layout.do_layout(self.rect())
self.cover_view.update_tooltip(self.current_path)
def reset_info(self):
self.show_data(Metadata(_('Unknown')))
# }}}
|
elssar/calibre
|
src/calibre/gui2/book_details.py
|
Python
|
gpl-3.0
| 27,090
|
import itemizer
class Importer(object):
def __init__(self):
self.itemizer = itemizer.Itemizer()
def get_ingredients(self, item_name, quantity=1, bundle=None,
stop_names=None, stop_groups=None):
if stop_names is None:
stop_names = []
if stop_groups is None:
stop_groups = []
if bundle is None:
bundle = Ingredients()
stop_by_groups = lambda item: any([item.in_group(market_group) for
market_group in stop_groups])
item = self.itemizer.get_item(item_name)
if (item_name in stop_names or stop_by_groups(item) or
len(item.materials) == 0):
bundle.add(item_name, quantity)
return bundle
for mat_name, (mat_item, mat_quantity) in item.materials.items():
self.get_ingredients(mat_name, quantity * mat_quantity, bundle,
stop_names, stop_groups)
return bundle
class Ingredients(object):
def __init__(self):
self.bundle = {}
def add(self, name, quantity):
if name in self.bundle:
self.bundle[name] += quantity
else:
self.bundle[name] = quantity
def __str__(self):
return '\n'.join(["%s, %s" % (name, quantity) for name, quantity in
self.bundle.items()])
|
tflovorn/profiteer
|
importer.py
|
Python
|
mit
| 1,352
|
import os, sys, time
from config import *
from checker import facebook, gmail
if( int(time.strftime('%H')) >= 8 and int(time.strftime('%H')) <= 21 ):
#facebook.checkFacebook()
gmail.checkGmail()
elif( int(time.strftime('%H')) == 22 ) :
state_gpio = [ True, GPIO.input(11), GPIO.input(16) ] #Night mode state, gpio11, gpio16
file = open(os.path.dirname(__file__)+"/night.save", "w")
cPickle.dump(state_gpio, file)
file.close()
GPIO.output(11, False)
GPIO.output(16, False)
elif( int(time.strftime('%H')) == 8 ) :
state_gpio = cPickle.load( open( os.path.dirname(__file__)+"/night.save", "r" ) )
GPIO.output(11, state_gpio[0])
GPIO.output(16, state_gpio[1])
|
maelg/RaspiNotifier
|
checker.py
|
Python
|
gpl-2.0
| 706
|
import logging
from django.contrib import messages
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.urls import reverse
from django.db.models import F
from django.http.response import HttpResponseRedirect, JsonResponse
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.generic import CreateView, DeleteView, FormView, ListView
from django.views.generic.base import RedirectView
from django.views.generic.detail import DetailView
from django.views.generic.edit import FormMixin, UpdateView
from django.utils.translation import ugettext_lazy as _
from django_filters.views import FilterView
from sapl.base.models import AppConfig as AppsAppConfig
from sapl.comissoes.apps import AppConfig
from sapl.comissoes.forms import (ComissaoForm, ComposicaoForm,
DocumentoAcessorioCreateForm,
DocumentoAcessorioEditForm,
ParticipacaoCreateForm,
ParticipacaoEditForm,
PautaReuniaoFilterSet, PautaReuniaoForm,
PeriodoForm, ReuniaoForm)
from sapl.crud.base import (Crud, CrudAux, MasterDetailCrud,
PermissionRequiredForAppCrudMixin, RP_DETAIL,
RP_LIST)
from sapl.materia.models import (MateriaEmTramitacao, MateriaLegislativa,
PautaReuniao, Tramitacao)
from sapl.utils import show_results_filter_set
from .models import (CargoComissao, Comissao, Composicao, DocumentoAcessorio,
Participacao, Periodo, Reuniao, TipoComissao)
def pegar_url_composicao(pk):
participacao = Participacao.objects.get(id=pk)
comp_pk = participacao.composicao.pk
url = reverse('sapl.comissoes:composicao_detail', kwargs={'pk': comp_pk})
return url
def pegar_url_reuniao(pk):
documentoacessorio = DocumentoAcessorio.objects.get(id=pk)
r_pk = documentoacessorio.reuniao.pk
url = reverse('sapl.comissoes:reuniao_detail', kwargs={'pk': r_pk})
return url
CargoComissaoCrud = CrudAux.build(
CargoComissao, 'cargo_comissao',
list_field_names=['nome', 'id_ordenacao', 'unico']
)
TipoComissaoCrud = CrudAux.build(
TipoComissao, 'tipo_comissao', list_field_names=[
'sigla', 'nome', 'natureza', 'dispositivo_regimental'])
class PeriodoComposicaoCrud(CrudAux):
model = Periodo
class CreateView(CrudAux.CreateView):
form_class = PeriodoForm
class UpdateView(CrudAux.UpdateView):
form_class = PeriodoForm
# class ListView(CrudAux.ListView):
class ParticipacaoCrud(MasterDetailCrud):
model = Participacao
parent_field = 'composicao__comissao'
public = [RP_DETAIL, ]
ListView = None
link_return_to_parent_field = True
class BaseMixin(MasterDetailCrud.BaseMixin):
list_field_names = ['composicao', 'parlamentar', 'cargo']
class CreateView(MasterDetailCrud.CreateView):
form_class = ParticipacaoCreateForm
def get_initial(self):
initial = super().get_initial()
initial['parent_pk'] = self.kwargs['pk']
return initial
class UpdateView(MasterDetailCrud.UpdateView):
layout_key = 'ParticipacaoEdit'
form_class = ParticipacaoEditForm
class DeleteView(MasterDetailCrud.DeleteView):
def get_success_url(self):
composicao_comissao_pk = self.object.composicao.comissao.pk
composicao_pk = self.object.composicao.pk
return '{}?pk={}'.format(reverse('sapl.comissoes:composicao_list',
args=[composicao_comissao_pk]),
composicao_pk)
class ComposicaoCrud(MasterDetailCrud):
model = Composicao
parent_field = 'comissao'
model_set = 'participacao_set'
public = [RP_LIST, RP_DETAIL, ]
class CreateView(MasterDetailCrud.CreateView):
form_class = ComposicaoForm
def get_initial(self):
comissao = Comissao.objects.get(id=self.kwargs['pk'])
return {'comissao': comissao}
class ListView(MasterDetailCrud.ListView):
logger = logging.getLogger(__name__)
template_name = "comissoes/composicao_list.html"
paginate_by = None
def take_composicao_pk(self):
username = self.request.user.username
try:
self.logger.debug('user=' + username + '. Tentando obter pk da composição.')
return int(self.request.GET['pk'])
except Exception as e:
self.logger.error('user=' + username + '. Erro ao obter pk da composição. Retornado 0. ' + str(e))
return 0
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
composicao_pk = self.take_composicao_pk()
if composicao_pk == 0:
# Composicao eh ordenada por Periodo, que por sua vez esta em
# ordem descrescente de data de inicio (issue #1920)
ultima_composicao = context['composicao_list'].first()
if ultima_composicao:
context['composicao_pk'] = ultima_composicao.pk
else:
context['composicao_pk'] = 0
else:
context['composicao_pk'] = composicao_pk
context['participacao_set'] = Participacao.objects.filter(
composicao__pk=context['composicao_pk']
).order_by('-titular', 'cargo__id_ordenacao', 'id')
return context
class DeleteView(MasterDetailCrud.DeleteView):
def delete(self, *args, **kwargs):
composicao = self.get_object()
composicao.delete()
return HttpResponseRedirect(
reverse('sapl.comissoes:composicao_list', kwargs={'pk': composicao.comissao.pk}))
class ComissaoCrud(Crud):
model = Comissao
help_topic = 'modulo_comissoes'
public = [RP_LIST, RP_DETAIL, ]
class BaseMixin(Crud.BaseMixin):
list_field_names = ['nome', 'sigla', 'tipo',
'data_criacao', 'data_extincao', 'ativa']
ordering = '-ativa', 'sigla'
class CreateView(Crud.CreateView):
form_class = ComissaoForm
def form_valid(self, form):
return super(Crud.CreateView, self).form_valid(form)
class UpdateView(Crud.UpdateView):
form_class = ComissaoForm
def form_valid(self, form):
return super(Crud.UpdateView, self).form_valid(form)
# Essa função retorna objetos MateriaEmTramitacao
def lista_materias_comissao(comissao_pk):
materias = MateriaEmTramitacao.objects.filter(
tramitacao__unidade_tramitacao_destino__comissao=comissao_pk
).order_by('materia__tipo', '-materia__ano', '-materia__numero')
return materias
class MateriasTramitacaoListView(ListView):
template_name = "comissoes/materias_em_tramitacao.html"
paginate_by = 10
def get_queryset(self):
return list(lista_materias_comissao(self.kwargs['pk']))
def get_context_data(self, **kwargs):
context = super(
MateriasTramitacaoListView, self).get_context_data(**kwargs)
context['object'] = Comissao.objects.get(id=self.kwargs['pk'])
context['qtde'] = len(self.object_list)
return context
class ReuniaoCrud(MasterDetailCrud):
model = Reuniao
parent_field = 'comissao'
public = [RP_LIST, RP_DETAIL, ]
class BaseMixin(MasterDetailCrud.BaseMixin):
list_field_names = ['data', 'nome', 'tema', 'upload_ata']
ordering = '-data'
class DetailView(MasterDetailCrud.DetailView):
template_name = "comissoes/reuniao_detail.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
docs = []
documentos = DocumentoAcessorio.objects.filter(reuniao=self.kwargs['pk']).order_by('nome')
docs.extend(documentos)
context['docs'] = docs
context['num_docs'] = len(docs)
mats = []
materias_pauta = PautaReuniao.objects.filter(reuniao=self.kwargs['pk'])
materias_pk = [materia_pauta.materia.pk for materia_pauta in materias_pauta]
context['mats'] = MateriaLegislativa.objects.filter(
pk__in=materias_pk
).order_by('tipo', '-ano', 'numero')
context['num_mats'] = len(context['mats'])
context['reuniao_pk'] = self.kwargs['pk']
return context
class ListView(MasterDetailCrud.ListView):
logger = logging.getLogger(__name__)
paginate_by = 10
def take_reuniao_pk(self):
username = self.request.user.username
try:
self.logger.debug('user=' + username + '. Tentando obter pk da reunião.')
return int(self.request.GET['pk'])
except Exception as e:
self.logger.error('user=' + username + '. Erro ao obter pk da reunião. Retornado 0. ' + str(e))
return 0
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
reuniao_pk = self.take_reuniao_pk()
if reuniao_pk == 0:
ultima_reuniao = list(context['reuniao_list'])
if len(ultima_reuniao) > 0:
ultimo = ultima_reuniao[-1]
context['reuniao_pk'] = ultimo.pk
else:
context['reuniao_pk'] = 0
else:
context['reuniao_pk'] = reuniao_pk
context['documentoacessorio_set'] = DocumentoAcessorio.objects.filter(
reuniao__pk=context['reuniao_pk']
).order_by('id')
return context
class UpdateView(MasterDetailCrud.UpdateView):
form_class = ReuniaoForm
def get_initial(self):
return {'comissao': self.object.comissao}
class CreateView(MasterDetailCrud.CreateView):
form_class = ReuniaoForm
def get_initial(self):
comissao = Comissao.objects.get(id=self.kwargs['pk'])
return {'comissao': comissao}
class RemovePautaView(PermissionRequiredMixin, CreateView):
model = PautaReuniao
form_class = PautaReuniaoForm
template_name = 'comissoes/pauta.html'
permission_required = ('comissoes.add_reuniao', )
def get_context_data(self, **kwargs):
context = super(
RemovePautaView, self
).get_context_data(**kwargs)
# Remove = 0; Adiciona = 1
context['opcao'] = 0
context['object'] = Reuniao.objects.get(pk=self.kwargs['pk'])
context['root_pk'] = context['object'].comissao.pk
materias_pauta = PautaReuniao.objects.filter(reuniao=context['object'])
materias_pk = [materia_pauta.materia.pk for materia_pauta in materias_pauta]
context['materias'] = MateriaLegislativa.objects.filter(
pk__in=materias_pk
).order_by('tipo', '-ano', 'numero')
context['numero_materias'] = len(context['materias'])
return context
def post(self, request, *args, **kwargs):
success_url = reverse('sapl.comissoes:reuniao_detail', kwargs={'pk':kwargs['pk']})
marcadas = request.POST.getlist('materia_id')
if not marcadas:
msg=_('Nenhuma matéria foi selecionada.')
messages.add_message(request, messages.WARNING, msg)
return HttpResponseRedirect(success_url)
reuniao = Reuniao.objects.get(pk=kwargs['pk'])
for materia in MateriaLegislativa.objects.filter(id__in=marcadas):
PautaReuniao.objects.filter(reuniao=reuniao,materia=materia).delete()
msg=_('Matéria(s) removida(s) com sucesso!')
messages.add_message(request, messages.SUCCESS, msg)
return HttpResponseRedirect(success_url)
class AdicionaPautaView(PermissionRequiredMixin, FilterView):
filterset_class = PautaReuniaoFilterSet
template_name = 'comissoes/pauta.html'
permission_required = ('comissoes.add_reuniao', )
def get_context_data(self, **kwargs):
context = super(
AdicionaPautaView, self
).get_context_data(**kwargs)
# Adiciona = 1; Remove = 0
context['opcao'] = 1
context['object'] = Reuniao.objects.get(pk=self.kwargs['pk'])
context['root_pk'] = context['object'].comissao.pk
qr = self.request.GET.copy()
materias_pauta = PautaReuniao.objects.filter(reuniao=context['object'])
nao_listar = [mp.materia.pk for mp in materias_pauta]
if not len(qr):
context['object_list'] = []
else:
context['object_list'] = context['object_list'].filter(
tramitacao__unidade_tramitacao_destino__comissao=context['root_pk']
).exclude(materia__pk__in=nao_listar).order_by(
"materia__tipo", "-materia__ano", "materia__numero"
)
context['numero_resultados'] = len(context['object_list'])
context['show_results'] = show_results_filter_set(qr)
return context
def post(self, request, *args, **kwargs):
success_url = reverse('sapl.comissoes:reuniao_detail', kwargs={'pk':kwargs['pk']})
marcadas = request.POST.getlist('materia_id')
if not marcadas:
msg = _('Nenhuma máteria foi selecionada.')
messages.add_message(request, messages.WARNING, msg)
return HttpResponseRedirect(success_url)
reuniao = Reuniao.objects.get(pk=kwargs['pk'])
pautas = []
for materia in MateriaLegislativa.objects.filter(id__in=marcadas):
pauta = PautaReuniao()
pauta.reuniao = reuniao
pauta.materia = materia
pautas.append(pauta)
PautaReuniao.objects.bulk_create(pautas)
msg = _('Matéria(s) adicionada(s) com sucesso!')
messages.add_message(request, messages.SUCCESS, msg)
return HttpResponseRedirect(success_url)
class DocumentoAcessorioCrud(MasterDetailCrud):
model = DocumentoAcessorio
parent_field = 'reuniao__comissao'
public = [RP_DETAIL, ]
ListView = None
link_return_to_parent_field = True
class BaseMixin(MasterDetailCrud.BaseMixin):
list_field_names = ['nome', 'tipo', 'data', 'autor', 'arquivo']
class CreateView(MasterDetailCrud.CreateView):
form_class = DocumentoAcessorioCreateForm
def get_initial(self):
initial = super().get_initial()
initial['parent_pk'] = self.kwargs['pk']
return initial
class UpdateView(MasterDetailCrud.UpdateView):
layout_key = 'DocumentoAcessorioEdit'
form_class = DocumentoAcessorioEditForm
class DeleteView(MasterDetailCrud.DeleteView):
def delete(self, *args, **kwargs):
obj = self.get_object()
obj.delete()
return HttpResponseRedirect(
reverse('sapl.comissoes:reuniao_detail',
kwargs={'pk': obj.reuniao.pk}))
def get_participacoes_comissao(request):
parlamentares = []
composicao_id = request.GET.get('composicao_id')
if composicao_id:
parlamentares = [{'nome': p.parlamentar.nome_parlamentar, 'id': p.parlamentar.id} for p in
Participacao.objects.filter(composicao_id=composicao_id).order_by(
'parlamentar__nome_parlamentar')]
return JsonResponse(parlamentares, safe=False)
|
interlegis/sapl
|
sapl/comissoes/views.py
|
Python
|
gpl-3.0
| 15,757
|
import random
import textwrap
from configparser import ConfigParser
def explain() -> str:
"""Explain Person Action Object"""
return textwrap.dedent(
"""\
Person Action Object (PAO)
The PAO is a system of encoding where you attribute a specific Person with an
Action that includes an Object. This is a composite object which you can then use
in a variety of ways. The idea is that you develop a collection of PAOs and assign
each of them a number.
Examples:
15: Albert Einstein (person) writing (action) on a blackboard (object).
16: Molly Ringwald (person) blowing candles (action) on a cake (object).
23: Michael Jordan (person) shooting (action) a basketball (object).
Armed with such an inventory you can use it for encoding of other information. Say
you want to memorize a series of numbers and you had a PAO inventory from
00-99. You could then assign the first six digits with a special combination of
your PAO collection.
Example:
162315 => Molly Ringwald shooting a blackboard
By doing this, you're compressing six digits into a single, composite image.
"""
)
def flatten_pao(d):
"""Yield back (num, item) tuples for each PAO broken into items.
The PAO item will be prefixed with either 'p:', 'a:', 'o:' to help denote its part of
the overall PAO.
Args:
d (dict): dictionary-like object that supports .items()
Yields:
(str, str)
"""
for num, pao in d.items():
person, action, obj = pao.split(",")
yield (num, "p:" + person.strip())
yield (num, "a:" + action.strip())
yield (num, "o:" + obj.strip())
def basic_quiz(config_file: str):
"""Test out your Person Action Object (PAO) knowledge
It supports just testing your PAO + shuffling them up to test combos
"""
config = ConfigParser()
config.read(config_file)
# TODO -- add an option to limit the values to test
# e.g. if I only want to test PAO for 1 through 4
# TODO add support for properly mixing up the PAO and testing
if "pao" not in config.sections():
print("No PAO Config setup. See README")
return
# Randomize the PAO items
pao_pairs = list(flatten_pao(config["pao"]))
random.shuffle(pao_pairs)
correct = 0
total = 0
for number, item in pao_pairs:
try:
guess = input("{}\n=> ".format(item))
except (EOFError, KeyboardInterrupt):
break
if not guess:
continue
if guess == number:
print("CORRECT!")
correct += 1
else:
print("INCORRECT: {}".format(number))
total += 1
if total:
print("\n{:>2}% Correct".format(correct / float(total) * 100))
|
patrickshuff/artofmemory
|
artofmemory/pao.py
|
Python
|
mit
| 2,868
|
from func.overlord.groups import Groups,get_hosts_spec
from certmaster.config import read_config, CONFIG_FILE
from certmaster.commonconfig import CMConfig
import os
import fnmatch
from func.overlord.group.conf_backend import ConfBackend
from func.overlord.group.sqlite_backend import SqliteBackend
TEST_DB_FILE = "/tmp/test_sqlite.db"
TEST_CONF_FILE = "/tmp/test_conf.conf"
class BaseMinions(object):
def create_dummy_minions(self,howmany=None):
"""
Creates a lots of minions so we can query
with different minion names cool isnt it
"""
cm_config = read_config(CONFIG_FILE, CMConfig)
howmany = howmany or 100 #it is a good default number
final_list = []
for m in xrange(howmany):
tmp_f = open("%s/%s.%s" % (cm_config.certroot,str(m),cm_config.cert_extension),"w")
tmp_f.close()
final_list.append(str(m))
print "%d dummy minions created "%howmany
return final_list
def clean_dummy_minions(self,howmany=None):
"""
Deletes a lots of minions garbage
"""
cm_config = read_config(CONFIG_FILE, CMConfig)
howmany = howmany or 100 #it is a good default number
for m in xrange(howmany):
tmp_f = "%s/%s.%s" % (cm_config.certroot,str(m), cm_config.cert_extension)
if os.path.exists(tmp_f):
os.remove(tmp_f)
print "%d dummy minions cleaned "%howmany
class BaseGroupT(object):
backends = [
{'backend':'sqlite','db_file':TEST_DB_FILE},
{'backend':'conf','conf_file':TEST_CONF_FILE}
]
def refresh_backend(self,g_object):
"""
Here you should add your object in if statements
"""
from func.overlord.group.conf_backend import ConfBackend
from func.overlord.group.sqlite_backend import SqliteBackend
if isinstance(g_object.backend,ConfBackend):
return Groups(**self.backends[1])
elif isinstance(g_object.backend,SqliteBackend):
return Groups(**self.backends[0])
else:
return None
def get_group_objects(self):
"""
Initializer
"""
gr_list = []
for b in self.backends:
gr_list.append(Groups(**b))
return gr_list
def clean_t_files(self,path):
"""
Clean the initialized stuff
"""
if os.path.exists(path):
os.remove(path)
class TestGroupApi(BaseGroupT,BaseMinions):
def setUp(self):
"""
Will be called after every
"""
#clean current files
self.clean_t_files(TEST_DB_FILE)
self.clean_t_files(TEST_CONF_FILE)
#destroy and create minions
self.clean_dummy_minions()
self.current_minions = self.create_dummy_minions()
#get groups
self.groups = self.get_group_objects()
def teardown(self):
"""
Clean the stuff
"""
self.clean_dummy_minions()
self.clean_t_files(TEST_DB_FILE)
self.clean_t_files(TEST_CONF_FILE)
def test_add_group(self):
"""
adds a single group item
"""
for g in self.groups:
assert g.add_group("group1",save=True)[0]== True
g = self.refresh_backend(g)
assert g.add_group("group1")[0] == False
def test_add_host_to_group(self):
"""
adds a host test
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
assert g.add_host_to_group(g_name,"host1")[0] == True
g = self.refresh_backend(g)
assert g.add_host_to_group(g_name,"host1")[0] == False
def test_add_hostst_to_group(self):
"""
Test adding hosts via string
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_hosts_to_group(g_name,"host1,host2,host3")
g = self.refresh_backend(g)
g.add_hosts_to_group(g_name,"host5;host7;host8")
def test_add_host_list(self):
"""
Test adding hosts via list
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_host_list(g_name,["host1","host2","host3"])
g = self.refresh_backend(g)
g.add_host_list(g_name,["host1","host2","host3"])
g = self.refresh_backend(g)
g.add_host_list(g_name,["host4","host5","host6"])
def test_add_hosts_to_group_glob(self):
"""
Test globbing addition
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_hosts_to_group_glob(g_name,"*") #add all of them
g = self.refresh_backend(g)
self.groups = self.get_group_objects()
for g in self.groups:
for h in self.current_minions:
if self.current_minions.index(h) %10 == 0:
print "Tests completed : ",self.current_minions.index(h)
assert g.add_host_to_group(g_name,h)[0] == False
#print "Let see IT ",g.add_host_to_group(g_name,h)[0]
g = self.refresh_backend(g)
#clear again so we can test exclude thing
self.teardown()
self.setUp()
#print "Testing exclude string ...."
self.groups = self.get_group_objects()
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_hosts_to_group_glob(g_name,"*",exclude_string="*[1,3,5,7,9]")
g = self.refresh_backend(g)
#add all of them
for h in self.current_minions:
#print "Checking for : ",h
if int(h)%2==0:
assert g.add_host_to_group(g_name,h)[0] == False
g = self.refresh_backend(g)
else:
assert g.add_host_to_group(g_name,h)[0] == True
g = self.refresh_backend(g)
def test_get_groups(self):
"""
test get groups
"""
for g in self.groups:
g.add_group("group1")
g = self.refresh_backend(g)
g.add_group("group2")
g = self.refresh_backend(g)
#get all groups
grs = g.get_groups()
assert self._t_compare_arrays(grs,["group1","group2"]) == True
#get one
tmg = g.get_groups(pattern="group1")
assert tmg==["group1"]
tmg = g.get_groups(pattern="gr",exact=False)
assert self._t_compare_arrays(tmg,["group1","group2"])==True
tmg = g.get_groups(pattern="gr",exact=False,exclude=["group2"])
assert tmg == ["group1"]
#test also an empty one
tmg = g.get_groups(pattern="group3")
assert tmg == []
def test_get_groups_glob(self):
"""
Globbing in groups
"""
for g in self.groups:
g.add_group("group1")
g = self.refresh_backend(g)
g.add_group("group2")
g = self.refresh_backend(g)
#get all groups
grs = g.get_groups_glob("*")
assert self._t_compare_arrays(grs,["group1","group2"]) == True
#get one
tmg = g.get_groups_glob("*[1]")
assert tmg == ["group1"]
tmg = g.get_groups_glob("*",exclude_string="*[2]")
assert tmg == ["group1"]
#test also an empty one
tmg = g.get_groups_glob("*[3]")
assert tmg == []
def test_get_hosts(self):
"""
Get hosts tests
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_host_list(g_name,["host1","host2","host3"])
g = self.refresh_backend(g)
hosts = g.get_hosts(group=g_name)
assert self._t_compare_arrays(hosts,["host1","host2","host3"]) == True
#get only one
host = g.get_hosts(pattern="host1",group=g_name)
assert host == ["host1"]
#get pattern
host = g.get_hosts(pattern="ho",group=g_name,exact=False)
assert self._t_compare_arrays(host,["host1","host2","host3"]) == True
host = g.get_hosts(pattern="ho",group=g_name,exact=False,exclude=["host1","host2"])
assert host==["host3"]
#an empty test also
host = g.get_hosts(pattern="host4")
assert host==[]
def test_get_hosts_glob(self):
"""
test hosts for glob strings
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_hosts_to_group_glob(g_name,"*") #add all of them
g = self.refresh_backend(g)
hosts = g.get_hosts_glob("@group1")
assert self._t_compare_arrays(hosts,self.current_minions) == True
#try subgroupping thing on the fly
hosts = g.get_hosts_glob("@group1:[0-9]")
assert self._t_compare_arrays(hosts,list(range(10))) == True
#try the exclude string
hosts = g.get_hosts_glob("@group1",exclude_string="@group1:[0-9][0-9]")
assert self._t_compare_arrays(hosts,list(range(10))) == True
hosts = g.get_hosts_glob("@group1:[1-5][0-9];@group1:[6-9][0-9]",exclude_string="@group1:[1-8][0-9];@group1:[9][0-9]")
assert self._t_compare_arrays(hosts,[]) == True
def test_remove_group(self):
"""
remove group test
"""
for g in self.groups:
g.add_group("group1")
g = self.refresh_backend(g)
#removing the group
assert g.remove_group("group1")[0] == True
g = self.refresh_backend(g)
assert g.remove_group("group1")[0] == False
g = self.refresh_backend(g)
grs = g.get_groups_glob("*")
assert grs == []
def test_remove_group_list(self):
"""
remove a list of groups
"""
for g in self.groups:
g.add_group("group1")
g = self.refresh_backend(g)
g.add_group("group2")
g = self.refresh_backend(g)
#removing the group
g.remove_group_list(["group1","group2"])
g = self.refresh_backend(g)
grs = g.get_groups_glob("*")
assert grs == []
def test_remove_group_glob(self):
"""
Remove groups by glob
"""
for g in self.groups:
g.add_group("group1")
g = self.refresh_backend(g)
g.add_group("group2")
g = self.refresh_backend(g)
#removing the group
g.remove_group_glob("gr*")
g = self.refresh_backend(g)
grs = g.get_groups_glob("*")
assert grs == []
def test_remove_host(self):
"""
remove host test
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_host_list(g_name,["host1","host2","host3"])
g = self.refresh_backend(g)
assert g.remove_host(g_name,"host1")[0] == True
g = self.refresh_backend(g)
assert g.remove_host(g_name,"host1")[0] == False
g = self.refresh_backend(g)
hosts = g.get_hosts(group=g_name)
assert self._t_compare_arrays(hosts,["host2","host3"])
assert g.remove_host(g_name,"host2")[0] ==True
g = self.refresh_backend(g)
hosts = g.get_hosts(group=g_name)
assert self._t_compare_arrays(hosts,["host3"])
def test_remove_host_list(self):
"""
Remove the host list
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_host_list(g_name,["host1","host2","host3"])
g = self.refresh_backend(g)
g.remove_host_list(g_name,["host1","host2"])
g = self.refresh_backend(g)
hosts = g.get_hosts(group=g_name)
assert hosts == ["host3"]
def test_remove_host_glob(self):
"""
Remove hosts bu glob
"""
g_name = "group1"
for g in self.groups:
g.add_group(g_name)
g = self.refresh_backend(g)
g.add_hosts_to_group_glob(g_name,"*") #add all of them
g = self.refresh_backend(g)
g.remove_host_glob("group1","*")
g = self.refresh_backend(g)
hosts = g.get_hosts_glob("@group1")
assert hosts==[]
g.add_hosts_to_group_glob(g_name,"*") #add all of them
g = self.refresh_backend(g)
#try subgroupping thing on the fly
g.remove_host_glob("group1","[0-9][0-9]")
g = self.refresh_backend(g)
hosts = g.get_hosts_glob("@group1:*")
assert self._t_compare_arrays(hosts,list(range(10))) == True
#try the exclude string
g.remove_host_glob("group1","*",exclude_string="[0-9][0-9]")
g = self.refresh_backend(g)
hosts = g.get_hosts_glob("@group1:*")
assert self._t_compare_arrays(hosts,[]) == True
def _t_compare_arrays(self,one,two):
return compare_arrays(one,two)
def compare_arrays(one,two):
if not one == two:
if not one or not two:
return False
else:
return True
two = [str(i) for i in two]
for o in one:
if not o in two:
return False
return True
from func.overlord.client import Minions
class TestMinionGroups(BaseMinions):
"""
Test the minion methods that wraps the group classes
"""
backends = [
{'groups_backend':'sqlite','db_file':TEST_DB_FILE},
{'groups_backend':'conf','conf_file':TEST_CONF_FILE}
]
def teardown(self):
for path in [TEST_DB_FILE,TEST_CONF_FILE]:
if os.path.exists(path):
os.remove(path)
self.clean_dummy_minions()
def setUp(self):
#destroy and create minions
self.clean_dummy_minions()
self.current_minions = self.create_dummy_minions()
#get groups
def test_get_urls(self):
for backend_dict in self.backends:
#create a minion with relevant backens
m = Minions("[0-9]",**backend_dict)
hosts = m.get_urls()
print hosts
def test_get_hosts_for_spec(self):
"""
Testing the minions just to pull things for a spec
"""
spec = "*"
m = Minions(spec)
minions = m.get_hosts_for_spec(spec)
assert compare_arrays(minions,self.current_minions) == True
def test_get_all_hosts(self):
"""
Getting all hosts
"""
for backend_dict in self.backends:
#create a minion with relevant backens
m = Minions("*",**backend_dict)
#create some groups and hosts into that Minion
m.group_class.add_group("group1")
m.group_class.add_hosts_to_group_glob("group1","[0-9]")
hosts = m.get_all_hosts()
assert compare_arrays(hosts,self.current_minions) == True
#now test with grouping
m = Minions("[1][0-9];@group1:*",**backend_dict)
hosts = m.get_all_hosts()
assert compare_arrays(hosts,range(20)) == True
m = Minions("[1][0-5];@group1:[5-9]",**backend_dict)
hosts = m.get_all_hosts()
assert compare_arrays(hosts,range(5,16)) == True
#do some testing about exclude string
m = Minions("*",exclude_spec="[1-9][0-9]",**backend_dict)
hosts = m.get_all_hosts()
assert compare_arrays(hosts,range(10)) == True
m = Minions("[1][0-5];@group1:[5-9]",exclude_spec="[1][3-5];@group1:[5-7]",**backend_dict)
hosts = m.get_all_hosts()
assert compare_arrays(hosts,range(8,13)) == True
if __name__ == "__main__":
b = BaseMinions()
b.create_dummy_minions()
#b.clean_dummy_minions()
|
dockerera/func
|
test/unittest/test_groups_api.py
|
Python
|
gpl-2.0
| 16,593
|
# -*- coding:utf-8 -*-
from PIL import Image
from PIL import ImageEnhance
import numpy as np
def get_image(path, shape, format):
"""
:param path:
:param shape:
:param format:
:return:
"""
img = Image.open(path)
img = img.resize(size=(shape[1], shape[2]), resample=Image.LANCZOS)
img_array = np.asarray(img, dtype=np.uint8)
if format == 'NCHW':
img_array = img_array.transpose(2, 0, 1)
img_array = img_array.reshape([1, shape[0], shape[1], shape[2]])
return img_array
class Enhance(object):
def __init__(self,
path=None,
img=None):
self.path = path
self.img = img
def set_img(self, img):
self.img = img
def color_enhance(self, factor, new_path=None, is_show=False):
if self.img is None:
img = Image.open(self.path)
else:
img = self.img
img = ImageEnhance.Color(img).enhance(factor)
if new_path is not None:
img.save(new_path)
if is_show:
img.show(title='color')
return img
def brightness_enhance(self, factor, new_path=None, is_show=False):
if self.img is None:
img = Image.open(self.path)
else:
img = self.img
img = ImageEnhance.Brightness(img).enhance(factor)
if new_path is not None:
img.save(new_path)
if is_show:
img.show(title='brightness')
return img
def contrast_enhance(self, factor, new_path=None, is_show=False):
if self.img is None:
img = Image.open(self.path)
else:
img = self.img
img = ImageEnhance.Contrast(img).enhance(factor)
if new_path is not None:
img.save(new_path)
if is_show:
img.show(title='contrast')
return img
def sharpness_enhance(self, factor, new_path=None, is_show=False):
if self.img is None:
img = Image.open(self.path)
else:
img = self.img
img = ImageEnhance.Sharpness(img).enhance(factor)
if new_path is not None:
img.save(new_path)
if is_show:
img.show(title='sharpness')
return img
|
gu-yan/mlAlgorithms
|
mxnet/cv_tools/image_tool.py
|
Python
|
apache-2.0
| 2,253
|
""" PublisherHandler
This service has been built to provide the RSS web views with all the information
they need. NO OTHER COMPONENT THAN Web controllers should make use of it.
"""
__RCSID__ = '$Id$'
# pylint: disable=no-self-use
import types
from datetime import datetime, timedelta
# DIRAC
from DIRAC import gLogger, S_OK, gConfig, S_ERROR
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.Utilities.SiteSEMapping import getSEHosts, getStorageElementsHosts
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getSites
from DIRAC.DataManagementSystem.Utilities.DMSHelpers import DMSHelpers
from DIRAC.ResourceStatusSystem.Client.ResourceStatusClient import ResourceStatusClient
from DIRAC.ResourceStatusSystem.Utilities import CSHelpers, Utils
ResourceManagementClient = getattr(
Utils.voimport('DIRAC.ResourceStatusSystem.Client.ResourceManagementClient'),
'ResourceManagementClient')
# RSS Clients
rsClient = None
rmClient = None
def initializePublisherHandler(_serviceInfo):
"""
Handler initialization in the usual horrible way.
"""
global rsClient
rsClient = ResourceStatusClient()
global rmClient
rmClient = ResourceManagementClient()
return S_OK()
class PublisherHandler(RequestHandler):
"""
RPCServer used to deliver data to the web portal.
"""
def __init__(self, *args, **kwargs):
"""
Constructor
"""
super(PublisherHandler, self).__init__(*args, **kwargs)
# ResourceStatusClient .......................................................
types_getSites = []
def export_getSites(self):
"""
Returns list of all sites considered by RSS
:return: S_OK( [ sites ] ) | S_ERROR
"""
gLogger.info('getSites')
return getSites()
types_getSitesResources = [(basestring, list, types.NoneType)]
def export_getSitesResources(self, siteNames):
"""
Returns dictionary with SEs and CEs for the given site(s). If siteNames is
None, all sites are taken into account.
:return: S_OK( { site1 : { ces : [ ces ], 'ses' : [ ses ] },... } ) | S_ERROR
"""
gLogger.info('getSitesResources')
if siteNames is None:
siteNames = getSites()
if not siteNames['OK']:
return siteNames
siteNames = siteNames['Value']
if isinstance(siteNames, basestring):
siteNames = [siteNames]
sitesRes = {}
for siteName in siteNames:
res = {}
res['ces'] = CSHelpers.getSiteComputingElements(siteName)
# Convert StorageElements to host names
res = DMSHelpers().getSiteSEMapping()
if not res['OK']:
return res
ses = res['Value'][1].get(siteName, [])
sesHosts = getStorageElementsHosts(ses)
if not sesHosts['OK']:
return sesHosts
# Remove duplicates
res['ses'] = list(set(sesHosts['Value']))
sitesRes[siteName] = res
return S_OK(sitesRes)
types_getElementStatuses = [basestring, (basestring, list, types.NoneType), (basestring, list, types.NoneType),
(basestring, list, types.NoneType), (basestring, list, types.NoneType),
(basestring, list, types.NoneType)]
def export_getElementStatuses(self, element, name, elementType, statusType, status, tokenOwner):
"""
Returns element statuses from the ResourceStatusDB
"""
gLogger.info('getElementStatuses')
return rsClient.selectStatusElement(element, 'Status', name=name, elementType=elementType,
statusType=statusType, status=status,
tokenOwner=tokenOwner)
types_getElementHistory = [basestring, (basestring, list, types.NoneType), (basestring, list, types.NoneType),
(basestring, list, types.NoneType)]
def export_getElementHistory(self, element, name, elementType, statusType):
"""
Returns element history from ResourceStatusDB
"""
gLogger.info('getElementHistory')
columns = ['Status', 'DateEffective', 'Reason']
return rsClient.selectStatusElement(element, 'History', name=name, elementType=elementType,
statusType=statusType,
meta={'columns': columns})
types_getElementPolicies = [basestring, (basestring, list, types.NoneType), (basestring, list, types.NoneType)]
def export_getElementPolicies(self, element, name, statusType):
"""
Returns policies for a given element
"""
gLogger.info('getElementPolicies')
columns = ['Status', 'PolicyName', 'DateEffective', 'LastCheckTime', 'Reason']
return rmClient.selectPolicyResult(element=element, name=name,
statusType=statusType,
meta={'columns': columns})
types_getNodeStatuses = []
def export_getNodeStatuses(self):
return rsClient.selectStatusElement('Node', 'Status')
types_getTree = [basestring, basestring]
def export_getTree(self, elementType, elementName):
"""
Given an element type and name,
finds its parent site and returns all descendants of that site.
"""
gLogger.info('getTree')
site = self.getSite(elementType, elementName)
if not site:
return S_ERROR('No site')
siteStatus = rsClient.selectStatusElement('Site', 'Status', name=site,
meta={'columns': ['StatusType', 'Status']})
if not siteStatus['OK']:
return siteStatus
tree = {site: {'statusTypes': dict(siteStatus['Value'])}}
ces = CSHelpers.getSiteComputingElements(site)
cesStatus = rsClient.selectStatusElement('Resource', 'Status', name=ces,
meta={'columns': ['Name', 'StatusType', 'Status']})
if not cesStatus['OK']:
return cesStatus
res = DMSHelpers().getSiteSEMapping()
if not res['OK']:
return res
ses = res['Value'][1].get(site, [])
sesStatus = rsClient.selectStatusElement('Resource', 'Status', name=list(ses),
meta={'columns': ['Name', 'StatusType', 'Status']})
return sesStatus
def feedTree(elementsList):
elements = {}
for elementTuple in elementsList['Value']:
name, statusType, status = elementTuple
if name not in elements:
elements[name] = {}
elements[name][statusType] = status
return elements
tree[site]['ces'] = feedTree(cesStatus)
tree[site]['ses'] = feedTree(sesStatus)
return S_OK(tree)
types_setToken = [basestring] * 7
def export_setToken(self, element, name, statusType, token, elementType, username, lastCheckTime):
lastCheckTime = datetime.strptime(lastCheckTime, '%Y-%m-%d %H:%M:%S')
credentials = self.getRemoteCredentials()
gLogger.info(credentials)
elementInDB = rsClient.selectStatusElement(element, 'Status', name=name,
statusType=statusType,
elementType=elementType,
lastCheckTime=lastCheckTime)
if not elementInDB['OK']:
return elementInDB
elif not elementInDB['Value']:
return S_ERROR('Your selection has been modified. Please refresh.')
if token == 'Acquire':
tokenOwner = username
tokenExpiration = datetime.utcnow() + timedelta(days=1)
elif token == 'Release':
tokenOwner = 'rs_svc'
tokenExpiration = datetime.max
else:
return S_ERROR('%s is unknown token action' % token)
reason = 'Token %sd by %s ( web )' % (token, username)
newStatus = rsClient.addOrModifyStatusElement(element, 'Status', name=name,
statusType=statusType,
elementType=elementType,
reason=reason,
tokenOwner=tokenOwner,
tokenExpiration=tokenExpiration)
if not newStatus['OK']:
return newStatus
return S_OK(reason)
def getSite(self, elementType, elementName):
"""
Given an element name, return its site
"""
if elementType == 'StorageElement':
elementType = 'SE'
domainNames = gConfig.getSections('Resources/Sites')
if not domainNames['OK']:
return domainNames
domainNames = domainNames['Value']
for domainName in domainNames:
sites = gConfig.getSections('Resources/Sites/%s' % domainName)
if not sites['OK']:
continue
for site in sites['Value']:
elements = gConfig.getValue('Resources/Sites/%s/%s/%s' % (domainName, site, elementType), '')
if elementName in elements:
return site
return ''
# ResourceManagementClient ...................................................
types_getDowntimes = [basestring, basestring, basestring]
def export_getDowntimes(self, element, elementType, name):
if elementType == 'StorageElement':
res = getSEHosts(name)
if not res['OK']:
return res
names = res['Value']
else:
names = name
return rmClient.selectDowntimeCache(element=element, name=names,
meta={'columns': ['StartDate', 'EndDate',
'Link', 'Description',
'Severity']})
types_getCachedDowntimes = [(basestring, types.NoneType, list),
(basestring, types.NoneType, list),
(basestring, types.NoneType, list),
(basestring, types.NoneType, list)]
def export_getCachedDowntimes(self, element, elementType, name, severity):
if elementType == 'StorageElement':
res = getSEHosts(name)
if not res['OK']:
return res
names = res['Value']
else:
names = name
columns = ['Element', 'Name', 'StartDate', 'EndDate', 'Severity', 'Description', 'Link']
res = rmClient.selectDowntimeCache(element=element, name=names, severity=severity,
meta={'columns': columns})
if not res['OK']:
return res
result = S_OK(res['Value'])
result['Columns'] = columns
return result
types_setStatus = [basestring] * 7
def export_setStatus(self, element, name, statusType, status, elementType, username, lastCheckTime):
lastCheckTime = datetime.strptime(lastCheckTime, '%Y-%m-%d %H:%M:%S')
credentials = self.getRemoteCredentials()
gLogger.info(credentials)
elementInDB = rsClient.selectStatusElement(element, 'Status', name=name,
statusType=statusType,
# status = status,
elementType=elementType,
lastCheckTime=lastCheckTime)
if not elementInDB['OK']:
return elementInDB
elif not elementInDB['Value']:
return S_ERROR('Your selection has been modified. Please refresh.')
reason = 'Status %s forced by %s ( web )' % (status, username)
tokenExpiration = datetime.utcnow() + timedelta(days=1)
newStatus = rsClient.addOrModifyStatusElement(element, 'Status', name=name,
statusType=statusType,
status=status,
elementType=elementType,
reason=reason,
tokenOwner=username,
tokenExpiration=tokenExpiration)
if not newStatus['OK']:
return newStatus
return S_OK(reason)
|
andresailer/DIRAC
|
ResourceStatusSystem/Service/PublisherHandler.py
|
Python
|
gpl-3.0
| 11,982
|
from transform import transform
def modify_proxy_request(request, log):
# Fake the header to ensure that mathml is rendered
request.headers['User-Agent'] = 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.11) Gecko/20101013 Ubuntu/10.10 (maverick) Firefox/3.6.11'
return request
|
Rhaptos/cnxmobile
|
src/cnxmobile/cnxmobile/__init__.py
|
Python
|
lgpl-2.1
| 294
|
# -*- encoding: utf-8 -*-
# import time
# from project.settings import log_debug
#
# def main():
# i = 0
#
# while i < 500:
# time.sleep(0.01)
# i += 1
# log_debug(i)
#
# if __name__ == '__main__':
# main()
import asyncio
@asyncio.coroutine
def echo_server():
yield from asyncio.start_server(handle_connection, 'localhost', 8000)
@asyncio.coroutine
def handle_connection(reader, writer):
while True:
data = yield from reader.read(8192)
if not data:
break
writer.write(data)
loop = asyncio.get_event_loop()
loop.run_until_complete(echo_server())
try:
loop.run_forever()
finally:
loop.close()
|
INP-Group/ProjectN-Control
|
src/server.py
|
Python
|
mit
| 688
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('problems', '0004_problem_main_problem_instance'),
('portals', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='node',
name='problems_in_content',
field=models.ManyToManyField(to='problems.Problem', blank=True),
preserve_default=True,
),
]
|
sio2project/oioioi
|
oioioi/portals/migrations/0002_node_problems_in_content.py
|
Python
|
gpl-3.0
| 521
|
## Matt Moehr
|
mattmoehr/ks-power-rankings
|
code/code-get-data/web_scraper.py
|
Python
|
gpl-2.0
| 14
|
"""
Giving models custom methods
Any method you add to a model will be available to instances.
"""
import datetime
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
def __str__(self):
return self.headline
def was_published_today(self):
return self.pub_date == datetime.date.today()
def articles_from_same_day_1(self):
return Article.objects.filter(pub_date=self.pub_date).exclude(id=self.id)
def articles_from_same_day_2(self):
"""
Verbose version of get_articles_from_same_day_1, which does a custom
database query for the sake of demonstration.
"""
from django.db import connection
with connection.cursor() as cursor:
cursor.execute("""
SELECT id, headline, pub_date
FROM custom_methods_article
WHERE pub_date = %s
AND id != %s""", [connection.ops.adapt_datefield_value(self.pub_date),
self.id])
return [self.__class__(*row) for row in cursor.fetchall()]
|
kisna72/django
|
tests/custom_methods/models.py
|
Python
|
bsd-3-clause
| 1,265
|
#!/usr/bin/env python
import sys
import netrc
import time
from alarmdealerscrape import AlarmDealerClient
def main(argv=None):
if argv is None:
argv = sys.argv
auth = netrc.netrc().authenticators(AlarmDealerClient.DOMAIN)
username, code, password = auth
client = AlarmDealerClient()
print("logging in")
client.login(username, password)
print("getting status")
print(client.get_status())
print("waiting 60 secs between status checks")
time.sleep(60)
print("getting status")
print(client.get_status())
print("waiting 120 secs between status checks")
time.sleep(120)
print("getting status")
print(client.get_status())
if __name__ == '__main__':
sys.exit(main())
|
tubaman/alarmdealerscrape
|
examples/test_long_interval_status.py
|
Python
|
bsd-3-clause
| 748
|
/*******************************************************************************
* Copyright 2013 University of Southern California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This code was developed by the Information Integration Group as part
* of the Karma project at the Information Sciences Institute of the
* University of Southern California. For more information, publications,
* and related projects, please see: http://www.isi.edu/integration
******************************************************************************/
#split the data
import datetime
f = open("GPSoutput.txt")
contents = f.read()
lines = contents.splitlines()
nlines = len(lines)
filename = ' '
j = 1
for i in range(1, nlines):
line = lines[i]
if line.count(',') >= 2:
newline = line.replace('|', ',')
idx = newline.find(',')
newline = newline[idx+1:]
idx = newline.find(',')
timestamp = newline[:idx]
newline = newline[idx+1:]
if filename == timestamp[:10]:
j = j + 1
outputFile.write(str(j) + ',' + timestamp + ',' + newline + '\n' )
else:
outputFile = open("GPSoutput" + timestamp[:10] + ".txt", 'w')
outputFile.write('index,time,latitude,longitude,accuracy,bearing\n')
j = 1
outputFile.write(str(j) + ',' + timestamp + ',' + newline + '\n' )
filename = timestamp[:10]
f.close()
|
usc-isi-i2/KarmaSpatialClustering
|
split.py
|
Python
|
apache-2.0
| 1,857
|
from subprocess32 import Popen,call,PIPE
from uuid import uuid4
import os
import time
import logging
logger = logging.getLogger("FileCrypto.root.LUKS")
class CryptoLuks(object):
def __init__(self,cryptfile,mountdir):
self.cryptfile = cryptfile
self.mountdir = mountdir
self.fuuid = uuid4().hex #The UUID used for creating the mapper device
self.maploc = os.path.join("/dev/mapper",self.fuuid)
def create(self,password,fsize=64,randomInit=False,owner="root"):
"""Creates a new LUKS container, and mounts it at the given mountpoint.
Tries to undo changes if there is an error.
Keyword Arguments:
fsize -- the file size in megabytes (int)
randomInit -- Whether or not to initialize created file with random bits (bool), takes longer if True.
"""
if (os.path.exists(self.cryptfile)==False):
if (randomInit==True):
if (call(["dd","if=/dev/urandom","of="+self.cryptfile,"bs=1M","count="+str(fsize)])!=0):
raise IOError("Failed to create file \""+self.cryptfile+"\" (urandom init)")
else:
if (call(["fallocate","-l",str(fsize)+"M",self.cryptfile])!=0):
raise IOError("Failed to create file \""+self.cryptfile+"\" (fallocate)")
else:
raise IOError("File \""+self.cryptfile+"\" already exists!")
if not os.path.exists(self.mountdir):
os.mkdir(self.mountdir)
elif (os.listdir(self.mountdir) != []):
os.remove(self.cryptfile)
raise IOError("Mount directory \""+self.cryptfile+"\" is not empty!")
#Format the file
csetup = Popen(["cryptsetup","luksFormat",self.cryptfile],stdin=PIPE)
csetup.communicate(password+"\n")
csetup.wait()
if (csetup.returncode != 0):
os.remove(self.cryptfile)
os.rmdir(self.mountdir)
raise IOError("CryptSetup luksFormat failed!")
#Open the volume
csetup = Popen(["cryptsetup","luksOpen",self.cryptfile,self.fuuid],stdin=PIPE)
csetup.communicate(password+"\n")
csetup.wait()
if (csetup.returncode != 0):
os.remove(self.cryptfile)
os.rmdir(self.mountdir)
raise IOError("CryptSetup luksOpen failed!")
if (call(["mkfs.ext4","-j",self.maploc])!= 0):
call(["cryptsetup","luksClose",self.fuuid])
os.remove(self.cryptfile)
os.rmdir(self.mountdir)
raise IOError("mkfs.ext4 failed!")
if (call(["mount",self.maploc,self.mountdir])!= 0):
call(["cryptsetup","luksClose",self.fuuid])
os.remove(self.cryptfile)
os.rmdir(self.mountdir)
raise IOError("mount failed!")
#Allows the owner to access the directory and file - since we are currently root
if (owner!="root"):
call(["chown",owner+":"+owner,self.mountdir])
call(["chown",owner+":"+owner,self.cryptfile])
#For security, only owner can even touch the directory or the mountfile
call(["chmod","700",self.mountdir])
call(["chmod","700",self.cryptfile])
def open(self,password, owner = None):
"""Opens the LUKS file and mounts it"""
csetup = Popen(["cryptsetup","luksOpen",self.cryptfile,self.fuuid],stdin=PIPE)
csetup.communicate(password+"\n")
csetup.wait()
if (csetup.returncode != 0):
raise IOError("luksOpen failed")
#mount it!
if ( call(["mount",self.maploc,self.mountdir])!= 0):
call(["cryptsetup","luksClose",self.fuuid])
raise IOError("mount failed")
#If we have an owner, make sure that the correct user can access the files, and
# that this user is the ONLY user that can access these files
if (owner is not None and owner!="root"):
call(["chown","-R",owner+":"+owner,self.mountdir])
call(["chmod","-R","700",self.mountdir])
def close(self,timeout=10):
"""Unmounts and closes the LUKS file"""
if (call(["umount",self.mountdir])!=0):
if (timeout > 0):
logger.warn("close waiting on disk IO. Timeout left: %i",timeout)
time.sleep(1) #Sleep for one second and attempt closing again
self.close(timeout-1)
else:
logger.warn("Close: timeout expired - running panic")
self.panic()
else:
call(["cryptsetup","luksClose",self.fuuid])
def suspend(self):
"""Calls luksSuspend. Stops all IO, and purges keys from kernel. Note that it does not purge the password from this class, so suspend will not guarantee that password is not in memory."""
call(["cryptsetup","luksSuspend",self.fuuid])
def resume(self,password):
"""Resumes previously suspended container"""
csetup = Popen(["cryptsetup","luksResume",self.fuuid],stdin=PIPE)
csetup.communicate(password+"\n")
csetup.wait()
if (csetup.returncode != 0):
raise IOError("luksResume failed!")
def panic(self):
"""Immediately suspends IO to the volume and attempts closing it. Closing is dependent on processes, while suspend is immediate. Can cause loss of data - use only in emergencies."""
call(["fuser","-km",self.mountdir])
self.close()
if (__name__=="__main__"):
print "The test will create a container, then suspend, resume, and close it. When it prints 'ok', it is waiting for input"
import time
passwd = "testingTesting"
c = CryptoLuks(os.path.join(os.getcwd(),"test.luks"),os.path.join(os.getcwd(),"testingMe"))
t = time.time()
c.create(passwd,owner="daniel")
print "create:",time.time()-t
raw_input("ok")
t= time.time()
c.close()
print "close:",time.time()-t
raw_input("ok")
c = CryptoLuks(os.path.join(os.getcwd(),"test.luks"),os.path.join(os.getcwd(),"testingMe"))
t = time.time()
c.open(passwd)
print "mount:",time.time()-t
raw_input("ok")
t= time.time()
c.suspend()
print "suspend:",time.time()-t
raw_input("ok")
t= time.time()
c.resume(passwd)
print "resume:",time.time()-t
raw_input("ok")
t= time.time()
c.close()
print "close:",time.time()-t
raw_input("done")
os.rmdir(os.path.join(os.getcwd(),"testingMe"))
os.remove(os.path.join(os.getcwd(),"test.luks"))
|
dkumor/meDB
|
connector/crypto/disk/rootprocess/luks/luks.py
|
Python
|
mit
| 6,510
|
# _*_ coding:utf-8 _*_
# Filename:ClientUI.py
# Python在线聊天客户端
from socket import *
from ftplib import FTP
import ftplib
import socket
import thread
import time
import sys
import codecs
import os
reload(sys)
sys.setdefaultencoding( "utf-8" )
class ClientMessage():
#设置用户名密码
def setUsrANDPwd(self,usr,pwd):
self.usr=usr
self.pwd=pwd
#设置目标用户
def setToUsr(self,toUsr):
self.toUsr=toUsr
self.ChatFormTitle=toUsr
#设置ip地址和端口号
def setLocalANDPort(self,local,port):
self.local = local
self.port = port
def check_info(self):
self.buffer = 1024
self.ADDR=(self.local,self.port)
self.udpCliSock = socket.socket(AF_INET, SOCK_DGRAM)
self.udpCliSock.sendto('0##'+self.usr+'##'+self.pwd,self.ADDR)
self.serverMsg ,self.ADDR = self.udpCliSock.recvfrom(self.buffer)
s=self.serverMsg.split('##')
if s[0]=='Y':
return True
elif s[0]== 'N':
return False
#接收消息
def receiveMessage(self):
self.buffer = 1024
self.ADDR=(self.local,self.port)
self.udpCliSock = socket.socket(AF_INET, SOCK_DGRAM)
self.udpCliSock.sendto('0##'+self.usr+'##'+self.pwd,self.ADDR)
while True:
#连接建立,接收服务器端消息
self.serverMsg ,self.ADDR = self.udpCliSock.recvfrom(self.buffer)
s=self.serverMsg.split('##')
if s[0]=='Y':
#self.chatText.insert(Tkinter.END,'客户端已经与服务器端建立连接......')
return True
elif s[0]== 'N':
#self.chatText.insert(Tkinter.END,'客户端与服务器端建立连接失败......')
return False
elif s[0]=='CLOSE':
i=5
while i>0:
self.chatText.insert(Tkinter.END,'你的账号在另一端登录,该客户端'+str(i)+'秒后退出......')
time.sleep(1)
i=i-1
self.chatText.delete(Tkinter.END)
os._exit(0)
#好友列表
elif s[0]=='F':
for eachFriend in s[1:len(s)]:
print eachFriend
#好友上线
elif s[0]=='0':
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime+' ' +'你的好友' + s[1]+'上线了')
#好友下线
elif s[0]=='1':
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime+' ' +'你的好友' + s[1]+'下线了')
#好友传来消息
elif s[0]=='2':
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime +' '+s[1] +' 说:\n')
self.chatText.insert(Tkinter.END, ' ' + s[3])
#好友传来文件
elif s[0]=='3':
filename=s[2]
f=FTP('192.168.1.105')
f.login('Coder', 'xianjian')
f.cwd(self.usr)
filenameD=filename[:-1].encode("cp936")
try:
f.retrbinary('RETR '+filenameD,open('..\\'+self.usr+'\\'+filenameD,'wb').write)
except ftplib.error_perm:
print 'ERROR:cannot read file "%s"' %file
self.chatText.insert(Tkinter.END,filename[:-1]+' 传输完成')
elif s[0]=='4':
agreement=raw_input(s[1]+'请求加你为好友,验证消息:'+s[3]+'你愿意加'+s[1]+'为好友吗(Y/N)')
if agreement=='Y':
self.udpCliSock.sendto('5##'+s[1]+'##'+s[2]+'##Y',self.ADDR)
elif agreement=='N':
self.udpCliSock.sendto('5##'+s[1]+'##'+s[2]+'##N',self.ADDR)
elif s[0]=='5':
if s[3]=='Y':
print s[2]+'接受了你的好友请求'
elif s[3]=='N':
print s[2]+'拒绝了你的好友请求'
#发送消息
def sendMessage(self):
#得到用户在Text中输入的消息
message = self.inputText.get('1.0',Tkinter.END)
#格式化当前的时间
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime +' 我 说:\n')
self.chatText.insert(Tkinter.END,' ' + message + '\n')
self.udpCliSock.sendto('2##'+self.usr+'##'+self.toUsr+'##'+message,self.ADDR);
#清空用户在Text中输入的消息
self.inputText.delete(0.0,message.__len__()-1.0)
#传文件
def sendFile(self):
filename = self.inputText.get('1.0',Tkinter.END)
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime +'我' + ' 传文件:\n')
self.chatText.insert(Tkinter.END,' ' + filename[:-1] + '\n')
f=FTP('192.168.1.105')
f.login('Coder', 'xianjian')
f.cwd(self.toUsr)
filenameU=filename[:-1].encode("cp936")
try:
#f.retrbinary('RETR '+filename,open(filename,'wb').write)
#将文件上传到服务器对方文件夹中
f.storbinary('STOR ' + filenameU, open('..\\'+self.usr+'\\'+filenameU, 'rb'))
except ftplib.error_perm:
print 'ERROR:cannot read file "%s"' %file
self.udpCliSock.sendto('3##'+self.usr+'##'+self.toUsr+'##'+filename,self.ADDR);
#加好友
def addFriends(self):
message= self.inputText.get('1.0',Tkinter.END)
s=message.split('##')
self.udpCliSock.sendto('4##'+self.usr+'##'+s[0]+'##'+s[1],self.ADDR);
#关闭消息窗口并退出
def close(self):
self.udpCliSock.sendto('1##'+self.usr,self.ADDR);
sys.exit()
#启动线程接收服务器端的消息
def startNewThread(self):
thread.start_new_thread(self.receiveMessage,())
def main():
client = ClientMessage()
client.setLocalANDPort('192.168.1.105', 8808)
client.setUsrANDPwd('12073127', '12073127')
client.setToUsr('12073128')
client.startNewThread()
if __name__=='__main__':
main()
|
gzxultra/IM_programming
|
class_ClientMessage.py
|
Python
|
gpl-2.0
| 6,373
|
import os
import six
from aleph.util import checksum
class Archive(object):
def _get_file_path(self, meta):
ch = meta.content_hash
if ch is None:
raise ValueError("No content hash available.")
path = os.path.join(ch[:2], ch[2:4], ch[4:6], ch)
file_name = 'data'
if meta.file_name is not None:
file_name = meta.file_name
else:
if meta.extension is not None:
file_name = '%s.%s' % (file_name, meta.extension)
return os.path.join(six.text_type(path), six.text_type(file_name))
def _update_metadata(self, filename, meta):
meta.content_hash = checksum(filename)
return meta
def upgrade(self):
"""Run maintenance on the store."""
pass
def archive_file(self, filename, meta, move=False):
"""Import the given file into the archive.
Return an updated metadata object. If ``move`` is given, the
original file will not exist afterwards.
"""
pass
def load_file(self, meta):
pass
def cleanup_file(self, meta):
pass
def generate_url(self, meta):
return
|
smmbllsm/aleph
|
aleph/archive/archive.py
|
Python
|
mit
| 1,182
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for type-dependent behavior used in autograph-generated code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.utils.type_check import is_tensor
from tensorflow.python.ops import control_flow_ops
def dynamic_is(left, right):
# TODO(alexbw) if we're sure we should leave 'is' in place,
# then change the semantics in converters/logical_expressions.py
return left is right
def dynamic_is_not(left, right):
return left is not right
def run_cond(condition, true_fn, false_fn):
"""Type-dependent functional conditional.
Args:
condition: A Tensor or Python bool.
true_fn: A Python callable implementing the true branch of the conditional.
false_fn: A Python callable implementing the false branch of the
conditional.
Returns:
result: The result of calling the appropriate branch. If condition is a
Tensor, tf.cond will be used. Otherwise, a standard Python if statement will
be ran.
"""
if is_tensor(condition):
return control_flow_ops.cond(condition, true_fn, false_fn)
else:
return py_cond(condition, true_fn, false_fn)
def py_cond(condition, true_fn, false_fn):
"""Functional version of Python's conditional."""
if condition:
results = true_fn()
else:
results = false_fn()
# The contract for the branch functions is to return tuples, but they should
# be collapsed to a single element when there is only one output.
if len(results) == 1:
return results[0]
return results
|
AnishShah/tensorflow
|
tensorflow/python/autograph/utils/multiple_dispatch.py
|
Python
|
apache-2.0
| 2,263
|
('\xef\xbb\xbf')
#Coded for Python 3.4.3
from tkinter import *
import re, linecache, os, sys
import xlwt, xlrd
#Created by Myles Morrone
#Ver(2.0) 12:05 8/3/2015
#The xlwt module is available at: https://pypi.python.org/pypi/xlwt
#The xlrd module is available at: https://pypi.python.org/pypi/xlrd
#Current build info:
#1. Automatic connection to the OneWiki does not work. (Cannot get mwclient to work locally)
#2. Lines with unreadable characters WILL BE IGNORED, be sure to review these manually!
#3. Additional files and MD5s, found in the File Analysis section, will NOT be automatically related.
'''
Instructions
1. Create a new text file named "ToBeParsed.txt" in same folder as this file
2. Go to desired Wiki page and click the "Edit" tab
3. Copy and paste ALL the information from the edit box into your text file (Ctrl+A, Ctrl+C, Ctrl+V)
4. Run this program (F5 in IDLE or double click if saved as a .pyw file)
5. Input Ticket Number, SP ID, comments, and finally click "Start"
'''
#JtR will parse data, create necessary reference text files, input into excel, open excel file for viewing
# and clean up reference files. Once reviewing has been finished, save excel file in .csv format. To
# begin another CRIT report, you must restart JtR (completely flushing memory).
RT = input("What is the Ticket Number? ")
SP = input("What is the SP ID Number? ")
COM = input("Comments? ")
#Global Vars
TBP = "Files/ToBeParsed.txt"
Results = "Files/Results.txt"
LineAgg = "Files/LineAgg.txt"
extList = [ ".doc:", ".docx:", ".log:", ".msg:", ".odt:",
".rtf:", ".tex:", ".txt:", ".csv:", ".dat:",
".pps:", ".ppt:", ".vcf:", ".xml:", ".bmp:",
".gif:", ".jpg:", ".png:", ".tif:", ".pct:",
".pdf:", ".xlr:", ".xls:", ".xlsx:", ".db:",
".dbf:", ".mdb:", ".sql:", ".exe:", ".jar:",
".pif:", ".vb:", ".vbs:", ".asp:", ".cfm:",
".css:", ".htm:", ".html:", ".js:", ".jsp:",
".php:", ".xhtml:", ".cfg:", ".ini:", ".7z:",
".deb:", ".gz:", ".pkg:", ".rar:", ".rpm:",
".tar.gz:", ".zip:", ".zipx:",".exifdata:"] #extensions to split file names and MD5s
domList = [ ".com/", ".org/", ".net/", ".int/", ".edu/",
".gov/", ".mil/", ".arpa/"] #top-level domain list
def JtR(): #Info parser - Jack the Ripper
print("-----JtR START-----")
maxL = sum(1 for line in open(TBP)) #Counts total lines in ToBeParsed.txt
print(str(maxL) + " lines to be parsed by JtR")
L = 1 #Current line, for iterating
LP = 0 #Lines printed, mainly for debugging purposes
P = False #Print boolean
XLtype = "" #Input for Type column in excel file
XLrole = "" #Input for Role column in excel file
with open(Results, "w") as fileW:
while L < maxL+1:
try:
line = linecache.getline(TBP, L) #Pull line #"L" from cached source
line = line[:-1] #Remove newline character (\n) from end of line
if re.search(r"�", line): #Obliterate these annoying little characters
line = "" #!!!!NOTICE!!!! These lines are ANNIHILATED, manually input them if you want them
if re.search(r"Notable", line): #Has parsed too far, halt parsing
break
if re.match(r"X-Mailer", line): #Section Headers, automatically changes inputs as per type
XLtype = "Email Header - X-Mailer"
XLrole = ""
if re.match(r"Sender domain", line):
XLtype = "URI - Domain Name"
XLrole = ""
if re.match(r"Sender IP", line):
XLtype = "Address - ipv4-addr"
XLrole = "Sender_IP"
if re.match(r"Sender mail", line):
XLtype = "Address - e-mail"
XLrole = "Sender_Address"
if re.match(r"Subject", line):
XLtype = "Email Header - Subject"
XLrole = ""
if re.match(r"Attachment names", line):
XLtype = "Hash - MD5"
XLrole = "Attachment"
if re.match(r"Message body links", line):
XLtype = "URI - URL"
XLrole = "Embedded_Link"
if re.match(r"Sandbox report links", line):
XLtype = "URI - URL"
XLrole = "Embedded_Link"
if re.match(r"Other hyperlinks", line):
XLtype = "URI - URL"
XLrole = "Embedded_Link"
if re.match(r"Downloaded files names and md5s", line):
XLtype = ""
XLrole = "Attachment"
if re.match(r"File name", line):
XLtype = "File - Name"
XLrole = "Attachment"
if re.match(r"File md5", line):
XLtype = "Hash - MD5"
XLrole = "Attachment"
if re.search("</pre>", line): #Switch printing mode OFF
P = False
if P == True: #Print mode is turned ON later, allows JtR to pass over useless lines in the beginning
if len(line)>2: #Just in case
if re.match(r"http", line): #Seeks hardest items to parse first, links
domSeg = line #Domain segment (blah.com)
URLSeg = line #URL segment (/blah/d/blah.html)
for item in domList:
if line.find(item) > 0:
domSeg = re.sub(r"http://",r"", domSeg) #Purifies domain segment
domSeg = re.sub(r"https://",r"", domSeg)
domSeg = re.sub(r"www.",r"", domSeg)
SL = domSeg.index("/") #Slash index, finds where domain ends
domSeg = domSeg[:SL] #Slices out domain
print(r"{}``{}``{}``{}".format(domSeg,"URI - Domain Name","",""), file=fileW)
LP += 1
print(r"{}``{}``{}``{}".format(line,XLtype,XLrole,""), file=fileW) #Prints original full line
LP += 1
URLSeg = re.sub(r"http://",r"", URLSeg) #Purifies URL segment
URLSeg = re.sub(r"https://",r"", URLSeg)
URLSeg = re.sub(r"www.",r"", URLSeg)
URLSeg = re.sub(domSeg,r"", URLSeg) #Rips out domain
iterate = True
while iterate == True:
last = URLSeg.rfind("/")
if last > 0:
URLSeg = URLSeg[:last]
print(r"{}``{}``{}``{}".format(URLSeg,XLtype,XLrole,line), file=fileW)
LP += 1
else:
iterate = False
if re.search(r"@", line): #search for emails
line = re.sub(r'[<">]', r"", line) #remove excess/nonsense characters, inverted "s and 's for capturing "s
emailName = line.rsplit(None, 1)[0]
emailAdd = line.rsplit(None, 1)[-1]
print(r"{}``{}``{}``{}".format(emailAdd,"Address - e-mail","Sender_Address",""), file=fileW)
LP += 1
if emailAdd == emailName:
pass
else:
print(r"{}``{}``{}``{}".format(emailName,"Email Header - String","Sender_Name",emailAdd), file=fileW)
LP += 1
else:
ScanP = False #Scan print boolean
for item in extList:
if re.search(item, line): #Search for file extensions and seperate file name from MD5s
II = line.index(":") #Item index
print(r"{}``{}``{}``{}".format(line[:II],"File - Name",XLrole,line[II+1:]), file=fileW)
print(r"{}``{}``{}``{}".format(line[II+1:],"Hash - MD5",XLrole,""), file=fileW)
LP += 2
ScanP = True #Line has been printed
else:
pass
if ScanP == False: #Line was not printed, print line
print(r"{}``{}``{}``{}".format(line,XLtype,XLrole,""), file=fileW)
LP += 1
if re.match("<pre>", line): #Switch printing mode on
P = True
if len(line) > 8:
print(r"{}``{}``{}``{}".format(line[5:],XLtype,XLrole,""), file=fileW) #In case of info after <pre>, print line
LP += 1
if re.match("Subject:", line): #Subject line has variable parsing issues, corrected here
P = True
L += 1
except:
Cake = True
pass
fileW.close()
print(str(L-1) + " lines parsed, " + str(LP) + " lines printed")
def CK(): #CopyKiller
global lineList
print("-----CK START-----")
ignoreCpy = False
maxL = sum(1 for line in open(Results))
print(str(maxL) + " lines to be parsed by CK")
L = 1 #Current line
LP = 0 #Lines printed
CpyK = 0 #Copies killed
lineList = [] #Line aggregation
lineAgg = open(LineAgg, "w")
while L != maxL+1:
line = linecache.getline(Results, L)
if re.search(r"�", line) or re.search(r"�", line):
line = "UNREADABLE CHARACTERS``UNR``UNR``UNR"
sline = line[:-1].split("``") #Split line
if sline[0] in lineList:
for item in extList:
if re.search(item[:-1], sline[0]):
ignoreCpy = True
if ignoreCpy == True:
lineList.append(sline[0])
print(r"{}".format(line[:-1]), file=lineAgg)
LP += 1
ignoreCpy = False
else:
CpyK += 1
else:
if re.match(r"UNREADABLE", line):
pass
else:
lineList.append(sline[0])
print(r"{}".format(line[:-1]), file=lineAgg)
LP += 1
L += 1
fileW = open(Results, "w")
for item in lineList:
print(r"{}".format(item), file=fileW)
fileW.close()
print(str(CpyK) + " copies killed, " + str(LP) + " unique lines printed")
def export():
print("---EXPORT START---")
XLSname = ("Files/"+SP+".xls")
#Formats
header = xlwt.easyxf("font: name Calibri, bold on")
inputs = xlwt.easyxf("font: name Calibri")
#Workbook creation
wb = xlwt.Workbook()
ws = wb.add_sheet("Indicators")
#Information Input
#Headers
# (Row,Col,Data,Format)
ws.write(0, 0, "Indicator", header)
ws.write(0, 1, "Type", header)
ws.write(0, 2, "Comment", header)
ws.write(0, 3, "Role", header)
ws.write(0, 4, "Phase", header)
ws.write(0, 5, "Campaign", header)
ws.write(0, 6, "Campaign-Description", header)
ws.write(0, 7, "Campaign-Confidence", header)
ws.write(0, 8, "Confidence", header)
ws.write(0, 9, "Impact", header)
ws.write(0, 10, "Activity-Start", header)
ws.write(0, 11, "Activity-End", header)
ws.write(0, 12, "Activity-Description", header)
ws.write(0, 13, "Bucket", header)
ws.write(0, 14, "Bucket 1", header)
ws.write(0, 15, "Relationship-Type", header)
ws.write(0, 16, "Relationship", header)
ws.write(0, 17, "Status", header)
ws.write(0, 18, "RT Ticket", header)
ws.write(0, 19, "Source", header)
ws.write(0, 20, "Reference", header)
#Indicators
#Static indicator attributes
Comment = COM
Phase = "Delivery"
Campaign = "zzUnknown"
Campaign_Description = ""
Campaign_Confidence = "medium"
Confidence = "medium"
Impact = "low"
Activity_Start = ""
Activity_End = ""
Activity_Description = ""
Bucket = "3000.0-Phishing"
Bucket_1 = ""
Relationship_Type = "Related_To"
Status = "Analyzed"
RT_Ticket = RT
Source = "GE IA Intelligence"
Reference = "https://imweb.corporate.ge.com/wiki/index.php/Category:"+SP
#Dynamic indicator attributes
maxL = sum(1 for line in open(LineAgg))
print(str(maxL) + " lines to be exported")
L = 1 #Current line
LP = 0 #Lines printed
for line in open(LineAgg):
while L != maxL+1: #Iterate through lines in LineAgg text
line = linecache.getline(LineAgg, L)
line = line[:-1].split("``")
Indicator = line[0]
Type = line[1]
Role = line[2]
Relationship = line[3]
#Begin printing attributes to file
ws.write(L, 0, Indicator, inputs)
ws.write(L, 1, Type, inputs)
ws.write(L, 2, Comment, inputs)
ws.write(L, 3, Role, inputs)
ws.write(L, 4, Phase, inputs)
ws.write(L, 5, Campaign, inputs)
ws.write(L, 6, Campaign_Description, inputs)
ws.write(L, 7, Campaign_Confidence, inputs)
ws.write(L, 8, Confidence, inputs)
ws.write(L, 9, Impact, inputs)
ws.write(L, 10, Activity_Start, inputs)
ws.write(L, 11, Activity_End, inputs)
ws.write(L, 12, Activity_Description, inputs)
ws.write(L, 13, Bucket, inputs)
ws.write(L, 14, Bucket_1, inputs)
ws.write(L, 15, Relationship_Type, inputs)
ws.write(L, 16, Relationship, inputs)
ws.write(L, 17, Status, inputs)
ws.write(L, 18, RT_Ticket, inputs)
ws.write(L, 19, Source, inputs)
ws.write(L, 20, Reference, inputs)
L += 1
print(str(L-1)+" lines exported to "+XLSname)
wb.save(XLSname)
print("Ticket #"+RT+" successfully saved to "+XLSname)
def cleanup(): #Remove reference files
os.remove("Files/Results.txt")
os.remove("Files/LineAgg.txt")
def start(): #Run program, called from Start button on GUI
JtR()
CK()
export()
cleanup()
start()
|
pickpocket689/Jack-the-Ripper
|
JtR_no_GUI.py
|
Python
|
mit
| 14,782
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-07-28 15:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0053_auto_20180715_0432'),
]
operations = [
migrations.AddField(
model_name='podcast',
name='owner_email_override',
field=models.CharField(blank=True, max_length=256, null=True),
),
]
|
Pinecast/pinecast
|
podcasts/migrations/0054_podcast_owner_email_override.py
|
Python
|
apache-2.0
| 487
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import argparse
import gettext
from formbar.config import Config, parse
def _(message):
if message == "":
return ""
result = gettext.gettext(message)
if isinstance(result, unicode):
result = result.encode("UTF-8")
return result
def _get_config(config):
return Config(parse(config.read()))
def reindent(s, numSpaces=3):
"""
Re-indent a multi-line string by a number of spaces (used for tables)
"""
s = s.split('\n')
s = [(' ' * numSpaces) + line for line in s]
s = '\n'.join(s)
return s
def render_page(element):
out = []
out.append("*"*len(_(element.attrib.get("label"))))
out.append(_(element.attrib.get("label")))
out.append("*"*len(_(element.attrib.get("label"))))
return "\n".join(out)
def render_section(element):
out = []
out.append(_(element.attrib.get("label")))
out.append("="*len(_(element.attrib.get("label"))))
return "\n".join(out)
def render_subsection(element):
out = []
out.append(_(element.attrib.get("label")))
out.append("-"*len(_(element.attrib.get("label"))))
return "\n".join(out)
def _render_label(element):
key = _('Label')
value = _(element.attrib.get("label"))
try:
if value and isinstance(value, unicode):
value = value.encode("UTF-8")
except:
value = "ERROR: Could not convert Label"
return ":{key}: {value}".format(key=key, value=value)
def _render_required(element):
key = _('Pflichtstatus')
required = _(element.attrib.get("required"))
desired = _(element.attrib.get("desired"))
if not required and not desired:
value = ""
elif required:
value = "Pflichtfeld"
else:
value = "Forderfeld"
try:
if value and isinstance(value, unicode):
value = value.encode("UTF-8")
except:
value = "ERROR: Could not convert Label"
return ":{key}: {value}".format(key=key, value=value)
def _render_name(element):
key = _('Name')
value = element.attrib.get("name")
if value:
value = value.encode("UTF-8")
return ":{key}: {value}".format(key=key, value=value)
def _render_type(element):
key = _('Type')
value = element.attrib.get("type", "string")
renderer = _get_renderer(element)
# Currently we do not have a relation datatype but the presence of a
# listing or link renderer is a strong indication that this field is
# a relation.
if renderer in ["listing", "link"]:
value = "relation"
if value:
value = value.encode("UTF-8")
return ":{key}: {value}".format(key=key, value=value)
def _render_id(element):
key = _('ID')
value = element.attrib.get("id")
if value:
value = value.encode("UTF-8")
return ":{key}: {value}".format(key=key, value=value)
def _render_help(element):
key = _('Help')
value = element.find(".//help")
if value is not None:
value = _(value.text.encode("UTF-8"))
return ":{key}: {value}".format(key=key, value=value)
return ""
def _get_renderer(element):
renderer = element.find("renderer")
if renderer is not None:
value = renderer.attrib.get("type")
else:
value = "text"
return value
def _render_renderer(element):
key = _('Renderer')
value = _get_renderer(element)
if value:
value = value.encode("UTF-8")
return ":{key}: {value}".format(key=key, value=value)
def _render_rst_table(options):
out = []
out.append("")
# Determine length of bars
keys = options.keys()
keys.append(_('Option'))
values = options.values()
values.append(_('Value'))
mln = len(max(keys, key=len))
mlv = len(max(values, key=len))
out.append("%s %s" % (mln*"=", mlv*"="))
out.append("%s %s" % (_('Value').ljust(mln), _('Option').ljust(mlv)))
out.append("%s %s" % (mln*"=", mlv*"="))
for k in sorted(options):
value = options[k]
k = k or "NULL"
name = k.encode("UTF-8").ljust(mln)
out.append("%s %s" % (name, value))
out.append("%s %s" % (mln*"=", mlv*"="))
out.append("")
return "\n".join(out)
def _render_options(element):
"""TODO: Docstring for _render_options.
:element: TODO
:returns: TODO
"""
options = {}
for option in element.findall("options/option"):
name = _(option.text.encode("UTF-8"))
options[option.attrib.get('value')] = name
if options:
return reindent(_render_rst_table(options))
return ""
def _render_rules(element):
out = []
rules = element.findall("rule")
if len(rules) == 0:
return ""
out.append("\n**{}**\n\n".format(_("Rules")))
for num, rule in enumerate(rules):
key = "{0}.".format(num+1)
value = rule.attrib.get("expr")
msg = rule.attrib.get("msg", "")
if value:
value = value.encode("UTF-8")
if msg:
msg = ": {}".format(msg.encode("UTF-8"))
out.append("{key} {value} {msg}".format(key=key, value=value, msg=msg))
out.append("")
out.append(reindent(render_meta(rule), 3))
return "\n".join(out)
def render_meta(element, header=None, metatype=None):
out = []
if metatype:
metaelements = element.findall("metadata/meta[@type='%s']" % metatype)
else:
metaelements = element.findall("metadata/meta")
if len(metaelements) == 0:
return ""
if header:
out.append("\n**{}**\n\n".format(header))
for num, element in enumerate(metaelements):
value = element.text
date = element.attrib.get("date")
if value:
value = value.encode("UTF-8")
out.append("{num}. {date} {value}".format(num=num+1, date=date, value=value))
return "\n".join(out)
def render_field(element):
out = []
title = element.attrib.get("name")
out.append("\n.. index:: {}\n".format(title))
out.append("\n.. rubric:: {}\n".format(title))
out.append(_render_label(element))
out.append(_render_name(element))
out.append(_render_required(element))
out.append(_render_type(element))
options = _render_options(element)
if options:
out.append(options)
out.append(_render_id(element))
out.append(_render_renderer(element))
help_ = _render_help(element)
if help_:
out.append(help_)
rules = _render_rules(element)
if rules:
out.append(rules)
changes = render_meta(element, _('Changes'), "change")
if changes:
out.append(changes)
comments = render_meta(element, _('Comments'), "comment")
if comments:
out.append(comments)
return "\n".join(out)
def render_spec(config, title, form):
out = []
out.append("#"*len(title))
out.append(title)
out.append("#"*len(title))
out.append(render_meta(config._tree))
elements = get_spec_elements(config, form)
num_elements = len(elements)
for num, element in enumerate(elements):
if element.tag == "page":
out.append(render_page(element))
elif element.tag == "section":
out.append(render_section(element))
elif element.tag == "subsection":
out.append(render_subsection(element))
elif element.tag == "entity":
out.append(render_field(element))
if num+1 < num_elements:
out.append("\n-----\n")
out.append("")
outx = []
for line in out:
if isinstance(line, unicode):
line = line.encode("utf8")
outx.append(line)
return "\n".join(outx)
def get_fields(config, node):
elements = []
for element in config.walk(node, {}, include_layout=True):
if element.tag == "page":
continue
if element.tag == "field":
ref = element.attrib.get('ref')
element = config._parent.get_element('entity', ref)
elements.append(element)
return elements
def get_spec_elements(config, form="update"):
form_config = config.get_form(form)
elements = []
pages = form_config.get_pages()
if len(pages) > 0:
for page in pages:
elements.append(page)
elements.extend(get_fields(form_config, page))
else:
elements.extend(get_fields(form_config, form_config._tree))
return elements
def main(args):
config = _get_config(args.config)
title = args.title or os.path.basename(args.config.name)
print render_spec(config, title, args.form)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Convert a Formbar XML specification ' +
'file into various formats.')
parser.add_argument('config', metavar='config',
type=argparse.FileType('rU'), help='A form configuration file')
parser.add_argument('--title', action='store',
help="Choose title of the topmost rst heading (default: The filename)")
parser.add_argument('--form', action='store', default='update',
help="Choose which form to parse (default: 'update')")
parser.add_argument('--translation', action='store',
help="Path to translation MO file")
args = parser.parse_args()
if args.translation:
gettext.bindtextdomain('formspec', args.translation)
gettext.textdomain('formspec')
else:
_ = lambda x: x
main(args)
# vim: set expandtab:
|
ringo-framework/formbar
|
contrib/formspec.py
|
Python
|
gpl-2.0
| 9,487
|
# This file is part of Heapkeeper.
#
# Heapkeeper is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# Heapkeeper is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Heapkeeper. If not, see <http://www.gnu.org/licenses/>.
# Copyright (C) 2010 Csaba Hoch
""":mod:`hkp_review` implements the "Review" plugin for Heapkeeper.
This plugin helps to set threads to "reviewed".
It defines a |hkshell| command (`r()` by default) that adds "reviewed" tag to
the given thread and commits all modifications to the post database. (It is
supposed that git is used for version controlling the post database.) The
plugin commits all modificaions because reviewing a thread may include
modifying other posts. The commit message will be generated from the subject of
the root post of thre thread.
The plugin also adds a "Set to reviewed" button to each thread page which calls
`r()` with that post as an argument.
The plugin can be activated in the following way::
>>> import hkp_review
>>> hkp_review.start()
"""
import hkutils
import hkshell
import hkweb
import tempfile
import os
def set_to_reviewed(prepost=None):
"""Sets the given thread to reviewed and commits all changes to the heap in
which the thread is.
**Argument:**
- `prepost` (|PrePost| | ``None``) -- If all touched post are within one
thread, this parameter may be ``None``, in which case Heapkeeper will
find this thread.
"""
if prepost is None:
modified = hkshell.postdb().all().collect.is_modified()
roots = modified.expb().collect.is_root()
if len(roots) == 0:
hkutils.log(
'No modified posts. No action. Use the r(<post id>) format.')
return
elif len(roots) > 1:
hkutils.log('More than one modified threads. No action.')
return
post = roots.pop()
hkutils.log('Thread: ' + post.post_id_str())
else:
post = hkshell.p(prepost)
if hkshell.postdb().parent(post) is not None:
hkutils.log('The post is not a root. No action.')
return
# Saving the modifications
hkshell.aTr(post, 'reviewed')
hkshell.s()
# Calculating the subject to be mentioned in the commit message
subject = post.subject()
if len(subject) > 33:
subject = subject[:30] + '...'
# Writing the commit message into a temp file
f, filename = tempfile.mkstemp()
os.write(f, 'Review: "' + subject + '" thread\n'
'\n'
'[review]\n')
os.close(f)
# Commiting the change in git
heap_dir = hkshell.postdb()._heaps[post.heap_id()]
oldcwd = os.getcwd()
try:
os.chdir(heap_dir)
hkutils.call(['git', 'commit', '-aF', filename])
finally:
os.chdir(oldcwd)
os.remove(filename)
class SetPostReviewed(hkweb.AjaxServer):
"""Sets the given post to reviewed.
Served URL: ``/set-post-reviewed``"""
def __init__(self):
hkweb.AjaxServer.__init__(self)
def execute(self, args):
# Unused argument # pylint: disable=W0613
"""Sets the post to reviewed.
**Argument:**
- `args` ({'post_id': |PrePostId|})
**Returns:** {'error': str} | {}
"""
post_id = args.get('post_id')
post = self._postdb.post(post_id)
if post is None:
return {'error': 'No such post: "%s"' % (post_id,)}
set_to_reviewed(post)
return {}
def start(review_command_name='r'):
"""Starts the plugin.
**Argument:**
- `review_command_name` (str) -- The name of the |hkshell| command that
shall be defined.
"""
hkshell.register_cmd(review_command_name, set_to_reviewed)
hkweb.insert_urls(['/set-post-reviewed',
'hkp_review.SetPostReviewed'])
old_init = hkweb.PostPageGenerator.__init__
def __init__(self, postdb):
# __init__ method from base class 'PostPageGenerator' is not called
# pylint: disable=W0231
old_init(self, postdb)
self.options.js_files.append('plugins/review/static/js/hkp_review.js')
hkweb.PostPageGenerator.__init__ = __init__
|
hcs42/heapkeeper-old
|
plugins/review/src/hkp_review.py
|
Python
|
gpl-3.0
| 4,623
|
__author__ = 'Dongwoo Kim'
import numpy as np
from sklearn.metrics import precision_recall_curve, auc
import path_tool
heaviside = lambda x: 1 if x >= 0 else 0
class TAMDC:
"""
Implementation of Active Multi-relational Data Construction (AMDC) method.
Reference:
Kajino, H., Kishimoto, A., Botea, A., Daly, E., & Kotoulas, S. (2015). Active Learning for Multi-relational Data
Construction. WWW 2015
"""
def __init__(self, D, alpha_0=0.1, gamma=0.3, gamma_p=0.9, c_e=5., c_n=1.):
"""
@param D: latent dimension of entity
@param alpha_0: initial learning rate
@param gamma: hyperparameter
@param gamma_p: hyperparameter
@param c_e: hyperparameter, impose score of positive triple to be greater than 0,
and negative triple to be less than 0
@param c_n: hyperparameter, importance of negative samples
@return:
"""
self.D = D
self.alpha_0 = alpha_0
self.gamma = gamma
self.gamma_p = gamma_p
self.c_e = c_e
self.c_n = c_n
def learn(self, T, p_idx, n_idx, max_iter, e_gap=1):
"""
Stochastic gradient descent optimization for AMDC
@param T: [E x E x K] multi-dimensional array,
tensor representation of knowledge graph
E = number of entities
K = number of relationships
@param p_idx: observed index of positive triples, all indices are raveled by np.ravel_multi_index
@param n_idx: observed index of negative triples
@param max_iter: maximum number of iterations
@param e_gap: evaluation gap
@return: A, R, r_error
A: [E x D] latent feature vector of entities
R: [D x D x K] rotation matrix for each entity
r_error: list of reconstruction errors at each evaluation point
"""
E, K = T.shape[0], T.shape[2]
np_idx = np.setdiff1d(range(np.prod(T.shape)), p_idx) # not positive index
nn_idx = np.setdiff1d(range(np.prod(T.shape)), n_idx) # not negative index
A = np.random.random([E, self.D]) - 0.5
A /= np.linalg.norm(A, axis=1)[:, np.newaxis]
R = np.zeros([self.D, self.D, K]) # rotation matrices
for k in range(K):
R[:, :, k] = np.identity(self.D)
tri_indices = path_tool.tri_index(T)
r_error = list()
it = 0
converged = False
learning_rate = self.alpha_0
while not converged:
if np.random.randint(100) % 2 == 0:
# next_idx = np.random.randint(len(p_idx))
# next_np_idx = np.random.randint(len(np_idx))
#
# i, j, k = np.unravel_index(p_idx[next_idx], T.shape)
# i_bar, j_bar, k_bar = np.unravel_index(np_idx[next_np_idx], T.shape)
next_idx = np.random.randint(len(tri_indices))
(i,j,a), (j,k,b), (i,k,c) = tri_indices[next_idx]
(i_bar,j_bar,a_bar), (j_bar, k_bar, b_bar), (i_bar, k_bar, c_bar) = path_tool.sample_broken_tri(T)
I_1 = heaviside(self.gamma - np.dot(np.dot(np.dot(A[i], R[:,:,b]), R[:,:,a]), A[k])
+ np.dot(np.dot(np.dot(A[i_bar], R[:,:,b_bar]), R[:,:,a_bar]), A[k_bar]))
I_2 = heaviside(self.gamma_p - np.dot(np.dot(np.dot(A[i], R[:,:,b]), R[:,:,a]), A[k]))
# I_1 = heaviside(self.gamma - np.dot(np.dot(A[i], R[:, :, k]), A[j])
# + np.dot(np.dot(A[i_bar], R[:, :, k_bar]), A[j_bar]))
# I_2 = heaviside(self.gamma_p - np.dot(np.dot(A[i], R[:, :, k]), A[j]))
# updating parameters
if I_1 != 0 or I_2 != 0:
a_i, a_k, r_a, r_b = A[i].copy(), A[k].copy(), R[:, :, a].copy(), R[:,:,b].copy()
A[i] -= learning_rate * (-(I_1 + I_2 * self.c_e) * np.dot(np.dot(r_b,r_a), a_k))
A[k] -= learning_rate * (-(I_1 + I_2 * self.c_e) * np.dot(np.dot(r_b,r_a).T, a_i))
R[:, :, a] -= learning_rate * (-(I_1 + I_2 * self.c_e) * np.outer(a_i, np.dot(r_b, a_k)))
R[:, :, b] -= learning_rate * (-(I_1 + I_2 * self.c_e) * np.outer(np.dot(r_a.T, a_i), a_k))
A[i] /= np.linalg.norm(A[i], ord=2)
A[k] /= np.linalg.norm(A[k], ord=2)
U, sigma, V = np.linalg.svd(R[:, :, a])
R[:, :, a] = np.dot(U, V)
U, sigma, V = np.linalg.svd(R[:, :, b])
R[:, :, b] = np.dot(U, V)
if I_1 != 0:
a_i_bar, a_k_bar, r_a_bar, r_b_bar = A[i_bar].copy(), A[k_bar].copy(), R[:, :, a_bar].copy(), R[:,:,b_bar].copy()
A[i_bar] -= learning_rate * (I_1 * np.dot(np.dot(r_b_bar,r_a_bar), a_k_bar))
A[k_bar] -= learning_rate * (I_1 * np.dot(np.dot(r_b_bar,r_a_bar).T, a_i_bar))
R[:, :, a_bar] -= learning_rate * (I_1 * np.outer(a_i_bar, np.dot(r_b_bar,a_k_bar)))
R[:, :, b_bar] -= learning_rate * (I_1 * np.outer(np.dot(r_a_bar.T,a_i_bar),a_k_bar))
A[i_bar] /= np.linalg.norm(A[i_bar], ord=2)
A[k_bar] /= np.linalg.norm(A[k_bar], ord=2)
U, sigma, V = np.linalg.svd(R[:, :, a_bar])
R[:, :, a_bar] = np.dot(U, V)
U, sigma, V = np.linalg.svd(R[:, :, b_bar])
R[:, :, b_bar] = np.dot(U, V)
else:
next_idx = np.random.randint(len(n_idx))
next_nn_idx = np.random.randint(len(nn_idx))
i, j, k = np.unravel_index(n_idx[next_idx], T.shape)
i_bar, j_bar, k_bar = np.unravel_index(nn_idx[next_nn_idx], T.shape)
I_3 = heaviside(self.gamma + np.dot(np.dot(A[i], R[:, :, k]), A[j])
- np.dot(np.dot(A[i_bar], R[:, :, k_bar]), A[j_bar]))
I_4 = heaviside(self.gamma_p + np.dot(np.dot(A[i], R[:, :, k]), A[j]))
if I_3 != 0 or I_4 != 0:
a_i, a_j, r_k = A[i].copy(), A[j].copy(), R[:, :, k].copy()
A[i] -= learning_rate * ((I_3 * self.c_n + I_4 * self.c_e) * np.dot(r_k, a_j))
A[j] -= learning_rate * ((I_3 * self.c_n + I_4 * self.c_e) * np.dot(r_k.T, a_i))
R[:, :, k] -= learning_rate * ((I_3 * self.c_n + I_4 * self.c_e) * np.outer(a_i, a_j))
A[i] /= np.linalg.norm(A[i], ord=2)
A[j] /= np.linalg.norm(A[j], ord=2)
U, sigma, V = np.linalg.svd(R[:, :, k])
R[:, :, k] = np.dot(U, V)
if I_3 != 0:
a_i_bar, a_j_bar, r_k_bar = A[i_bar].copy(), A[j_bar].copy(), R[:, :, k_bar].copy()
A[i_bar] -= learning_rate * (I_3 * self.c_n * np.dot(r_k_bar, a_j_bar))
A[j_bar] -= learning_rate * (I_3 * self.c_n * np.dot(r_k_bar.T, a_i_bar))
R[:, :, k_bar] -= learning_rate * (I_3 * self.c_n * np.outer(a_i_bar, a_j_bar))
A[i_bar] /= np.linalg.norm(A[i_bar], ord=2)
A[j_bar] /= np.linalg.norm(A[j_bar], ord=2)
U, sigma, V = np.linalg.svd(R[:, :, k_bar])
R[:, :, k_bar] = np.dot(U, V)
if it >= max_iter:
converged = True
if it % e_gap == 0:
T_bar = self.reconstruct(A, R)
_T = T.copy()
_T[_T == -1] = 0
T_bar = (T_bar + 1.) / 2.
from sklearn.metrics import roc_auc_score
err = 0.
for k in range(K):
err += roc_auc_score(_T[:, :, k].flatten(), T_bar[:, :, k].flatten())
err /= float(K)
print('Iter %d, ROC-AUC: %.5f' % (it, err))
it += 1
learning_rate = self.alpha_0 / np.sqrt(it)
return A, R, r_error
def reconstruct(self, A, R):
"""
Reconstruct knowledge graph from latent representations of entities and rotation matrices
@param A: [E x D] multi-dimensional array, latent representation of entity
@param R: [D x D x K] multi-dimensional array, rotation matrix for each relation
@return: [E x E x K] reconstructed knowledge graph
"""
T = np.zeros((A.shape[0], A.shape[0], R.shape[2]))
for i in range(R.shape[2]):
T[:, :, i] = np.dot(np.dot(A, R[:, :, i]), A.T)
return T
def test(self, T, A, R, test_idx):
T_bar = self.reconstruct(A, R)
T_bar[T_bar > 0] = 1
T_bar[T_bar <= 0] = -1
idx = np.unravel_index(test_idx, T.shape)
prec, recall, _ = precision_recall_curve(T[idx], T_bar[idx])
return auc(recall, prec)
def test():
"""
Test with Kinship dataset
Use all positive triples and negative triples as a training set
See how the reconstruction error is reduced during training
"""
from scipy.io.matlab import loadmat
mat = loadmat('../data/alyawarradata.mat')
T = np.array(mat['Rs'], np.float32)
T[T == 0] = -1 # set negative value to -1
E, K = T.shape[0], T.shape[2]
max_iter = E * E * K * 10
latent_dimension = 10
p_idx = np.ravel_multi_index((T == 1).nonzero(), T.shape) # raveled positive index
n_idx = np.ravel_multi_index((T == -1).nonzero(), T.shape) # raveled negative index
model = TAMDC(latent_dimension)
model.learn(T, p_idx, n_idx, max_iter, e_gap=10000)
if __name__ == '__main__':
test()
|
chubbymaggie/almc
|
amdc/tri_model.py
|
Python
|
gpl-2.0
| 9,709
|
#!/usr/bin/env python
"""
CsPython Tutorial Example 1
A pure-Python script to show the use of Crystal Space.
To use this, ensure that your PYTHONPATH, CRYSTAL, and LD_LIBRARY_PATH
(or DYLD_LIBRARY_PATH for MacOS/X; or PATH for Windows) variables are set
approrpriately, and then run the script with the command:
python scripts/python/tutorial1.py
This performs the same features as the C++ tutorial1.
It creates a room and allows movement with the arrow keys, page-up, and
page-down.
===========================================================================
There are two ways to use the CsPython module.
Either as a plugin within CS (pysimp),
or as a pure Python module (this example).
This is performs a function similar to the CS tutorial 1, rewritten in Python.
Please refer to the CS Tutorial 1 in the documentation
for detail on how the C++ version works.
"""
import sys, time, traceback
try: # get in CS
from cspace import *
except:
print "WARNING: Failed to import module cspace"
traceback.print_exc()
sys.exit(1) # die!!
DEBUG = 0
def CreateRoom (matname):
if DEBUG: print 'Start creating polygons from Python script...'
if DEBUG: print 'object_reg=',object_reg
if DEBUG: print 'dir(object_reg)=',dir(object_reg)
engine = object_reg.Get(iEngine)
if DEBUG: print 'engine=',engine
vc = object_reg.Get(iVirtualClock)
if DEBUG: print 'vc=',vc
loader = object_reg.Get(iLoader)
if DEBUG: print 'loader=',loader
matname = 'mystone'
loader.LoadTexture (matname, "/lib/stdtex/bricks.jpg")
tm = engine.GetMaterialList().FindByName(matname)
room = engine.GetSectors().FindByName("room")
mapper = DensityTextureMapper(0.3)
box = TesselatedBox(csVector3(-5, 0, -5), csVector3(5, 20, 5))
box.SetLevel(3)
box.SetMapper(mapper)
box.SetFlags(Primitives.CS_PRIMBOX_INSIDE)
walls = GeneralMeshBuilder.CreateFactoryAndMesh (engine, room, "walls", \
"walls_factory", box)
walls.GetMeshObject().SetMaterialWrapper(tm)
if DEBUG: print 'Finished!'
def SetupFrame ():
if DEBUG: print 'SetupFrame called'
elapsed_time = vc.GetElapsedTicks()
current_time = vc.GetCurrentTicks()
# Now rotate the camera according to keyboard state
speed = (elapsed_time / 1000.) * (0.03 * 20);
if kbd.GetKeyState(CSKEY_RIGHT):
view.GetCamera().GetTransform().RotateThis(CS_VEC_ROT_RIGHT, speed)
if kbd.GetKeyState(CSKEY_LEFT):
view.GetCamera().GetTransform().RotateThis(CS_VEC_ROT_LEFT, speed)
if kbd.GetKeyState(CSKEY_PGUP):
view.GetCamera().GetTransform().RotateThis(CS_VEC_TILT_UP, speed)
if kbd.GetKeyState(CSKEY_PGDN):
view.GetCamera().GetTransform().RotateThis(CS_VEC_TILT_DOWN, speed)
if kbd.GetKeyState(CSKEY_UP):
view.GetCamera().Move(CS_VEC_FORWARD * 4 * speed)
if kbd.GetKeyState(CSKEY_DOWN):
view.GetCamera().Move(CS_VEC_BACKWARD * 4 * speed)
# Tell 3D driver we're going to display 3D things.
if not myG3D.BeginDraw(engine.GetBeginDrawFlags() | CSDRAW_3DGRAPHICS):
sys.exit(1)
if view:
view.Draw()
if DEBUG: print 'SetupFrame done'
def FinishFrame ():
if DEBUG: print 'FinishFrame called'
myG3D.FinishDraw()
myG3D.Print(None)
if DEBUG: print 'FinishFrame done'
def HandleEvent (ev):
if DEBUG: print 'HandleEvent called'
if ((ev.Name == KeyboardDown) and
(csKeyEventHelper.GetCookedCode(ev) == CSKEY_ESC)):
q = object_reg.Get(iEventQueue)
if q:
q.GetEventOutlet().Broadcast(csevQuit(object_reg))
return 1
return 0
def EventHandler (ev):
if DEBUG: print 'EventHandler called'
if DEBUG: print ' ev=%s' % ev
if ev.Name == Frame:
try:
SetupFrame()
FinishFrame()
except:
traceback.print_exc()
return 1
elif ev.Name == CommandLineHelp:
print 'No help today...'
return 1
else:
try:
return HandleEvent(ev)
except:
traceback.print_exc()
return 0
object_reg = csInitializer.CreateEnvironment(sys.argv)
def Report (severity, msg):
csReport(object_reg, severity, "crystalspace.application.python", msg)
if DEBUG: print 'Initializing application...'
if not csInitializer.SetupConfigManager(object_reg):
Report(CS_REPORTER_SEVERITY_ERROR, "Couldn't init app!")
sys.exit(1)
plugin_requests = [
CS_REQUEST_VFS, CS_REQUEST_OPENGL3D, CS_REQUEST_ENGINE,
CS_REQUEST_FONTSERVER, CS_REQUEST_IMAGELOADER, CS_REQUEST_LEVELLOADER,
]
if DEBUG: print 'Requesting plugins...'
if not csInitializer.RequestPlugins(object_reg, plugin_requests):
Report(CS_REPORTER_SEVERITY_ERROR, "Plugin requests failed!")
sys.exit(1)
if DEBUG: print 'Setting up event handler...'
if not csInitializer.SetupEventHandler(object_reg, EventHandler):
Report(CS_REPORTER_SEVERITY_ERROR, "Could not initialize event handler!")
sys.exit(1)
# Get some often used event IDs
KeyboardDown = csevKeyboardDown(object_reg)
Frame = csevFrame(object_reg)
CommandLineHelp = csevCommandLineHelp(object_reg)
if DEBUG: print 'Checking if help is needed...'
if csCommandLineHelper.CheckHelp(object_reg):
csCommandLineHelper.Help(object_reg)
sys.exit(0)
if DEBUG: print 'Getting virtual clock...'
vc = object_reg.Get(iVirtualClock)
if DEBUG: print 'Getting engine...'
engine = object_reg.Get(iEngine)
if not engine:
Report(CS_REPORTER_SEVERITY_ERROR, "No iEngine plugin!")
sys.exit(1)
if DEBUG: print 'Getting 3D graphics...'
myG3D = object_reg.Get(iGraphics3D)
if not myG3D:
Report(CS_REPORTER_SEVERITY_ERROR, "No iGraphics3D loader plugin!")
sys.exit(1)
LevelLoader = object_reg.Get(iLoader)
if not LevelLoader:
Report(CS_REPORTER_SEVERITY_ERROR, "No iLoader plugin!")
sys.exit(1)
kbd = object_reg.Get(iKeyboardDriver)
if not kbd:
Report(CS_REPORTER_SEVERITY_ERROR, "No iKeyboardDriver!")
sys.exit(1)
# Open the main system. This will open all the previously loaded plug-ins.
nw = myG3D.GetDriver2D().GetNativeWindow()
if nw:
nw.SetTitle("Simple Crystal Space Python Application")
if not csInitializer.OpenApplication (object_reg):
Report(CS_REPORTER_SEVERITY_ERROR, "Error opening system!")
Cleanup()
sys.exit(1)
# Some commercials...
Report(
CS_REPORTER_SEVERITY_NOTIFY,
"Simple Crystal Space Python Application version 0.1."
)
txtmgr = myG3D.GetTextureManager()
# Create our world.
Report(CS_REPORTER_SEVERITY_NOTIFY, "Creating world!...")
LevelLoader.LoadTexture("stone", "/lib/std/stone4.gif")
room = engine.CreateSector("room")
plugin_mgr = object_reg.Get(iPluginManager)
if 0:
Report(CS_REPORTER_SEVERITY_NOTIFY, "Loading script.python plugin...")
# Initialize the python plugin.
script = CS_LOAD_PLUGIN(
plugin_mgr, "crystalspace.script.python", iScript
)
if script:
Report(CS_REPORTER_SEVERITY_NOTIFY, "Loading pysimp module...")
# Load a python module (scripts/python/pysimp.py).
if not script.LoadModule("pysimp"):
sys.exit(1)
# Set up our room.
# Execute one method defined in pysimp.py
# This will create the polygons in the room.
Report (CS_REPORTER_SEVERITY_NOTIFY, "calling pysimp.CreateRoom...")
if script.RunText ("pysimp.CreateRoom('stone')"):
sys.exit(1)
else:
CreateRoom('stone')
light = engine.CreateLight("", csVector3(0, 5, 0), 10, csColor(1, 0, 0), CS_LIGHT_DYNAMICTYPE_STATIC)
if DEBUG: print 'light=',light
room.GetLights().Add(light)
if DEBUG: print 'calling engine.Prepare()'
engine.Prepare()
SimpleStaticLighter.ShineLights(room, engine, 4)
Report(CS_REPORTER_SEVERITY_NOTIFY, "--------------------------------------")
# csView is a view encapsulating both a camera and a clipper.
# You don't have to use csView as you can do the same by
# manually creating a camera and a clipper but it makes things a little
# easier.
view = csView(engine, myG3D)
view.GetCamera().SetSector(room)
view.GetCamera().GetTransform().SetOrigin(csVector3(0, 2, 0))
g2d = myG3D.GetDriver2D()
view.SetRectangle(2, 2, g2d.GetWidth() - 4, g2d.GetHeight() - 4)
csDefaultRunLoop(object_reg)
|
garinh/cs
|
scripts/python/tutorial1.py
|
Python
|
lgpl-2.1
| 8,233
|
import logging
import math
import time
from robotics.controllers.pid_controller import PIDController
from robotics.robots.factory import RobotFactory
def uni_to_diff(v, w):
R = 0.032
L = 0.1
vel_l = (2.0 * v - L * w) / (2.0 * R)
vel_r = (2.0 * v + L * w) / (2.0 * R)
return vel_l, vel_r
def uni_to_power(v, w):
MX = 0.3
MN = 0.4
if v == 0.0:
if w > 0.0:
power_l = 0.0
power_r = MX
else:
power_l = MX
power_r = 0.0
elif v > 0.0:
if w == 0.0:
power_l = MX
power_r = MX
elif w > 0.0:
power_l = MN
power_r = MX
else:
power_l = MX
power_r = MN
else:
raise NotImplementedError
return power_l, power_r
def main():
# 0.2, 0.004, 0.01
controller = PIDController(0.2, 0.005, 0.01)
robot = RobotFactory.createAizekRobot()
robot.start()
robot.setPosition(0.0, 0.0, 0.0)
prev_time = time.time()
target_x = 0.5
target_y = 0.5
while True:
ldistance, fdistance, rdistance = robot.readDistanceSensors()
print 'Distance l: %s, f: %s, r: %s' % (ldistance, fdistance, rdistance)
dlradians, drradians = robot.readVelocitySensors()
robot.updatePosition(dlradians, drradians)
curr_time = time.time()
dt = curr_time - prev_time
prev_time = curr_time
dx = target_x - robot.pos_x
dy = target_y - robot.pos_y
if abs(dx) < 0.02 and abs(dy) < 0.02:
print 'Goal reached...'
break
dphi = math.atan2(dy, dx) - robot.phi
if dphi > math.pi:
dphi -= 2 * math.pi
if dphi < -math.pi:
dphi += 2 * math.pi
print 'x: %s, y: %s, phi: %s' % (robot.pos_x, robot.pos_y, robot.phi)
print 'dx: %s, dy: %s, dphi: %s' % (dx, dy, dphi)
if abs(dphi) < 0.06 * math.pi:
robot.setPower(0.25, 0.25)
else:
if dphi > 0.0:
robot.setPower(0.0, 0.25)
else:
robot.setPower(0.25, 0.0)
time.sleep(0.1)
robot.stop()
print 'Robot x: %s, y: %s, phi: %s' % (robot.pos_x, robot.pos_y, robot.phi)
if __name__ == '__main__':
main()
|
asydorchuk/robotics
|
python/robotics/examples/aizek_supervisor.py
|
Python
|
mit
| 2,300
|
import yaml
import re
import numpy as np
from .._base import DReprError
from ....core.error import SimpleGaussianError, MatrixGaussianError
from ...xy import XYContainer
__all__ = ["add_error_to_container", "write_errors_to_yaml", "process_error_sources",
"MatrixYamlDumper", "MatrixYamlLoader"]
_yaml_error_section_for_axis = {0: 'x_errors',
1: 'y_errors',
None: 'errors'}
def add_error_to_container(err_type, container_obj, **kwargs):
# TODO: check kwargs explicitly
if err_type == 'simple':
container_obj.add_error(**kwargs)
elif err_type == 'matrix':
container_obj.add_matrix_error(**kwargs)
else:
raise DReprError("Unknown error type '{}'. "
"Valid: {}".format(err_type, ('simple', 'matrix')))
return container_obj
def write_errors_to_yaml(container, yaml_doc):
# TODO: create public error retrieval interface
for _err_name, _err_dict in container._error_dicts.items():
_err_obj = _err_dict['err']
_err_axis = _err_dict.get('axis', None)
# get the relevant error section for the current axis, creating it if necessary
_yaml_section = yaml_doc.setdefault(_yaml_error_section_for_axis[_err_axis], [])
# -- check for relative errors
_is_relative = _err_obj.relative
if _is_relative:
_err_val = _err_obj.error_rel
else:
_err_val = _err_obj.error
# -- collapse identical errors to one float
if np.allclose(_err_val[0], _err_val):
_err_val = float(_err_val[0])
else:
_err_val = _err_val.tolist()
# -- handle different error types
# TODO shouldn't each error be wrapped inside an 'error' namespace?
if _err_obj.__class__ is SimpleGaussianError:
_yaml_section.append(dict(name=_err_name,
type='simple',
error_value=_err_val,
relative=_is_relative,
correlation_coefficient=_err_obj._corr_coeff,
# TODO: public interface for _corr_coeff!
)
)
elif _err_obj.__class__ is MatrixGaussianError:
_mtype = _err_obj._matrix_type_at_construction # TODO: public interface!
_yaml_section.append(dict(name=_err_name,
type='matrix',
matrix_type=_mtype,
relative=_is_relative,
)
)
if _mtype == 'covariance':
if _is_relative:
_yaml_section[-1]['matrix'] = _err_obj.cov_mat_rel # .tolist()
else:
_yaml_section[-1]['matrix'] = _err_obj.cov_mat # .tolist()
elif _mtype == 'correlation':
_yaml_section[-1]['matrix'] = _err_obj.cor_mat # .tolist()
_yaml_section[-1]['error_value'] = _err_val
else:
raise DReprError("Unknown error matrix type '{}'. "
"Valid: 'correlation' or 'covariance'.")
else:
raise DReprError("No representation for error type {} "
"implemented!".format(type(_err_obj)))
return yaml_doc
def process_error_sources(container_obj, yaml_doc):
# -- process error sources
# errors can be specified as a single float, a list of floats, or a kafe2 error object
# lists of the above are also valid, if the error object is not a list
if isinstance(container_obj, XYContainer): # also applies for XYParamModel
_xerrs = yaml_doc.pop('x_errors', [])
if not isinstance(_xerrs, list) or (len(_xerrs) > 0 and isinstance(_xerrs[0], float)):
_xerrs = [_xerrs]
_yerrs = yaml_doc.pop('y_errors', [])
if not isinstance(_yerrs, list) or (len(_yerrs) > 0 and isinstance(_yerrs[0], float)):
_yerrs = [_yerrs]
_errs = _xerrs + _yerrs
_axes = [0] * len(_xerrs) + [1] * len(_yerrs) # 0 for 'x', 1 for 'y'
else:
_errs = yaml_doc.pop('errors', [])
if not isinstance(_errs, list) or (len(_errs) > 0 and isinstance(_errs[0], float)):
_errs = [_errs]
_axes = [None] * len(_errs)
# add error sources, if any
for _err, _axis in zip(_errs, _axes):
# if error is a float/int or a list thereof add it as a simple error and don't
# try to interpret it as a kafe2 error object
if isinstance(_err, (float, int, list)):
if _axis is not None:
container_obj = add_error_to_container('simple', container_obj, err_val=_err,
axis=_axis)
else:
container_obj = add_error_to_container('simple', container_obj, err_val=_err)
continue
elif isinstance(_err, str):
if _err.endswith("%"):
try:
_rel_err_percent = float(_err[:-1])
except ValueError:
raise DReprError("Cannot convert string to relative error: %s" % _err)
if _axis is not None:
container_obj = add_error_to_container('simple', container_obj,
err_val=0.01 * _rel_err_percent,
relative=True,
axis=_axis)
else:
container_obj = add_error_to_container('simple', container_obj,
err_val=0.01 * _rel_err_percent,
relative=True)
continue
else:
raise DReprError("Cannot convert string to error: %s" % _err)
_add_kwargs = dict()
# translate and check that all required keys are present
try:
_err_type = _err['type']
_add_kwargs['name'] = _err.get('name')
if _err_type == 'simple':
_add_kwargs['err_val'] = _err['error_value']
_add_kwargs['correlation'] = _err['correlation_coefficient']
elif _err_type == 'matrix':
_add_kwargs['err_matrix'] = _err['matrix']
_add_kwargs['matrix_type'] = _err['matrix_type']
# default None only mandatory for cor mats; check done later
_add_kwargs['err_val'] = _err.get('error_value', None)
else:
raise DReprError("Unknown error type '{}'. "
"Valid: {}".format(_err_type, ('simple', 'matrix')))
_add_kwargs['relative'] = _err.get('relative', False)
# if needed, specify the axis (only for 'xy' containers)
if _axis is not None:
_add_kwargs['axis'] = _axis
except KeyError as e:
# KeyErrors mean the YAML is incomplete -> raise
raise DReprError("Missing required key '%s' for error specification" % e.args[0])
# add error to data container
container_obj = add_error_to_container(_err_type, container_obj, **_add_kwargs)
return container_obj, yaml_doc
class MatrixYamlDumper(yaml.Dumper):
"""Custom directives for writing out matrices"""
_regex_space_after_newline = re.compile(r"\n\s+") # match all spaces immediately after newline
# match exactly 'n' sequences of non-whitespace followed by optional whitespace
_regex_format_n_numbers = r"((\S+\s*){{}})"
def matrix(self, numpy_matrix):
"""Represent a matrix as a space/endline separated string.
Note: symmetric matrices are represented as a lower triangular matrix by default
"""
_is_symmetric = np.allclose(numpy_matrix, numpy_matrix.T)
# remove brackets
_string_repr = str(numpy_matrix).replace('[[', ' [').replace(']]', '] ').replace('[', '').\
replace(']', '').strip()
# remove all spaces immediately after newline
_string_repr = re.sub(self.__class__._regex_space_after_newline, "\n", _string_repr)
# if symmetric, remove everything above upper diagonal
if _is_symmetric and False:
_rows = _string_repr.split('\n')
for _irow, _row in enumerate(_rows):
_rows[_irow] = re.sub(r"^((\S+\s*){{{}}}).*$".format(_irow+1), r"\1", _row).strip()
_string_repr = "\n".join(_rows)
# write lower triangular matrix using the '|'-style
return self.represent_scalar('!symmetric_matrix', _string_repr, style='|')
# write full matrix using the '|'-style
return self.represent_scalar('!matrix', _string_repr, style='|')
# representers for covariance matrices errors
MatrixYamlDumper.add_representer(np.array, MatrixYamlDumper.matrix)
class MatrixYamlLoader(yaml.Loader):
"""custom directives for reading in matrices"""
def matrix(self, node):
"""construct a matrix from a tab/endline separated string"""
_rows = node.value.split('\n')
_mat = [_row.split() for _row in _rows if _row] # last row may be empty -> leave out
return np.array(_mat, dtype=float)
def symmetric_matrix(self, node):
"""construct a lower triangular matrix from a tab/endline separated string"""
_rows = node.value.split('\n')[:-1] # last endline ends the matrix -> no new row
_mat = [_row.split() for _row in _rows]
_np_mat = np.array(np.zeros((len(_rows), len(_rows))), dtype=float)
for _i, _row in enumerate(_mat):
# check shape -> row index must match row length
if len(_row) != _i + 1:
raise DReprError("Cannot parse lower triangular matrix: "
"row #{} should have length {}, got {} "
"instead!".format(_i+1, _i+1, len(_row)))
# fill matrix
_np_mat[_i, 0:len(_row)] = _row # fill below diagonal
_np_mat = _np_mat + np.tril(_np_mat, -1).T # symmetrize
return _np_mat
MatrixYamlLoader.add_constructor('!matrix', MatrixYamlLoader.matrix)
MatrixYamlLoader.add_constructor('!symmetric_matrix', MatrixYamlLoader.symmetric_matrix)
|
dsavoiu/kafe2
|
kafe2/fit/representation/error/common_error_tools.py
|
Python
|
gpl-3.0
| 10,614
|
import glob
import os
from unittest import TestCase
from qtpy.QtCore import QPoint
from qtpy.QtTest import QTest
from qtpy.QtWidgets import QMainWindow, QWidget, QVBoxLayout
from mtpy.core import mt
from mtpy.gui.SmartMT.Components.PlotParameter import FrequencySelection
from tests.SmartMT import _click_area
edi_paths = [
"data/edifiles",
# "examples/data/edi2",
# "examples/data/edi_files",
# "../MT_Datasets/3D_MT_data_edited_fromDuanJM",
# "../MT_Datasets/GA_UA_edited_10s-10000s",
# "data/edifiles2"
]
class MainWindow(QMainWindow):
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
widget = QWidget(self)
layout = QVBoxLayout(widget)
self.frequency_select = FrequencySelection(self)
layout.addWidget(self.frequency_select)
widget.setLayout(layout)
self.setCentralWidget(widget)
def _get_mt_objs(edi_path):
edi_files = glob.glob(os.path.join(edi_path, '*.edi'))
mt_objs = [mt.MT(os.path.abspath(file_name)) for file_name in edi_files]
return mt_objs
class TestFrequencySelect(TestCase):
def setUp(self):
# create gui
self.app = MainWindow()
self.app.show()
QTest.qWaitForWindowActive(self.app)
def _std_function_tests(self):
pos_check_box = QPoint(8, 8)
_click_area(self.app.frequency_select.ui.radioButton_period, pos_check_box)
self.assertTrue(self.app.frequency_select.ui.radioButton_period.isChecked())
_click_area(self.app.frequency_select.ui.radioButton_frequency, pos_check_box)
self.assertTrue(self.app.frequency_select.ui.radioButton_frequency.isChecked())
# test frequency selection
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
self.assertTrue(self.app.frequency_select.model_selected.rowCount() > 0)
_click_area(self.app.frequency_select.ui.checkBox_existing_only, pos_check_box)
self.assertTrue(self.app.frequency_select.ui.checkBox_existing_only.isChecked())
self.assertTrue(self.app.frequency_select.histogram._select_existing_only)
self.assertTrue(self.app.frequency_select.model_selected.rowCount() == 0)
_click_area(self.app.frequency_select.ui.checkBox_show_existing, pos_check_box)
self.assertTrue(self.app.frequency_select.ui.checkBox_show_existing.isChecked())
self.assertTrue(self.app.frequency_select.histogram._show_existing)
_click_area(self.app.frequency_select.ui.checkBox_y_log_scale, pos_check_box)
self.assertTrue(self.app.frequency_select.ui.checkBox_y_log_scale.isChecked())
self.assertTrue(self.app.frequency_select.histogram._y_log_scale)
_click_area(self.app.frequency_select.ui.checkBox_x_log_scale, pos_check_box)
self.assertTrue(self.app.frequency_select.ui.checkBox_x_log_scale.isChecked())
self.assertTrue(self.app.frequency_select.histogram._x_log_scale)
# test clear
_click_area(self.app.frequency_select.ui.pushButton_clear)
self.assertTrue(self.app.frequency_select.model_selected.rowCount() == 0)
# test delete
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
_click_area(self.app.frequency_select.histogram,
offset=self.app.frequency_select.histogram.geometry().topLeft())
self.assertTrue(self.app.frequency_select.model_selected.rowCount() > 0)
self.app.frequency_select.ui.listView_selected.selectAll()
_click_area(self.app.frequency_select.ui.pushButton_delete)
self.assertTrue(self.app.frequency_select.model_selected.rowCount() == 0)
def _generate_tests(edi_path):
def _test_case(self):
if os.path.exists(edi_path):
mt_objs = _get_mt_objs(edi_path)
self.app.frequency_select.set_data(mt_objs)
QTest.qWait(2000)
self._std_function_tests()
else:
self.app.close()
self.skipTest("edi path not found")
return _test_case
for edi_path in edi_paths:
_test_case = _generate_tests(edi_path)
_test_case.__name__ = "test_case_{}".format(os.path.basename(edi_path))
setattr(TestFrequencySelect, _test_case.__name__, _test_case)
|
MTgeophysics/mtpy
|
tests/SmartMT/test_frequencySelect.py
|
Python
|
gpl-3.0
| 5,393
|
#! /usr/bin/env python
# Author: David Goodger
# Contact: goodger@users.sourceforge.net
# Revision: $Revision: 4233 $
# Date: $Date: 2005-12-29 00:48:48 +0100 (Thu, 29 Dec 2005) $
# Copyright: This module has been placed in the public domain.
"""
Tests for docutils.transforms.references.Substitutions.
"""
from __init__ import DocutilsTestSupport
from docutils.transforms.references import Substitutions
from docutils.parsers.rst import Parser
def suite():
parser = Parser()
s = DocutilsTestSupport.TransformTestSuite(parser)
s.generateTests(totest)
return s
totest = {}
totest['substitutions'] = ((Substitutions,), [
["""\
The |biohazard| symbol is deservedly scary-looking.
.. |biohazard| image:: biohazard.png
""",
"""\
<document source="test data">
<paragraph>
The \n\
<image alt="biohazard" uri="biohazard.png">
symbol is deservedly scary-looking.
<substitution_definition names="biohazard">
<image alt="biohazard" uri="biohazard.png">
"""],
["""\
Here's an |unknown| substitution.
""",
"""\
<document source="test data">
<paragraph>
Here's an \n\
<problematic ids="id2" refid="id1">
|unknown|
substitution.
<system_message backrefs="id2" ids="id1" level="3" line="1" source="test data" type="ERROR">
<paragraph>
Undefined substitution referenced: "unknown".
"""],
[u"""\
Substitutions support case differences:
.. |eacute| replace:: \u00E9
.. |Eacute| replace:: \u00C9
|Eacute|\\t\\ |eacute|, and even |EACUTE|.
""",
u"""\
<document source="test data">
<paragraph>
Substitutions support case differences:
<substitution_definition names="eacute">
\u00E9
<substitution_definition names="Eacute">
\u00C9
<paragraph>
\u00C9
t
\u00E9
, and even \n\
\u00C9
.
"""],
[u"""\
Indirect substitution definitions with multiple references:
|substitute| my coke for gin
|substitute| you for my mum
at least I'll get my washing done
.. |substitute| replace:: |replace|
.. |replace| replace:: swap
""",
u"""\
<document source="test data">
<paragraph>
Indirect substitution definitions with multiple references:
<paragraph>
swap
my coke for gin
swap
you for my mum
at least I'll get my washing done
<substitution_definition names="substitute">
swap
<substitution_definition names="replace">
swap
"""],
["""\
.. |l| unicode:: U+00AB .. left chevron
.. |r| unicode:: U+00BB .. right chevron
.. |.| replace:: |l|\ ``.``\ |r|
.. Delete either of the following lines, and there is no error.
Regular expression |.| will match any character
.. Note:: Note that |.| matches *exactly* one character
""",
u"""\
<document source="test data">
<substitution_definition names="l">
\xab
<substitution_definition names="r">
\xbb
<substitution_definition names=".">
\xab
<literal>
.
\xbb
<comment xml:space="preserve">
Delete either of the following lines, and there is no error.
<paragraph>
Regular expression \n\
\xab
<literal>
.
\xbb
will match any character
<note>
<paragraph>
Note that \n\
\xab
<literal>
.
\xbb
matches \n\
<emphasis>
exactly
one character
"""],
["""\
.. |sub| replace:: |sub|
""",
"""\
<document source="test data">
<system_message level="3" line="1" names="sub" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |sub| replace:: |sub|
"""],
["""\
.. |sub| replace:: |indirect1|
.. |indirect1| replace:: |indirect2|
.. |indirect2| replace:: |Sub|
""",
"""\
<document source="test data">
<system_message level="3" line="1" names="sub" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |sub| replace:: |indirect1|
<system_message level="3" line="2" names="indirect1" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect1| replace:: |indirect2|
<system_message level="3" line="3" names="indirect2" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect2| replace:: |Sub|
"""],
["""\
.. |indirect1| replace:: |indirect2|
.. |indirect2| replace:: |Sub|
.. |sub| replace:: |indirect1|
Use |sub| and |indirect1| and |sub| again (and |sub| one more time).
""",
"""\
<document source="test data">
<system_message level="3" line="1" names="indirect1" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect1| replace:: |indirect2|
<system_message level="3" line="2" names="indirect2" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |indirect2| replace:: |Sub|
<system_message level="3" line="3" names="sub" source="test data" type="ERROR">
<paragraph>
Circular substitution definition detected:
<literal_block xml:space="preserve">
.. |sub| replace:: |indirect1|
<paragraph>
Use \n\
<problematic ids="id8" refid="id7">
and \n\
<problematic ids="id2" refid="id1">
|indirect1|
and \n\
<problematic ids="id4" refid="id3">
|sub|
again (and \n\
<problematic ids="id6" refid="id5">
|sub|
one more time).
<system_message backrefs="id2" ids="id1" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "indirect1".
<system_message backrefs="id4" ids="id3" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "sub".
<system_message backrefs="id6" ids="id5" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "sub".
<system_message backrefs="id8" ids="id7" level="3" source="test data" type="ERROR">
<paragraph>
Circular substitution definition referenced: "Sub".
"""],
])
totest['unicode'] = ((Substitutions,), [
["""\
Insert an em-dash (|mdash|), a copyright symbol (|copy|), a non-breaking
space (|nbsp|), a backwards-not-equals (|bne|), and a captial omega (|Omega|).
.. |mdash| unicode:: 0x02014
.. |copy| unicode:: \\u00A9
.. |nbsp| unicode::  
.. |bne| unicode:: U0003D U020E5
.. |Omega| unicode:: U+003A9
""",
u"""\
<document source="test data">
<paragraph>
Insert an em-dash (
\u2014
), a copyright symbol (
\u00a9
), a non-breaking
space (
\u00a0
), a backwards-not-equals (
=
\u20e5
), and a captial omega (
\u03a9
).
<substitution_definition names="mdash">
\u2014
<substitution_definition names="copy">
\u00a9
<substitution_definition names="nbsp">
\u00a0
<substitution_definition names="bne">
=
\u20e5
<substitution_definition names="Omega">
\u03a9
"""],
["""
Testing comments and extra text.
Copyright |copy| 2003, |BogusMegaCorp (TM)|.
.. |copy| unicode:: 0xA9 .. copyright sign
.. |BogusMegaCorp (TM)| unicode:: BogusMegaCorp U+2122
.. with trademark sign
""",
u"""\
<document source="test data">
<paragraph>
Testing comments and extra text.
<paragraph>
Copyright \n\
\u00a9
2003, \n\
BogusMegaCorp
\u2122
.
<substitution_definition names="copy">
\u00a9
<substitution_definition names="BogusMegaCorp\ (TM)">
BogusMegaCorp
\u2122
"""],
["""\
Insert an em-dash |---| automatically trimming whitespace.
Some substitutions |TM| only need trimming on one side.
.. |---| unicode:: U+02014
:trim:
.. |TM| unicode:: U+02122
:ltrim:
""",
u"""\
<document source="test data">
<paragraph>
Insert an em-dash
\u2014
automatically trimming whitespace.
Some substitutions
\u2122
only need trimming on one side.
<substitution_definition ltrim="1" names="---" rtrim="1">
\u2014
<substitution_definition ltrim="1" names="TM">
\u2122
"""],
["""\
Substitution definition with an illegal element:
.. |target| replace:: _`target`
Make sure this substitution definition is not registered: |target|
""",
"""\
<document source="test data">
<paragraph>
Substitution definition with an illegal element:
<system_message level="3" line="3" source="test data" type="ERROR">
<paragraph>
Substitution definition contains illegal element:
<literal_block xml:space="preserve">
<target ids="target" names="target">
target
<literal_block xml:space="preserve">
.. |target| replace:: _`target`
<paragraph>
Make sure this substitution definition is not registered: \n\
<problematic ids="id2" refid="id1">
|target|
<system_message backrefs="id2" ids="id1" level="3" line="5" source="test data" type="ERROR">
<paragraph>
Undefined substitution referenced: "target".
"""],
])
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
|
alon/polinax
|
libs/external_libs/docutils-0.4/test/test_transforms/test_substitutions.py
|
Python
|
gpl-2.0
| 9,979
|
# Copyright 2013 OpenStack Foundation.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from glanceclient.tests import utils
from glanceclient.v2 import tasks
_OWNED_TASK_ID = 'a4963502-acc7-42ba-ad60-5aa0962b7faf'
_OWNER_ID = '6bd473f0-79ae-40ad-a927-e07ec37b642f'
_FAKE_OWNER_ID = '63e7f218-29de-4477-abdc-8db7c9533188'
_PENDING_ID = '3a4560a1-e585-443e-9b39-553b46ec92d1'
_PROCESSING_ID = '6f99bf80-2ee6-47cf-acfe-1f1fabb7e810'
fixtures = {
'/v2/tasks?limit=%d' % tasks.DEFAULT_PAGE_SIZE: {
'GET': (
{},
{'tasks': [
{
'id': _PENDING_ID,
'type': 'import',
'status': 'pending',
},
{
'id': _PROCESSING_ID,
'type': 'import',
'status': 'processing',
},
]},
),
},
'/v2/tasks?limit=1': {
'GET': (
{},
{
'tasks': [
{
'id': _PENDING_ID,
'type': 'import',
'status': 'pending',
},
],
'next': ('/v2/tasks?limit=1&'
'marker=3a4560a1-e585-443e-9b39-553b46ec92d1'),
},
),
},
('/v2/tasks?limit=1&marker=3a4560a1-e585-443e-9b39-553b46ec92d1'): {
'GET': (
{},
{'tasks': [
{
'id': _PROCESSING_ID,
'type': 'import',
'status': 'pending',
},
]},
),
},
'/v2/tasks/3a4560a1-e585-443e-9b39-553b46ec92d1': {
'GET': (
{},
{
'id': _PENDING_ID,
'type': 'import',
'status': 'pending',
},
),
'PATCH': (
{},
'',
),
},
'/v2/tasks/e7e59ff6-fa2e-4075-87d3-1a1398a07dc3': {
'GET': (
{},
{
'id': 'e7e59ff6-fa2e-4075-87d3-1a1398a07dc3',
'type': 'import',
'status': 'pending',
},
),
'PATCH': (
{},
'',
),
},
'/v2/tasks': {
'POST': (
{},
{
'id': _PENDING_ID,
'type': 'import',
'status': 'pending',
'input': '{"import_from": "file:///", '
'"import_from_format": "qcow2"}'
},
),
},
'/v2/tasks?limit=%d&owner=%s' % (tasks.DEFAULT_PAGE_SIZE, _OWNER_ID): {
'GET': (
{},
{'tasks': [
{
'id': _OWNED_TASK_ID,
},
]},
),
},
'/v2/tasks?limit=%d&status=processing' % (tasks.DEFAULT_PAGE_SIZE): {
'GET': (
{},
{'tasks': [
{
'id': _OWNED_TASK_ID,
},
]},
),
},
'/v2/tasks?limit=%d&type=import' % (tasks.DEFAULT_PAGE_SIZE): {
'GET': (
{},
{'tasks': [
{
'id': _OWNED_TASK_ID,
},
]},
),
},
'/v2/tasks?limit=%d&type=fake' % (tasks.DEFAULT_PAGE_SIZE): {
'GET': (
{},
{'tasks': [
]},
),
},
'/v2/tasks?limit=%d&status=fake' % (tasks.DEFAULT_PAGE_SIZE): {
'GET': (
{},
{'tasks': [
]},
),
},
'/v2/tasks?limit=%d&type=import' % (tasks.DEFAULT_PAGE_SIZE): {
'GET': (
{},
{'tasks': [
{
'id': _OWNED_TASK_ID,
},
]},
),
},
'/v2/tasks?limit=%d&owner=%s' % (tasks.DEFAULT_PAGE_SIZE, _FAKE_OWNER_ID):
{
'GET': ({},
{'tasks': []},
),
},
'/v2/tasks?limit=%d&sort_key=type' % tasks.DEFAULT_PAGE_SIZE: {
'GET': (
{},
{'tasks': [
{
'id': _PENDING_ID,
'type': 'import',
'status': 'pending',
},
{
'id': _PROCESSING_ID,
'type': 'import',
'status': 'processing',
},
]},
),
},
'/v2/tasks?limit=%d&sort_dir=asc&sort_key=id' % tasks.DEFAULT_PAGE_SIZE: {
'GET': (
{},
{'tasks': [
{
'id': _PENDING_ID,
'type': 'import',
'status': 'pending',
},
{
'id': _PROCESSING_ID,
'type': 'import',
'status': 'processing',
},
]},
),
},
'/v2/tasks?limit=%d&sort_dir=desc&sort_key=id' % tasks.DEFAULT_PAGE_SIZE: {
'GET': (
{},
{'tasks': [
{
'id': _PROCESSING_ID,
'type': 'import',
'status': 'processing',
},
{
'id': _PENDING_ID,
'type': 'import',
'status': 'pending',
},
]},
),
},
}
schema_fixtures = {
'task': {
'GET': (
{},
{
'name': 'task',
'properties': {
'id': {},
'type': {},
'status': {},
'input': {},
'result': {},
'message': {},
},
'additionalProperties': False,
}
)
}
}
class TestController(testtools.TestCase):
def setUp(self):
super(TestController, self).setUp()
self.api = utils.FakeAPI(fixtures)
self.schema_api = utils.FakeSchemaAPI(schema_fixtures)
self.controller = tasks.Controller(self.api, self.schema_api)
def test_list_tasks(self):
# NOTE(flwang): cast to list since the controller returns a generator
tasks = list(self.controller.list())
self.assertEqual(tasks[0].id, _PENDING_ID)
self.assertEqual(tasks[0].type, 'import')
self.assertEqual(tasks[0].status, 'pending')
self.assertEqual(tasks[1].id, _PROCESSING_ID)
self.assertEqual(tasks[1].type, 'import')
self.assertEqual(tasks[1].status, 'processing')
def test_list_tasks_paginated(self):
# NOTE(flwang): cast to list since the controller returns a generator
tasks = list(self.controller.list(page_size=1))
self.assertEqual(tasks[0].id, _PENDING_ID)
self.assertEqual(tasks[0].type, 'import')
self.assertEqual(tasks[1].id, _PROCESSING_ID)
self.assertEqual(tasks[1].type, 'import')
def test_list_tasks_with_status(self):
filters = {'filters': {'status': 'processing'}}
tasks = list(self.controller.list(**filters))
self.assertEqual(tasks[0].id, _OWNED_TASK_ID)
def test_list_tasks_with_wrong_status(self):
filters = {'filters': {'status': 'fake'}}
tasks = list(self.controller.list(**filters))
self.assertEqual(len(tasks), 0)
def test_list_tasks_with_type(self):
filters = {'filters': {'type': 'import'}}
tasks = list(self.controller.list(**filters))
self.assertEqual(tasks[0].id, _OWNED_TASK_ID)
def test_list_tasks_with_wrong_type(self):
filters = {'filters': {'type': 'fake'}}
tasks = list(self.controller.list(**filters))
self.assertEqual(len(tasks), 0)
def test_list_tasks_for_owner(self):
filters = {'filters': {'owner': _OWNER_ID}}
tasks = list(self.controller.list(**filters))
self.assertEqual(tasks[0].id, _OWNED_TASK_ID)
def test_list_tasks_for_fake_owner(self):
filters = {'filters': {'owner': _FAKE_OWNER_ID}}
tasks = list(self.controller.list(**filters))
self.assertEqual(tasks, [])
def test_list_tasks_filters_encoding(self):
filters = {"owner": u"ni\xf1o"}
try:
list(self.controller.list(filters=filters))
except KeyError:
# NOTE(flaper87): It raises KeyError because there's
# no fixture supporting this query:
# /v2/tasks?owner=ni%C3%B1o&limit=20
# We just want to make sure filters are correctly encoded.
pass
self.assertEqual(b"ni\xc3\xb1o", filters["owner"])
def test_list_tasks_with_marker(self):
tasks = list(self.controller.list(marker=_PENDING_ID, page_size=1))
self.assertEqual(1, len(tasks))
self.assertEqual(_PROCESSING_ID, tasks[0]['id'])
def test_list_tasks_with_single_sort_key(self):
tasks = list(self.controller.list(sort_key='type'))
self.assertEqual(2, len(tasks))
self.assertEqual(_PENDING_ID, tasks[0].id)
def test_list_tasks_with_invalid_sort_key(self):
self.assertRaises(ValueError,
list,
self.controller.list(sort_key='invalid'))
def test_list_tasks_with_desc_sort_dir(self):
tasks = list(self.controller.list(sort_key='id', sort_dir='desc'))
self.assertEqual(2, len(tasks))
self.assertEqual(_PENDING_ID, tasks[1].id)
def test_list_tasks_with_asc_sort_dir(self):
tasks = list(self.controller.list(sort_key='id', sort_dir='asc'))
self.assertEqual(2, len(tasks))
self.assertEqual(_PENDING_ID, tasks[0].id)
def test_list_tasks_with_invalid_sort_dir(self):
self.assertRaises(ValueError,
list,
self.controller.list(sort_dir='invalid'))
def test_get_task(self):
task = self.controller.get(_PENDING_ID)
self.assertEqual(task.id, _PENDING_ID)
self.assertEqual(task.type, 'import')
def test_create_task(self):
properties = {
'type': 'import',
'input': {'import_from_format': 'ovf', 'import_from':
'swift://cloud.foo/myaccount/mycontainer/path'},
}
task = self.controller.create(**properties)
self.assertEqual(task.id, _PENDING_ID)
self.assertEqual(task.type, 'import')
def test_create_task_invalid_property(self):
properties = {
'type': 'import',
'bad_prop': 'value',
}
self.assertRaises(TypeError, self.controller.create, **properties)
|
sjsucohort6/openstack
|
python/venv/lib/python2.7/site-packages/glanceclient/tests/unit/v2/test_tasks.py
|
Python
|
mit
| 11,379
|
from SuperDiffer import app, db
from SuperDiffer.id import controllers as ID
from flask import Flask, render_template, request, abort, jsonify
import json,base64,pdb
"""Routes to allow clients to add left and right base64 encoded on JSON values and fetch their diff"""
#References: https://blog.miguelgrinberg.com/post/designing-a-restful-api-with-python-and-flask
@app.route('/v1/diff/<int:id>', methods=['GET'])
def diff_right_left(id):
"""Calculates the diff between left and right descriptors of a given ID and remove those descriptors if they're found (even if the data lenght is not the same and no diff is made)"""
all_diff_data = ID.diff(id, ["left","right"])
if not all_diff_data or not all_diff_data["left_right"]:
abort(400)
ID.remove_all(id, ["left","right"])
return jsonify(all_diff_data["left_right"])
@app.route('/v1/diff/<int:id>/left', methods=['POST'])
def add_left_to_id(id):
"""Add a JSON base64 value (in the format: {"data":"base64value"}) to the left descriptor of a given ID"""
return _add_data_to_id_description(id, "left", request.json)
@app.route('/v1/diff/<int:id>/right', methods=['POST'])
def add_right_to_id(id):
"""Add a JSON base64 value (in the format: {"data":"base64value"}) to the right descriptor of a given ID"""
return _add_data_to_id_description(id, "right", request.json)
def _is_base64(value):
"""Returns true only if value only has base64 chars (A-Z,a-z,0-9,+ or /)"""
#http://stackoverflow.com/questions/12315398/verify-is-a-string-is-encoded-in-base64-python
try:
enc = base64.b64encode(base64.b64decode(value)).strip()
return enc == value
except TypeError:
return False
def _add_data_to_id_description(id, descriptor, request_json):
"""Add a base64 value obtained from a JSON in the format {"data":"base64value"}) to the given descriptor of a given ID"""
if not "data" in request_json:# no data key on json ? abort !
abort(400)
try:#arrays or other objects that doesnt have encode methods should not be accepted - abort !
no_unicode_data = request_json["data"].encode("utf-8")
except:
abort(400)
if not _is_base64(no_unicode_data):# no base64 value on data key ? abort !
abort(400)
if not ID.add(id, descriptor, no_unicode_data):# add failed due to some database problem ? yeah, abort !
abort(400)
return "Created", 201 #yey!
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
|
gpaOliveira/SuperDiffer
|
SuperDiffer/routes.py
|
Python
|
mit
| 2,533
|
from BarTable import BarTable
import BigWorld
from gui.shared.gui_items.Vehicle import VEHICLE_CLASS_NAME
from gui.shared.gui_items.Vehicle import VEHICLE_TYPES_ORDER
from gui.battle_control import g_sessionProvider
from plugins.Engine.ModUtils import BattleUtils
import re
from StarsBar import StarsBar
import GUI
class BattleLoadingBarTable(BarTable):
def __init__(self,texture=''):
super(BattleLoadingBarTable,self).__init__(texture)
def getColor(self,code):
return '\\c' + re.sub('[^A-Za-z0-9]+', '', code) + 'FF;'
def addBars(self,config):
amounts,tiers,currentTier = self.getVehicleTypeAmount()
for vehicleType,amount in amounts.iteritems():
if amount['enemy'] == 0:
perc = 1
else:
perc = amount['ally']*1.0 /(amount['ally'] + amount['enemy'])
config['percentage'] = perc
if config['show_label']:
config['label'] = config[vehicleType]
else:
config['label'] = ''
amount['ally_color'] = self.getColor('#00ff00')
amount['enemy_color'] = self.getColor('#ff0000')
amount['sep_color'] = self.getColor('#ffffff')
amount['tank_type'] = config[vehicleType]
if config['show_perc']:
config['percentage_text'] = config['table_bars_label'].format(**amount)
else:
config['percentage_text'] = ''
self.addBar(config)
config['texture'] = ''
config['percentage_text'] = ''
config['label'] = ''
self.addBar(config)
self.addStars(tiers,currentTier)
def addStars(self,tiers,currentTier):
from Statistics import Statistics
if not Statistics.myConf['stars_enable']:
return
x, y = GUI.screenResolution()
n = 5
maxTierDiff = 2
startPosition = eval(Statistics.myConf['stars_position'])
delta = Statistics.myConf['stars_delta']
size = Statistics.myConf['stars_size']
activePath = Statistics.myConf['stars_activePath']
inactivePath = Statistics.myConf['stars_inactivePath']
st = StarsBar(n,startPosition,delta,size,activePath,inactivePath)
averageTier = sum(tiers) / float(len(tiers))
starsAmount = round(currentTier-averageTier) + n - maxTierDiff
st.add(starsAmount)
self.bars.append(st)
def getVehicleTypeAmount(self):
from Statistics import Statistics
player = BigWorld.player()
vehicles = player.arena.vehicles
if player.playerVehicleID not in vehicles:
return
curVeh = vehicles[player.playerVehicleID]
Statistics.getInfos(curVeh['accountDBID'])
vehicles[player.playerVehicleID]['team']
amounts = {VEHICLE_CLASS_NAME.HEAVY_TANK:{'ally':0,'enemy':0},
VEHICLE_CLASS_NAME.MEDIUM_TANK:{'ally':0,'enemy':0},
VEHICLE_CLASS_NAME.LIGHT_TANK:{'ally':0,'enemy':0},
VEHICLE_CLASS_NAME.AT_SPG:{'ally':0,'enemy':0},
VEHICLE_CLASS_NAME.SPG:{'ally':0,'enemy':0}}
tiers = []
for accountDBID,entityObj in Statistics.getEmo().getAll().iteritems():
vID = g_sessionProvider.getCtx().getVehIDByAccDBID(accountDBID)
if vID in vehicles:
v_info = vehicles[vID]
tiers.append(v_info['vehicleType'].level)
if not BattleUtils.isMyTeam(v_info['team']):
tag = 'enemy'
else:
tag = 'ally'
for vehicleType in VEHICLE_TYPES_ORDER:
if vehicleType in v_info['vehicleType'].type.tags:
amounts[vehicleType][tag] += 1
currentTier = curVeh['vehicleType'].level
return (amounts,tiers,currentTier)
|
jstar88/wotmods
|
files/uncompyled/wot_folder/res_mods/0.9.10/scripts/client/plugins/Statistics_plugin/BattleLoadingBarTable.py
|
Python
|
gpl-2.0
| 3,899
|
#!/usr/bin/env python
# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
__author__="mcanuto"
__date__ ="$Feb 13, 2014 6:03:13 PM$"
from domain_info import domainsVM, VMobject
from ConfigParser import ConfigParser
from countersMetrics import CountersMetrics
from powerMetrics import PowerMetrics
from wattsUp import WattsupPowerMetrics
from odroidXUE import OdroidXUPowerMetrics
from rawCountersMetrics import RawCountersMetrics
from extraVMmetrics import ExtraVMmetrics
from temperatureMetrics import TemperatureMetrics
from cpuCoresUsage import CoreUsage
from threading import Thread
from subprocess import Popen, PIPE
from threading import Thread
from time import sleep
from gmetric import GmetricConf
from logging import handlers
import gmetric
import signal
import sys
import guestfs
import os
import errno
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# create a file handler
handler = handlers.RotatingFileHandler('extraMetrics.log', maxBytes=1024*1024*10) #max size = 10 MB
handler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)
pid_file = 'extra_monitoring.pid'
log_file = 'logging.out'
sflow_conf_path = '/etc/hsflowd.auto'
protocol = 'udp'
slope = 'both'
counters_directory = 'counters_data/'
def readScriptConfig():
conf_dict = {}
with open("extraMetrics.conf", 'r') as f:
for line in f:
if line != '\n' and not line.startswith('#'):
items = line.split("=")
key, values = items[0].strip(), items[1].strip()
conf_dict[key] = values
return conf_dict
def readMetricConf(confFile):
config = ConfigParser()
fp = open(confFile)
config.readfp(fp)
fp.close()
return config._sections
def sFlowConf():
sflow_dict = {}
with open(sflow_conf_path, 'r') as f:
for line in f:
li=line.strip()
if not li.startswith("#"):
items = line.split("=")
key, values = items[0], items[1].strip()
sflow_dict[key] = values
return sflow_dict
def sendHostMetrics(mconf, sflow, g):
host_m = ExtraHostMetrics()
#for keymet, valuemet in host_m.readMetrics().items():
for keymet, valuemet in host_m.readMetrics().items():
if keymet in mconf:
#logger.info('sendHostMetrics -', keymet, valuemet)
if mconf[keymet]["spoof"].lower() == "yes":
spoof = spoof_ip +":"+ spoof_hostname
g.send(keymet , valuemet, mconf[keymet]["type"], mconf[keymet]["units"], slope, mconf[keymet]["tmax"], mconf[keymet]["dmax"], mconf[keymet]["group"], spoof)
else:
g.send(keymet , valuemet, mconf[keymet]["type"], mconf[keymet]["units"], slope, mconf[keymet]["tmax"], mconf[keymet]["dmax"], mconf[keymet]["group"])
def is_process_running():
if (os.path.isfile(pid_file)):
f = open(pid_file)
process_id = int(f.readline())
f.close()
try:
os.kill(process_id, 0)
return True
except OSError:
logger.info('PID file existed but process ' + str(process_id) + ' was not found')
return False
else:
return False
def terminate_process():
try:
with open(pid_file, "r") as f:
pid = f.readline()
os.kill(int(pid), signal.SIGTERM)
os.remove(pid_file)
logger.info('Process terminated')
print "Process terminated"
except IOError:
os.remove(pid_file)
except IOError, io:
logger.error('Terminating process', io)
print "Process not running: "
if __name__ == '__main__':
#redirect output to log file
#old = os.dup(1)
#os.close(1)
#os.open(log_file, os.O_WRONLY|os.O_CREAT)
if os.geteuid() != 0:
exit("You need to have root privileges to run this script.\nPlease try again, this time using 'sudo'. Exiting.")
if (len(sys.argv) == 2 and sys.argv[1] == 'stop'):
terminate_process()
elif (len(sys.argv) == 2 and sys.argv[1] == 'start'):
if is_process_running():
logger.info('Process already running')
else:
# Read script configuration
sconf = readScriptConfig()
# get vm metrics - yes/no
vm_metrics = sconf['vm_metrics'].strip()
# get performance counters of the vm pid - yes/no
vm_counters = sconf['vm_counters'].strip()
# get performance raw counters of the vm pid - yes/no
vm_raw_counters = sconf['vm_raw_counters'].strip()
# get host performance counters - yes/no
host_counters = sconf['host_counters'].strip()
# get host performance raw counters - yes/no
host_raw_counters = sconf['host_raw_counters'].strip()
# get temperature metrics - yes/no
temperature_metrics = sconf['temperature_metrics'].strip()
# get power metrics - yes/no
power_metrics = sconf['power_metrics'].strip()
#get other configuration parameters
counters_interval = sconf['counters_interval'].strip()
counters_list = sconf['counters_list'].strip()
power_interval = sconf['power_interval'].strip()
power_sensors = sconf['power_sensors'].strip().split(',')
raw_counters_list = sconf['raw_counters_list'].strip()
vm_metrics_interval = sconf["vm_metrics_interval"].strip()
metrics_config_path = sconf["metrics_config_path"].strip()
vm_file_path = sconf["vm_file_path"].strip()
temperature_interval = sconf['temperature_interval'].strip()
core_usage = sconf['core_usage'].strip()
#print (counters_interval, vm_metrics_interval, extra_host_metrics_interval, metrics_config_path)
# Read sFlow configuration
sflow = sFlowConf()
# Read metrics configuration
mconf = readMetricConf(metrics_config_path)
spoof_ip = sflow["agentIP"]
spoof_hostname = sflow["hostname"]
spoof = spoof_ip +":"+ spoof_hostname
items = sflow["collector"].split(" ")
host = items[0]
port = int(items[1])-6343+8649
# save PID of main process
with open(pid_file, "w") as myfile:
myfile.write(str(os.getpid()))
# create directory for counter metrics
if not os.path.exists(counters_directory):
os.makedirs(counters_directory)
#read and send metrics
try:
# gmetric configuration
gconf = GmetricConf(host, port, protocol, slope, spoof)
# VM metrics + VM counters (vm pid in pysichal host) + VM raw counters
if vm_counters == 'yes':
if vm_metrics == 'yes':
if vm_raw_counters == 'yes':
# VM raw counters (vm pid in pysichal host)
logger.info("Start collecting VM metrics, VM Counters and VM Raw Counters metrics")
# ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_directory, counters_interval, counters_list, raw_counters_list GET_VM_METRICS, GET_VM_COUNTERS, GET_VM_RAW_COUNTERS)
extra_vm = ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_directory, counters_interval, counters_list, raw_counters_list, True, True, True)
thread_VM = Thread(target = extra_vm.collectVMmetrics)
thread_VM.start()
else:
logger.info("Start collecting VM metrics and VM Counters")
extra_vm = ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_directory, counters_interval, counters_list, None, True, True, False)
thread_VM = Thread(target = extra_vm.collectVMmetrics)
thread_VM.start()
else:
if vm_raw_counters == 'yes':
logger.info("Start collecting VM Counters and VM Raw Counters metrics")
# ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_interval, counters_list, GET_VM_METRICS, GET_VM_COUNTERS, GET_VM_RAW_COUNTERS)
extra_vm = ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_directory, counters_interval, counters_list, raw_counters_list, False, True, True)
thread_VM = Thread(target = extra_vm.collectVMmetrics)
thread_VM.start()
else:
logger.info("Start collecting VM Counters metrics")
extra_vm = ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_directory, counters_interval, counters_list, None, False, True, False)
thread_VM = Thread(target = extra_vm.collectVMmetrics)
thread_VM.start()
#
elif vm_metrics == 'yes':
if vm_raw_counters == 'yes':
# VM raw counters (vm pid in pysichal host)
logger.info("Start collecting VM metrics and VM Raw Counters metrics")
extra_vm = ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_directory, counters_interval, counters_list, raw_counters_list, True, False, True)
thread_VM = Thread(target = extra_vm.collectVMmetrics)
thread_VM.start()
else:
logger.info("Start collecting VM metrics")
extra_vm = ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, None, None, None, None, True, False, False)
thread_VM = Thread(target = extra_vm.collectVMmetrics)
thread_VM.start()
else:
if vm_raw_counters == 'yes':
# VM raw counters (vm pid in pysichal host)
logger.info("Start collecting VM metrics and VM Raw Counters metrics")
extra_vm = ExtraVMmetrics(vm_file_path, vm_metrics_interval, mconf, gconf, counters_directory, counters_interval, None, raw_counters_list, False, True, True)
thread_VM = Thread(target = extra_vm.collectVMmetrics)
thread_VM.start()
# Standard counters metrics
if host_counters == 'yes' and counters_list:
logger.info("Start collecting Host Counters metrics")
counters_metric = CountersMetrics(counters_directory, counters_interval, counters_list, mconf, gconf, None, None)
threadCounters = Thread(target = counters_metric.collectCountersMetrics)
threadCounters.start()
# Raw counters metrics
if host_raw_counters == 'yes' and raw_counters_list:
logger.info("Start collecting Host Raw Counters metrics")
raw_counters_metric = RawCountersMetrics(counters_directory, counters_interval, raw_counters_list, mconf, gconf, None, None)
threadRawCounters = Thread(target = raw_counters_metric.collectCountersMetrics)
threadRawCounters.start()
# Temperature metrics
if temperature_metrics == 'yes':
logger.info("Start collecting Temperature metrics")
temperature_metric = TemperatureMetrics(temperature_interval, mconf, gconf)
threadTemperature = Thread(target = temperature_metric.collectTemperatureMetrics)
threadTemperature.start()
# Power metrics
if power_metrics == 'yes':
logger.info("Start collecting Power metrics")
for power_sensor in power_sensors:
power_metric = None
if power_sensor == "bscgrid":
power_metric = PowerMetrics(power_interval, mconf, gconf)
elif power_sensor == "wattsup":
power_metrics_list = sconf['power_metrics_list'].strip()
wattsup_path = sconf['wattsup_path'].strip()
wattsup_device = sconf['wattsup_device'].strip()
connected_node = sconf['connected_node'].strip()
power_metric = WattsupPowerMetrics(power_interval, power_metrics_list, wattsup_path, wattsup_device, connected_node, mconf, gconf)
elif power_sensor == "odroidxue":
power_metric = OdroidXUPowerMetrics(power_interval, mconf, gconf)
else:
logger.error("Error, sensor " + power_sensor + " not supported")
if power_metric != None:
threadPower= Thread(target = power_metric.collectPowerMetrics)
threadPower.start()
# Cpu Cores Usage metrics
if core_usage == 'yes':
logger.info("Start collecting Core usage metrics")
core_usage = CoreUsage(counters_interval, mconf, gconf)
threadCoreUsage= Thread(target = core_usage.collectCoreMetrics)
threadCoreUsage.start()
except (KeyboardInterrupt, SystemExit):
extra_vm.stopThreads()
sys.exit(0)
logger.info("Threads created...")
else:
print "Usage: "+sys.argv[0]+" start or "+sys.argv[0]+" stop"
|
bsc-renewit/d2.2
|
monitoringFramework/init_parallel.py
|
Python
|
apache-2.0
| 12,800
|
#
# DBus interface for the interactive partitioning module
#
# Copyright (C) 2019 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from dasbus.server.interface import dbus_interface
from pyanaconda.modules.common.constants.objects import INTERACTIVE_PARTITIONING
from pyanaconda.modules.storage.partitioning.base_interface import PartitioningInterface
@dbus_interface(INTERACTIVE_PARTITIONING.interface_name)
class InteractivePartitioningInterface(PartitioningInterface):
"""DBus interface for the interactive partitioning module."""
|
jkonecny12/anaconda
|
pyanaconda/modules/storage/partitioning/interactive/interactive_interface.py
|
Python
|
gpl-2.0
| 1,424
|
# -*- coding: UTF-8 -*-
import __future__
import os
import sys
import traceback
import site
import tempfile
from drawBot.misc import getDefault
class StdOutput(object):
def __init__(self, output, isError=False, outputView=None):
self.data = output
self.isError = isError
self.outputView = outputView
def write(self, data):
if isinstance(data, str):
try:
data = unicode(data, "utf-8", "replace")
except UnicodeDecodeError:
data = "XXX " + repr(data)
if self.outputView is not None:
self.outputView.append(data, self.isError)
self.outputView.forceUpdate()
self.outputView.scrollToEnd()
else:
self.data.append((data, self.isError))
def flush(self):
pass
def close(self):
pass
def _makeEnviron():
env = dict(os.environ)
kill = ["ARGVZERO", "EXECUTABLEPATH", "PYTHONHOME", "PYTHONPATH", "RESOURCEPATH"]
for key in kill:
if key in env:
del env[key]
return env
def _execute(cmds):
import subprocess
stderrPath = tempfile.mkstemp()[1]
stdoutPath = tempfile.mkstemp()[1]
stderrFile = open(stderrPath, "w")
stdoutFile = open(stdoutPath, "w")
# get the os.environ
env = _makeEnviron()
# make a string of escaped commands
cmds = subprocess.list2cmdline(cmds)
# go
popen = subprocess.Popen(cmds, stderr=stderrFile, stdout=stdoutFile, env=env, shell=True)
popen.wait()
# get the output
stderrFile.close()
stdoutFile.close()
stderrFile = open(stderrPath, "r")
stdoutFile = open(stdoutPath, "r")
stderr = stderrFile.read()
stdout = stdoutFile.read()
stderrFile.close()
stdoutFile.close()
# trash the temp files
os.remove(stderrPath)
os.remove(stdoutPath)
# done
return stderr, stdout
localSitePackagesCode = u"""
from distutils import sysconfig
_site_packages_path = sysconfig.get_python_lib()
print _site_packages_path
"""
def getLocalCurrentPythonVersionDirName():
tempFile = tempfile.mkstemp(".py")[1]
f = open(tempFile, "w")
f.write(localSitePackagesCode)
f.close()
log = _execute(["python", tempFile])[1]
sitePackages = log.split("\n")[0]
os.remove(tempFile)
if os.path.exists(sitePackages):
return sitePackages
else:
return False
localSitePackagesPath = getLocalCurrentPythonVersionDirName()
class DrawBotNamespace(dict):
def __init__(self, context, variables):
self._context = context
self._variables = variables
def __getitem__(self, item):
if item in self._variables:
return getattr(self._context, item)
return super(DrawBotNamespace, self).__getitem__(item)
class _Helper(object):
"""
Define the builtin 'help'.
This is a wrapper around pydoc.help (with a twist).
"""
def __repr__(self):
return "Type help() for interactive help, " \
"or help(object) for help about object."
def __call__(self, *args, **kwds):
import pydoc
return pydoc.help(*args, **kwds)
class ScriptRunner(object):
def __init__(self, text=None, path=None, stdout=None, stderr=None, namespace=None, checkSyntaxOnly=False):
from threading import Thread
if path:
if isinstance(path, unicode):
path = path.encode("utf-8")
curDir, fileName = os.path.split(path)
else:
curDir = os.getenv("HOME")
fileName = '<untitled>'
# save up the important bits
saveStdout = sys.stdout
saveStderr = sys.stderr
saveArgv = sys.argv
try:
saveDir = os.getcwd()
except:
saveDir = None
# set up the name space
if namespace is None:
namespace = dict()
namespace["__file__"] = path
namespace["__name__"] = "__main__"
namespace["help"] = _Helper()
if stdout:
sys.stdout = stdout
if stderr:
sys.stderr = stderr
sys.argv = [fileName]
os.chdir(curDir)
sys.path.insert(0, curDir)
if localSitePackagesPath and localSitePackagesPath not in sys.path:
site.addsitedir(localSitePackagesPath)
# here we go
if text is None:
f = open(path, 'rb')
text = f.read()
f.close()
source = text.replace('\r\n', '\n').replace('\r', '\n')
compileFlags = 0
if getDefault("DrawBotUseFutureDivision", True):
compileFlags |= __future__.CO_FUTURE_DIVISION
try:
try:
code = compile(source + '\n\n', fileName, "exec", compileFlags)
except:
traceback.print_exc(0)
else:
if not checkSyntaxOnly:
self._scriptDone = False
try:
exec code in namespace
except KeyboardInterrupt:
pass
except:
etype, value, tb = sys.exc_info()
if tb.tb_next is not None:
tb = tb.tb_next
traceback.print_exception(etype, value, tb)
etype = value = tb = None
finally:
# reset the important bits
self._scriptDone = True
sys.stdout = saveStdout
sys.stderr = saveStderr
sys.argv = saveArgv
if saveDir:
os.chdir(saveDir)
sys.path.remove(curDir)
def CallbackRunner(callback, stdout=None, stderr=None, args=[], kwargs={}, fallbackResult=None):
result = fallbackResult
saveStdout = sys.stdout
saveStderr = sys.stderr
if stdout:
sys.stdout = stdout
if stderr:
sys.stderr = stderr
try:
result = callback(*args, **kwargs)
except:
etype, value, tb = sys.exc_info()
if tb.tb_next is not None:
tb = tb.tb_next
traceback.print_exception(etype, value, tb)
etype = value = tb = None
finally:
sys.stdout = saveStdout
sys.stderr = saveStderr
return result
|
bitforks/drawbot
|
drawBot/scriptTools.py
|
Python
|
bsd-2-clause
| 6,306
|
# coding: utf-8
from fabkit import task
from fablib.openstack import Nova, Neutron
nova = Nova('compute')
neutron = Neutron('compute')
@task
def setup():
nova.setup()
neutron.setup()
return {'status': 1}
@task
def restart():
nova.restart_services()
neutron.restart_services()
|
syunkitada/fabkit-repo
|
fabscript/openstack/compute.py
|
Python
|
mit
| 303
|
#
# Thierry Parmentelat - INRIA
#
from PLC.Faults import *
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.Auth import Auth
from PLC.Sites import Sites
from PLC.Nodes import Nodes
from PLC.Interfaces import Interface, Interfaces
from PLC.TagTypes import TagType, TagTypes
from PLC.InterfaceTags import InterfaceTag, InterfaceTags
# need to import so the core classes get decorated with caller_may_write_tag
from PLC.AuthorizeHelpers import AuthorizeHelpers
class AddInterfaceTag(Method):
"""
Sets the specified setting for the specified interface
to the specified value.
Admins have full access. Non-admins need to
(1) have at least one of the roles attached to the tagtype,
and (2) belong in the same site as the tagged subject.
Returns the new interface_tag_id (> 0) if successful, faults
otherwise.
"""
roles = ['admin', 'pi', 'tech', 'user']
accepts = [
Auth(),
# no other way to refer to a interface
InterfaceTag.fields['interface_id'],
Mixed(TagType.fields['tag_type_id'],
TagType.fields['tagname']),
InterfaceTag.fields['value'],
]
returns = Parameter(int, 'New interface_tag_id (> 0) if successful')
def call(self, auth, interface_id, tag_type_id_or_name, value):
interfaces = Interfaces(self.api, [interface_id])
if not interfaces:
raise PLCInvalidArgument("No such interface %r"%interface_id)
interface = interfaces[0]
tag_types = TagTypes(self.api, [tag_type_id_or_name])
if not tag_types:
raise PLCInvalidArgument("No such tag type %r"%tag_type_id_or_name)
tag_type = tag_types[0]
# checks for existence - does not allow several different settings
conflicts = InterfaceTags(self.api,
{'interface_id':interface['interface_id'],
'tag_type_id':tag_type['tag_type_id']})
if len(conflicts) :
raise PLCInvalidArgument("Interface %d already has setting %d"%(interface['interface_id'],
tag_type['tag_type_id']))
# check authorizations
interface.caller_may_write_tag(self.api,self.caller,tag_type)
interface_tag = InterfaceTag(self.api)
interface_tag['interface_id'] = interface['interface_id']
interface_tag['tag_type_id'] = tag_type['tag_type_id']
interface_tag['value'] = value
interface_tag.sync()
self.object_ids = [interface_tag['interface_tag_id']]
return interface_tag['interface_tag_id']
|
dreibh/planetlab-lxc-plcapi
|
PLC/Methods/AddInterfaceTag.py
|
Python
|
bsd-3-clause
| 2,707
|
r'''
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
|
google/nogotofail
|
nogotofail/clients/linux/__init__.py
|
Python
|
apache-2.0
| 583
|
# -*- coding: utf-8 -*-
# Copyright (C) 2016, Maximilian Köhl <mail@koehlma.de>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals, division, absolute_import
import gc
import weakref
import common
import uv
@common.skip_interpreter('pypy')
class TestGC(common.TestCase):
def set_up(self):
gc.disable()
gc.collect()
def tear_down(self):
gc.enable()
def test_gc_loop(self):
loop = uv.Loop()
weak_loop = weakref.ref(loop)
base_loop = weakref.ref(loop.base_loop)
del loop
gc.collect()
self.assert_not_equal(weak_loop(), None)
self.assert_not_equal(base_loop(), None)
uv.Loop._thread_locals.loop = None
gc.collect()
self.assert_equal(weak_loop(), None)
self.assert_equal(base_loop(), None)
def test_gc_handle(self):
weak_handle = weakref.ref(uv.Prepare())
gc.collect()
self.assert_equal(weak_handle(), None)
prepare = uv.Prepare()
prepare.start()
weak_handle = weakref.ref(prepare)
del prepare
gc.collect()
self.assert_not_equal(weak_handle(), None)
def test_gc_pending(self):
loop = uv.Loop()
client = uv.Pipe(loop=loop)
client.connect(common.TEST_PIPE1)
client.write(b'hello')
weak_client = weakref.ref(client)
self.loop.make_current()
del loop
del client
gc.collect()
self.assert_is(weak_client(), None)
def test_gc_loop_close(self):
client = uv.Pipe()
client.connect(common.TEST_PIPE1).clear_pending()
client.clear_pending()
weak_client = weakref.ref(client)
del client
gc.collect()
self.assert_is(weak_client(), None)
self.loop.close()
|
koehlma/uv
|
tests/test_gc.py
|
Python
|
lgpl-3.0
| 2,404
|
__author__ = "Ryan Dale"
__copyright__ = "Copyright 2016, Ryan Dale"
__email__ = "dalerr@niddk.nih.gov"
__license__ = "MIT"
import os
from snakemake.shell import shell
from snakemake.utils import makedirs
# fastqc creates a zip file and an html file but the filename is hard-coded by
# replacing fastq|fastq.gz|fq|fq.gz|bam with _fastqc.zip|_fastqc.html in the
# input file's basename.
#
# So we identify that file and move it to the expected output after fastqc is
# done.
outfile = os.path.basename(snakemake.input[0])
outdir = os.path.dirname(snakemake.output.html)
if outdir == '':
outdir = '.'
strip = ['.fastq', '.fq', '.gz', '.bam']
for s in strip:
outfile = outfile.replace(s, '')
out_zip = os.path.join(outdir, outfile + '_fastqc.zip')
out_html = os.path.join(outdir, outfile + '_fastqc.html')
extra = snakemake.params.get('extra', '')
log = snakemake.log_fmt_shell()
shell(
'fastqc '
'--threads {snakemake.threads} '
'--noextract '
'--quiet '
'--outdir {outdir} '
'{extra} '
'{snakemake.input} '
'{log} '
)
def same_file(x, y):
return os.path.abspath(x) == os.path.abspath(y)
if not same_file(out_zip,snakemake.output.zip):
shell('mv {out_zip} {snakemake.output.zip}')
if not same_file(out_html, snakemake.output.html):
shell('mv {out_html} {snakemake.output.html}')
|
lcdb/lcdb-wrapper-tests
|
wrappers/fastqc/wrapper.py
|
Python
|
mit
| 1,337
|
"""
Unit test for the parser.
"""
import unittest
from six import StringIO
from lesscpy.lessc.parser import LessParser
class TestLessParser(unittest.TestCase):
"""
Unit tests for LessParser.
"""
def setUp(self):
self.parser = LessParser()
def test_parse_stream(self):
"""
It can parse input from a file stream.
"""
stream = StringIO("""
@nice-blue: #5B83AD;
""")
self.parser.parse(file=stream)
# A single object is parser which is the expected variable.
self.assertEqual(1, len(self.parser.result))
# This is a stream without a name so it sets default name.
self.assertEqual('(stream)', self.parser.target)
variable = self.parser.result[0]
self.assertEqual('@nice-blue', variable.name)
self.assertEqual(['#5b83ad'], variable.value)
|
joequery/lesscpy
|
test/test_parser.py
|
Python
|
mit
| 888
|
from .. import register_backend
from ..elf import ELF
from ...patched_stream import PatchedStream
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
CGC_HEADER = "7f43 4743 0101 0143 014d 6572 696e 6f00".replace(" ","").decode('hex')
class CGC(ELF):
"""
Backend to support the CGC elf format used by the Cyber Grand Challenge competition.
See : https://github.com/CyberGrandChallenge/libcgcef/blob/master/cgc_executable_format.md
"""
def __init__(self, binary, *args, **kwargs):
if hasattr(binary, 'seek'):
filename = None
stream = PatchedStream(binary, [(0, ELF_HEADER)])
else:
filename = binary
stream = PatchedStream(open(binary, 'rb'), [(0, ELF_HEADER)])
kwargs['filename'] = filename
super(CGC, self).__init__(stream, *args, **kwargs)
self.memory.write_bytes(0, CGC_HEADER) # repair CGC header
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
@staticmethod
def is_compatible(stream):
stream.seek(0)
identstring = stream.read(0x1000)
stream.seek(0)
if identstring.startswith('\x7fCGC'):
return True
return False
def _load_segment(self, seg):
if seg.header.p_memsz > 0:
super(CGC, self)._load_segment(seg)
supported_filetypes = ['cgc']
register_backend('cgc', CGC)
|
Ruide/angr-dev
|
cle/cle/backends/cgc/cgc.py
|
Python
|
bsd-2-clause
| 1,452
|
#!/bin/python
# Simple script for shutting down the raspberry Pi at the press of a button.
# by Inderpreet Singh
import RPi.GPIO as GPIO
import time
import os
# Use the Broadcom SOC Pin numbers
# Setup the Pin with Internal pullups enabled and PIN in reading mode.
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.IN, pull_up_down = GPIO.PUD_UP)
# Our function on what to do when the button is pressed
def Shutdown(channel):
os.system("sudo shutdown -h now")
# Add our function to execute when the button pressed event happens
GPIO.add_event_detect(18, GPIO.FALLING, callback = Shutdown, bouncetime = 2000)
# Now wait!
while 1:
time.sleep(1)
|
mikestebbins/openapsdev
|
Scripts/shutdown_pi.py
|
Python
|
mit
| 691
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for QueryContextLineageSubgraph
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1beta1_MetadataService_QueryContextLineageSubgraph_async]
from google.cloud import aiplatform_v1beta1
async def sample_query_context_lineage_subgraph():
# Create a client
client = aiplatform_v1beta1.MetadataServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.QueryContextLineageSubgraphRequest(
context="context_value",
)
# Make the request
response = await client.query_context_lineage_subgraph(request=request)
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1beta1_MetadataService_QueryContextLineageSubgraph_async]
|
googleapis/python-aiplatform
|
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_metadata_service_query_context_lineage_subgraph_async.py
|
Python
|
apache-2.0
| 1,624
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.