code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
Midan_Metal_Musketoon0 = [[148, 0, 110, 0, 123, 162, 73, 2.64], u'Midan Metal Musketoon+CRIT12+CRIT12', 1, 14581]
Midan_Metal_Musketoon1 = [[148, 12, 98, 0, 123, 162, 73, 2.64], u'Midan Metal Musketoon+CRIT12+ACC12', 1, 14581]
Midan_Metal_Musketoon2 = [[148, 24, 86, 0, 123, 162, 73, 2.64], u'Midan Metal Musketoon+ACC12+ACC12', 1, 14581]
Augmented_Torrent_Musketoon0 = [[141, 0, 120, 80, 24, 154, 72, 2.64], u'Augmented Torrent Musketoon+SS12+SS12', 1, 14395]
Augmented_Torrent_Musketoon1 = [[141, 12, 120, 80, 12, 154, 72, 2.64], u'Augmented Torrent Musketoon+SS12+ACC12', 1, 14395]
Augmented_Torrent_Musketoon2 = [[141, 24, 120, 80, 0, 154, 72, 2.64], u'Augmented Torrent Musketoon+ACC12+ACC12', 1, 14395]
Arm = [ Midan_Metal_Musketoon0, Midan_Metal_Musketoon1, Midan_Metal_Musketoon2, Augmented_Torrent_Musketoon0, Augmented_Torrent_Musketoon1, Augmented_Torrent_Musketoon2, ]
Midan_Goggles_of_Aiming0 = [[81, 48, 69, 0, 24, 88, 0, 0], u'Midan Goggles of Aiming+SS12+SS12', 1, 14591]
Midan_Goggles_of_Aiming1 = [[81, 60, 69, 0, 12, 88, 0, 0], u'Midan Goggles of Aiming+SS12+ACC12', 1, 14591]
Midan_Goggles_of_Aiming2 = [[81, 69, 69, 0, 0, 88, 0, 0], u'Midan Goggles of Aiming+ACC12+ACC9', 1, 14591]
Augmented_Torrent_Mask_of_Aiming0 = [[81, 69, 24, 0, 48, 88, 0, 0], u'Augmented Torrent Mask of Aiming+CRIT12+CRIT12', 1, 14405]
Head = [ Midan_Goggles_of_Aiming0, Midan_Goggles_of_Aiming1, Midan_Goggles_of_Aiming2, Augmented_Torrent_Mask_of_Aiming0, ]
Midan_Coat_of_Aiming0 = [[131, 0, 102, 0, 112, 143, 0, 0], u'Midan Coat of Aiming+CRIT12+CRIT12', 1, 14598]
Midan_Coat_of_Aiming1 = [[131, 12, 90, 0, 112, 143, 0, 0], u'Midan Coat of Aiming+CRIT12+ACC12', 1, 14598]
Midan_Coat_of_Aiming2 = [[131, 24, 78, 0, 112, 143, 0, 0], u'Midan Coat of Aiming+ACC12+ACC12', 1, 14598]
Augmented_Torrent_Tabard_of_Aiming0 = [[131, 112, 24, 75, 0, 143, 0, 0], u'Augmented Torrent Tabard of Aiming+CRIT12+CRIT12', 1, 14412]
Body = [ Midan_Coat_of_Aiming0, Midan_Coat_of_Aiming1, Midan_Coat_of_Aiming2, Augmented_Torrent_Tabard_of_Aiming0, ]
Midan_Gloves_of_Aiming0 = [[81, 0, 24, 66, 48, 88, 0, 0], u'Midan Gloves of Aiming+CRIT12+CRIT12', 1, 14605]
Midan_Gloves_of_Aiming1 = [[81, 12, 12, 66, 48, 88, 0, 0], u'Midan Gloves of Aiming+CRIT12+ACC12', 1, 14605]
Midan_Gloves_of_Aiming2 = [[81, 24, 0, 66, 48, 88, 0, 0], u'Midan Gloves of Aiming+ACC12+ACC12', 1, 14605]
Augmented_Torrent_Armguards_of_Aiming0 = [[81, 48, 69, 0, 24, 88, 0, 0], u'Augmented Torrent Armguards of Aiming+SS12+SS12', 1, 14419]
Augmented_Torrent_Armguards_of_Aiming1 = [[81, 60, 69, 0, 12, 88, 0, 0], u'Augmented Torrent Armguards of Aiming+SS12+ACC12', 1, 14419]
Augmented_Torrent_Armguards_of_Aiming2 = [[81, 69, 69, 0, 0, 88, 0, 0], u'Augmented Torrent Armguards of Aiming+ACC12+ACC9', 1, 14419]
Hands = [ Midan_Gloves_of_Aiming0, Midan_Gloves_of_Aiming1, Midan_Gloves_of_Aiming2, Augmented_Torrent_Armguards_of_Aiming0, Augmented_Torrent_Armguards_of_Aiming1, Augmented_Torrent_Armguards_of_Aiming2, ]
Midan_Belt_of_Aiming0 = [[61, 0, 51, 34, 12, 66, 0, 0], u'Midan Belt of Aiming+SS12', 1, 14612]
Midan_Belt_of_Aiming1 = [[61, 12, 51, 34, 0, 66, 0, 0], u'Midan Belt of Aiming+ACC12', 1, 14612]
Augmented_Torrent_Belt_of_Aiming0 = [[61, 36, 12, 0, 51, 66, 0, 0], u'Augmented Torrent Belt of Aiming+CRIT12', 1, 14426]
Augmented_Torrent_Belt_of_Aiming1 = [[61, 48, 0, 0, 51, 66, 0, 0], u'Augmented Torrent Belt of Aiming+ACC12', 1, 14426]
Waist = [ Midan_Belt_of_Aiming0, Midan_Belt_of_Aiming1, Augmented_Torrent_Belt_of_Aiming0, Augmented_Torrent_Belt_of_Aiming1, ]
Midan_Poleyns_of_Aiming0 = [[131, 112, 24, 75, 0, 143, 0, 0], u'Midan Poleyns of Aiming+CRIT12+CRIT12', 1, 14619]
Augmented_Torrent_Tights_of_Aiming0 = [[131, 0, 102, 0, 112, 143, 0, 0], u'Augmented Torrent Tights of Aiming+CRIT12+CRIT12', 1, 14433]
Augmented_Torrent_Tights_of_Aiming1 = [[131, 12, 90, 0, 112, 143, 0, 0], u'Augmented Torrent Tights of Aiming+CRIT12+ACC12', 1, 14433]
Augmented_Torrent_Tights_of_Aiming2 = [[131, 24, 78, 0, 112, 143, 0, 0], u'Augmented Torrent Tights of Aiming+ACC12+ACC12', 1, 14433]
Legs = [ Midan_Poleyns_of_Aiming0, Augmented_Torrent_Tights_of_Aiming0, Augmented_Torrent_Tights_of_Aiming1, Augmented_Torrent_Tights_of_Aiming2, ]
Midan_Boots_of_Aiming0 = [[81, 69, 24, 0, 48, 88, 0, 0], u'Midan Boots of Aiming+CRIT12+CRIT12', 1, 14626]
Augmented_Torrent_Boots_of_Aiming0 = [[81, 0, 69, 66, 0, 88, 0, 0], u'Augmented Torrent Boots of Aiming+CRIT12+CRIT9', 1, 14440]
Augmented_Torrent_Boots_of_Aiming1 = [[81, 12, 60, 66, 0, 88, 0, 0], u'Augmented Torrent Boots of Aiming+CRIT12+ACC12', 1, 14440]
Augmented_Torrent_Boots_of_Aiming2 = [[81, 24, 48, 66, 0, 88, 0, 0], u'Augmented Torrent Boots of Aiming+ACC12+ACC12', 1, 14440]
Feet = [ Midan_Boots_of_Aiming0, Augmented_Torrent_Boots_of_Aiming0, Augmented_Torrent_Boots_of_Aiming1, Augmented_Torrent_Boots_of_Aiming2, ]
Midan_Neckband_of_Aiming0 = [[61, 36, 51, 0, 12, 0, 0, 0], u'Midan Neckband of Aiming+SS12', 1, 14637]
Midan_Neckband_of_Aiming1 = [[61, 48, 51, 0, 0, 0, 0, 0], u'Midan Neckband of Aiming+ACC12', 1, 14637]
Augmented_Primal_Choker_of_Aiming0 = [[61, 51, 12, 0, 36, 0, 0, 0], u'Augmented Primal Choker of Aiming+CRIT12', 1, 14451]
Necklace = [ Midan_Neckband_of_Aiming0, Midan_Neckband_of_Aiming1, Augmented_Primal_Choker_of_Aiming0, ]
Midan_Earrings_of_Aiming0 = [[61, 51, 12, 0, 36, 0, 0, 0], u'Midan Earrings of Aiming+CRIT12', 1, 14632]
Augmented_Primal_Earrings_of_Aiming0 = [[61, 0, 51, 34, 12, 0, 0, 0], u'Augmented Primal Earrings of Aiming+SS12', 1, 14446]
Augmented_Primal_Earrings_of_Aiming1 = [[61, 12, 51, 34, 0, 0, 0, 0], u'Augmented Primal Earrings of Aiming+ACC12', 1, 14446]
Earrings = [ Midan_Earrings_of_Aiming0, Augmented_Primal_Earrings_of_Aiming0, Augmented_Primal_Earrings_of_Aiming1, ]
Midan_Bracelets_of_Aiming0 = [[61, 0, 12, 49, 36, 0, 0, 0], u'Midan Bracelets of Aiming+CRIT12', 1, 14642]
Midan_Bracelets_of_Aiming1 = [[61, 12, 0, 49, 36, 0, 0, 0], u'Midan Bracelets of Aiming+ACC12', 1, 14642]
Augmented_Primal_Bracelet_of_Aiming0 = [[61, 51, 48, 0, 0, 0, 0, 0], u'Augmented Primal Bracelet of Aiming+CRIT12', 1, 14456]
Bracelets = [ Midan_Bracelets_of_Aiming0, Midan_Bracelets_of_Aiming1, Augmented_Primal_Bracelet_of_Aiming0, ]
Midan_Ring_of_Aiming0 = [[61, 36, 12, 0, 51, 0, 0, 0], u'Midan Ring of Aiming+CRIT12', 1, 14647]
Midan_Ring_of_Aiming1 = [[61, 48, 0, 0, 51, 0, 0, 0], u'Midan Ring of Aiming+ACC12', 1, 14647]
Augmented_Primal_Ring_of_Aiming0 = [[61, 0, 48, 49, 0, 0, 0, 0], u'Augmented Primal Ring of Aiming+CRIT12', 1, 14461]
Augmented_Primal_Ring_of_Aiming1 = [[61, 12, 36, 49, 0, 0, 0, 0], u'Augmented Primal Ring of Aiming+ACC12', 1, 14461]
Ring = [ Midan_Ring_of_Aiming0, Midan_Ring_of_Aiming1, Augmented_Primal_Ring_of_Aiming0, Augmented_Primal_Ring_of_Aiming1, ]
| jrlusby/xiv-bard-calc | inventories/mch240fullmeta.py | Python | mit | 6,767 |
"""
The Kullback-Leibler divergence.
"""
import numpy as np
from .cross_entropy import get_pmfs_like
from ..helpers import normalize_rvs
from ..utils import flatten
from .kullback_leibler_divergence import kullback_leibler_divergence
__all__ = ('double_power_sum',
'hellinger_sum',
'alpha_divergence',
'hellinger_divergence',
'renyi_divergence',
'tsallis_divergence',
)
### References for Divergence Formulas ###
## http://arxiv.org/pdf/1105.3259v1.pdf
## http://arxiv.org/pdf/1206.2459.pdf
## http://mitran-lab.amath.unc.edu:8082/subversion/grants/Proposals/2013/DOE-DataCentric/biblio/LieseVajdaDivergencesInforTheory.pdf
## Crooks: http://threeplusone.com/on_information.pdf
def double_power_sum(dist1, dist2, exp1=1, exp2=1, rvs=None, crvs=None,
rv_mode=None):
"""
A common generalization of the sums needed to compute the Hellinger
and alpha divergences below.
Parameters
----------
dist1 : Distribution
The first distribution in the sum.
The second distribution in the sum.
exp1 : float, 1
First exponent used in the power sum.
exp2 : float, 1
Second exponent used in the power sum.
rvs : list, None
The indexes of the random variable used to calculate the sum.
If None, then the sum is calculated over all random variables.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If equal
to 'names', the the elements are interpreted as random variable names.
If `None`, then the value of `dist._rv_mode` is consulted, which
defaults to 'indices'.
Returns
-------
dkl : float
The specified sum between `dist1` and `dist2`.
Raises
------
ditException
Raised if either `dist1` or `dist2` doesn't have `rvs` or, if `rvs` is
None, if `dist2` has an outcome length different than `dist1`.
"""
rvs, crvs, rv_mode = normalize_rvs(dist1, rvs, crvs, rv_mode)
rvs, crvs = list(flatten(rvs)), list(flatten(crvs))
normalize_rvs(dist2, rvs, crvs, rv_mode)
p1s, q1s = get_pmfs_like(dist1, dist2, rvs+crvs, rv_mode)
div = np.nansum(np.power(p1s, exp1) * np.power(q1s, exp2))
if crvs:
p2s, q2s = get_pmfs_like(dist1, dist2, crvs, rv_mode)
div = np.nansum(np.power(p2s, exp1) * np.power(q2s, exp2))
return div
def hellinger_sum(dist1, dist2, alpha=1., rvs=None, crvs=None, rv_mode=None):
"""
The Hellinger sum/integral of `dist1` and `dist2`, used to define other
divergences.
Parameters
----------
dist1 : Distribution
The first distribution in the sum.
The second distribution in the sum.
alpha : float, 1
Exponent used in the sum.
rvs : list, None
The indexes of the random variable used to calculate the sum.
If None, then the sum is calculated over all random variables.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If equal
to 'names', the the elements are interpreted as random variable names.
If `None`, then the value of `dist._rv_mode` is consulted, which
defaults to 'indices'.
Returns
-------
dkl : float
The Hellinger sum between `dist1` and `dist2`.
Raises
------
ditException
Raised if either `dist1` or `dist2` doesn't have `rvs` or, if `rvs` is
None, if `dist2` has an outcome length different than `dist1`.
"""
return double_power_sum(dist1, dist2, alpha, 1.-alpha,
rvs=rvs, crvs=crvs, rv_mode=rv_mode)
def hellinger_divergence(dist1, dist2, alpha=1., rvs=None, crvs=None,
rv_mode=None):
# http://mitran-lab.amath.unc.edu:8082/subversion/grants/Proposals/2013/DOE-DataCentric/biblio/LieseVajdaDivergencesInforTheory.pdf
"""
The Hellinger divergence of `dist1` and `dist2`.
Parameters
----------
dist1 : Distribution
The first distribution in the Hellinger divergence.
dist2 : Distribution
The second distribution in the Hellinger divergence.
alpha : float, 1
The divergence is a one parameter family in alpha.
rvs : list, None
The indexes of the random variable used to calculate the
Hellinger divergence between. If None, then the Hellinger
divergence is calculated over all random variables.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If equal
to 'names', the the elements are interpreted as random variable names.
If `None`, then the value of `dist._rv_mode` is consulted, which
defaults to 'indices'.
Returns
-------
dkl : float
The Hellinger divergence between `dist1` and `dist2`.
Raises
------
ditException
Raised if either `dist1` or `dist2` doesn't have `rvs` or, if `rvs` is
None, if `dist2` has an outcome length different than `dist1`.
"""
if alpha == 1:
return kullback_leibler_divergence(dist1, dist2, rvs=rvs,
crvs=crvs, rv_mode=rv_mode)
s = hellinger_sum(dist1, dist2, rvs=rvs, alpha=alpha,
crvs=crvs, rv_mode=rv_mode)
return (s-1.)/(alpha-1.)
def tsallis_divergence(dist1, dist2, alpha=1., rvs=None, crvs=None,
rv_mode=None):
"""
The Tsallis divergence of `dist1` and `dist2`.
Parameters
----------
dist1 : Distribution
The first distribution in the Tsallis divergence.
dist2 : Distribution
The second distribution in the Tsallis divergence.
alpha : float, 1
The divergence is a one parameter family in alpha.
rvs : list, None
The indexes of the random variable used to calculate the
Tsallis divergence between. If None, then the Tsallis
divergence is calculated over all random variables.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If equal
to 'names', the the elements are interpreted as random variable names.
If `None`, then the value of `dist._rv_mode` is consulted, which
defaults to 'indices'.
Returns
-------
dkl : float
The Tsallis divergence between `dist1` and `dist2`.
Raises
------
ditException
Raised if either `dist1` or `dist2` doesn't have `rvs` or, if `rvs` is
None, if `dist2` has an outcome length different than `dist1`.
"""
# D_T = (D_alpha -1) / (alpha-1)
if alpha == 1:
div = kullback_leibler_divergence(dist1, dist2, rvs=rvs,
crvs=crvs, rv_mode=rv_mode)
else:
s = hellinger_sum(dist1, dist2, rvs=rvs, alpha=alpha,
crvs=crvs, rv_mode=rv_mode)
div = (s - 1.) /(alpha - 1.)
return div
def renyi_divergence(dist1, dist2, alpha=1., rvs=None, crvs=None, rv_mode=None):
"""
The Renyi divergence of `dist1` and `dist2`.
Parameters
----------
dist1 : Distribution
The first distribution in the Renyi divergence.
dist2 : Distribution
The second distribution in the Renyi divergence.
alpha : float, 1
The divergence is a one parameter family in alpha.
rvs : list, None
The indexes of the random variable used to calculate the
Renyi divergence between. If None, then the Renyi
divergence is calculated over all random variables.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If equal
to 'names', the the elements are interpreted as random variable names.
If `None`, then the value of `dist._rv_mode` is consulted, which
defaults to 'indices'.
Returns
-------
dkl : float
The Renyi divergence between `dist1` and `dist2`.
Raises
------
ditException
Raised if either `dist1` or `dist2` doesn't have `rvs` or, if `rvs` is
None, if `dist2` has an outcome length different than `dist1`.
"""
# D_R = log D_alpha / (alpha-1)
if alpha == 1:
div = kullback_leibler_divergence(dist1, dist2, rvs=rvs,
crvs=crvs, rv_mode=rv_mode)
else:
s = hellinger_sum(dist1, dist2, rvs=rvs,
alpha=alpha, crvs=crvs, rv_mode=rv_mode)
div = np.log2(s) / (alpha - 1.)
return div
def alpha_divergence(dist1, dist2, alpha=1., rvs=None, crvs=None, rv_mode=None):
"""
The alpha divergence of `dist1` and `dist2`, as used in Information
Geometry. Note there is more than one inequivalent definition of
"alpha divergence" in the literature, this one comes
from http://en.wikipedia.org/wiki/Information_geometry .
Parameters
----------
dist1 : Distribution
The first distribution in the alpha divergence.
dist2 : Distribution
The second distribution in the alpha divergence.
alpha : float, 1
The divergence is a one parameter family in alpha.
rvs : list, None
The indexes of the random variable used to calculate the
alpha divergence between. If None, then the alpha
divergence is calculated over all random variables.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If equal
to 'names', the the elements are interpreted as random variable names.
If `None`, then the value of `dist._rv_mode` is consulted, which
defaults to 'indices'.
Returns
-------
dkl : float
The alpha divergence between `dist1` and `dist2`.
Raises
------
ditException
Raised if either `dist1` or `dist2` doesn't have `rvs` or, if `rvs` is
None, if `dist2` has an outcome length different than `dist1`.
"""
if alpha == 1:
return kullback_leibler_divergence(dist1, dist2, rvs=rvs,
crvs=crvs, rv_mode=rv_mode)
if alpha == -1:
return kullback_leibler_divergence(dist2, dist1, rvs=rvs,
crvs=crvs, rv_mode=rv_mode)
s = double_power_sum(dist1, dist2, (1.-alpha)/2, (1.+alpha)/2,
rvs=rvs, crvs=crvs, rv_mode=rv_mode)
return 4*(1.-s)/(1.-alpha*alpha)
def f_divergence(dist1, dist2, f, rvs=None, crvs=None, rv_mode=None):
"""
The Csiszar f-divergence of `dist1` and `dist2`. Note that it is typically
more accurate to use a specialized divergence function when available
due to roundoff errors and small probability effects.
Parameters
----------
dist1 : Distribution
The first distribution in the f-divergence.
dist2 : Distribution
The second distribution in the f-divergence.
f : function
The auxillary function defining the f-divergence
rvs : list, None
The indexes of the random variable used to calculate the
f-divergence between. If None, then the
f-divergence is calculated over all random variables.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If equal
to 'names', the the elements are interpreted as random variable names.
If `None`, then the value of `dist._rv_mode` is consulted, which
defaults to 'indices'.
Returns
-------
dkl : float
The f-divergence between `dist1` and `dist2`.
Raises
------
ditException
Raised if either `dist1` or `dist2` doesn't have `rvs` or, if `rvs` is
None, if `dist2` has an outcome length different than `dist1`.
"""
rvs, crvs, rv_mode = normalize_rvs(dist1, rvs, crvs, rv_mode)
rvs, crvs = list(flatten(rvs)), list(flatten(crvs))
normalize_rvs(dist2, rvs, crvs, rv_mode)
p1s, q1s = get_pmfs_like(dist1, dist2, rvs+crvs, rv_mode)
vfunc = np.vectorize(f)
div = np.nansum(vfunc(np.divide(p1s, q1s)) * q1s)
if crvs:
p2s, q2s = get_pmfs_like(dist1, dist2, crvs, rv_mode)
div = np.nansum(vfunc(np.divide(p2s, q2s)) * q2s)
return div
| chebee7i/dit | dit/divergences/generalized_divergences.py | Python | bsd-3-clause | 13,249 |
#
# Copyright (c) 2012-2016 The ANTLR Project. All rights reserved.
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#
#
# This is an InputStream that is loaded from stdin all at once
# when you construct the object.
#
import codecs
import sys
from InputStream import InputStream
class StdinStream(InputStream):
def __init__(self, encoding='ascii'):
bytes = sys.stdin.read()
data = codecs.decode(bytes, encoding)
super(type(self), self).__init__(data) | kuckaogh/csclpp | csclpp/src/csclpp/dts/antlr4/StdinStream.py | Python | gpl-3.0 | 539 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import mock
from six import moves
import sys
import testtools
from bileanclient.common import utils
from bileanclient import exc
class ShellTest(testtools.TestCase):
def test_format_parameter_none(self):
self.assertEqual({}, utils.format_parameters(None))
def test_format_parameters(self):
p = utils.format_parameters(['name=bilean_user;status=ACTIVE'])
self.assertEqual({'name': 'bilean_user',
'status': 'ACTIVE'}, p)
def test_format_parameters_split(self):
p = utils.format_parameters([
'name=bilean_user',
'status=ACTIVE'])
self.assertEqual({'name': 'bilean_user',
'status': 'ACTIVE'}, p)
def test_format_parameters_multiple_semicolon_values(self):
p = utils.format_parameters([
'status=ACTIVE',
'name=bilean;user'])
self.assertEqual({'name': 'bilean;user',
'status': 'ACTIVE'}, p)
def test_format_parameters_parse_semicolon_false(self):
p = utils.format_parameters(
['name=bilean;a=b'],
parse_semicolon=False)
self.assertEqual({'name': 'bilean;a=b'}, p)
def test_format_parameters_multiple_values_per_pamaters(self):
p = utils.format_parameters([
'status=ACTIVE',
'status=FREE'])
self.assertIn('status', p)
self.assertIn('ACTIVE', p['status'])
self.assertIn('FREE', p['status'])
def test_format_parameter_bad_parameter(self):
params = ['name=bilean_user;statusACTIVE']
ex = self.assertRaises(exc.CommandError,
utils.format_parameters, params)
self.assertEqual('Malformed parameter(statusACTIVE). '
'Use the key=value format.', str(ex))
def test_format_multiple_bad_parameter(self):
params = ['name=bilean_user', 'statusACTIVE']
ex = self.assertRaises(exc.CommandError,
utils.format_parameters, params)
self.assertEqual('Malformed parameter(statusACTIVE). '
'Use the key=value format.', str(ex))
def test_link_formatter(self):
self.assertEqual('', utils.link_formatter(None))
self.assertEqual('', utils.link_formatter([]))
self.assertEqual(
'http://foo.example.com\nhttp://bar.example.com',
utils.link_formatter([
{'href': 'http://foo.example.com'},
{'href': 'http://bar.example.com'}]))
self.assertEqual(
'http://foo.example.com (a)\nhttp://bar.example.com (b)',
utils.link_formatter([
{'href': 'http://foo.example.com', 'rel': 'a'},
{'href': 'http://bar.example.com', 'rel': 'b'}]))
self.assertEqual(
'\n',
utils.link_formatter([
{'hrf': 'http://foo.example.com'},
{}]))
def test_json_formatter(self):
self.assertEqual('null', utils.json_formatter(None))
self.assertEqual('{}', utils.json_formatter({}))
self.assertEqual('{\n "foo": "bar"\n}',
utils.json_formatter({"foo": "bar"}))
self.assertEqual(u'{\n "Uni": "test\u2665"\n}',
utils.json_formatter({"Uni": u"test\u2665"}))
def test_yaml_formatter(self):
self.assertEqual('null\n...\n', utils.yaml_formatter(None))
self.assertEqual('{}\n', utils.yaml_formatter({}))
self.assertEqual('foo: bar\n',
utils.yaml_formatter({"foo": "bar"}))
def test_text_wrap_formatter(self):
self.assertEqual('', utils.text_wrap_formatter(None))
self.assertEqual('', utils.text_wrap_formatter(''))
self.assertEqual('one two three',
utils.text_wrap_formatter('one two three'))
self.assertEqual(
'one two three four five six seven eight nine ten eleven\ntwelve',
utils.text_wrap_formatter(
('one two three four five six seven '
'eight nine ten eleven twelve')))
def test_newline_list_formatter(self):
self.assertEqual('', utils.newline_list_formatter(None))
self.assertEqual('', utils.newline_list_formatter([]))
self.assertEqual('one\ntwo',
utils.newline_list_formatter(['one', 'two']))
class CaptureStdout(object):
"""Context manager for capturing stdout from statements in its block."""
def __enter__(self):
self.real_stdout = sys.stdout
self.stringio = moves.StringIO()
sys.stdout = self.stringio
return self
def __exit__(self, *args):
sys.stdout = self.real_stdout
self.stringio.seek(0)
self.read = self.stringio.read
class PrintListTestCase(testtools.TestCase):
def test_print_list_with_list(self):
Row = collections.namedtuple('Row', ['foo', 'bar'])
to_print = [Row(foo='fake_foo1', bar='fake_bar2'),
Row(foo='fake_foo2', bar='fake_bar1')]
with CaptureStdout() as cso:
utils.print_list(to_print, ['foo', 'bar'])
# Output should be sorted by the first key (foo)
self.assertEqual("""\
+-----------+-----------+
| foo | bar |
+-----------+-----------+
| fake_foo1 | fake_bar2 |
| fake_foo2 | fake_bar1 |
+-----------+-----------+
""", cso.read())
def test_print_list_with_None_data(self):
Row = collections.namedtuple('Row', ['foo', 'bar'])
to_print = [Row(foo='fake_foo1', bar='None'),
Row(foo='fake_foo2', bar='fake_bar1')]
with CaptureStdout() as cso:
utils.print_list(to_print, ['foo', 'bar'])
# Output should be sorted by the first key (foo)
self.assertEqual("""\
+-----------+-----------+
| foo | bar |
+-----------+-----------+
| fake_foo1 | None |
| fake_foo2 | fake_bar1 |
+-----------+-----------+
""", cso.read())
def test_print_list_with_list_sortby(self):
Row = collections.namedtuple('Row', ['foo', 'bar'])
to_print = [Row(foo='fake_foo1', bar='fake_bar2'),
Row(foo='fake_foo2', bar='fake_bar1')]
with CaptureStdout() as cso:
utils.print_list(to_print, ['foo', 'bar'], sortby_index=1)
# Output should be sorted by the first key (bar)
self.assertEqual("""\
+-----------+-----------+
| foo | bar |
+-----------+-----------+
| fake_foo2 | fake_bar1 |
| fake_foo1 | fake_bar2 |
+-----------+-----------+
""", cso.read())
def test_print_list_with_list_no_sort(self):
Row = collections.namedtuple('Row', ['foo', 'bar'])
to_print = [Row(foo='fake_foo2', bar='fake_bar1'),
Row(foo='fake_foo1', bar='fake_bar2')]
with CaptureStdout() as cso:
utils.print_list(to_print, ['foo', 'bar'], sortby_index=None)
# Output should be in the order given
self.assertEqual("""\
+-----------+-----------+
| foo | bar |
+-----------+-----------+
| fake_foo2 | fake_bar1 |
| fake_foo1 | fake_bar2 |
+-----------+-----------+
""", cso.read())
def test_print_list_with_generator(self):
Row = collections.namedtuple('Row', ['foo', 'bar'])
def gen_rows():
for row in [Row(foo='fake_foo1', bar='fake_bar2'),
Row(foo='fake_foo2', bar='fake_bar1')]:
yield row
with CaptureStdout() as cso:
utils.print_list(gen_rows(), ['foo', 'bar'])
self.assertEqual("""\
+-----------+-----------+
| foo | bar |
+-----------+-----------+
| fake_foo1 | fake_bar2 |
| fake_foo2 | fake_bar1 |
+-----------+-----------+
""", cso.read())
class PrintDictTestCase(testtools.TestCase):
def test_print_dict(self):
data = {'foo': 'fake_foo', 'bar': 'fake_bar'}
with CaptureStdout() as cso:
utils.print_dict(data)
# Output should be sorted by the Property
self.assertEqual("""\
+----------+----------+
| Property | Value |
+----------+----------+
| bar | fake_bar |
| foo | fake_foo |
+----------+----------+
""", cso.read())
| lvdongbing/python-bileanclient | bileanclient/tests/unit/test_utils.py | Python | apache-2.0 | 8,900 |
import dork_compose.plugin
import os
import time
from docker.api.client import APIClient
from docker.errors import APIError
import logging
log = logging.getLogger(__name__)
class Plugin(dork_compose.plugin.Plugin):
def __init__(self, env, name, command):
dork_compose.plugin.Plugin.__init__(self, env, name, command)
self.__mkdir(self.snapshots)
self.__mkdir(self.volumes)
@property
def volumes(self):
return os.path.expanduser(self.env.get('DORK_FILESYSTEM_VOLUME_PATH', '%s/volumes' % self.datadir))
@property
def volume(self):
return '%s/%s/%s' % (
os.path.expanduser(self.volumes),
self.project,
self.instance
)
@property
def snapshots(self):
return os.path.expanduser(self.env.get('DORK_FILESYSTEM_SNAPSHOT_PATH', '%s/snapshots' % self.datadir))
@property
def snapshot(self):
return '%s/%s' % (os.path.expanduser(self.snapshots), self.project)
def __mkdir(self, dir):
if not os.path.exists(dir):
os.makedirs(dir)
def snapshot_save(self, snapshots=(), volumes=()):
client = APIClient()
for name in snapshots:
snapshot = '%s/%s' % (self.snapshot, name)
for v in volumes:
log.info("Saving volume %s to %s/%s." % (v, snapshot, v))
try:
client.inspect_image('iamdork/rsync')
except APIError:
client.pull('iamdork/rsync')
sync = client.create_container(
image='iamdork/rsync',
volumes=['/destination', '/source'],
cpu_shares=256,
host_config=client.create_host_config(binds=[
'%s/%s:/destination' % (snapshot, v),
'%s:/source' % volumes[v].full_name
]),
)
try:
client.start(sync)
while client.inspect_container(sync)['State']['Running']:
time.sleep(0.5)
finally:
client.remove_container(sync)
def snapshot_load(self, snapshots=(), volumes=()):
options = list(set(self.snapshot_ls()) & set(snapshots))
client = APIClient()
try:
client.inspect_image('iamdork/rsync')
except APIError:
client.pull('iamdork/rsync')
if len(options):
name = options[-1]
snapshot = '%s/%s' % (self.snapshot, name)
if not os.path.isdir(snapshot):
log.error("Snapshot %s of project %s doesn't exist." % (name, self.project))
return
for v in volumes:
log.info("Restoring volume %s from %s/%s." % (v, snapshot, v))
sync = client.create_container(
image='iamdork/rsync',
volumes=['/destination', '/source'],
host_config=client.create_host_config(binds=[
'%s/%s:/source' % (snapshot, v),
'%s:/destination' % volumes[v].full_name
]),
)
try:
client.start(sync)
while client.inspect_container(sync)['State']['Running']:
time.sleep(0.5)
finally:
client.remove_container(sync)
return name
return None
def snapshot_rm(self, snapshots=()):
client = APIClient()
try:
client.inspect_image('alpine:3.4')
except APIError:
client.pull('alpine:3.4')
for name in snapshots:
snapshot = '%s/%s' % (self.snapshot, name)
if not os.path.isdir(snapshot):
log.error("Snapshot %s of project %s doesn't exist." % (name, self.project))
continue
container = client.create_container(
command='rm -rf /snapshots/%s' % name,
image='alpine:3.4',
volumes=['/snapshots'],
host_config=client.create_host_config(binds=[
'%s:/snapshots' % self.snapshot,
]),
)
try:
client.start(container)
while client.inspect_container(container)['State']['Running']:
time.sleep(0.5)
finally:
client.remove_container(container)
yield name
def snapshot_ls(self):
return [path for path in os.listdir(self.snapshot)]
| iamdork/compose | dork_compose/plugins/filesystem.py | Python | mit | 4,662 |
""" PGPy :: Pretty Good Privacy for Python
"""
from ._author import *
from .pgp import PGPKey
from .pgp import PGPKeyring
from .pgp import PGPMessage
from .pgp import PGPSignature
from .pgp import PGPDetachedSignature
from .pgp import PGPUID
__all__ = ['__author__',
'__copyright__',
'__license__',
'__version__',
'constants',
'errors',
'PGPKey',
'PGPKeyring',
'PGPMessage',
'PGPSignature',
'PGPDetachedSignature',
'PGPUID', ]
| J08nY/PGPy | pgpy/__init__.py | Python | bsd-3-clause | 550 |
# -*- coding: utf-8 -*-
# Natural Language Toolkit: An Crubadan N-grams Reader
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Avital Pekker <avital.pekker@utoronto.ca>
#
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
An NLTK interface for the n-gram statistics gathered from
the corpora for each language using An Crubadan.
There are multiple potential applications for the data but
this reader was created with the goal of using it in the
context of language identification.
For details about An Crubadan, this data, and its potential uses, see:
http://borel.slu.edu/crubadan/index.html
"""
from __future__ import print_function, unicode_literals
import re
from nltk.compat import PY3
from os import path
from nltk.corpus.reader import CorpusReader
from nltk.probability import FreqDist
from nltk.data import ZipFilePathPointer
class CrubadanCorpusReader(CorpusReader):
"""
A corpus reader used to access language An Crubadan n-gram files.
"""
_LANG_MAPPER_FILE = 'table.txt'
_all_lang_freq = {}
def __init__(self, root, fileids, encoding='utf8', tagset=None):
super(CrubadanCorpusReader, self).__init__(root, fileids, encoding='utf8')
self._lang_mapping_data = []
self._load_lang_mapping_data()
def lang_freq(self, lang):
''' Return n-gram FreqDist for a specific language
given ISO 639-3 language code '''
if lang not in self._all_lang_freq:
self._all_lang_freq[lang] = self._load_lang_ngrams(lang)
return self._all_lang_freq[lang]
def langs(self):
''' Return a list of supported languages as ISO 639-3 codes '''
return [row[1] for row in self._lang_mapping_data]
def iso_to_crubadan(self, lang):
''' Return internal Crubadan code based on ISO 639-3 code '''
for i in self._lang_mapping_data:
if i[1].lower() == lang.lower():
return i[0]
def crubadan_to_iso(self, lang):
''' Return ISO 639-3 code given internal Crubadan code '''
for i in self._lang_mapping_data:
if i[0].lower() == lang.lower():
return i[1]
def _load_lang_mapping_data(self):
''' Load language mappings between codes and description from table.txt '''
if isinstance(self.root, ZipFilePathPointer):
raise RuntimeError("Please install the 'crubadan' corpus first, use nltk.download()")
mapper_file = path.join(self.root, self._LANG_MAPPER_FILE)
if self._LANG_MAPPER_FILE not in self.fileids():
raise RuntimeError("Could not find language mapper file: " + mapper_file)
if PY3:
raw = open(mapper_file, 'r', encoding='utf-8').read().strip()
else:
raw = open(mapper_file, 'rU').read().decode('utf-8').strip()
self._lang_mapping_data = [row.split('\t') for row in raw.split('\n')]
def _load_lang_ngrams(self, lang):
''' Load single n-gram language file given the ISO 639-3 language code
and return its FreqDist '''
if lang not in self.langs():
raise RuntimeError("Unsupported language.")
crubadan_code = self.iso_to_crubadan(lang)
ngram_file = path.join(self.root, crubadan_code + '-3grams.txt')
if not path.isfile(ngram_file):
raise Runtime("No N-gram file found for requested language.")
counts = FreqDist()
if PY3:
f = open(ngram_file, 'r', encoding='utf-8')
else:
f = open(ngram_file, 'rU')
for line in f:
if PY3:
data = line.split(' ')
else:
data = line.decode('utf8').split(' ')
ngram = data[1].strip('\n')
freq = int(data[0])
counts[ngram] = freq
return counts
| Reagankm/KnockKnock | venv/lib/python3.4/site-packages/nltk/corpus/reader/crubadan.py | Python | gpl-2.0 | 3,946 |
#!/usr/bin/env python3
import sys
import os
import argparse
from pylint import lint
# Use the current development version of SkoolKit
SKOOLKIT_HOME = os.environ.get('SKOOLKIT_HOME')
if not SKOOLKIT_HOME:
sys.stderr.write('SKOOLKIT_HOME is not set; aborting\n')
sys.exit(1)
if not os.path.isdir(SKOOLKIT_HOME):
sys.stderr.write('SKOOLKIT_HOME={}; directory not found\n'.format(SKOOLKIT_HOME))
sys.exit(1)
os.chdir(SKOOLKIT_HOME)
# Messages to ignore by default
IGNORE_MSG_IDS = [
'C0103', # Invalid name for variable, constant, class
'C0111', # Missing docstring
'C0301', # Line too long
'C0302', # Too many lines in module
'C0325', # Unnecessary parens after 'print' keyword
'C1001', # Old-style class
'E0601', # Variable used before assignment
'E0611', # No such name in module
'E1101', # No such member in instance
'F0401', # Import error
'R0201', # Method could be a function
'R0902', # Too many instance attributes
'R0903', # Too few public methods
'R0904', # Too many public methods
'R0911', # Too many return statements
'R0912', # Too many branches
'R0913', # Too many arguments
'R0914', # Too many local variables
'R0915', # Too many statements
'R1710', # Inconsistent return statements
'W0142', # Used * or ** magic
'W0201', # Attribute defined outside __init__
'W0232', # Class has no __init__ method
'W0601', # Global variable undefined at the module level
'W0603', # Using the global statement
'W0631', # Using possibly undefined loop variable
'W0632', # Possible unbalanced tuple unpacking
'W0633', # Attempting to unpack a non-sequence
'W1401', # Anomalous backslash in string
]
# pylint options
OPTIONS = (
"--msg-template='{path}:{line}: [{msg_id}:{symbol}:{obj}] {msg}'",
)
###############################################################################
# Begin
###############################################################################
parser = argparse.ArgumentParser(
usage='lint.py [options]',
description="Run pylint on the SkoolKit code or test modules.",
add_help=False
)
group = parser.add_argument_group('Options')
group.add_argument('-d', dest='message_ids', metavar='LIST',
help='Disable the messages in this comma-separated list (in addition to those disabled by default)')
group.add_argument('-i', dest='ignore_indentation', action='store_true',
help='Ignore messages about hanging indentation (C0330)')
group.add_argument('-s', dest='ignore_similarities', action='store_true',
help='Ignore messages about code similarities (R0801)')
group.add_argument('-t', dest='test', action='store_true',
help='Run pylint on the SkoolKit test modules')
group.add_argument('-u', dest='ignore_unused', action='store_true',
help='Ignore messages about unused variables (W0612) and arguments (W0613)')
namespace, unknown_args = parser.parse_known_args()
if unknown_args:
parser.exit(2, parser.format_help())
extra_ignores = []
if namespace.message_ids:
extra_ignores.extend(namespace.message_ids.split(','))
if namespace.ignore_indentation:
extra_ignores.append('C0330')
if namespace.ignore_similarities:
extra_ignores.append('R0801')
if namespace.ignore_unused:
extra_ignores.append('W0612')
extra_ignores.append('W0613')
if namespace.test:
os.chdir('tests')
args = [m for m in os.listdir('.') if m.endswith('.py')]
extra_ignores.append('F0401') # Unable to import 'skoolkit.*'
else:
args = ['skoolkit']
IGNORE_MSG_IDS.extend(extra_ignores)
args.extend(OPTIONS)
args.extend(('-d', ','.join(IGNORE_MSG_IDS)))
print('{0}\npylint {1}\n{0}'.format('-' * 80, ' '.join(args)))
lint.Run(args)
| skoolkid/skoolkit | tools/lint.py | Python | gpl-3.0 | 3,797 |
# -*- coding: utf-8 -*-
#
# -----------------------------------------------------------------------------------
# Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----------------------------------------------------------------------------------
import sys
from client.enum import LoginProvider
sys.path.append("..")
reload(sys)
sys.setdefaultencoding('utf-8')
import time
from datetime import datetime, timedelta
import re
import json
import requests
import markdown
from flask_login import login_required, login_user, LoginManager, current_user
from flask import Response, render_template, request, g, redirect, make_response, session, url_for, abort
from client import app, Context
from client.constants import LOGIN_PROVIDER
from client.user.login_manager import login_manager_helper
from client.functions import get_config, safe_get_config, get_now
from client.log import log
API_HACKATHON = "/api/hackathon"
API_HACKATHON_LIST = "/api/hackathon/list"
API_HACKATHON_TEMPLATE = "/api/hackathon/template"
API_HACKATHON_REGISTRATION = "/api/user/registration"
API_TEAM_MEMBER_LIST = "/api/team/member/list"
API_TEAM_USER = "/api/user/team/list"
API_TEAM = "/api/team"
API_MY_TEAM = "/api/team/my"
login_manager = LoginManager()
login_manager.init_app(app)
def __oauth_meta_content():
return {
LOGIN_PROVIDER.WEIBO: get_config('login.weibo.meta_content'),
LOGIN_PROVIDER.QQ: get_config('login.qq.meta_content')
}
def __oauth_api_key():
return {
LOGIN_PROVIDER.WEIBO: get_config('login.weibo.client_id'),
LOGIN_PROVIDER.QQ: get_config('login.qq.client_id'),
LOGIN_PROVIDER.LIVE: get_config('login.live.client_id'),
LOGIN_PROVIDER.WECHAT: get_config("login.wechat.client_id"),
LOGIN_PROVIDER.GITHUB: get_config('login.github.client_id')
}
def render(template_name_or_list, **context):
ui = ""
if current_user and current_user.is_authenticated():
ui = " for user '%s'" % current_user.name
log.debug("rendering template '%s' %s" % (template_name_or_list, ui))
return render_template(template_name_or_list,
meta_content=__oauth_meta_content(),
oauth_api_key=__oauth_api_key(),
**context)
def __login_failed(provider):
if provider == "mysql":
error = "Login failed. username or password invalid."
return render("/superadmin.html", error=error)
return redirect("/")
def __login(provider):
try:
user_with_token = login_manager_helper.login(provider)
if user_with_token is None:
return __login_failed(provider)
log.info("login successfully:" + repr(user_with_token))
token = user_with_token["token"]
login_user(user_with_token["user"])
session["token"] = token
if session.get("return_url") is not None:
resp = make_response(redirect(session["return_url"]))
session["return_url"] = None
else:
resp = make_response(redirect(url_for("index")))
resp.set_cookie('token', token)
return resp
except Exception as ex:
log.error(ex)
return __login_failed(provider)
def __date_serializer(date):
return long((date - datetime(1970, 1, 1)).total_seconds() * 1000)
def __get_api(url, headers=None, **kwargs):
default_headers = {"content-type": "application/json"}
if headers is not None and isinstance(headers, dict):
default_headers.update(headers)
try:
req = requests.get(get_config("hackathon-api.endpoint") + url, headers=default_headers, **kwargs)
resp = req.content
return json.loads(resp)
except Exception:
abort(500, 'API Service is not yet open')
@app.context_processor
def utility_processor():
def get_now_serialized():
return __date_serializer(get_now())
def activity_progress(starttime, endtime):
return ((int(time.time() * 1e3) - starttime) * 1.0 / (endtime - starttime) * 1.0) * 100
def get_provides(value):
prs = []
if value is None:
return ""
else:
value = int(value)
if value == 255:
return ""
else:
if value & LoginProvider.live == LoginProvider.live:
prs.append("live")
if value & LoginProvider.github == LoginProvider.github:
prs.append("github")
if value & LoginProvider.weibo == LoginProvider.weibo:
prs.append("weibo")
if value & LoginProvider.qq == LoginProvider.qq:
prs.append("qq")
if value & LoginProvider.wechat == LoginProvider.wechat:
prs.append("wechat")
if value & LoginProvider.alauda == LoginProvider.alauda:
prs.append("alauda")
return ",".join(prs)
return dict(get_now=get_now_serialized, activity_progress=activity_progress, get_provides=get_provides)
@app.template_filter('mkHTML')
def to_markdown_html(text):
if text is None:
text = ""
return markdown.markdown(text)
@app.template_filter('stripTags')
def strip_tags(html):
if html is None:
html = ""
return re.sub(r"</?\w+[^>]*>", "", html)
@app.template_filter('limitTo')
def limit_to(text, limit=100):
if text is None:
text = ""
text = unicode(text)
return text[0:limit]
@app.template_filter('deadline')
def deadline(endtime):
end_date = datetime.fromtimestamp(endtime / 1e3)
if end_date > datetime.now():
return (end_date - datetime.now()).days
else:
return "--"
week = ['周一', '周二', '周三', '周四', '周五', '周六', '周日']
@app.template_filter('date')
def to_datetime(datelong, fmt=''):
if fmt:
date = datetime.fromtimestamp(datelong / 1e3)
fmt = re.compile('%a', re.I).sub(week[date.weekday()], fmt)
return date.strftime(fmt)
else:
return datetime.fromtimestamp(datelong / 1e3).strftime("%y/%m/%d")
@login_manager.user_loader
def load_user(id):
return login_manager_helper.load_user(id)
@login_manager.unauthorized_handler
def unauthorized_log():
return render("/login.html",
error=None,
providers=safe_get_config("login.provider_enabled",
["github", "qq", "wechat", "weibo", "live", "alauda"]))
@app.before_request
def make_session_permanent():
g.user = current_user
session.permanent = True
app.permanent_session_lifetime = timedelta(minutes=safe_get_config("login.session_valid_time_minutes", 60))
@app.errorhandler(401)
def custom_401(e):
return render("/login.html", error=None)
@app.errorhandler(404)
def page_not_found(e):
return render('/404.html'), 404
@app.errorhandler(500)
def server_error(e):
return render('error.html', error=e), 500
# js config
@app.route('/config.js')
def js_config():
resp = Response(response="var CONFIG=%s" % json.dumps(get_config("javascript")),
status=200,
mimetype="application/javascript")
return resp
@app.route('/github')
def github_login():
return __login(LOGIN_PROVIDER.GITHUB)
@app.route('/weibo')
def weibo_login():
return __login(LOGIN_PROVIDER.WEIBO)
@app.route('/wechat')
def wechat_login():
return __login(LOGIN_PROVIDER.WECHAT)
@app.route('/qq')
def qq_login():
return __login(LOGIN_PROVIDER.QQ)
@app.route('/live')
def live_login():
return __login(LOGIN_PROVIDER.LIVE)
@app.route('/alauda')
def alauda_login():
return __login(LOGIN_PROVIDER.ALAUDA)
@app.route('/')
@app.route('/index')
def index():
landing_page_visited = request.cookies.get('ohplpv')
if not landing_page_visited:
return redirect("/landing")
empty_items = {
"items": []
}
newest_hackathons = __get_api(API_HACKATHON_LIST, {"token": session.get("token")},
params={"page": 1, "per_page": 6, "order_by": "create_time", "status": 1})
hot_hackathons = __get_api(API_HACKATHON_LIST, {"token": session.get("token")},
params={"page": 1, "per_page": 6, "order_by": "registered_users_num", "status": 1})
soon_hackathon = __get_api(API_HACKATHON_LIST, {"token": session.get("token")},
params={"page": 1, "per_page": 6, "order_by": "event_start_time", "status": 1})
newest_hackathons = empty_items if "error" in newest_hackathons else newest_hackathons
hot_hackathons = empty_items if "error" in hot_hackathons else hot_hackathons
soon_hackathon = empty_items if "error" in soon_hackathon else soon_hackathon
return render('/home.html', newest_hackathons=newest_hackathons, hot_hackathons=hot_hackathons,
soon_hackathon=soon_hackathon, sc=False)
@app.route('/shuangchuang')
def shuangchuang():
empty_items = {
"items": []
}
newest_hackathons = __get_api(API_HACKATHON_LIST, {"token": session.get("token")},
params={"page": 1, "per_page": 3, "order_by": "create_time", "status": 1})
hot_hackathons = __get_api(API_HACKATHON_LIST, {"token": session.get("token")},
params={"page": 1, "per_page": 3, "order_by": "registered_users_num", "status": 1})
soon_hackathon = __get_api(API_HACKATHON_LIST, {"token": session.get("token")},
params={"page": 1, "per_page": 3, "order_by": "event_start_time", "status": 1})
newest_hackathons = empty_items if "error" in newest_hackathons else newest_hackathons
hot_hackathons = empty_items if "error" in hot_hackathons else hot_hackathons
soon_hackathon = empty_items if "error" in soon_hackathon else soon_hackathon
return render('/home.html', newest_hackathons=newest_hackathons, hot_hackathons=hot_hackathons,
soon_hackathon=soon_hackathon, sc=True)
@app.route('/help')
def help():
return render('/help.html')
@app.route('/about')
def about():
return render('/about.html')
@app.route("/logout")
@login_required
def logout():
login_manager_helper.logout(session.get("token"))
return_url = request.args.get("return_url", "/")
if "manage/" in return_url:
return_url = "/"
resp = redirect(return_url)
resp.set_cookie('token', '', expires=0)
return resp
@app.route("/login")
def login():
session["return_url"] = request.args.get("return_url")
provider = request.args.get("provides")
prs = ["github", "qq", "wechat", "weibo", "live", "alauda"]
if provider is None:
provider = safe_get_config("login.provider_enabled", prs)
else:
provider = provider.split(',')
return render("/login.html",
error=None,
providers=provider)
@app.route("/site/<hackathon_name>")
def hackathon(hackathon_name):
headers = {"hackathon_name": hackathon_name, "token": session.get("token")}
data = __get_api(API_HACKATHON, headers)
data = Context.from_object(data)
reg = Context.from_object(__get_api(API_HACKATHON_REGISTRATION, headers))
if data.get('error') is not None:
return render("/404.html")
else:
return render("/site/hackathon.html",
hackathon_name=hackathon_name,
hackathon=data.get("hackathon", data),
user=data.get("user"),
registration=data.get("registration"),
team=data.get("team"),
experiment=reg.get("experiment"))
@app.route("/site/<hackathon_name>/workspace")
@login_required
def workspace(hackathon_name):
headers = {"hackathon_name": hackathon_name, "token": session.get("token")}
reg = Context.from_object(__get_api(API_HACKATHON_REGISTRATION, headers))
if reg.get('registration') is not None:
if reg.registration.status == 1 or reg.registration.status == 3:
return render("/site/workspace.html", hackathon_name=hackathon_name,
workspace=True,
asset=reg.get("asset"),
hackathon=reg.get("hackathon"),
experiment=reg.get('experiment', {id: 0}))
else:
return redirect(url_for('hackathon', hackathon_name=hackathon_name))
else:
return redirect(url_for('hackathon', hackathon_name=hackathon_name))
@app.route("/site/<hackathon_name>/settings")
@login_required
def temp_settings(hackathon_name):
headers = {"hackathon_name": hackathon_name, "token": session.get("token")}
reg = Context.from_object(
__get_api(API_HACKATHON, {"hackathon_name": hackathon_name, "token": session.get("token")}))
if reg.get('registration') is not None:
if reg.get('experiment') is not None:
return redirect(url_for('workspace', hackathon_name=hackathon_name))
elif reg.registration.status == 1 or reg.registration.status == 3:
templates = Context.from_object(__get_api(API_HACKATHON_TEMPLATE, headers))
return render("/site/settings.html", hackathon_name=hackathon_name, templates=templates)
else:
return redirect(url_for('hackathon', hackathon_name=hackathon_name))
else:
return redirect(url_for('hackathon', hackathon_name=hackathon_name))
@app.route("/site/<hackathon_name>/team")
@login_required
def my_team(hackathon_name):
headers = {"hackathon_name": hackathon_name, "token": session.get("token")}
team = Context.from_object(__get_api(API_MY_TEAM, headers))
return render_team_page(hackathon_name, team)
@app.route("/site/<hackathon_name>/team/<tid>")
def create_join_team(hackathon_name, tid):
headers = {"hackathon_name": hackathon_name, "token": session.get("token")}
team = Context.from_object(__get_api(API_TEAM, headers, params={"id": tid}))
return render_team_page(hackathon_name, team)
def render_team_page(hackathon_name, team):
if team.get('error') is not None:
return redirect(url_for('hackathon', hackathon_name=hackathon_name))
else:
role = team.get('is_admin') and 4 or 0
role += team.get('is_leader') and 2 or 0
role += team.get('is_member') and 1 or 0
return render("/site/team.html", hackathon_name=hackathon_name, team=team, role=role)
@app.route("/signin", methods=['GET', 'POST'])
@app.route("/admin", methods=['GET', 'POST'])
def superadmin():
if request.method == 'POST':
return __login(LOGIN_PROVIDER.DB)
return render("/superadmin.html")
@app.route("/landing")
def landing():
return render("/landing.html")
@app.route("/events")
def events():
return render("/events.html")
from route_manage import *
from route_template import *
from route_user import *
from route_team import *
| lclchen/open-hackathon | open-hackathon-client/src/client/views/__init__.py | Python | mit | 16,081 |
#!/usr/bin/env python
#
#CustomScript extension
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from Utils import HandlerUtil
from common import CommonVariables
import base64
import json
class ParameterParser(object):
def __init__(self, protected_settings, public_settings):
"""
TODO: we should validate the parameter first
"""
self.blobs = []
self.backup_metadata = None
self.public_config_obj = None
self.private_config_obj = None
self.blobs = None
"""
get the public configuration
"""
self.commandToExecute = public_settings.get(CommonVariables.command_to_execute)
self.taskId = public_settings.get(CommonVariables.task_id)
self.locale = public_settings.get(CommonVariables.locale)
self.publicObjectStr = public_settings.get(CommonVariables.object_str)
if(self.publicObjectStr is not None and self.publicObjectStr != ""):
decoded_public_obj_string = base64.standard_b64decode(self.publicObjectStr)
decoded_public_obj_string = decoded_public_obj_string.strip()
decoded_public_obj_string = decoded_public_obj_string.strip('\'')
self.public_config_obj = json.loads(decoded_public_obj_string)
self.backup_metadata = self.public_config_obj['backupMetadata']
"""
first get the protected configuration
"""
self.logsBlobUri = protected_settings.get(CommonVariables.logs_blob_uri)
self.privateObjectStr = protected_settings.get(CommonVariables.object_str)
if(self.privateObjectStr!=None and self.privateObjectStr != ""):
decoded_private_obj_string = base64.standard_b64decode(self.privateObjectStr)
decoded_private_obj_string = decoded_private_obj_string.strip()
decoded_private_obj_string = decoded_private_obj_string.strip('\'')
self.private_config_obj = json.loads(decoded_private_obj_string)
self.blobs = self.private_config_obj['blobSASUri']
| Snesha/azure-linux-extensions | VMBackup/main/parameterparser.py | Python | apache-2.0 | 2,609 |
# -*- coding: utf-8 -*-
#
# Copyright 2008-2009 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""tests for storage base classes"""
from __future__ import print_function
import os
import six
import warnings
from io import BytesIO
from translate.misc.multistring import multistring
from translate.storage import base, factory
from translate.storage.placeables import general, parse as rich_parse
def headerless_len(units):
"""return count of translatable (non header) units"""
return len(list(filter(lambda x: not x.isheader(), units)))
def first_translatable(store):
"""returns first translatable unit, skipping header if present"""
if store.units[0].isheader() and len(store.units) > 1:
return store.units[1]
else:
return store.units[0]
class TestTranslationUnit:
"""Tests a TranslationUnit.
Derived classes can reuse these tests by pointing UnitClass to a derived Unit"""
UnitClass = base.TranslationUnit
def setup_method(self, method):
self.unit = self.UnitClass("Test String")
def test_isfuzzy(self):
"""Test that we can call isfuzzy() on a unit.
The default return value for isfuzzy() should be False.
"""
assert not self.unit.isfuzzy()
def test_create(self):
"""tests a simple creation with a source string"""
unit = self.unit
print('unit.source:', unit.source)
assert unit.source == "Test String"
def test_eq(self):
"""tests equality comparison"""
unit1 = self.unit
unit2 = self.UnitClass("Test String")
unit3 = self.UnitClass("Test String")
unit4 = self.UnitClass("Blessed String")
unit5 = self.UnitClass("Blessed String")
unit6 = self.UnitClass("Blessed String")
assert unit1 == unit1
assert unit1 == unit2
assert unit1 != unit4
unit1.target = "Stressed Ting"
unit2.target = "Stressed Ting"
unit5.target = "Stressed Bling"
unit6.target = "Stressed Ting"
assert unit1 == unit2
assert unit1 != unit3
assert unit4 != unit5
if unit1.__class__.__name__ in ('RESXUnit', 'dtdunit', 'TxtUnit',
'JsonUnit', 'l20nunit', 'YAMLUnit',
'WebExtensionJsonUnit'):
# unit1 will generally equal unit6 for monolingual formats (resx, dtd, txt, l20n)
# with the default comparison method which compare units by their
# target and source properties only.
# For other monolingual formats:
# - AndroidResourceUnit is comparing units by their full xml serialization (overriden __eq__)
# - phpunit and propunit (properties) can have different source/target
# and are reunited when serializing through `self.translation or self.value`
assert unit1 == unit6
assert not(unit1 != unit6)
else:
assert unit1 != unit6
assert not(unit1 == unit6)
def test_target(self):
unit = self.unit
assert not unit.target
unit.target = "Stressed Ting"
assert unit.target == "Stressed Ting"
unit.target = "Stressed Bling"
assert unit.target == "Stressed Bling"
unit.target = ""
assert unit.target == ""
def test_escapes(self):
"""Test all sorts of characters that might go wrong in a quoting and
escaping roundtrip."""
unit = self.unit
specials = ['Fish & chips', 'five < six', 'six > five', 'five < six',
'Use ', 'Use &nbsp;', 'Use &amp;nbsp;',
'A "solution"', "skop 'n bal", '"""', "'''", u'µ',
'\n', '\t', '\r', '\r\n', '\\r', '\\', '\\\r']
for special in specials:
unit.source = special
print("unit.source:", repr(unit.source))
print("special:", repr(special))
assert unit.source == special
def test_difficult_escapes(self):
"""Test difficult characters that might go wrong in a quoting and
escaping roundtrip."""
unit = self.unit
specials = ['\\n', '\\t', '\\"', '\\ ',
'\\\n', '\\\t', '\\\\n', '\\\\t', '\\\\r', '\\\\"',
'\\r\\n', '\\\\r\\n', '\\r\\\\n', '\\\\n\\\\r']
for special in specials:
unit.source = special
print("unit.source:", repr(unit.source) + '|')
print("special:", repr(special) + '|')
assert unit.source == special
def test_note_sanity(self):
"""Tests that all subclasses of the base behaves consistently with regards to notes."""
unit = self.unit
unit.addnote(u"Test note 1", origin="translator")
unit.addnote(u"Test note 2", origin="translator")
unit.addnote(u"Test note 3", origin="translator")
expected_notes = u"Test note 1\nTest note 2\nTest note 3"
actual_notes = unit.getnotes(origin="translator")
assert actual_notes == expected_notes
# Test with no origin.
unit.removenotes()
assert not unit.getnotes()
unit.addnote(u"Test note 1")
unit.addnote(u"Test note 2")
unit.addnote(u"Test note 3")
expected_notes = u"Test note 1\nTest note 2\nTest note 3"
actual_notes = unit.getnotes()
assert actual_notes == expected_notes
def test_rich_get(self):
"""Basic test for converting from multistrings to StringElem trees."""
target_mstr = multistring([u'tėst', u'<b>string</b>'])
unit = self.UnitClass(multistring([u'a', u'b']))
unit.rich_parsers = general.parsers
unit.target = target_mstr
elems = unit.rich_target
if unit.hasplural():
assert len(elems) == 2
assert len(elems[0].sub) == 1
assert len(elems[1].sub) == 3
assert six.text_type(elems[0]) == target_mstr.strings[0]
assert six.text_type(elems[1]) == target_mstr.strings[1]
assert six.text_type(elems[1].sub[0]) == u'<b>'
assert six.text_type(elems[1].sub[1]) == u'string'
assert six.text_type(elems[1].sub[2]) == u'</b>'
else:
assert len(elems[0].sub) == 1
assert six.text_type(elems[0]) == target_mstr.strings[0]
def test_rich_set(self):
"""Basic test for converting from multistrings to StringElem trees."""
elems = [
rich_parse(u'Tëst <x>string</x>', general.parsers),
rich_parse(u'Another test string.', general.parsers),
]
unit = self.UnitClass(multistring([u'a', u'b']))
unit.rich_target = elems
if unit.hasplural():
assert unit.target.strings[0] == u'Tëst <x>string</x>'
assert unit.target.strings[1] == u'Another test string.'
else:
assert unit.target == u'Tëst <x>string</x>'
class TestTranslationStore(object):
"""Tests a TranslationStore.
Derived classes can reuse these tests by pointing StoreClass to a derived Store"""
StoreClass = base.TranslationStore
def setup_method(self, method):
"""Allocates a unique self.filename for the method, making sure it doesn't exist"""
self.filename = "%s_%s.test" % (self.__class__.__name__, method.__name__)
if os.path.exists(self.filename):
os.remove(self.filename)
warnings.resetwarnings()
def teardown_method(self, method):
"""Makes sure that if self.filename was created by the method, it is cleaned up"""
if os.path.exists(self.filename):
os.remove(self.filename)
warnings.resetwarnings()
def test_create_blank(self):
"""Tests creating a new blank store"""
store = self.StoreClass()
assert headerless_len(store.units) == 0
def test_add(self):
"""Tests adding a new unit with a source string"""
store = self.StoreClass()
unit = store.addsourceunit("Test String")
print(str(unit))
print(bytes(store))
assert headerless_len(store.units) == 1
assert unit.source == "Test String"
def test_find(self):
"""Tests searching for a given source string"""
store = self.StoreClass()
unit1 = store.addsourceunit("Test String")
unit2 = store.addsourceunit("Blessed String")
assert store.findunit("Test String") == unit1
assert store.findunit("Blessed String") == unit2
assert store.findunit("Nest String") is None
def test_translate(self):
"""Tests the translate method and non-ascii characters."""
store = self.StoreClass()
unit = store.addsourceunit("scissor")
unit.target = u"skêr"
unit = store.addsourceunit(u"Beziér curve")
unit.target = u"Beziér-kurwe"
assert store.translate("scissor") == u"skêr"
assert store.translate(u"Beziér curve") == u"Beziér-kurwe"
def reparse(self, store):
"""converts the store to a string and back to a store again"""
storestring = bytes(store)
newstore = self.StoreClass.parsestring(storestring)
return newstore
def check_equality(self, store1, store2):
"""asserts that store1 and store2 are the same"""
assert headerless_len(store1.units) == headerless_len(store2.units)
for n, store1unit in enumerate(store1.units):
store2unit = store2.units[n]
match = store1unit == store2unit
if not match:
print("match failed between elements %d of %d" % ((n + 1), headerless_len(store1.units)))
print("store1:")
print(bytes(store1))
print("store2:")
print(bytes(store2))
print("store1.units[%d].__dict__:" % n, store1unit.__dict__)
print("store2.units[%d].__dict__:" % n, store2unit.__dict__)
assert store1unit == store2unit
def test_parse(self):
"""Tests converting to a string and parsing the resulting string"""
store = self.StoreClass()
unit1 = store.addsourceunit("Test String")
unit1.target = "Test String"
unit2 = store.addsourceunit("Test String 2")
unit2.target = "Test String 2"
newstore = self.reparse(store)
self.check_equality(store, newstore)
def test_files(self):
"""Tests saving to and loading from files"""
store = self.StoreClass()
unit1 = store.addsourceunit("Test String")
unit1.target = "Test String"
unit2 = store.addsourceunit("Test String 2")
unit2.target = "Test String 2"
store.savefile(self.filename)
newstore = self.StoreClass.parsefile(self.filename)
self.check_equality(store, newstore)
def test_save(self):
"""Tests that we can save directly back to the original file."""
store = self.StoreClass()
unit1 = store.addsourceunit("Test String")
unit1.target = "Test String"
unit2 = store.addsourceunit("Test String 2")
unit2.target = "Test String 2"
store.savefile(self.filename)
store.save()
newstore = self.StoreClass.parsefile(self.filename)
self.check_equality(store, newstore)
def test_markup(self):
"""Tests that markup survives the roundtrip. Most usefull for xml types."""
store = self.StoreClass()
unit = store.addsourceunit("<vark@hok.org> %d keer %2$s")
assert unit.source == "<vark@hok.org> %d keer %2$s"
unit.target = "bla"
assert store.translate("<vark@hok.org> %d keer %2$s") == "bla"
def test_nonascii(self):
store = self.StoreClass()
unit = store.addsourceunit(u"Beziér curve")
unit.target = u"Beziér-kurwe"
answer = store.translate(u"Beziér curve")
if isinstance(answer, bytes):
answer = answer.decode("utf-8")
assert answer == u"Beziér-kurwe"
#Just test that __str__ doesn't raise exception:
src = store.serialize(BytesIO())
def test_extensions(self):
"""Test that the factory knows the extensions for this class."""
supported = factory.supported_files()
supported_dict = dict([(name, (extensions, mimetypes)) for name, extensions, mimetypes in supported])
if not (self.StoreClass.Name and self.StoreClass.Name in supported_dict):
return
detail = supported_dict[self.StoreClass.Name] # will start to get problematic once translated
print("Factory:", detail[0])
print("StoreClass:", self.StoreClass.Extensions)
for ext in detail[0]:
assert ext in self.StoreClass.Extensions
for ext in self.StoreClass.Extensions:
assert ext in detail[0]
def test_mimetypes(self):
"""Test that the factory knows the mimetypes for this class."""
supported = factory.supported_files()
supported_dict = dict([(name, (extensions, mimetypes)) for name, extensions, mimetypes in supported])
if not (self.StoreClass.Name and self.StoreClass.Name in supported_dict):
return
detail = supported_dict[self.StoreClass.Name] # will start to get problematic once translated
print("Factory:", detail[1])
print("StoreClass:", self.StoreClass.Mimetypes)
for ext in detail[1]:
assert ext in self.StoreClass.Mimetypes
for ext in self.StoreClass.Mimetypes:
assert ext in detail[1]
| diorcety/translate | translate/storage/test_base.py | Python | gpl-2.0 | 14,264 |
from models.usuarios import Usuario
import utils
__author__ = 'alexandreferreira'
from Crypto import Random
class Token:
def __init__(self):
self._id = None
self.token = None
self.usuario = None
self.expired = False
@staticmethod
def generate_token():
rndfile = Random.new()
return rndfile.read(40).encode("hex")
def expired_token(self):
self.expired = True
@staticmethod
def get_usuario_from_token(token):
usuario_dict = Token.collection().find_one({'token': token})
if usuario_dict:
usuario = Usuario()
usuario = Usuario.make_usuario_from_dict(usuario, usuario_dict)
return usuario
else:
return None
@staticmethod
def validate_token(token):
if token:
u = Token.collection().find_one({'token': token})
if u:
return True
else:
return False
else:
return False
def make_dict(self):
token_dict = {}
if self._id:
token_dict['_id'] = self._id
if self.token:
token_dict['token'] = self.token
if self.usuario:
token_dict['usuario'] = self.usuario
if self.expired:
token_dict['expired'] = self.expired
return token_dict
def make_from_dict(self, token_dict):
if token_dict.get('_id'):
self._id = token_dict.get('_id')
if token_dict.get('token'):
self.token = token_dict.get('token')
if token_dict.get('usuario'):
self.usuario = token_dict.get('usuario')
if token_dict.get('expired'):
self.expired = token_dict.get('expired')
return self
@staticmethod
def collection():
db = utils.connect_mongo()
return db.tokens
| alexandreferreira/tata_library | models/token.py | Python | apache-2.0 | 1,887 |
from math import sqrt
def euclidean_distance(p1, p2):
"""
Compute euclidean distance for two points
:param p1:
:param p2:
:return:
"""
dx, dy = p2[0] - p1[0], p2[1] - p1[1]
# Magnitude. Coulomb law.
return sqrt(dx ** 2 + dy ** 2) | dsaldana/roomba_sensor_network | roomba_sensor/src/roomba_sensor/util/geo.py | Python | gpl-3.0 | 268 |
from a10sdk.common.A10BaseClass import A10BaseClass
class VlanGlobal(A10BaseClass):
"""Class Description::
Configure global options for vlan.
Class vlan-global supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param l3_vlan_fwd_disable: {"default": 0, "optional": true, "type": "number", "description": "Disable L3 forwarding between VLANs", "format": "flag"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param enable_def_vlan_l2_forwarding: {"default": 0, "optional": true, "type": "number", "description": "Enable layer 2 forwarding on default vlan", "format": "flag"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/network/vlan-global`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "vlan-global"
self.a10_url="/axapi/v3/network/vlan-global"
self.DeviceProxy = ""
self.l3_vlan_fwd_disable = ""
self.uuid = ""
self.enable_def_vlan_l2_forwarding = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
| amwelch/a10sdk-python | a10sdk/core/network/network_vlan_global.py | Python | apache-2.0 | 1,434 |
# Copyright (C) 2014 Science and Technology Facilities Council.
# Copyright (C) 2015-2021 East Asian Observatory.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function, \
unicode_literals
import logging
import os
from flask import Flask
from jinja2_orderblocks import OrderBlocks
from ..config import get_config, get_database, get_facilities, get_home
from ..type.enum import GroupType, MessageThreadType, SiteGroupType
from .template_util import register_template_utils
from .util import check_session_expiry, \
make_enum_converter, register_error_handlers
from .blueprint.admin import create_admin_blueprint
from .blueprint.facility import create_facility_blueprint
from .blueprint.help import create_help_blueprint
from .blueprint.home import create_home_blueprint
from .blueprint.oauth import create_oauth_blueprint
from .blueprint.people import create_people_blueprint
from .blueprint.query import create_query_blueprint
def create_web_app(db=None, facility_spec=None, auto_reload_templates=False,
without_logger=False, _test_return_extra=False):
"""
Function to prepare the Flask web application.
:param db: database access object to use. If `None` then one will be
constructed based on the configuration (and `facility_spec`).
:param facility_spec: facility specification used to construct the
facility list (via :func:`hedwig.config.get_config`) and database
object (if not given, via :func:`hedwig.config.get_database`).
:param auto_reload_templates: Configure whether Jinja2 should automatically
reload template files. (Only applies with Flask 0.11 or later.)
:param without_logger: if `True`, do not configure the application's
logger. (Otherwise it is configured to log to the file specified
in the configuration file.)
:param _test_return_extra: if true, instead of just returning the
application object, return a dictionary of values useful for
debugging. (Currently just returns the output of `locals()`.)
"""
home = get_home()
config = get_config()
if db is None:
db = get_database(facility_spec=facility_spec)
# Load facilities as specified in the configuration.
facilities = get_facilities(db=db, facility_spec=facility_spec)
# Configure the web application.
application_name = config.get('application', 'name')
app = Flask(
__name__,
static_folder=os.path.join(home, 'data', 'web', 'static'),
template_folder=os.path.join(home, 'data', 'web', 'template'),
)
app.jinja_options['extensions'].append(OrderBlocks)
if not without_logger:
log_file = config.get('application', 'log_file')
if log_file:
file_handler = logging.FileHandler(log_file)
file_handler.setLevel(logging.WARNING)
file_handler.setFormatter(logging.Formatter(
fmt='%(asctime)s %(levelname)s %(name)s %(message)s',
datefmt='%Y-%m-%dT%H:%M:%S'))
app.logger.addHandler(file_handler)
# Determine maximum upload size: check all upload limits from the
# configuration file.
max_upload_size = max(
int(upload_val) for (upload_key, upload_val) in config.items('upload')
if upload_key.startswith('max_') and upload_key.endswith('_size'))
app.config['MAX_CONTENT_LENGTH'] = max_upload_size * 1024 * 1024
# Try to read the secret key from the configuration file, but if
# there isn't one, generate a temporary key.
secret_key = config.get('application', 'secret_key')
if not secret_key:
app.logger.warning('Generating temporary secret key')
secret_key = os.urandom(32)
app.secret_key = secret_key
# Configure template reloading. Note we must do this before we set up
# custom template filters. (Otherwise we could rely on Flask doing this
# automatically when we run the application with debog=True.)
app.config['TEMPLATES_AUTO_RELOAD'] = auto_reload_templates
# Set up routing converters.
app.url_map.converters['hedwig_group'] = make_enum_converter(GroupType)
app.url_map.converters['hedwig_thread'] = \
make_enum_converter(MessageThreadType)
app.url_map.converters['hedwig_site_group'] = \
make_enum_converter(SiteGroupType)
for facility in facilities.values():
app.url_map.converters['hedwig_call_type_{}'.format(facility.code)] = \
make_enum_converter(facility.view.get_call_types())
app.url_map.converters['hedwig_review_{}'.format(facility.code)] = \
make_enum_converter(facility.view.get_reviewer_roles())
app.url_map.converters['hedwig_text_{}'.format(facility.code)] = \
make_enum_converter(facility.view.get_text_roles())
for filter_ in facility.view.get_custom_filters():
app.add_template_filter(
filter_, '{}_{}'.format(facility.code, filter_.__name__))
app.register_blueprint(create_home_blueprint(db, facilities))
app.register_blueprint(create_admin_blueprint(db, facilities),
url_prefix='/admin')
app.register_blueprint(create_people_blueprint(db, facilities))
app.register_blueprint(create_help_blueprint(db), url_prefix='/help')
app.register_blueprint(create_query_blueprint(db), url_prefix='/query')
app.register_blueprint(create_oauth_blueprint(db, app), url_prefix='/user/oauth')
# Register blueprints for each facility.
for facility in facilities.values():
app.register_blueprint(create_facility_blueprint(db, facility.view),
url_prefix='/' + facility.code)
# Add beginning of request function to check session expiry.
app.before_request(check_session_expiry)
@app.context_processor
def add_to_context():
return {
'application_name': application_name,
}
register_error_handlers(app)
register_template_utils(app)
if _test_return_extra:
return locals()
return app
| eaobservatory/hedwig | lib/hedwig/web/app.py | Python | gpl-3.0 | 6,738 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" An crawler leverage asyncrhonized IO.
File: __init__.py
Author: SpaceLis
Email: Wen.Li@tudelft.nl
GitHub: http://github.com/spacelis
Description:
To achieve full asynchronizibility of crawling.
"""
import gzip
from itertools import cycle
from pykka.actor import ActorRef
class Message(dict):
""" Messages class that used for type match. """
_next_id = 0
__slots__ = ['sender', 'identifier', 'kind']
def __init__(self, sender, identifier=None):
"""@todo: Docstring for __init__.
:id: @todo
:sender: @todo
:returns: @todo
"""
super(Message, self).__init__()
if isinstance(sender, ActorRef):
self.sender = sender
elif sender is None:
self.sender = None
else:
self.sender = sender.actor_ref
if identifier:
self.identifier = identifier
else:
self.identifier = Message._next_id
Message._next_id += 1
self.kind = self.__class__.__name__
def match(self, handler):
"""@todo: Docstring for match.
:handler: @todo
:returns: @todo
"""
cname = self.kind
if cname in handler:
return handler[cname](self)
elif '_' in handler:
return handler['_'](self)
else:
raise ValueError('Unable to find a match for "%s"' % (cname))
@classmethod
def N(cls):
""" Return the class name.
:returns: @todo
"""
return cls.__name__
def __str__(self):
""" Return a string
:returns: @todo
"""
return "%s from %s [%s]" % (self.N(),
self.sender.actor_class.__name__,
self.sender.actor_urn)
class Task(Message):
""" A message loaded with a task. """
__slots__ = ['payload']
def __init__(self, sender, payload):
super(Task, self).__init__(sender)
self.payload = payload
class NoMoreTask(Message):
""" No more task. """
def __init__(self, sender):
super(NoMoreTask, self).__init__(sender)
class Done(Message):
"""A message saying a task is done."""
def __init__(self, sender, task):
super(Done, self).__init__(sender, task.identifier)
class TaskRequest(Message):
""" A message requesting a new task. """
def __init__(self, sender):
super(TaskRequest, self).__init__(sender)
class Record(Message):
""" A message conveying the data to be written down. """
__slots__ = ['payload']
def __init__(self, sender, payload):
super(Record, self).__init__(sender)
self.payload = payload
class Report(Message):
""" A message indicating an unusual event. """
__slots__ = ['content']
def __init__(self, sender, content):
super(Report, self).__init__(sender)
self.content = content
class NonFatalFailure(Message):
""" A notice of unhandled failure. """
__slots__ = ['err']
def __init__(self, sender, err):
super(NonFatalFailure, self).__init__(sender)
self.err = err
def __str__(self):
"""@todo: Docstring for __str__.
:returns: @todo
"""
return "%s : %s" % (super(NonFatalFailure, self).__str__(),
self.err)
class Resignition(Message):
""" A notice that the crawler is hit by sever problems. """
__slots__ = ['err', 'stack', 'conf']
def __init__(self, sender, err, stack, conf):
"""@todo: to be defined1. """
super(Resignition, self).__init__(sender)
self.err = err
self.stack = stack
self.conf = conf
def __str__(self):
"""@todo: Docstring for __str__.
:returns: @todo
"""
return "%s: %s\n%s\n%s" % (super(Resignition, self).__str__(),
self.conf, self.err, self.stack)
class Retire(Message):
""" A notice that the crawler is hit by sever problems. """
def __str__(self):
"""@todo: Docstring for __str__.
:returns: @todo
"""
return "Retired: %s" % (self.sender.actor_urn)
class RecoverableError(Exception):
""" Tell controller that there is a non-fatal error. """
__slots__ = ['err', 'retry_in']
def __init__(self, err, retry_in=10):
super(RecoverableError, self).__init__()
self.retry_in = retry_in
self.err = err
def __str__(self):
"""@todo: Docstring for __str__.
:returns: @todo
"""
return "Retry in %s because %s" % (self.retry_in, str(self.err))
class IgnorableError(Exception):
""" Tell controller that there is a non-fatal error. """
__slots__ = ['err']
def __init__(self, err):
super(IgnorableError, self).__init__()
self.err = err
def __str__(self):
"""@todo: Docstring for __str__.
:returns: @todo
"""
return str(self.err)
| spacelis/crawler.kka | crawler/__init__.py | Python | mit | 5,059 |
import pytest
import pickle
import numpy as np
from velox import VeloxObject, register_object
from velox.tools import (fullname, import_from_qualified_name,
obtain_padding_bytes, obtain_qualified_name,
VELOX_NEW_FILE_SIGNATURE,
VELOX_NEW_FILE_EXTRAS_LENGTH)
from velox_test_utils import create_class
def test_proper_fullname():
x = np.random.normal(0, 1, (10, ))
assert fullname(x) == 'numpy.ndarray'
m = create_class('foo')()
assert fullname(m) == 'velox_test_utils._Model'
def test_import_from_qualified_name():
clf = import_from_qualified_name('sklearn.linear_model.SGDRegressor')()
assert clf
from sklearn.linear_model import SGDRegressor
assert isinstance(clf, SGDRegressor)
def test_obtain_padding_bytes():
x = np.random.normal(0, 1, (10, ))
b = obtain_padding_bytes(x).decode()
assert len(b) == VELOX_NEW_FILE_EXTRAS_LENGTH
assert VELOX_NEW_FILE_SIGNATURE in b
assert obtain_qualified_name(b) == 'numpy.ndarray'
| lukedeo/Velox | test/test_tools.py | Python | apache-2.0 | 1,058 |
from django.contrib import admin
from event.models import Event
class EventArtistsInline(admin.TabularInline):
model = Event.artists.through
verbose_name_plural = 'Artists'
extra = 1
class EventInline(admin.StackedInline):
model = Event
extra = 1
class EventAdmin(admin.ModelAdmin):
inlines = [EventArtistsInline]
list_display = ['name', 'start', 'venue']
list_filter = ['name', 'start', 'venue__name']
search_fields = ['name', 'start', 'venue__name']
admin.site.register(Event, EventAdmin)
| FedorSelitsky/eventrack | event/admin.py | Python | mit | 536 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
from __future__ import annotations # this will fix backward compatibility with Python 3.8 and less
from inspect import isfunction
import bpy
import blf
import bgl
from bpy.types import SpaceNodeEditor
from sverchok.utils.sv_stethoscope_helper import draw_text_data, draw_graphical_data
from sverchok.utils.logging import debug
callback_dict = {}
point_dict = {}
def tag_redraw_all_nodeviews():
for window in bpy.context.window_manager.windows:
for area in window.screen.areas:
if area.type == 'NODE_EDITOR':
for region in area.regions:
if region.type == 'WINDOW':
region.tag_redraw()
def callback_enable(*args, overlay='POST_VIEW'):
n_id = args[0]
# global callback_dict
if n_id in callback_dict:
return
handle_pixel = SpaceNodeEditor.draw_handler_add(draw_callback_px, args, 'WINDOW', overlay)
callback_dict[n_id] = handle_pixel
tag_redraw_all_nodeviews()
def draw_text(node, text: str, draw_id=None, color=(1, 1, 1, 1), scale=1., align="RIGHT", dynamic_location=True):
"""Draw any text nearby a node, use together with callback_disable
align = {"RIGHT", "UP", "DOWN"} todo replace with typing.Literal"""
draw_id = draw_id or node.node_id
if draw_id in callback_dict:
callback_disable(draw_id)
color = color if len(color) == 4 else (*color, 1)
text_location = None if dynamic_location else _get_text_location(node, align)
handle_pixel = SpaceNodeEditor.draw_handler_add(
_draw_text_handler,
(node.id_data.tree_id, node.node_id, text, color, scale, align, text_location),
'WINDOW',
'POST_VIEW')
callback_dict[draw_id] = handle_pixel
tag_redraw_all_nodeviews()
def callback_disable(n_id):
# global callback_dict
handle_pixel = callback_dict.get(n_id, None)
if not handle_pixel:
return
SpaceNodeEditor.draw_handler_remove(handle_pixel, 'WINDOW')
del callback_dict[n_id]
tag_redraw_all_nodeviews()
def callback_disable_all():
# global callback_dict
temp_list = list(callback_dict.keys())
for n_id in temp_list:
if n_id:
callback_disable(n_id)
def callback_disable_filtered(pattern):
temp_list = list(callback_dict.keys())
for ident in temp_list:
if ident.endswith(pattern):
callback_disable(ident)
def restore_opengl_defaults():
bgl.glLineWidth(1)
bgl.glDisable(bgl.GL_BLEND)
# bgl.glColor4f(0.0, 0.0, 0.0, 1.0) # doesn't exist anymore ..
def get_xy_from_data(data):
location = data.get('loc')
if isfunction(location):
x, y = get_sane_xy(data)
elif isinstance(location, (tuple, list)):
x, y = location
else:
x, y = 20, 20
return x, y
def get_sane_xy(data):
return_value = (120, 120)
location_function = data.get('loc')
if location_function:
ng = bpy.data.node_groups.get(data['tree_name'])
if ng:
node = ng.nodes.get(data['node_name'])
if node:
return_value = location_function(node)
return return_value
def draw_callback_px(n_id, data):
space = bpy.context.space_data
ng_view = space.edit_tree
# ng_view can be None
if not ng_view:
return
ng_name = space.edit_tree.name
if not (data['tree_name'] == ng_name):
return
if not ng_view.bl_idname in {"SverchCustomTreeType", 'SvGroupTree'}:
return
if data.get('mode', 'text-based') == 'text-based':
draw_text_data(data)
elif data.get('mode') == "graphical":
draw_graphical_data(data)
restore_opengl_defaults()
elif data.get('mode') == 'custom_function':
drawing_func = data.get('custom_function')
x, y = get_xy_from_data(data)
args = data.get('args', (None,))
drawing_func(x, y, args)
restore_opengl_defaults()
elif data.get('mode') == 'LEAN_AND_MEAN':
drawing_func = data.get('custom_function')
args = data.get('args', (None,))
drawing_func(*args)
restore_opengl_defaults()
elif data.get('mode') == 'custom_function_context':
'''
0) this mode is useful for custom shader inside 2d drawing context, like nodeview
1) you will supply this function with args, and args will contain (geom, config)
2) your passing function might look something like
config = lambda: None
config.shader_data = ...
geom = lambda: None
geom.stuff = ..
draw_data = {
'loc': function_returning_xy,
'mode': 'custom_function_context',
'tree_name': self.id_data.name[:],
'node_name': self.name[:],
'custom_function': advanced_grid_xy,
'args': (geom, config)
}
nvBGL.callback_enable(self.n_id, draw_data)
'''
x, y = get_xy_from_data(data)
# bgl.glEnable(bgl.GL_DEPTH_TEST)
drawing_func = data.get('custom_function')
args = data.get('args', (None,))
drawing_func(bpy.context, args, (x, y))
restore_opengl_defaults()
# bgl.glDisable(bgl.GL_DEPTH_TEST)
def _draw_text_handler(tree_id, node_id, text: str, color=(1, 1, 1, 1), scale=1.0, align='RIGHT',
text_coordinates=None):
"""Draw the text in a node tree editor nearby the given node"""
editor = bpy.context.space_data
if editor.type != 'NODE_EDITOR':
return
if editor.tree_type not in {"SverchCustomTreeType", 'SvGroupTree'}:
return
if not editor.edit_tree or editor.edit_tree.tree_id != tree_id:
return
# this is less efficient because it requires search of the node each redraw call
if not text_coordinates:
if not any(n for n in editor.edit_tree.nodes if n.node_id == node_id):
debug(f'Some node looks like was removed without removing bgl drawing, text: {text}')
return
# find node location
node = next(n for n in editor.edit_tree.nodes if n.node_id == node_id)
(x, y), z = _get_text_location(node, align), 0
# put static coordinates if there are a lot of nodes with text to draw (does not react on the node movements)
else:
(x, y), z = text_coordinates, 0
# https://github.com/nortikin/sverchok/issues/4247
ui_scale = bpy.context.preferences.system.ui_scale
x, y = x * ui_scale, y * ui_scale
# todo add scale from the preferences
text_height = int(15 * scale * ui_scale)
line_height = int(18 * scale * ui_scale)
font_id = 0
dpi = 72
blf.size(font_id, text_height, dpi)
blf.color(font_id, *color)
for line in text.split('\n'):
blf.position(font_id, x, y, z)
blf.draw(font_id, line)
y -= line_height
def _get_text_location(node, align='RIGHT') -> tuple[int, int]:
"""Find location for a text nearby give node"""
(x, y) = node.absolute_location
gap = 10
# some nodes override standard attributes
try:
dx, dy = node.dimensions
except (TypeError, ValueError):
dx, dy = 1, 1 # todo would be nice to have something more sensible here
# find text location
if align == "RIGHT":
x, y = int(x + dx + gap), int(y)
elif align == "UP":
if node.hide: # when the node is hidden its location moves slightly upper
max_sock_num = max(len([s for s in node.inputs if not s.hide]),
len([s for s in node.outputs if not s.hide]))
gap += (max_sock_num * 0.3) * max_sock_num
x, y = int(x), int(y + gap)
elif align == "DOWN":
x, y = int(x), int(y - dy - gap)
else:
debug(f'Some node drawing text with unsupported align: {align}')
return x, y
def unregister():
callback_disable_all()
| nortikin/sverchok | ui/bgl_callback_nodeview.py | Python | gpl-3.0 | 8,768 |
# Django settings for example_project project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'linkslist.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = PROJECT_ROOT + '/media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'eg&#u^%axtc$7)gk=*vy8$+!sfjv00%sk(7vxv$o#eg+ktb64r'
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'example_project.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_ROOT + '/templates/default/'
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'south',
'linkslist',
)
| MidwestCommunications/django-linkslist | example_project/settings.py | Python | mit | 3,221 |
# awl.tests.test_contextproc.py
from django.test import TestCase
from awl.context_processors import extra_context
from awl.waelsteng import FakeRequest
# ============================================================================
class ContextProcessorTest(TestCase):
def test_extra_context(self):
request = FakeRequest()
context = extra_context(request)
self.assertEqual(request, context['request'])
self.assertEqual('test_host', context['HOST'])
self.assertFalse(context['IN_ADMIN'])
| cltrudeau/django-awl | awl/tests/test_contextproc.py | Python | mit | 536 |
'''
Created on Dec 10, 2014
@author: msuliga
'''
from django.contrib import admin
from .models import Person
class PersonAdmin( admin.ModelAdmin ):
search_fields = ['name']
list_display = ( 'name', 'address', 'no_index_tag', 'personal_code', 'id' )
list_editable = ('no_index_tag',)
fields = [ 'no_index_tag']
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
admin.site.register( Person, PersonAdmin )
| tigeorgia/CorpSearch | apps/person/admin.py | Python | gpl-2.0 | 531 |
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import csv
import pickle
import rasmodel
from indra import trips, reach
from indra.literature import id_lookup
from indra.util import read_unicode_csv
from assembly_eval import have_file, run_assembly
if __name__ == '__main__':
pmc_ids = [s.strip() for s in open('pmcids.txt', 'rt').readlines()]
# Load the REACH reading output
with open('reach/reach_stmts_batch_4_eval.pkl', 'rb') as f:
reach_stmts = pickle.load(f)
# Load the PMID to PMCID map
pmcid_to_pmid = {}
csvreader = read_unicode_csv('pmc_batch_4_id_map.txt', delimiter='\t')
for row in csvreader:
pmcid_to_pmid[row[0]] = row[1]
for pmcid in pmc_ids:
print('Processing %s...' % pmcid)
# Process TRIPS
trips_fname = 'trips/' + pmcid + '.ekb'
tp = trips.process_xml(open(trips_fname).read())
# Get REACH statements
reach_stmts_for_pmcid = reach_stmts.get(pmcid_to_pmid[pmcid], [])
if not reach_stmts_for_pmcid:
print("No REACH statements for %s" % pmcid)
# Get prior statements
rasmodel_stmts = rasmodel.get_statements()
# Combine all statements
all_statements = tp.statements + reach_stmts_for_pmcid
# Run assembly
run_assembly(all_statements, 'combined', pmcid,
background_assertions=rasmodel_stmts)
| jmuhlich/indra | indra/benchmarks/assembly_eval/batch4/run_combined.py | Python | bsd-2-clause | 1,453 |
import urllib2
import base64
import json as json_module
class PreemptiveBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
'''Preemptive basic auth.
Instead of waiting for a 403 to then retry with the credentials,
send the credentials if the url is handled by the password manager.
Note: please use realm=None when calling add_password.'''
def http_request(self, req):
url = req.get_full_url()
realm = None
# this is very similar to the code from retry_http_basic_auth()
# but returns a request object.
user, pw = self.passwd.find_user_password(realm, url)
if pw:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.b64encode(raw).strip()
req.add_unredirected_header(self.auth_header, auth)
return req
https_request = http_request
class RequestError(Exception):
"""A generic Exception while processing the request."""
response = None
request = None
def __init__(self, *args, **kwargs):
self.response = kwargs.pop('response', None)
self.request = kwargs.pop('request', None)
if self.response is not None and self.request is None:
if hasattr(self.response, 'request'):
self.request = self.response.request
super(RequestError, self).__init__(*args, **kwargs)
def __str__(self):
msg = "RequestError"
if self.request:
msg = msg + " request="+str(self.request)
if self.response:
msg = msg + " response="+str(self.response)
return msg
class HTTPError(RequestError):
"""An HTTP error occurred."""
pass
class Response():
def __init__(self, method=None, url=None, headers=None,
data=None, params=None, auth=None, json=None):
# defaults
method = method.upper()
headers = {} if headers is None else headers
params = {} if params is None else params
body = None
content_type = None
if not data and json is not None:
content_type = 'application/json'
data = json_module.dumps(json)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in headers):
headers['Content-Type'] = content_type
req = urllib2.Request(url, headers=headers)
if data:
req.add_data(data)
if auth is not None:
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, url, auth[0], auth[1])
auth_manager = PreemptiveBasicAuthHandler(password_manager)
opener = urllib2.build_opener(auth_manager).open
else:
opener = urllib2.urlopen
self.request = req
self._opener = opener
self._code = None
self._data = None
self._is_error = False
self._handler = None
self._raise_on_http_error = False
def raise_on_http_error(self):
self._raise_on_http_error = True
def code(self):
if self._code:
return self._code
self.read()
return self._code
def header(self, name):
return self._handler.headers.getheader(name)
def read(self):
if self._is_error:
return None
if self._data is None:
try:
self._handler = self._opener(self.request)
self._data = self._handler.read()
self._code = self._handler.getcode()
except urllib2.HTTPError as e:
self._code = e.code
self._is_error = True
raise HTTPError(response=self)
except:
self._is_error = True
raise
if self._raise_on_http_error and self._handler.getcode() != 200:
raise HTTPError(response=self)
return self._data
def json(self):
return json_module.loads(self.read())
def data(self):
return self.read()
def GET(url, **kwargs):
return Response('get', url, **kwargs)
def POST(url, data=None, json=None, **kwargs):
return Response('post', url, data=data, json=json, **kwargs)
| ManocLabs/manoc-agent-dhcpd | manoc_agents/common/requests.py | Python | artistic-2.0 | 4,377 |
import fruit
print locals()
print fruit.utils.pathformat.prefix("hello/good")
print fruit.utils.pathformat.prefix("good")
print fruit.utils.pathformat.suffix("hello/good")
print fruit.utils.pathformat.suffix("good")
a = 'hello my name is zhaomr'
| zhaomr13/fruit | tests/test_this_module.py | Python | gpl-2.0 | 247 |
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Rds20140815CreateDBInstanceRequest(RestApi):
def __init__(self,domain='rds.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ClientToken = None
self.ConnectionMode = None
self.DBInstanceClass = None
self.DBInstanceDescription = None
self.DBInstanceNetType = None
self.DBInstanceStorage = None
self.Engine = None
self.EngineVersion = None
self.InstanceNetworkType = None
self.PayType = None
self.PrivateIpAddress = None
self.RegionId = None
self.SecurityIPList = None
self.VPCId = None
self.VSwitchId = None
self.ZoneId = None
def getapiname(self):
return 'rds.aliyuncs.com.CreateDBInstance.2014-08-15'
| wanghe4096/website | aliyun/api/rest/Rds20140815CreateDBInstanceRequest.py | Python | bsd-2-clause | 766 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django_mysql.models
class Migration(migrations.Migration):
dependencies = [
('base', '0008_auto_20160331_1405'),
]
operations = [
migrations.AddField(
model_name='snippet',
name='client_options',
field=django_mysql.models.DynamicField(default=None),
),
]
| akatsoulas/snippets-service | snippets/base/migrations/0009_snippet_client_options.py | Python | mpl-2.0 | 448 |
# -*- coding: utf-8 -*-
"""CIFAR-100 DeepOBS dataset."""
import os
import tensorflow as tf
from . import dataset
from .. import config
class cifar100(dataset.DataSet):
"""DeepOBS data set class for the `CIFAR-100\
<https://www.cs.toronto.edu/~kriz/cifar.html>`_ data set.
Args:
batch_size (int): The mini-batch size to use. Note that, if ``batch_size``
is not a divider of the dataset size (``50 000`` for train, ``10 000``
for test) the remainder is dropped in each epoch (after shuffling).
data_augmentation (bool): If ``True`` some data augmentation operations
(random crop window, horizontal flipping, lighting augmentation) are
applied to the training data (but not the test data).
train_eval_size (int): Size of the train eval data set.
Defaults to ``10 000`` the size of the test set.
Attributes:
batch: A tuple ``(x, y)`` of tensors, yielding batches of CIFAR-100 images
(``x`` with shape ``(batch_size, 32, 32, 3)``) and corresponding one-hot
label vectors (``y`` with shape ``(batch_size, 100)``). Executing these
tensors raises a ``tf.errors.OutOfRangeError`` after one epoch.
train_init_op: A tensorflow operation initializing the dataset for the
training phase.
train_eval_init_op: A tensorflow operation initializing the testproblem for
evaluating on training data.
test_init_op: A tensorflow operation initializing the testproblem for
evaluating on test data.
phase: A string-value tf.Variable that is set to ``train``, ``train_eval``
or ``test``, depending on the current phase. This can be used by testproblems
to adapt their behavior to this phase.
"""
def __init__(self,
batch_size,
data_augmentation=True,
train_eval_size=10000):
"""Creates a new CIFAR-100 instance.
Args:
batch_size (int): The mini-batch size to use. Note that, if ``batch_size``
is not a divider of the dataset size (``50 000`` for train, ``10 000``
for test) the remainder is dropped in each epoch (after shuffling).
data_augmentation (bool): If ``True`` some data augmentation operations
(random crop window, horizontal flipping, lighting augmentation) are
applied to the training data (but not the test data).
train_eval_size (int): Size of the train eval data set.
Defaults to ``10 000`` the size of the test set.
"""
self._name = "cifar100"
self._data_augmentation = data_augmentation
self._train_eval_size = train_eval_size
super(cifar100, self).__init__(batch_size)
def _make_dataset(self,
binaries_fname_pattern,
data_augmentation=False,
shuffle=True):
"""Creates a CIFAR-100 data set (helper used by ``.make_*_datset`` below).
Args:
binaries_fname_pattern (str): Pattern of the ``.bin`` files from which
to load images and labels (e.g. ``some/path/data_batch_*.bin``).
data_augmentation (bool): Whether to apply data augmentation operations.
shuffle (bool): Switch to turn on or off shuffling of the data set.
Defaults to ``True``.
Returns:
A tf.data.Dataset yielding batches of CIFAR-100 data.
"""
# Set number of bytes to read.
label_bytes = 1
label_offset = 1
num_classes = 100
depth = 3
image_size = 32
image_bytes = image_size * image_size * depth
record_bytes = label_bytes + label_offset + image_bytes
def parse_func(raw_record):
"""Function parsing data from raw binary records."""
# Decode raw_record.
record = tf.reshape(
tf.decode_raw(raw_record, tf.uint8), [record_bytes])
label = tf.cast(
tf.slice(record, [label_offset], [label_bytes]), tf.int32)
depth_major = tf.reshape(
tf.slice(record, [label_bytes], [image_bytes]),
[depth, image_size, image_size])
image = tf.cast(tf.transpose(depth_major, [1, 2, 0]), tf.float32)
# Add image pre-processing.
if data_augmentation:
image = tf.image.resize_image_with_crop_or_pad(
image, image_size + 4, image_size + 4)
image = tf.random_crop(image, [32, 32, 3])
image = tf.image.random_flip_left_right(image)
image = tf.image.random_brightness(image, max_delta=63. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_contrast(image, lower=0.2, upper=1.8)
else:
image = tf.image.resize_image_with_crop_or_pad(image, 32, 32)
image = tf.image.per_image_standardization(image)
label = tf.squeeze(tf.one_hot(label, depth=num_classes))
return image, label
with tf.name_scope(self._name):
with tf.device('/cpu:0'):
filenames = tf.matching_files(binaries_fname_pattern)
filenames = tf.random_shuffle(filenames)
data = tf.data.FixedLengthRecordDataset(
filenames=filenames, record_bytes=record_bytes)
data = data.map(
parse_func,
num_parallel_calls=(8 if data_augmentation else 4))
if shuffle:
data = data.shuffle(
buffer_size=20000)
data = data.batch(self._batch_size, drop_remainder=True)
data = data.prefetch(
buffer_size=4)
return data
def _make_train_dataset(self):
"""Creates the CIFAR-100 training dataset.
Returns:
A tf.data.Dataset instance with batches of training data.
"""
pattern = os.path.join(config.get_data_dir(), "cifar-100", "train.bin")
return self._make_dataset(
pattern, data_augmentation=self._data_augmentation, shuffle=True)
def _make_train_eval_dataset(self):
"""Creates the CIFAR-100 train eval dataset.
Returns:
A tf.data.Dataset instance with batches of training eval data.
"""
return self._train_dataset.take(
self._train_eval_size // self._batch_size)
def _make_test_dataset(self):
"""Creates the CIFAR-100 test dataset.
Returns:
A tf.data.Dataset instance with batches of test data.
"""
pattern = os.path.join(config.get_data_dir(), "cifar-100", "test.bin")
return self._make_dataset(
pattern, data_augmentation=False, shuffle=False)
| fsschneider/DeepOBS | deepobs/tensorflow/datasets/cifar100.py | Python | mit | 6,777 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 问题1.8:怎样在数据字典中执行一些计算操作 (比如求最小值、最大值、排序等等)
# 解决方案:可以使用zip和sorted,min,max等函数
prices = {
'ACME': 45.23,
'APL': 612.78,
'IBM': 205.55,
'HPQ': 37.20,
'FB': 10.75
}
# 如果你在一个字典上执行普通的数学运算,你会发现它们仅仅作用于键,而不是值
# 返回ACME
print(min(prices))
# 返回IBM
print(max(prices))
# 可以在 min() 和 max() 函数中提供 key 函数参数来获取最小值或最大值对应的键的信息
print(min(prices, key=lambda k: prices[k]))
print(max(prices, key=lambda k: prices[k]))
# 最小股票价格和代码
# 函数zip的参数iterable为可迭代的对象,并且可以有多个参数。
# 该函数返回一个以元组为元素的列表,其中第i个元组包含每个参数序列的第i个元素。
# 返回的列表长度被截断为最短的参数序列的长度。只有一个序列参数时,它返回一个1元组的列表。没有参数时,它返回一个空的列表。
min_price = min(zip(prices.values(), prices.keys()))
print(min_price)
# 最大股票价格和代码
max_price = max(zip(prices.values(), prices.keys()))
print(max_price)
# 排序
prices_sorted = sorted(zip(prices.values(), prices.keys()))
print(prices_sorted)
| felix9064/python | Demo/cookbook/operate_dict.py | Python | mit | 1,379 |
from __future__ import print_function
from pprint import pprint
import sys
sys.path.append( 'external/pycparser' )
from pycparser import c_parser, c_ast, parse_file
# Portable cpp path for Windows and Linux/Unix
CPPPATH = '../utils/cpp.exe' if sys.platform == 'win32' else 'cpp'
class IdVisitor(c_ast.NodeVisitor):
def __init__(self):
self.idList_ = []
def visit_ID(self, node):
self.idList_.append( node.name )
def idDefs(filename):
ast = parse_file(
filename,
use_cpp=True,
cpp_path=CPPPATH,
cpp_args=[ "-nostdinc" ]
)
# c.f. http://stackoverflow.com/questions/10353902/any-way-to-get-the-c-preproccessor-to-ignore-all-includes
v = IdVisitor()
v.visit(ast)
print( v.idList_ )
if __name__ == "__main__":
if len(sys.argv) > 1:
filename = sys.argv[1]
else:
filename = 'c_files/hash.c'
idDefs(filename)
| arunksaha/casescore | pycparser_id.py | Python | bsd-3-clause | 916 |
# -*- coding: utf-8 -*-
# @CreateTime: 2017/8/4 14:25
# @CreateBy: Alvin
# @File: xHarvester.py
# @UpdateTime:
# @UpdateBy:
import os
import sys
import getopt
from theHarvester import usage
from theHarvester import start
def x_start(arg):
if len(sys.argv) < 4:
usage()
sys.exit()
try:
opts, args = getopt.getopt(arg, "l:d:b:s:vf:nhcte:")
except getopt.GetoptError:
usage()
sys.exit()
opt_keys = [opt[0] for opt in opts]
if ('-d' in opt_keys) and ('-f' in opt_keys):
if not os.path.exists('output'):
os.mkdir('output')
f_index = opt_keys.index('-f')
d_index = opt_keys.index('-d')
d_file = opts[d_index][1]
if (not os.path.exists(d_file)) or not os.path.isfile(d_file):
sys.exit()
with open(d_file) as f:
for line_num, line in enumerate(f):
d_value = line[:-1]
f_value = os.path.join('output', str(line_num) + '_' + line[:-1] + '.html')
opts[d_index] = ('-d', d_value)
opts[f_index] = ('-f', f_value)
print opts
try:
start(opts)
except:
continue
if __name__ == "__main__":
try:
x_start(sys.argv[1:])
except KeyboardInterrupt:
print "Search interrupted by user.."
except:
sys.exit()
| cative0/TheHarvester | xHarvester.py | Python | gpl-2.0 | 1,498 |
"""
Link extractor based on lxml.html
"""
import six
from six.moves.urllib.parse import urljoin
import lxml.etree as etree
from w3lib.html import strip_html5_whitespace
from w3lib.url import canonicalize_url
from scrapy.link import Link
from scrapy.utils.misc import arg_to_iter, rel_has_nofollow
from scrapy.utils.python import unique as unique_list, to_native_str
from scrapy.utils.response import get_base_url
from scrapy.linkextractors import FilteringLinkExtractor
# from lxml/src/lxml/html/__init__.py
XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml"
_collect_string_content = etree.XPath("string()")
def _nons(tag):
if isinstance(tag, six.string_types):
if tag[0] == '{' and tag[1:len(XHTML_NAMESPACE)+1] == XHTML_NAMESPACE:
return tag.split('}')[-1]
return tag
class LxmlParserLinkExtractor(object):
def __init__(self, tag="a", attr="href", process=None, unique=False,
strip=True, canonicalized=False):
self.scan_tag = tag if callable(tag) else lambda t: t == tag
self.scan_attr = attr if callable(attr) else lambda a: a == attr
self.process_attr = process if callable(process) else lambda v: v
self.unique = unique
self.strip = strip
if canonicalized:
self.link_key = lambda link: link.url
else:
self.link_key = lambda link: canonicalize_url(link.url,
keep_fragments=True)
def _iter_links(self, document):
for el in document.iter(etree.Element):
if not self.scan_tag(_nons(el.tag)):
continue
attribs = el.attrib
for attrib in attribs:
if not self.scan_attr(attrib):
continue
yield (el, attrib, attribs[attrib])
def _extract_links(self, selector, response_url, response_encoding, base_url):
links = []
# hacky way to get the underlying lxml parsed document
for el, attr, attr_val in self._iter_links(selector.root):
# pseudo lxml.html.HtmlElement.make_links_absolute(base_url)
try:
if self.strip:
attr_val = strip_html5_whitespace(attr_val)
attr_val = urljoin(base_url, attr_val)
except ValueError:
continue # skipping bogus links
else:
url = self.process_attr(attr_val)
if url is None:
continue
url = to_native_str(url, encoding=response_encoding)
# to fix relative links after process_value
url = urljoin(response_url, url)
link = Link(url, _collect_string_content(el) or u'',
nofollow=rel_has_nofollow(el.get('rel')))
links.append(link)
return self._deduplicate_if_needed(links)
def extract_links(self, response):
base_url = get_base_url(response)
return self._extract_links(response.selector, response.url, response.encoding, base_url)
def _process_links(self, links):
""" Normalize and filter extracted links
The subclass should override it if neccessary
"""
return self._deduplicate_if_needed(links)
def _deduplicate_if_needed(self, links):
if self.unique:
return unique_list(links, key=self.link_key)
return links
class LxmlLinkExtractor(FilteringLinkExtractor):
def __init__(self, allow=(), deny=(), allow_domains=(), deny_domains=(), restrict_xpaths=(),
tags=('a', 'area'), attrs=('href',), canonicalize=False,
unique=True, process_value=None, deny_extensions=None, restrict_css=(),
strip=True):
tags, attrs = set(arg_to_iter(tags)), set(arg_to_iter(attrs))
tag_func = lambda x: x in tags
attr_func = lambda x: x in attrs
lx = LxmlParserLinkExtractor(
tag=tag_func,
attr=attr_func,
unique=unique,
process=process_value,
strip=strip,
canonicalized=canonicalize
)
super(LxmlLinkExtractor, self).__init__(lx, allow=allow, deny=deny,
allow_domains=allow_domains, deny_domains=deny_domains,
restrict_xpaths=restrict_xpaths, restrict_css=restrict_css,
canonicalize=canonicalize, deny_extensions=deny_extensions)
def extract_links(self, response):
base_url = get_base_url(response)
if self.restrict_xpaths:
docs = [subdoc
for x in self.restrict_xpaths
for subdoc in response.xpath(x)]
else:
docs = [response.selector]
all_links = []
for doc in docs:
links = self._extract_links(doc, response.url, response.encoding, base_url)
all_links.extend(self._process_links(links))
return unique_list(all_links)
| rolando-contrib/scrapy | scrapy/linkextractors/lxmlhtml.py | Python | bsd-3-clause | 4,962 |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import json
import eventlet
from flask import request, Flask
from st2reactor.sensor.base import Sensor
from st2common.util import isotime
__all__ = [
'ServiceNotificationsSensor'
]
eventlet.monkey_patch(
os=True,
select=True,
socket=True,
thread=False if '--use-debugger' in sys.argv else True,
time=True)
SUPPORTED_SERVICES_EVENT_KEYS = [
's3'
]
class ServiceNotificationsSensor(Sensor):
def __init__(self, sensor_service, config=None):
super(ServiceNotificationsSensor, self).__init__(sensor_service=sensor_service,
config=config)
self._config = self._config.get('service_notifications_sensor', {})
self._host = self._config.get('host', 'localhost')
self._port = self._config.get('port', 12345)
self._path = self._config.get('path', None)
if not self._path:
raise ValueError('path setting not configured')
self._log = self._sensor_service.get_logger(__name__)
self._app = Flask(__name__)
def setup(self):
pass
def run(self):
@self._app.route(self._path, methods=['POST'])
def handle_notification_webhook():
return self._handle_notification_webhook()
self._log.info('Listening for webhooks on http://%s:%s%s' %
(self._host, self._port, self._path))
self._app.run(host=self._host, port=self._port)
def cleanup(self):
pass
def add_trigger(self, trigger):
pass
def update_trigger(self, trigger):
pass
def remove_trigger(self, trigger):
pass
def _handle_notification_webhook(self):
self._log.debug('Received webhook, data=%s' % (request.data))
try:
data = json.loads(request.data)
except ValueError as e:
self._log.debug('Failed to parse body as JSON')
return ''
message = data.get('Message', None)
if not message:
self._log.debug('Payload contains no "Message attribute, skipping"')
return ''
try:
message = json.loads(message)
except ValueError as e:
self._log.info('Failed to parse message as JSON: %s (message=%s)' %
(str(e), message))
# log
return ''
self._process_message(message=message)
return ''
def _process_message(self, message):
records = message.get('Records', [])
for record in records:
self._dispatch_trigger_for_record(record=record)
def _dispatch_trigger_for_record(self, record):
trigger = 'aws.service_notification'
timestamp_datetime = isotime.parse(record['eventTime'])
timestamp = int(timestamp_datetime.strftime('%s')) # pylint: disable=no-member
source = record.get('eventSource', 'unknown')
region = record.get('awsRegion', 'unknown')
name = record.get('eventName', 'unknown')
request_parameters = record['requestParameters']
response_elements = record['responseElements']
# Build event specified payload object
event_payload = None
for event_key in SUPPORTED_SERVICES_EVENT_KEYS:
value = record.get(event_key, None)
if value:
event_payload = value
break
if not event_payload:
# Unsupported service
return
payload = {
# Common attributes for all the AWS services events
'source': source,
'region': region,
'name': name,
'timestamp': timestamp,
'request_parameters': request_parameters,
'response_elements': response_elements,
# Service and event specific payload
'payload': event_payload
}
self._sensor_service.dispatch(trigger=trigger, payload=payload)
| armab/st2contrib | packs/aws/sensors/service_notifications_sensor.py | Python | apache-2.0 | 4,755 |
#!/usr/bin/python
#
# linearize-data.py: Construct a linear, no-fork version of the chain.
#
# Copyright (c) 2013-2014 The Stardust Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from __future__ import print_function, division
import json
import struct
import re
import os
import os.path
import base64
import httplib
import sys
import hashlib
import datetime
import time
from collections import namedtuple
settings = {}
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
def calc_hdr_hash(blk_hdr):
hash1 = hashlib.sha256()
hash1.update(blk_hdr)
hash1_o = hash1.digest()
hash2 = hashlib.sha256()
hash2.update(hash1_o)
hash2_o = hash2.digest()
return hash2_o
def calc_hash_str(blk_hdr):
hash = calc_hdr_hash(blk_hdr)
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
return hash_str
def get_blk_dt(blk_hdr):
members = struct.unpack("<I", blk_hdr[68:68+4])
nTime = members[0]
dt = datetime.datetime.fromtimestamp(nTime)
dt_ym = datetime.datetime(dt.year, dt.month, 1)
return (dt_ym, nTime)
def get_block_hashes(settings):
blkindex = []
f = open(settings['hashlist'], "r")
for line in f:
line = line.rstrip()
blkindex.append(line)
print("Read " + str(len(blkindex)) + " hashes")
return blkindex
def mkblockmap(blkindex):
blkmap = {}
for height,hash in enumerate(blkindex):
blkmap[hash] = height
return blkmap
# Block header and extent on disk
BlockExtent = namedtuple('BlockExtent', ['fn', 'offset', 'inhdr', 'blkhdr', 'size'])
class BlockDataCopier:
def __init__(self, settings, blkindex, blkmap):
self.settings = settings
self.blkindex = blkindex
self.blkmap = blkmap
self.inFn = 0
self.inF = None
self.outFn = 0
self.outsz = 0
self.outF = None
self.outFname = None
self.blkCountIn = 0
self.blkCountOut = 0
self.lastDate = datetime.datetime(2000, 1, 1)
self.highTS = 1408893517 - 315360000
self.timestampSplit = False
self.fileOutput = True
self.setFileTime = False
self.maxOutSz = settings['max_out_sz']
if 'output' in settings:
self.fileOutput = False
if settings['file_timestamp'] != 0:
self.setFileTime = True
if settings['split_timestamp'] != 0:
self.timestampSplit = True
# Extents and cache for out-of-order blocks
self.blockExtents = {}
self.outOfOrderData = {}
self.outOfOrderSize = 0 # running total size for items in outOfOrderData
def writeBlock(self, inhdr, blk_hdr, rawblock):
blockSizeOnDisk = len(inhdr) + len(blk_hdr) + len(rawblock)
if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz):
self.outF.close()
if self.setFileTime:
os.utime(outFname, (int(time.time()), highTS))
self.outF = None
self.outFname = None
self.outFn = self.outFn + 1
self.outsz = 0
(blkDate, blkTS) = get_blk_dt(blk_hdr)
if self.timestampSplit and (blkDate > self.lastDate):
print("New month " + blkDate.strftime("%Y-%m") + " @ " + hash_str)
lastDate = blkDate
if outF:
outF.close()
if setFileTime:
os.utime(outFname, (int(time.time()), highTS))
self.outF = None
self.outFname = None
self.outFn = self.outFn + 1
self.outsz = 0
if not self.outF:
if self.fileOutput:
outFname = self.settings['output_file']
else:
outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn)
print("Output file " + outFname)
self.outF = open(outFname, "wb")
self.outF.write(inhdr)
self.outF.write(blk_hdr)
self.outF.write(rawblock)
self.outsz = self.outsz + len(inhdr) + len(blk_hdr) + len(rawblock)
self.blkCountOut = self.blkCountOut + 1
if blkTS > self.highTS:
self.highTS = blkTS
if (self.blkCountOut % 1000) == 0:
print('%i blocks scanned, %i blocks written (of %i, %.1f%% complete)' %
(self.blkCountIn, self.blkCountOut, len(self.blkindex), 100.0 * self.blkCountOut / len(self.blkindex)))
def inFileName(self, fn):
return os.path.join(self.settings['input'], "blk%05d.dat" % fn)
def fetchBlock(self, extent):
'''Fetch block contents from disk given extents'''
with open(self.inFileName(extent.fn), "rb") as f:
f.seek(extent.offset)
return f.read(extent.size)
def copyOneBlock(self):
'''Find the next block to be written in the input, and copy it to the output.'''
extent = self.blockExtents.pop(self.blkCountOut)
if self.blkCountOut in self.outOfOrderData:
# If the data is cached, use it from memory and remove from the cache
rawblock = self.outOfOrderData.pop(self.blkCountOut)
self.outOfOrderSize -= len(rawblock)
else: # Otherwise look up data on disk
rawblock = self.fetchBlock(extent)
self.writeBlock(extent.inhdr, extent.blkhdr, rawblock)
def run(self):
while self.blkCountOut < len(self.blkindex):
if not self.inF:
fname = self.inFileName(self.inFn)
print("Input file " + fname)
try:
self.inF = open(fname, "rb")
except IOError:
print("Premature end of block data")
return
inhdr = self.inF.read(8)
if (not inhdr or (inhdr[0] == "\0")):
self.inF.close()
self.inF = None
self.inFn = self.inFn + 1
continue
inMagic = inhdr[:4]
if (inMagic != self.settings['netmagic']):
print("Invalid magic: " + inMagic.encode('hex'))
return
inLenLE = inhdr[4:]
su = struct.unpack("<I", inLenLE)
inLen = su[0] - 80 # length without header
blk_hdr = self.inF.read(80)
inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen)
hash_str = calc_hash_str(blk_hdr)
if not hash_str in blkmap:
print("Skipping unknown block " + hash_str)
self.inF.seek(inLen, os.SEEK_CUR)
continue
blkHeight = self.blkmap[hash_str]
self.blkCountIn += 1
if self.blkCountOut == blkHeight:
# If in-order block, just copy
rawblock = self.inF.read(inLen)
self.writeBlock(inhdr, blk_hdr, rawblock)
# See if we can catch up to prior out-of-order blocks
while self.blkCountOut in self.blockExtents:
self.copyOneBlock()
else: # If out-of-order, skip over block data for now
self.blockExtents[blkHeight] = inExtent
if self.outOfOrderSize < self.settings['out_of_order_cache_sz']:
# If there is space in the cache, read the data
# Reading the data in file sequence instead of seeking and fetching it later is preferred,
# but we don't want to fill up memory
self.outOfOrderData[blkHeight] = self.inF.read(inLen)
self.outOfOrderSize += inLen
else: # If no space in cache, seek forward
self.inF.seek(inLen, os.SEEK_CUR)
print("Done (%i blocks written)" % (self.blkCountOut))
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: linearize-data.py CONFIG-FILE")
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'netmagic' not in settings:
settings['netmagic'] = 'f9beb4d9'
if 'genesis' not in settings:
settings['genesis'] = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
if 'input' not in settings:
settings['input'] = 'input'
if 'hashlist' not in settings:
settings['hashlist'] = 'hashlist.txt'
if 'file_timestamp' not in settings:
settings['file_timestamp'] = 0
if 'split_timestamp' not in settings:
settings['split_timestamp'] = 0
if 'max_out_sz' not in settings:
settings['max_out_sz'] = 1000L * 1000 * 1000
if 'out_of_order_cache_sz' not in settings:
settings['out_of_order_cache_sz'] = 100 * 1000 * 1000
settings['max_out_sz'] = long(settings['max_out_sz'])
settings['split_timestamp'] = int(settings['split_timestamp'])
settings['file_timestamp'] = int(settings['file_timestamp'])
settings['netmagic'] = settings['netmagic'].decode('hex')
settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz'])
if 'output_file' not in settings and 'output' not in settings:
print("Missing output file / directory")
sys.exit(1)
blkindex = get_block_hashes(settings)
blkmap = mkblockmap(blkindex)
if not settings['genesis'] in blkmap:
print("Genesis block not found in hashlist")
else:
BlockDataCopier(settings, blkindex, blkmap).run()
| ctwiz/stardust | contrib/linearize/linearize-data.py | Python | mit | 8,828 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
# Obs: Need to install pygtk on windows
import pygtk
pygtk.require("2.0") #Essa linha define a versão do pygtk a ser importado
import gtk
from bet_calculator.bet_calculator import Bet_Calculator
from decimal import *
class Vision(object):
def __init__(self):
builder = gtk.Builder()
builder.add_from_file("calculadorApostas.glade")
self.calculador_de_aposta = Bet_Calculator()
builder.connect_signals({
"gtk_main_quit": gtk.main_quit,
"on_txTime1_activate": self.time1_preenchido,
"on_txTime1_focus_out_event": self.time1_preenchido,
"on_txTime2_activate": self.time2_preenchido,
"on_txTime2_focus_out_event": self.time2_preenchido,
"on_txCasa1_activate": self.casa1_preenchido,
"on_txCasa1_focus_out_event": self.casa1_preenchido,
"on_txCasa2_activate": self.casa2_preenchido,
"on_txCasa2_focus_out_event": self.casa2_preenchido,
"on_txCotacaoTime1Casa1_activate": self.cotacao_time1_casa1_preenchido,
"on_txCotacaoTime1Casa1_focus_out_event": self.cotacao_time1_casa1_preenchido,
"on_txCotacaoTime1Casa2_activate": self.cotacao_time1_casa2_preenchido,
"on_txCotacaoTime1Casa2_focus_out_event": self.cotacao_time1_casa2_preenchido,
"on_txCotacaoTime2Casa1_activate": self.cotacao_time2_casa1_preenchido,
"on_txCotacaoTime2Casa1_focus_out_event": self.cotacao_time2_casa1_preenchido,
"on_txCotacaoTime2Casa2_activate": self.cotacao_time2_casa2_preenchido,
"on_txCotacaoTime2Casa2_focus_out_event": self.cotacao_time2_casa2_preenchido,
"on_txValorTotal_activate": self.valor_total_preenchido,
"on_txValorTotal_focus_out_event": self.valor_total_preenchido,
"on_hsTimes_move_slider": self.slider_modificado,
"on_hsTimes_value_changed": self.slider_modificado,
"on_rbTime1Casa1_toggled": self.bet_team1_house1_ativado,
"on_rbTime1Casa2_toggled": self.aposta_time1_casa2_ativado,
})
self.Time1 = "Time 1"
self.Time2 = "Time 2"
self.Casa1 = "Casa 1"
self.Casa2 = "Casa 2"
self.frmprincipal = builder.get_object("frmPrincipal") #form Principal, será exibido somente
self.txTime1 = builder.get_object("txTime1") # Texto onde captura o time 1
self.txTime2 = builder.get_object("txTime2") # Texto onde captura o time 2
self.txCasa1 = builder.get_object("txCasa1") # Texto onde captura a casa 1
self.txCasa2 = builder.get_object("txCasa2") # Texto onde captura a casa 2
self.lbNumeroTime1 = builder.get_object("lbNumeroTime1")
self.lbNumeroTime2 = builder.get_object("lbNumeroTime2")
self.lbNumeroCasa1 = builder.get_object("lbNumeroCasa1")
self.lbNumeroCasa2 = builder.get_object("lbNumeroCasa2")
self.rbTime1Casa1 = builder.get_object("rbTime1Casa1")
self.rbTime1Casa2 = builder.get_object("rbTime1Casa2")
self.lbResultadoTime1 = builder.get_object("lbResultadoTime1")
self.lbResultadoTime2 = builder.get_object("lbResultadoTime2")
self.lbVencedorTime1 = builder.get_object("lbVencedorTime1")
self.lbVencedorTime2 = builder.get_object("lbVencedorTime2")
self.lbAvisaNumeros = builder.get_object("lbAvisaNumeros")
self.txCotacaoTime1Casa1 = builder.get_object("txCotacaoTime1Casa1")
self.txCotacaoTime2Casa1 = builder.get_object("txCotacaoTime2Casa1")
self.txCotacaoTime1Casa2 = builder.get_object("txCotacaoTime1Casa2")
self.txCotacaoTime2Casa2 = builder.get_object("txCotacaoTime2Casa2")
self.hsTimesValores = builder.get_object("hsTimesValores")
self.txValorTime1 = builder.get_object("txValorTime1")
self.txValorTime2 = builder.get_object("txValorTime2")
self.txValorTotal = builder.get_object("txValorTotal")
self.lbLucroTime1Vencedor = builder.get_object("lbLucroTime1Vencedor")
self.lbLucroTime2Vencedor = builder.get_object("lbLucroTime2Vencedor")
self.hsTimes = builder.get_object("hsTimes")
self.frmprincipal.show()
def time1_preenchido(self, widget, never_used=''):
time1 = self.txTime1.get_text()
if time1 != '':
self.Time1 = time1
else:
self.Time1 = 'Time 1'
self.preenche_dados_time1()
def time2_preenchido(self, widget, never_used=''):
time2 = self.txTime2.get_text()
if time2 != '':
self.Time2 = time2
else:
self.Time2 = 'Time 2'
self.preenche_dados_time2()
def casa1_preenchido(self, widget, never_used=''):
casa1 = self.txCasa1.get_text()
if casa1 != '':
self.Casa1 = casa1
else:
self.Casa1 = 'Casa 1'
self.preenche_dados_casa1()
def casa2_preenchido(self, widget, never_used=''):
casa2 = self.txCasa2.get_text()
if casa2 != '':
self.Casa2 = casa2
else:
self.Casa2 = 'Casa 2'
self.preenche_dados_casa2()
def cotacao_time1_casa1_preenchido(self, widget, never_used=''):
try:
self.calculador_de_aposta.decimal_team1_house1 = self.txCotacaoTime1Casa1.get_text()
except:
self.calculador_de_aposta.decimal_team1_house1 = 0
self.atualiza_informacoes_apostas()
def cotacao_time1_casa2_preenchido(self, widget, never_used=''):
try:
self.calculador_de_aposta.decimal_team1_house2 = self.txCotacaoTime1Casa2.get_text()
except:
self.calculador_de_aposta.decimal_team1_house2 = 0
self.atualiza_informacoes_apostas()
def cotacao_time2_casa1_preenchido(self, widget, never_used=''):
try:
self.calculador_de_aposta.decimal_team2_house1 = self.txCotacaoTime2Casa1.get_text()
except:
self.calculador_de_aposta.decimal_team2_house1 = 0
self.atualiza_informacoes_apostas()
def cotacao_time2_casa2_preenchido(self, widget, never_used=''):
try:
self.calculador_de_aposta.decimal_team2_house2 = self.txCotacaoTime2Casa2.get_text()
except:
self.calculador_de_aposta.decimal_team2_house2 = 0
self.atualiza_informacoes_apostas()
def valor_total_preenchido(self, widget, never_used=''):
try:
self.calculador_de_aposta.cash_to_bet = self.txValorTotal.get_text()
except:
self.calculador_de_aposta.cash_to_bet = 0
self.atualiza_informacoes_apostas()
def slider_modificado(self, widget, never_used=''):
self.atualiza_informacoes_de_lucro()
def bet_team1_house1_ativado(self, widget, never_used=''):
if self.rbTime1Casa1.get_active() and (not self.rbTime1Casa1.get_property("inconsistent") ):
self.hsTimes.set_value(0)
self.atualiza_informacoes_de_lucro()
else:
if (not self.rbTime1Casa2.get_property("inconsistent") ):
self.rbTime1Casa2.set_active(True)
def aposta_time1_casa2_ativado(self, widget, never_used=''):
if self.rbTime1Casa2.get_active() and (not self.rbTime1Casa2.get_property("inconsistent") ):
self.hsTimes.set_value(0)
self.atualiza_informacoes_de_lucro()
else:
if (not self.rbTime1Casa1.get_property("inconsistent") ):
self.rbTime1Casa1.set_active(True)
def preenche_dados_time1(self):
self.lbNumeroTime1.set_text(self.Time1)
self.atualiza_radio_buttons()
self.lbResultadoTime1.set_text(self.Time1)
self.lbVencedorTime1.set_text("Se " + self.Time1 + " ganhar")
def preenche_dados_time2(self):
self.lbNumeroTime2.set_text(self.Time2)
self.atualiza_radio_buttons()
self.lbResultadoTime2.set_text(self.Time2)
self.lbVencedorTime2.set_text("Se " + self.Time2 + " ganhar")
def preenche_dados_casa1(self):
self.lbNumeroCasa1.set_text(self.Casa1)
self.atualiza_radio_buttons()
def preenche_dados_casa2(self):
self.lbNumeroCasa2.set_text(self.Casa2)
self.atualiza_radio_buttons()
def atualiza_radio_buttons(self):
self.rbTime1Casa1.set_label(
self.preencheDadosCasaTime(self.Time1, self.Casa1, self.Time2, self.Casa2)
)
self.rbTime1Casa2.set_label(
self.preencheDadosCasaTime(self.Time1, self.Casa2, self.Time2, self.Casa1)
)
def preencheDadosCasaTime(self, time1, casa1, time2, casa2):
return time1 + " => " + casa1 + " / " + time2 +" => " + casa2
def atualiza_informacoes_de_lucro(self):
valor_time1 = Decimal('0.0')
valor_time2 = Decimal('0.0')
proporcao = Decimal(self.hsTimes.get_value())/Decimal('100');
if self.rbTime1Casa1.get_active() and (not self.rbTime1Casa1.get_property("inconsistent") ):
maximo_valor_time1 = self.calculador_de_aposta.biggest_possible_value_team1(bet_team1_house1 = True)
minimo_valor_time1 = self.calculador_de_aposta.least_possible_value_team1(bet_team1_house1 = True)
diferenca = maximo_valor_time1 - minimo_valor_time1
valor_proporcao = proporcao*diferenca
valor_time1 = minimo_valor_time1+valor_proporcao
valor_time2 = self.calculador_de_aposta.cash_to_bet - valor_time1
self.txValorTime1.set_text(("%.2f" % valor_time1))
self.txValorTime2.set_text(("%.2f" % valor_time2))
elif self.rbTime1Casa2.get_active() and (not self.rbTime1Casa2.get_property("inconsistent") ):
maximo_valor_time1 = self.calculador_de_aposta.biggest_possible_value_team1(bet_team1_house1 = False)
minimo_valor_time1 = self.calculador_de_aposta.least_possible_value_team1(bet_team1_house1 = False)
diferenca = maximo_valor_time1 - minimo_valor_time1
valor_proporcao = proporcao*diferenca
valor_time1 = minimo_valor_time1+valor_proporcao
valor_time2 = self.calculador_de_aposta.cash_to_bet - valor_time1
self.txValorTime1.set_text(("%.2f" % valor_time1))
self.txValorTime2.set_text(("%.2f" % valor_time2))
if valor_time1 > 0:
lucroTime1 = 0.0
lucroTime2 = 0.0
if self.rbTime1Casa1.get_active() and (not self.rbTime1Casa1.get_property("inconsistent") ):
lucroTime1 = self.calculador_de_aposta.profit_if_team1_wins(valor_time1, bet_team1_house1 = True)
lucroTime2 = self.calculador_de_aposta.profit_if_team2_wins(valor_time2, bet_team1_house1 = True)
elif self.rbTime1Casa2.get_active() and (not self.rbTime1Casa2.get_property("inconsistent") ):
lucroTime1 = self.calculador_de_aposta.profit_if_team1_wins(valor_time1, bet_team1_house1 = False)
lucroTime2 = self.calculador_de_aposta.profit_if_team2_wins(valor_time2, bet_team1_house1 = False)
self.lbLucroTime1Vencedor.set_text(("%.2f" % lucroTime1))
self.lbLucroTime2Vencedor.set_text(("%.2f" % lucroTime2))
else:
self.lbLucroTime1Vencedor.set_text("0.0")
self.lbLucroTime2Vencedor.set_text("0.0")
def atualiza_informacoes_apostas(self):
str_final = ""
if self.calculador_de_aposta.can_bet_team1_house1:
str_final += ("É possível fazer apostas\ncom o " +
self.Time1 + " no site\n" + self.Casa1 + " e o " +
self.Time2 + "\n" + "no site " + self.Casa2) +"\n"
self.rbTime1Casa1.set_property("inconsistent", False)
else:
self.rbTime1Casa1.set_property("inconsistent", True)
self.rbTime1Casa2.set_active(True)
if self.calculador_de_aposta.can_bet_team1_house2:
str_final += ("É possível fazer apostas\ncom o " +
self.Time1 + " no site\n" + self.Casa2 + " e o " +
self.Time2 + "\n" + "no site " + self.Casa1) +"\n"
self.rbTime1Casa2.set_property("inconsistent", False)
else:
self.rbTime1Casa2.set_property("inconsistent", True)
self.hsTimes.set_value(0)
self.atualiza_informacoes_de_lucro()
self.lbAvisaNumeros.set_text(str_final)
if __name__ == "__main__":
app = Vision()
gtk.main() | felipeAraujo/Bet-Calculator | main.py | Python | mit | 12,618 |
from csat.acquisition import models
class UploadConfig(models.DataCollectorConfig):
pass
| GaretJax/csat | csat/collectors/upload/models.py | Python | mit | 95 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/networking/requests/messages/incense_encounter_message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/networking/requests/messages/incense_encounter_message.proto',
package='pogoprotos.networking.requests.messages',
syntax='proto3',
serialized_pb=_b('\nGpogoprotos/networking/requests/messages/incense_encounter_message.proto\x12\'pogoprotos.networking.requests.messages\"K\n\x17IncenseEncounterMessage\x12\x14\n\x0c\x65ncounter_id\x18\x01 \x01(\x04\x12\x1a\n\x12\x65ncounter_location\x18\x02 \x01(\tb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_INCENSEENCOUNTERMESSAGE = _descriptor.Descriptor(
name='IncenseEncounterMessage',
full_name='pogoprotos.networking.requests.messages.IncenseEncounterMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='encounter_id', full_name='pogoprotos.networking.requests.messages.IncenseEncounterMessage.encounter_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='encounter_location', full_name='pogoprotos.networking.requests.messages.IncenseEncounterMessage.encounter_location', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=116,
serialized_end=191,
)
DESCRIPTOR.message_types_by_name['IncenseEncounterMessage'] = _INCENSEENCOUNTERMESSAGE
IncenseEncounterMessage = _reflection.GeneratedProtocolMessageType('IncenseEncounterMessage', (_message.Message,), dict(
DESCRIPTOR = _INCENSEENCOUNTERMESSAGE,
__module__ = 'pogoprotos.networking.requests.messages.incense_encounter_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.messages.IncenseEncounterMessage)
))
_sym_db.RegisterMessage(IncenseEncounterMessage)
# @@protoc_insertion_point(module_scope)
| bellowsj/aiopogo | aiopogo/pogoprotos/networking/requests/messages/incense_encounter_message_pb2.py | Python | mit | 2,882 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
COMP_ID = 'compulink_mobile'
| nextgis/nextgisweb_compulink | nextgisweb_compulink/compulink_mobile/ident.py | Python | gpl-2.0 | 94 |
# Portions Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# identity.py - program identity
#
# Copyright Matt Mackall <mpm@selenic.com> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
prog = "hg"
product = "Mercurial"
longproduct = "Mercurial Distributed SCM"
templatemap = {"@prog@": prog, "@Product@": product, "@LongProduct@": longproduct}
def replace(s):
"""Replace template instances in the given string"""
if s is not None:
for template, replacement in templatemap.items():
s = s.replace(template, replacement)
return s
| facebookexperimental/eden | eden/scm/edenscm/mercurial/identity.py | Python | gpl-2.0 | 783 |
# vi: ts=8 sts=4 sw=4 et
#
# test_format.py: unit tests for draco2.util.locale
#
# This file is part of Draco2. Draco2 is free software and is made available
# under the MIT license. Consult the file "LICENSE" that is distributed
# together with this file for the exact licensing terms.
#
# Draco2 is copyright (c) 1999-2007 by the Draco2 authors. See the file
# "AUTHORS" for a complete overview.
#
# $Revision: 1187 $
import py.test
import datetime
from draco2.locale import format
class TestFormatNumeric(object):
conv_basic = {
'grouping': [3, 0],
'thousands_sep': ',',
'decimal_point': '.'
}
data_basic = (
(123, '%s', '123'),
(-123, '%s', '-123'),
(123456, '%s', '123,456'),
(-123456, '%s', '-123,456'),
(1234567, '%s', '1,234,567'),
(-1234567, '%s', '-1,234,567'),
(123456789123456789, '%s', '123,456,789,123,456,789'),
(-123456789123456789, '%s', '-123,456,789,123,456,789'),
(123.456, '%.3f', '123.456'),
(-123.456, '%.3f', '-123.456'),
(1234.567, '%.3f', '1,234.567'),
(-1234.567, '%.3f', '-1,234.567'),
(123456789123.456, '%.3f', '123,456,789,123.456'),
(-123456789123.456, '%.3f', '-123,456,789,123.456'),
)
def test_basic(self):
for number,fmt,string in self.data_basic:
assert format.format_numeric(number, fmt,
self.conv_basic) == string
conv_grouping = {
'grouping': [3, 3, None],
'thousands_sep': ',',
'decimal_point': '.'
}
data_grouping = (
(1234567, '%s', '1,234,567'),
(-1234567, '%s', '-1,234,567'),
(123456789123456789, '%s', '123456789123,456,789'),
(-123456789123456789, '%s', '-123456789123,456,789')
)
def test_grouping(self):
for number,fmt,string in self.data_grouping:
assert format.format_numeric(number, fmt,
self.conv_grouping) == string
conv_illegal = {
'grouping': [3, 3],
'thousands_sep': ',',
'decimal_point': '.'
}
data_illegal = (
(123, '%s', '123'),
)
def test_illegal(self):
for number,fmt,string in self.data_illegal:
py.test.raises(ValueError, format.format_numeric,
number, fmt, self.conv_illegal)
class TestFormatMonetary(object):
data = {
'italy': (
{
'mon_grouping': [3, 0],
'mon_thousands_sep': '.',
'mon_decimal_point': '',
'positive_sign': '',
'negative_sign': '-',
'p_sep_by_space': 0,
'p_cs_precedes': 1,
'p_sign_posn': 1,
'n_sep_by_space': 0,
'n_cs_precedes': 1,
'n_sign_posn': 1,
'currency_symbol': 'L.',
'frac_digits': 0,
'int_curr_symbol': 'ITL.',
'int_frac_digits': 0
},
(1230, 'L.1.230', '-L.1.230', 'ITL.1.230')),
'netherlands': (
{
'mon_grouping': [3, 0],
'mon_thousands_sep': '.',
'mon_decimal_point': ',',
'positive_sign': '',
'negative_sign': '-',
'p_sep_by_space': 1,
'p_cs_precedes': 1,
'p_sign_posn': 1,
'n_sep_by_space': 1,
'n_cs_precedes': 1,
'n_sign_posn': 4,
'currency_symbol': 'F',
'frac_digits': 2,
'int_curr_symbol': 'NLG',
'int_frac_digits': 2
},
(1234.56, 'F 1.234,56', 'F -1.234,56', 'NLG 1.234,56')),
'norway': (
{
'mon_grouping': [3, 0],
'mon_thousands_sep': '.',
'mon_decimal_point': ',',
'positive_sign': '',
'negative_sign': '-',
'p_sep_by_space': 0,
'p_cs_precedes': 1,
'p_sign_posn': 1,
'n_sep_by_space': 0,
'n_cs_precedes': 1,
'n_sign_posn': 2,
'currency_symbol': 'kr',
'frac_digits': 2,
'int_curr_symbol': 'NOK ',
'int_frac_digits': 2
},
(1234.56, 'kr1.234,56', 'kr1.234,56-', 'NOK 1.234,56')),
'switzerland': (
{
'mon_grouping': [3, 0],
'mon_thousands_sep': ',',
'mon_decimal_point': '.',
'positive_sign': '',
'negative_sign': 'C',
'p_sep_by_space': 0,
'p_cs_precedes': 1,
'p_sign_posn': 1,
'n_sep_by_space': 0,
'n_cs_precedes': 1,
'n_sign_posn': 2,
'currency_symbol': 'SFrs.',
'frac_digits': 2,
'int_curr_symbol': 'CHF ',
'int_frac_digits': 2
},
(1234.56, 'SFrs.1,234.56', 'SFrs.1,234.56C', 'CHF 1,234.56'))
}
def test_basic(self):
for country, tuple in self.data.items():
conv, data = tuple
number, pos, neg, intl = data
fmt = '%s'
assert format.format_monetary(number, fmt, conv) == pos
assert format.format_monetary(-number, fmt, conv) == neg
assert format.format_monetary(number, fmt, conv,
international=True) == intl
class TestFormatDate(object):
# Dutch langinfo
langinfo = \
{
'abday_1': 'zo',
'abday_2': 'ma',
'abday_3': 'di',
'abday_4': 'wo',
'abday_5': 'do',
'abday_6': 'vr',
'abday_7': 'za',
'day_1': 'zondag',
'day_2': 'maandag',
'day_3': 'dinsdag',
'day_4': 'woensdag',
'day_5': 'donderdag',
'day_6': 'vrijdag',
'day_7': 'zaterdag',
'abmon_1': 'jan',
'abmon_2': 'feb',
'abmon_3': 'mrt',
'abmon_4': 'apr',
'abmon_5': 'mei',
'abmon_6': 'jun',
'abmon_7': 'jul',
'abmon_8': 'aug',
'abmon_9': 'sep',
'abmon_10': 'okt',
'abmon_11': 'nov',
'abmon_12': 'dec',
'mon_1': 'januari',
'mon_2': 'februari',
'mon_3': 'maart',
'mon_4': 'april',
'mon_5': 'mei',
'mon_6': 'juni',
'mon_7': 'juli',
'mon_8': 'augustus',
'mon_9': 'september',
'mon_10': 'oktober',
'mon_11': 'november',
'mon_12': 'december',
'am_str': 'am',
'pm_str': 'pm',
'd_fmt': '%d-%m-%y',
't_fmt': '%H:%M:%S',
'd_t_fmt': '%a %d %b %Y %H:%M:%S %Z'
}
data = (
(datetime.date(2005, 10, 8), '%x', '08-10-05'),
(datetime.time(17, 0, 1), '%X', '17:00:01'),
(datetime.datetime(2005, 10, 8, 17, 0, 1), '%c', 'za 08 okt 2005 17:00:01 ')
)
def test_basic(self):
for object,fmt,result in self.data:
if isinstance(object, datetime.date):
assert format.format_date(object, fmt,
self.langinfo) == result
elif isinstance(object, datetime.time):
assert format.format_time(object, fmt,
self.langinfo) == result
elif isinstance(object, datetime.datetime):
assert format.format_datetime(object, fmt,
self.langinfo) == result
def test_extended(self):
fmt = lambda x: format.format_datetime(x, '%q', self.langinfo)
dt = datetime.datetime(2005, 1, 1)
assert fmt(dt) == '1'
dt = datetime.datetime(2005, 3, 1)
assert fmt(dt) == '1'
dt = datetime.datetime(2005, 4, 1)
assert fmt(dt) == '2'
def test_custom(self):
self.langinfo['d_t_fmt_1'] = 'Q%q %Y'
fmt = lambda x: format.format_datetime(x, '%1', self.langinfo)
dt = datetime.datetime(2005, 1, 1)
assert fmt(dt) == 'Q1 2005'
dt = datetime.datetime(2005, 3, 1)
assert fmt(dt) == 'Q1 2005'
dt = datetime.datetime(2005, 4, 1)
assert fmt(dt) == 'Q2 2005'
| geertj/draco2 | draco2/locale/test/test_format.py | Python | mit | 8,425 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from logging import getLogger
log = getLogger(__name__)
# test integer types
| kalefranz/auxlib | tests/test_type_coercion.py | Python | isc | 169 |
# -*- coding: utf-8 -*-
import logging
import sys
import time
class StackTraceFilter(logging.Filter):
def filter(self, record):
"""
Filter log records with stack traces.
Multi-line messages must be processed differently than those
of standard application logs.
Parameters
----------
record : logging.LogRecord
Log record.
Returns
-------
int
If the handler should include the log record, 1 is
returned. If the handler should exclude the log record, 0
is returned.
"""
if record.exc_info:
is_included = 0
else:
is_included = 1
return is_included
# This object relies on sys._getframe() to collect runtime data. While
# a protected function, numerous modules from the standard library such
# as inspect, logging, and traceback all ultimately rely it as well. A
# handful of top-rated Stack Overflow answers concur [1].
# sys._getframe() as such is idiomatic, stable, and arguably canonical.
#
# See Also
# --------
# inspect.stack()
# logging.Logger.findCaller()
# traceback.format_stack()
#
# References
# ----------
# .. [1] Albert Vonpupp, "How to get a function name as a string in
# Python?", http://stackoverflow.com/a/13514318/6754214
class Tracer:
def __init__(self, next_frame_name):
"""
Context manager for collecting runtime data.
See the README section on Examples for more details.
Parameters
----------
next_frame_name : str
This generally refers to the name of the method or
function being called within the context manager.
"""
self._next_frame_name = next_frame_name
# A depth of 0 returns the frame at the top of the call stack.
# An offset is therefore required to account for calling the
# Tracer itself.
self._current_frame = sys._getframe(1)
self._previous_frame = sys._getframe(2)
self._start_time = None
self._stop_time = None
@property
def message(self):
message = 'Traced the call from {current_frame_name} to {next_frame_name}.'
return message.format(**self.to_json())
def to_json(self):
"""
Convert the object into a serializable primitive.
Returns
-------
dict
"""
data = {
'next_frame_name': self._next_frame_name,
'current_frame_file_path': self._current_frame.f_code.co_filename,
'current_frame_line_number': self._current_frame.f_lineno,
'current_frame_name': self._current_frame.f_code.co_name,
'previous_frame_file_path': self._previous_frame.f_code.co_filename,
'previous_frame_line_number': self._previous_frame.f_lineno,
'previous_frame_name': self._previous_frame.f_code.co_name,
'start_time': self._start_time,
'stop_time': self._stop_time}
return data
def __enter__(self):
self._start_time = time.time()
return self
# Upon exiting the context, this method is passed the exception
# type, value, and stacktrace if they exist.
def __exit__(self, *args, **kwargs):
self._stop_time = time.time()
def __repr__(self):
repr_ = '{}(next_frame_name="{}")'
return repr_.format(self.__class__.__name__, self._next_frame_name)
| dnguyen0304/tuxedo-mask | tuxedo_mask/utilities.py | Python | mit | 3,479 |
"""@file ideal_ratio_processor.py
contains the idealRatioProcessor class"""
import os
import subprocess
import StringIO
import scipy.io.wavfile as wav
import numpy as np
import processor
from nabu.processing.feature_computers import feature_computer_factory
class IdealRatioProcessor(processor.Processor):
"""a processor for audio files, this will compute the ideal ratio masks"""
def __init__(self, conf, segment_lengths):
"""IdealRatioProcessor constructor
Args:
conf: IdealRatioProcessor configuration as a dict of strings
segment_lengths: A list containing the desired lengths of segments.
Possibly multiple segment lengths"""
# create the feature computer
if 'pow' not in conf['feature']:
raise Exception('expecting feature to be in power domain')
self.comp = feature_computer_factory.factory(conf['feature'])(conf)
# set the length of the segments. Possibly multiple segment lengths
self.segment_lengths = segment_lengths
# initialize the metadata
self.dim = self.comp.get_dim()
self.nontime_dims = [self.dim]
super(IdealRatioProcessor, self).__init__(conf)
print ('WARNING: untested script')
raise Exception(
'Speaker signals are summed before energy is calculated. This is wrong. '
'Look at ideal_ratio_multimic_processor.py')
def __call__(self, dataline):
"""process the data in dataline
Args:
dataline: either a path to a wav file or a command to read and pipe
an audio file
Returns:
segmented_data: The segmented info on bins to be used for scoring as a list of numpy arrays per segment length
utt_info: some info on the utterance"""
utt_info = dict()
splitdatalines = dataline.strip().split(' ')
nrS = len(splitdatalines) - 1
speaker_rate = None
speaker_utt = None
# Add speaker signals
for s in range(nrS):
rate, utt = _read_wav(splitdatalines[s])
if speaker_rate is None:
speaker_rate = rate
speaker_utt = utt
else:
if speaker_rate != rate:
raise Exception('Unequal sampling rates!')
if len(speaker_utt) != len(utt):
raise Exception('Unequal length')
speaker_utt = speaker_utt + utt
speaker_features = self.comp(speaker_utt, speaker_rate)
ref_rate, ref_utt = _read_wav(splitdatalines[-1])
ref_features = self.comp(ref_utt, ref_rate)
# calculate ideal ratio mask
targets = speaker_features/(speaker_features + ref_features + 1e-48)
targets = np.sqrt(targets)
segmented_data = self.segment_data(targets)
return segmented_data, utt_info
def write_metadata(self, datadir):
"""write the processor metadata to disk
Args:
datadir: the directory where the metadata should be written"""
for i, seg_length in enumerate(self.segment_lengths):
seg_dir = os.path.join(datadir, seg_length)
with open(os.path.join(seg_dir, 'dim'), 'w') as fid:
fid.write(str(self.dim))
with open(os.path.join(seg_dir, 'nontime_dims'), 'w') as fid:
fid.write(str(self.nontime_dims)[1:-1])
def _read_wav(wavfile):
"""
read a wav file
Args:
wavfile: either a path to a wav file or a command to read and pipe
an audio file
Returns:
- the sampling rate
- the utterance as a numpy array
"""
if os.path.exists(wavfile):
# its a file
(rate, utterance) = wav.read(wavfile)
elif wavfile[-1] == '|':
# its a command
# read the audio file
pid = subprocess.Popen(wavfile + ' tee', shell=True, stdout=subprocess.PIPE)
output, _ = pid.communicate()
output_buffer = StringIO.StringIO(output)
(rate, utterance) = wav.read(output_buffer)
else:
# its a segment of an utterance
split = wavfile.split(' ')
begin = float(split[-2])
end = float(split[-1])
unsegmented = ' '.join(split[:-2])
rate, full_utterance = _read_wav(unsegmented)
utterance = full_utterance[int(begin*rate):int(end*rate)]
return rate, utterance
| JeroenZegers/Nabu-MSSS | nabu/processing/processors/ideal_ratio_processor.py | Python | mit | 3,843 |
#~~~~~~~~Authentication for apps.twitter.com~~~~~~~~~#
#~~~~~~~~DO NOT UPLOAD THIS WITH YOUR TOKEN INFO TO ANYWHERE PUBLIC~~~~~~~~~#
CONSUMER_KEY = ""
CONSUMER_SECRET = ""
ACCESS_TOKEN = ""
ACCESS_TOKEN_SECRET = ""
| WeiseGuy/WeiseBot | src/covert.py | Python | mit | 223 |
"""
Derivation and Elementary Trees live here.
"""
from __future__ import print_function
from baal.structures import Entry, ConstituencyTree, consts
from baal.semantics import Predicate, Expression
from collections import deque
from copy import copy, deepcopy
from math import floor, ceil
try:
input = raw_input
except:
pass
def prn_pairs(phead, thead):
pairs = [("-LRB-", "-RRB-"), ("-RSB-", "-RSB-"), ("-LCB-", "-RCB-"),
("--", "--"), (",", ",")]
return any([left.lower()==phead.lower() and right.lower()==thead.lower() for left,right in pairs])
class AttachmentPoint(object):
def __init__(self, free, pos_symbol, gorn, type, seq_index):
self.free = free
self.pos_symbol = pos_symbol
self.gorn = gorn
self.type = type
self.seq_index = seq_index
self.hlf_symbol = None
self.frontier_increment = 0.01
self.frontier = (-1,0)
def __repr__(self):
return "{}@{}".format(self.pos_symbol,self.gorn)
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
@classmethod
def from_tree(cls, tree, address, seq_index, tree_type):
new_point = cls(True, tree.symbol, address, tree_type, seq_index)
if tree.spine_index >= 0:
new_point.frontier = (tree.spine_index, tree.spine_index)
return new_point
@property
def left_frontier(self):
l, r = self.frontier
self.frontier = (l-self.frontier_increment, r)
assert self.frontier[0] > floor(self.frontier[0])
return self.frontier[0]
@property
def right_frontier(self):
l, r = self.frontier
self.frontier = (l, r+self.frontier_increment)
assert self.frontier[1] < ceil(self.frontier[1])
return self.frontier[1]
def sibling_increment(self, left=True):
l, r = self.frontier
if left:
self.frontier = (ceil(l) - 1.0, r)
else:
self.frontier = (l, floor(r) + 1.0)
def match(self, op):
pos_match = self.pos_symbol == op.target['pos_symbol']
gorn_match = ((self.gorn == op.target['target_gorn'])
or op.target['target_gorn'] is None)
hlf_match = self.hlf_symbol == op.target['target_hlf']
type_match = self.type == op.type
fail = []
if not pos_match:
f = "failure because pos:"
f += "self: {}; op: {}".format(str(self.pos_symbol),
str(op.target['pos_symbol']))
fail.append(f)
if not gorn_match:
f = "failure because gorn:"
f += "self: {}; op: {}".format(str(self.gorn),
str(op.target['target_gorn']))
fail.append(f)
if not hlf_match:
f = "failure because hlf:"
f += "self: {}; op: {}".format(str(self.hlf_symbol),
str(op.target['target_hlf']))
fail.append(f)
#if len(fail) > 0:
# print(" & \n".join(fail))
#else:
# print("Success!")
return self.free and pos_match and gorn_match and hlf_match and type_match
def set_path_features(self, hlf_symbol):
self.hlf_symbol = hlf_symbol
def clone(self):
ret = AttachmentPoint(self.free, self.pos_symbol, self.gorn,
self.type, self.seq_index)
ret.hlf_symbol = self.hlf_symbol
ret.frontier = self.frontier
return ret
class AttachmentOperation(object):
"""Represents an elementary tree operation
Used by DerivationTrees when trying to find where an elementary tree should attach
There are two modes to the operation:
1. Use it as a general attachment. In this case it needs to know
the permissable attachments via the pos_symbol (and direction if insertion)
2. Use it in specific attachment. In this case it needs to know
identifying information about the tree it should be attaching to.
Current ideas: hlf_symbol, tree_id, argument_number, gorn_address
Thoughts: gorn_address won't work (for obvious reasons as the tree grows)
tree_id won't work because there might be duplicates
hlf_symbol could work, as long as this semantic form remains
argument_number requires planning, which CSG and others might handle
"""
def __init__(self, target, type):
"""Pass in the already made parameters to make the operation.
Args:
target: dict with keys 'pos_symbol' and 'parameter'
'pos_symbol' is the part of speech this operation looks for
'parameter' is direction for insertions, and argument number
for substitutions
type: the type of operation this is: consts.INSERTION or consts.SUBSTITUTION
Notes:
insertion direction: left means it inserts on the left side
e.g. (NP* (DT a)) inserts left.
the asterisk denotes the attachment point
right means it inserts on the right side
e.g. (*S (. .)) inserts right
the asterisk denotes the attachment point
"""
self.target = target
self.type = type
@property
def is_insertion(self):
return self.type == consts.INSERTION
@property
def direction(self):
if not self.is_insertion:
raise Exception("Not an insertion tree")
else:
return self.target['attach_direction']
def clone(self):
return AttachmentOperation(self.target, self.type)
def set_path_features(self, target_gorn, target_hlf):
if target_hlf is not None:
self.target['target_hlf'] = target_hlf
if target_gorn is not None:
self.target['target_gorn'] = tuple(target_gorn)
@classmethod
def from_tree(cls, tree):
"""Calculate the parameters for the operation from a parse tree
Args:
tree: A ConstituencyParse instance
"""
if tree.adjunct:
target = {'pos_symbol': tree.symbol, 'attach_direction': tree.direction,
'target_gorn': None, 'target_hlf': None}
type = consts.INSERTION
else:
target = {'pos_symbol': tree.symbol, 'attach_direction': "up",
'target_gorn': None, 'target_hlf': None}
type = consts.SUBSTITUTION
return cls(target, type)
return cls(root_op, "", (0,), None, "(ROOT)",
[root_subpoint], [], hlf_symbol="g-1")
class ElementaryTree(object):
"""represent a tree fragment, its operations, and its internal addresses
"""
def __init__(self, op, head, head_address, head_symbol, bracketed_string,
substitution_points, insertion_points,
hlf_symbol=None, tree_id=None, last_type=None, last_index=-1):
self.tree_operation = op
self.head = head
self.head_address = head_address
self.substitution_points = substitution_points
self.insertion_points = insertion_points
self.address = (0,)
self.last_type = last_type
self.last_index = last_index
self.hlf_symbol = hlf_symbol
self.bracketed_string = bracketed_string
self.tree_id = tree_id
self.head_symbol = head_symbol
@classmethod
def from_full_parse_tree(cls, parse_tree):
if parse_tree.symbol == "" and len(parse_tree.children) == 1:
parse_tree.symbol = "ROOT"
_, addressbook = parse_tree.clone()
@classmethod
def from_single_parse_tree(cls, parse_tree):
if parse_tree.save_str().upper() == "(ROOT ROOT)":
return cls.root_tree()
_, addressbook = parse_tree.clone()
head = None
head_address = None
substitution_points = list()
insertion_points = list()
sorted_book = sorted(addressbook.items())
_, root = sorted_book[0]
root_sym = root.symbol
for address, tree in sorted_book:
#if tree.symbol == "ROOT":
# head = "ROOT"
# new_point = AttachmentPoint.from_tree(tree, address, 0, consts.SUBSTITUTION)
# substitution_points.append(new_point)
if tree.lexical:
if head is None:
head = tree.symbol
head_address = address
head_parent = tree.parent
else:
assert prn_pairs(head, tree.symbol)
elif tree.complement:
new_point = AttachmentPoint.from_tree(tree,
address,
len(substitution_points),
consts.SUBSTITUTION)
substitution_points.append(new_point)
elif tree.spine_index >= 0:
new_point = AttachmentPoint.from_tree(tree,
address,
len(insertion_points),
consts.INSERTION)
insertion_points.append(new_point)
else:
print(address, tree)
print("Then what is it?")
op = AttachmentOperation.from_tree(parse_tree)
assert (head is not None and head_address is not None) or head is "ROOT"
return cls(op, head, head_address, head_parent, parse_tree.save_str(),
substitution_points, insertion_points)
@classmethod
def from_bracketed_string(cls, bracketed_string):
parse_tree, _ = ConstituencyTree.make(bracketed_string=bracketed_string)
return cls.from_single_parse_tree(parse_tree)
@classmethod
def root_tree(cls):
root_op = AttachmentOperation({'pos_symbol': 'ROOT', 'attach_direction': None,
'target_gorn': None, 'target_hlf':None},
consts.SUBSTITUTION)
root_subpoint = AttachmentPoint(True, 'ROOT', (0,), consts.SUBSTITUTION, 0)
root_subpoint.hlf_symbol = "g-1"
return cls(root_op, "", (0,), None, "(ROOT)",
[root_subpoint], [], hlf_symbol="g-1")
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
################### INSERTION OPERATION
########################################
def insert(self, op_tree):
new_tree = deepcopy(self)#.clone()
address = new_tree.mark_insertion(op_tree.tree_operation)
op_tree = deepcopy(op_tree)#.clone()
op_tree.address = address
return new_tree, op_tree
def mark_insertion(self, op):
assert self.last_match is not None
assert self.last_match.match(op)
if op.target['attach_direction'] == "left":
op_index = self.last_match.left_frontier
else:
op_index = self.last_match.right_frontier
return self.last_match.gorn + (op_index,)
def matches_inspoint(self, op):
self.last_type = None
self.last_index = -1
for index, point in enumerate(self.insertion_points):
if point.match(op):
self.last_index = index
self.last_type = consts.INSERTION
return True
return False
################### SUBSTITUTION OPERATION
###########################################
def substitute(self, op_tree):
"""update open substitution spots.
Args:
op_tree: an ElementaryTree instance
Notes:
accepts an op_tree that needs to substitute here.
raises an Exception if it can't
"""
new_tree = deepcopy(self)#self.clone()
address = new_tree.mark_substituted(op_tree.tree_operation)
op_tree = deepcopy(op_tree)#.clone()
op_tree.address = address
return new_tree, op_tree
def mark_substituted(self, op):
assert self.last_match is not None
assert self.last_match.match(op)
self.last_match.free = False
match_gorn = self.last_match.gorn
if self.hlf_symbol == 'g-1':
return match_gorn
is_left = match_gorn < self.head_address
for point in self.insertion_points:
if point.gorn == match_gorn[:-1]:
point.sibling_increment(is_left)
return match_gorn
def matches_subpoint(self, op):
"""check to see if operation matches anything on this tree
Args:
op: AttachmentOperation instance
Returns:
True, False
"""
self.last_type = None
self.last_index = -1
for index, point in enumerate(self.substitution_points):
if point.match(op):
self.last_type = consts.SUBSTITUTION
self.last_index = index
return True
return False
##################### UTILITY METHODS
#####################################
def point_iterator(self, ignore_taken=False):
for pt_type, points in zip(['SUB', 'INS'], [self.sub_points, self.ins_points]):
for point in points:
if ignore_taken and not point.free:
continue
yield pt_type, point
@property
def ins_points(self):
return self.insertion_points
@property
def sub_points(self):
return self.substitution_points
@property
def root_pos(self):
return self.tree_operation.target['pos_symbol']
@property
def last_match(self):
if self.last_index < 0:
return None
elif self.last_type == consts.SUBSTITUTION:
return self.substitution_points[self.last_index]
else:
return self.insertion_points[self.last_index]
@property
def is_insertion(self):
return self.tree_operation.is_insertion
@property
def pos_symbol(self):
return self.tree_operation.target['pos_symbol']
def set_path_features(self, target_gorn=None, target_hlf=None,
self_hlf=None, tree_id=None):
"""Set the variables needed to reconstruct paths.
Args
target_gorn: the gorn address of the target operation node
target_hlf: the target hlf symbol of the target operation tree
self_hlf: this tree's hlf symbol
Notes:
The gorn address will identify where in the target tree
The target_hlf will identify which tree; especially important for duplicates
"""
if self_hlf:
for point in self.substitution_points + self.insertion_points:
point.set_path_features(self_hlf)
self.hlf_symbol = self_hlf
if target_gorn or target_hlf:
self.tree_operation.set_path_features(target_gorn, target_hlf)
if tree_id:
self.tree_id = tree_id
def expand_address(self, incoming):
self.expanded_address = incoming
for _, point in self.point_iterator():
point.expanded_address = incoming + point.gorn[1:]
""" a soft deletion to see if i can get rid of this code
def refresh_points(self):
self.tree_operation = self.tree_operation.clone()
self.substitution_points = [sub.clone() for sub in self.substitution_points]
self.insertion_points = [ins.clone() for ins in self.insertion_points]
def clone(self):
new_tree = ElementaryTree(self.tree_operation, self.head,
self.head_address, self.bracketed_string,
self.substitution_points,
self.insertion_points)
new_tree.refresh_points()
if self.last_match:
new_tree.last_type = self.last_type
new_tree.last_index = self.last_index
if self.hlf_symbol:
new_tree.hlf_symbol = self.hlf_symbol
new_tree.address = self.address
new_tree.tree_id = self.tree_id
return new_tree
"""
def __str__(self):
return self.bracketed_string
def __repr__(self):
substr = ", ".join("{}{}@{}".format(sub.pos_symbol,
"-FREE" if sub.free else "-FILLED",
sub.gorn)
for sub in sorted(self.substitution_points,
key=lambda x: x.gorn))
instr = ", ".join("{}@{}".format(ins.pos_symbol, ins.gorn)
for ins in sorted(self.insertion_points,
key=lambda x: x.gorn))
if self.tree_operation.is_insertion:
typestr = "{}*" if self.tree_operation.direction == "left" else "*{}"
else:
typestr = "^{}^"
typestr = typestr.format(self.head)
return "<{}; sub=[{}], ins=[{}]>".format(typestr, substr, instr)
class DerivationTree(object):
"""represent a tree of ElementaryTrees and their attachment addresses.
"""
def __init__(self, elem_tree, children, predicate=None, suppress_predicate=False):
self.elem_tree = elem_tree
self.children = children
self.predicate = predicate
if not suppress_predicate and predicate is None:
self.predicate = self.instantiate_semantics()
@classmethod
def root_tree(cls):
E = ElementaryTree.root_tree()
P = Predicate(name='ROOT', valence=1, hlf_symbol='g-1')
return cls(E, [], P)
@classmethod
def from_single_parse_tree(cls, tree):
elem_tree = ElementaryTree.from_single_parse_tree(tree)
return cls(elem_tree, [])
@classmethod
def from_bracketed(cls, bracketed_string, **kwargs):
elem_tree = ElementaryTree.from_bracketed_string(bracketed_string)
#parse_tree, _ = ConstituencyTree.make(bracketed_string=bracketed_string)
return cls(elem_tree, [], **kwargs)
@property
def E(self):
""" shortcut alias for shorter lines """
return self.elem_tree
@property
def is_insertion(self):
return self.elem_tree.is_insertion
@property
def direction(self):
if self.is_insertion:
return self.E.tree_operation.target['attach_direction']
else:
return "up"
@property
def tree_op(self):
return self.E.tree_operation
@property
def bracketed(self):
return self.E.bracketed_string
@property
def head(self):
return self.E.head
@property
def supertag(self):
return (self.E.root_pos, self.E.head_symbol, self.direction)
@property
def superindex(self):
return (self.head, self.supertag)
@property
def is_root(self):
return "ROOT" in self.E.bracketed_string
@property
def num_children(self):
return sum([child.num_children+1 for child in self.children])
@property
def lexical(self):
out = [self.E.head]
for child in self.children:
out.extend(child.lexical)
return out
def target_gorn(self, adjust_insertion=True):
gorn = self.tree_op.target['target_gorn']
direction = self.tree_op.target['attach_direction']
if self.is_insertion and adjust_insertion:
gorn += ((-100 if direction == "left" else 100), )
return gorn
def accepts_op(self, other_tree):
other_target = other_tree.E.tree_operation.target['pos_symbol']
if other_tree.is_insertion:
points = self.E.insertion_points
else:
points = self.E.substitution_points
for point in points:
if point.pos_symbol == other_target:
return True
return False
def expand_address(self, incoming=None):
incoming = incoming or (0,)
self.E.expand_address(incoming)
self.expanded_address = incoming
for child in self.children:
child_address = incoming + child.E.address[1:]
child.expand_address(child_address)
def all_points(self):
points = list(self.E.point_iterator())
for child in self.children:
points.extend(child.all_points)
return points
def get_spine(self):
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
return spine
def roll_features(self, parent_head="ROOT"):
"""assumes 1 head.. more thought needed for other forms"""
spine = self.get_spine()
out_ch = [child.head for child in self.children]
out = [(self.head, parent_head, self.bracketed, spine, out_ch)]
for child in self.children:
out.extend(child.roll_features(self.head))
return out
def modo_roll_features(self, parent_head="ROOT", parent_spine=None):
"""v2. mother-daughter roll features
roll up the tree; get the mother-daughter quadruples
"""
parent_spine = parent_spine or ((("ROOT", "SUB"),),)
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
safety = 0
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
filter_ch = lambda c: c.E.head_symbol in [",", ":", ".", "``","''", "--"]
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
safety += 1
if safety == 100:
raise Exception("loop issue")
out = [(self.head, parent_head, self.bracketed, spine, parent_spine)]
for child in self.children:
out.extend(child.modo_roll_features(self.head, spine))
return out
def dcontext_roll_features(self):
"""v3. mother-daughter roll features
roll up the trees; get the node+daughter head context
"""
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
filter_ch = lambda c: c.E.head_symbol in [",", ":", ".", "``","''", "--"]
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
hlf_info = (self.E.hlf_symbol, self.E.tree_operation.target['target_hlf'])
child_heads = [child.head for child in self.children]
out = [(self.head, spine, child_heads, self.bracketed, hlf_info)]
for child in self.children:
out.extend(child.dcontext_roll_features())
return out
def learning_features_july2016(self):
'''sequential choice model with a horizon and RTTN
'''
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
annotate = lambda t: (t.symbol, ("SUB" if t.complement
else ("INS" if t.adjunct
else "SPINE")))
not_lex = lambda t: not tree.lexical
spine = [[(tree.symbol, self.direction)]]
while not_lex(tree):
if len(tree.children) == 1 and tree.children[0].lexical:
break
spine.append([annotate(c) for c in tree.children if not_lex(c)])
tree = tree.children[tree.spine_index]
return self.head, spine
def to_constituency(self):
raise Exception("dont use this yet")
import pdb
#pdb.set_trace()
tree, _ = ConstituencyTree.make(bracketed_string=self.bracketed)
for child in sorted(self.children, key=lambda c: c.E.address):
print("*******\n**********")
print("starting child {}".format(child.supertag))
ct = child.to_constituency()
print("----------------------------")
print("finished to constituency for ct")
print("tree is currently {}".format(tree))
print("child's ct: {}".format(ct))
print("-------------------")
print(self.bracketed)
print(child.E.address)
print(str(child))
print("attaching {} to {}".format(child.bracketed, self.bracketed))
self.attach_at(tree, ct, list(child.E.address)[1:])
return tree
def attach_at(self, node, op, address):
raise Exception("dont use this yet")
while len(address) > 1:
node = node.children[address.pop(0)]
if not hasattr(node, "bookkeeper"):
node.bookkeeper = {}
opid = address.pop(0)
assert len(address) == 0
if isinstance(opid, int):
node.children[opid].__dict__.update(op.__dict__)
elif isinstance(opid, float):
if opid > 0:
node.children.extend(op.children)
else:
node.children = op.children + node.children
node.spine_index += len(op.children)
else:
raise Exception("sanity check")
def __str__(self):
if self.E.bracketed_string == "(ROOT)" and len(self.children) == 0:
return "<empty root>"
lexical = self.in_order_lexical()
return " ".join(lexical)
def __repr__(self):
if self.E.bracketed_string == "(ROOT)" and len(self.children) == 0:
return "<empty root>"
descs = self.in_order_descriptive()
return " ".join(descs)
def _check_heads(self, child_prep, next_word, stk_idx, sf_stk, avail_pos):
for (head,hlf), child in child_prep.items():
if head == next_word:
import pdb
#pdb.set_trace()
w_size = child.num_children + 1
low,high = stk_idx, stk_idx+w_size
while high >= stk_idx and low >= 0:
possible = sf_stk[low:high]
if sorted(possible) == sorted(child.lexical):
child_prep.pop((head, hlf))
pos = avail_pos.pop()
return child, pos, low
else:
low -= 1
high -= 1
return None, None, None
def _sort_by_surface_form(self, sf_list, children, positions, left=True):
"""assign spine-out indices that agrees with surface form list (sf_list)
positions start from 0 and go negative when left, positive when right
we want to associate things closer to 0 with words closer to head
"""
#my_possible_positions = [i for i,x in enumerate(sf_list) if x==self.E.head]
#if "down" in [c.E.head for c in children]:
# import pdb
# pdb.set_trace()
#for possible_position in my_possible_positions:
#print("===")
child_prep = {(child.E.head,child.E.hlf_symbol):child for child in children}
pairing = []
avail_pos = sorted(positions)
sf_stk = sf_list[:]
if not left:
avail_pos = avail_pos[::-1]
sf_stk = sf_stk[::-1]
# if the position is so bad that it cuts off the words, just skip it
if not all([(word in sf_stk) for c in children for word in c.lexical]):
raise Exception()
stk_idx = len(sf_stk) - 1
#print("xxx")
domain = set([w for child in children for w in child.lexical])
import pdb
#pdb.set_trace()
while len(avail_pos) > 0 and stk_idx >= 0:
#while len(sf_stk) > 0 and len(pairing)<len(children):
#print("---", possible_position, child_prep.keys(), sf_stk, stk_idx)
next_word = sf_stk[stk_idx]
if next_word not in domain:
#print("trashpop", next_word)
sf_stk.pop()
else:
child, pos, low = self._check_heads(child_prep, next_word, stk_idx, sf_stk, avail_pos)
if child is not None:
stk_idx = low
sf_stk = sf_stk[:low]
pairing.append((child,pos))
stk_idx -= 1
try:
assert len(avail_pos) == 0
yield pairing
except:
raise Exception()
#try:
# assert len(my_possible_positions) > 1
#except:
print("available positions weren't exausted. why?")
print("I thought i had it figured out; multiple of this head word")
print("it partitions string too much.. but i was wrong?")
print("debugging. inspect now.")
import pdb
pdb.set_trace()
def sort_by_surface_form(self, sf_list, children, positions, left=True):
#import pdb
#pdb.set_trace()
#try:
#if self.E.head == "iii":
# import pdb
# pdb.set_trace()
all_pairings = list(self._sort_by_surface_form(sf_list, children, positions, left))
#except IndexError as e:
# print("tried to pop from an empty list... what should I do")
# import pdb
# pdb.set_trace()
if len(all_pairings) == 1:
return all_pairings[0]
else:
#try:
key = lambda item: (item[1], (item[0].E.head, item[0].E.hlf_symbol))
same = lambda p1, p2: tuple(map(key,p1))==tuple(map(key,p2))
if all([same(p1,p2) for p1 in all_pairings for p2 in all_pairings]):
#print("all same anyway, returning")
return all_pairings[0]
else:
dt_check = lambda diffs: any([item[0].E.head_symbol == "DT" for pair in diffs for item in pair])
dt_key = lambda pairing: sum([abs(p) for c,p in pairing if c.E.head_symbol=="DT"])
differences = [(p1,p2) for i,p1 in enumerate(all_pairings)
for j,p2 in enumerate(all_pairings)
if not same(p1,p2) and i<j]
differences = [(x,y) for diff_item in differences for x,y in zip(*diff_item) if x!=y]
if len(differences) == 2 and dt_check(differences):
#print("shortcutting")
out_pairing = max(all_pairings, key=dt_key)
#print("hopefully works: ", out_pairing)
return out_pairing
#return all_pairings[0]
print("Not sure what to do. not all pairings are the same. inspect please")
import pdb
pdb.set_trace()
#except Exception as e:
# print("not exactly sure what is breaking")
# import pdb
# pdb.set_trace()
def surface_index(self, sf_list, num_left):
for i,w in enumerate(sf_list):
if w == self.E.head and i >= num_left:
return i
return -1
def align_gorn_to_surface(self, surface_form):
if len(self.children) == 0:
return
sf_list = surface_form.split(" ")
if self.E.head == "as" and "much" in sf_list:
import pdb
#pdb.set_trace()
left_of = lambda x,me: x.elem_tree.address < me.elem_tree.head_address
left_children = [child for child in self.children if left_of(child, self)]
organizer = {}
num_left = sum([child.num_children+1 for child in left_children])
boundary = max(num_left, self.surface_index(sf_list, num_left))
left_form = " ".join(sf_list[:boundary])
right_form = " ".join(sf_list[boundary+1:])
#### LEFT CHILDREN
for child in left_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
children, positions = [x[0] for x in items], [x[1] for x in items]
pairing = self.sort_by_surface_form(sf_list[:boundary], children, positions, True)
for child,position in pairing:
assert child.E.address[:-1] == level
child.E.address = child.E.address[:-1] + (position,)
#### RIGHT CHILDREN
organizer = {}
right_children = [child for child in self.children if not left_of(child, self)]
for child in right_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
children, positions = [x[0] for x in items], [x[1] for x in items]
pairing = self.sort_by_surface_form(sf_list[boundary+1:], children, positions, False)
for child,position in pairing:
assert child.E.address[:-1] == level
child.E.address = child.E.address[:-1] + (position,)
for child in left_children:
child.align_gorn_to_surface(left_form)
for child in right_children:
child.align_gorn_to_surface(right_form)
def align_gorn_to_surface_deprecated_march30(self, surface_form):
left_of = lambda x,me: x.elem_tree.address < me.elem_tree.head_address
surface_index = lambda child: surface_form.find(child.elem_tree.head)
left_children = [child for child in self.children if left_of(child, self)]
organizer = {}
#### LEFT CHILDREN
for child in left_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
child_list = sorted([c for c,p in items], key=surface_index)
pop_q = deque(sorted([p for c,p in items]))
assert [x!=y for x in pop_q for y in pop_q]
for child in child_list:
addr = child.elem_tree.address
child.elem_tree.address = addr[:-1] + (pop_q.popleft(), )
#### RIGHT CHILDREN
organizer = {}
right_children = [child for child in self.children if not left_of(child, self)]
for child in right_children:
addr = child.elem_tree.address
level, position = addr[:-1], addr[-1]
organizer.setdefault(level, []).append((child, position))
for level, items in organizer.items():
if len(items) == 1:
continue
child_list = sorted([c for c,p in items], key=surface_index)
pop_q = deque(sorted([p for c,p in items]))
for child in child_list:
addr = child.elem_tree.address
child.elem_tree.address = addr[:-1] + (pop_q.popleft(), )
for child in self.children:
child.align_gorn_to_surface(surface_form)
def align_gorn_to_surface_old(self, surface_form):
ins_children = [child for child in self.children if child.is_insertion]
sub_children = [child for child in self.children if not child.is_insertion]
surface_index = lambda child: surface_form.find(child.elem_tree.head)
organizer = {}
for child in ins_children:
addr = child.elem_tree.address
new_addr = addr[:-1] + ((1,) if addr[-1] > 0 else (-1,))
organizer.setdefault(addr, []).append(child)
for proxy_addr, child_list in organizer.items():
if len(child_list) == 1:
continue
offset = min([c.elem_tree.address[-1] for c in child_list])
for i, child in enumerate(sorted(child_list, key=surface_index),0):
last_bit = i+offset
child.elem_tree.address = proxy_addr[:-1] +(last_bit,)
for child in self.children:
child.align_gorn_to_surface(surface_form)
#left_ins = [child for child in ins_children if child.elem_tree.address[-1]<0]
#right_ins = [child for child in ins_children if child.elem_tree.address[-1]>0]
#surface_index = lambda child: surface_form.find(child.elem_tree.head)
#sort_key = lambda ch: ch.elem_tree.address[:-1]+()
def gorn_in_order(self, include_empty=False):
items = [(child.elem_tree.address, child) for child in self.children]
if len(self.E.head) > 0:
items.append((self.elem_tree.head_address, self))
if include_empty:
for point in self.elem_tree.substitution_points:
if all([addr!=point.gorn for addr, _ in items]):
items.append((point.gorn, None))
sorted_items = sorted(items)
return sorted_items
def gorn_pre_order(self, merged=True):
"""Return children sorted by gorn. Use for pre-order walks.
Will also return from inside out.
"""
left_of = lambda x,me: x.elem_tree.address < me.elem_tree.head_address
left_children = [child for child in self.children if left_of(child, self)]
right_children = [child for child in self.children if not left_of(child, self)]
sorted_left = sorted(left_children, key=lambda x: x.elem_tree.address, reverse=True)
#for i,left in enumerate(sorted_left):
# print(i,left.elem_tree.bracketed_string)
# print(i,left.elem_tree.address)
sorted_right = sorted(right_children, key=lambda x: x.elem_tree.address)
#for i,right in enumerate(sorted_right):
# print(i,right.elem_tree.bracketed_string)
# print(i,right.elem_tree.address)
#sorted_children = sorted(self.children, key=lambda x: x.elem_tree.address)
if merged:
return sorted_left + sorted_right
else:
return sorted_left, sorted_right
def learning_features(self, *args):
"""make learning features. currently for dual attender model.
output: features and annotations for pairs (parent, child)
"""
feature_output = []
f1 = "head={}".format(self.E.head)
f2 = "template={}".format(self.E.bracketed_string.replace(self.E.head, ""))
if self.is_root:
my_feats = (f2,)
else:
my_feats = (f1, f2)
for child_type, side in zip(self.gorn_pre_order(False), ("left", "right")):
for i, child in enumerate(child_type):
anno = []
anno.append("dist-from-spine: {}".format(i))
anno.append("dist-from-frontier: {}".format(len(child_type)-i-1))
anno.append("spine-side: {}".format(side))
if child.is_insertion:
anno.append("type=ins")
else:
anno.append("type=sub")
for j, pt in enumerate(self.E.substitution_points):
if pt.gorn == child.E.address:
anno.append("argument-{}".format(j))
child_feats, pairs_below = child.learning_features()
feature_output.extend(pairs_below)
feature_output.append((my_feats, child_feats, tuple(anno)))
return my_feats, feature_output
def _old_learning_features(self, flat=False):
raise Exception("don't use this function anymore")
f1 = "head={}".format(self.elem_tree.head)
f2 = "template={}".format(self.elem_tree.bracketed_string.replace(self.elem_tree.head, ""))
#f4 = "surface=[{}]".format(str(self))
#fulllex = self.in_order_lexical(True)
#f5 = "surface_with_empties=[{}]".format(fulllex)
myfeats = {"f1":f1,"f2":f2,"f3": []}
#"f4":f4,"f5":f5}
allfeats = [myfeats]
first_ins = lambda child: (child.E.address < self.E.head_address and
all([child.E.address < other_child.E.address
for other_child in self.children
if other_child.E.address != child.E.address]))
last_ins = lambda child: (child.E.address > self.E.head_address and
all([child.E.address > other_child.E.address
for other_child in self.children
if other_child.E.address != child.E.address]))
for child in self.children:
# if child is insertion, find out whether it's furthest left or furthest right
# if child is substitution, find out which of the substitution poitns it corresponds to
if first_ins(child):
pass
arrow = "<-" if child.is_insertion else "->"
f3 = "{}{}{}".format(self.elem_tree.head, arrow, child.elem_tree.head)
myfeats['f3'].append(f3)
allfeats.extend(child.learning_features())
if flat:
final_list = []
for featset in allfeats:
for featval in featset.values():
if isinstance(featval, list):
final_list.extend(featval)
else:
final_list.append(featval)
return final_list
return allfeats
def path_reconstruction_features(self):
return (self.E.bracketed_string, self.E.hlf_symbol,
self.E.tree_operation.target['target_hlf'],
self.E.tree_operation.target['target_gorn'])
#return (self.elem_tree.tree_id, self.elem_tree.head)
def pre_order_features(self):
feat_list = [self.path_reconstruction_features()]# for now, just id
for child in self.gorn_pre_order():
feat_list.extend(child.pre_order_features())
return tuple(feat_list)
def pre_order_descriptive(self):
descs = [str(self.elem_tree)]
sorted_children = sorted(self.children, key=lambda x: x.elem_tree.address)
for tree in sorted_children:
descs.extend(tree.pre_order_descriptive())
return descs
def in_order_descriptive(self):
descs = []
for address, tree in self.gorn_in_order():
if tree == self:
descs.append(str(self.elem_tree))
else:
descs.extend(tree.in_order_descriptive())
return descs
def in_order_treeids(self):
treeids = []
for address, tree in self.gorn_in_order():
if tree == self:
treeids.append(tree.elem_tree.tree_id)
else:
treeids.extend(tree.in_order_treeids())
return treeids
def pre_order_lexical(self):
pass
def in_order_lexical(self, include_empties=False):
lexical = []
for address, tree in self.gorn_in_order(include_empties):
if include_empties and tree is None:
lexical.append("<open-sub-point>")
elif tree.elem_tree.head is None:
continue
elif tree == self:
lexical.append(self.elem_tree.head)
else:
lexical.extend(tree.in_order_lexical())
return lexical
def expanded_by_hlf(self, book=None):
if book is None:
self.expand_address()
book = {}
book[self.E.hlf_symbol] = self.expanded_address
for child in self.children:
book = child.expanded_by_hlf(book)
return book
def make_expression(self, top=True):
expr = []
for i, (address, tree) in enumerate(self.gorn_in_order()):
if tree == self:
expr.append(self.predicate)
else:
expr.extend(tree.make_expression(False))
if top:
return Expression.from_iter(expr)
return expr
def lookup_insert(self, index):
return self.elem_tree.insertion_points[index].gorn
def lookup_sub(self, index):
return self.elem_tree.substitution_points[index].gorn
def set_path_features(self, instantiate_semantics=True, *args, **kwargs):
self.elem_tree.set_path_features(*args, **kwargs)
if instantiate_semantics:
self.predicate = self.instantiate_semantics()
def set_insertion_argument(self, arg):
if not self.is_insertion:
raise Exception("Don't call this if it's not insertion..")
self.predicate.substitute(arg, 0)
def instantiate_semantics(self):
num_arguments = len(self.elem_tree.substitution_points)
if self.is_insertion:
num_arguments += 1
predicate = Predicate(self.elem_tree.head,
num_arguments,
self.elem_tree.hlf_symbol)
if self.elem_tree.hlf_symbol is None:
self.elem_tree.set_path_features(self_hlf=predicate.hlf_symbol)
return predicate
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
def clone(self):
children = [child.clone() for child in self.children]
pred = self.predicate.clone()
return self.__class__(self.elem_tree.clone(), children)
def handle_insertion(self, operative, in_place):
"""Check if my elementary tree is the insertion point; if not, recurse
Args:
op_tree: ElementaryTree instance
"""
ThisClass = self.__class__
op_tree = operative.elem_tree
op = op_tree.tree_operation
if self.elem_tree.matches_inspoint(op):
# do the insertting; making new elem tree copies; updating addresses
new_elem_tree, new_op_tree = self.elem_tree.insert(op_tree)
# start making the new composed tree
# create a new clone of the op dtree
if in_place:
new_operative = operative
new_operative.elem_tree = new_op_tree
new_children = self.children
else:
#new_children = [child.clone() for child in self.children]
new_children = deepcopy(self.children)
new_operative = ThisClass.replicate(operative, new_op_tree)
# since it's an insertion, this pred is an argument to the op
new_pred = deepcopy(self.predicate)
# put the predicate into the op
new_operative.set_insertion_argument(new_pred)
# finish off the children
new_children.append(new_operative)
else:
new_elem_tree = deepcopy(self.elem_tree)
new_children = [child.operate(operative, in_place) for child in self.children]
new_pred = deepcopy(self.predicate)
if in_place:
self.elem_tree = new_elem_tree
self.children = new_children
self.predicate = new_pred
return self
else:
return ThisClass(new_elem_tree, new_children)
def handle_substitution(self, operative, in_place=False):
"""Check if my elementary tree is the subpoint; if not, recurse on children
Args:
op_tree: ElementaryTree instance
"""
ThisClass = self.__class__
op_tree = operative.elem_tree
op = op_tree.tree_operation
if self.elem_tree.matches_subpoint(op):
# the purpose of the substitute is to give the op_tree an address
# that adddress is the location of its substituion
# this is important for when we want to order our derived children via gorn
new_elem_tree, new_op_tree = self.elem_tree.substitute(op_tree)
##### HANDLE IN-PLACE-TYPE VS FACTORY-TYPE OPERATION
# the thing coming in is copied
if in_place:
new_operative = operative
new_operative.elem_tree = new_op_tree
new_children = self.children
else:
new_children = deepcopy(self.children)#[child.clone() for child in self.children]
new_operative = ThisClass.replicate(operative, new_op_tree)
new_children.append(new_operative)
##### HANDLE LOGIC STUFF
new_pred = deepcopy(self.predicate)#.clone()
# we put it into its correct spot
if self.is_insertion:
pred_arg_index = new_elem_tree.last_index + 1
else:
pred_arg_index = new_elem_tree.last_index
# abusing terms. substitute here is not a tree substitute, but a logic substitute
# find a better term....................
new_pred.substitute(new_operative.predicate, pred_arg_index)
else:
new_elem_tree = deepcopy(self.elem_tree)#.clone()
new_pred = deepcopy(self.predicate)#.clone()
new_children = [child.operate(operative, in_place) for child in self.children]
if in_place:
self.elem_tree = new_elem_tree
self.children = new_children
self.predicate = new_pred
return self
else:
return ThisClass(new_elem_tree, new_children)
def operate(self, operative, in_place=False):
"""handle the possible operations incoming to this derived tree.
Args:
operative: a DerivationTree instance
Returns:
a new DerivationTree that results from operation
Notes:
An intended operation would know what tree it wants to operate on
and where it wants to do it.
E.G:
(NP* (DT a)) knows it wants to attach to the tree (NP (NN dog))
which is substituted into (S (NP) (VP finds) (NP))
The DerivationTree should know that (NP (NN dog)) was substituted into
the first substitution spot.
Temp QUD:
what is the best way to represent this intended operation?
we could have the DT tree know it wants to attach to tree id X
but that tree id X could be in the tree twice (either NP)
it could know the predicate then?
"""
if operative.elem_tree.tree_operation.type == consts.INSERTION:
return self.handle_insertion(operative, in_place)
elif operative.elem_tree.tree_operation.type == consts.SUBSTITUTION:
return self.handle_substitution(operative, in_place)
@classmethod
def replicate(cls, old_inst, new_elem_tree=None, new_children=None, new_pred=None):
""" this is basically clone but allows etrees, childre, and preds rather than just straight cloning """
new_elem_tree = new_elem_tree or deepcopy(old_inst.elem_tree)#.clone()
new_children = new_children or deepcopy(old_inst.children) #[child.clone() for child in old_inst.children]
new_pred = new_pred or deepcopy(old_inst.predicate)#.clone()
return cls(new_elem_tree, new_children)
def test():
parse = """(ROOT(S(NP(NP (DT The) (NN boy))(VP (VBG laying)(S(VP (VB face)(PRT (RP down))(PP (IN on)(NP (DT a) (NN skateboard)))))))(VP (VBZ is)(VP (VBG being)(VP (VBN pushed)(PP (IN along)(NP (DT the) (NN ground)))(PP (IN by)(NP (DT another) (NN boy))))))(. .)))"""
tree_cuts = tree_enrichment.string2cuts(parse)
tree_strings = [cut.save_str() for cut in tree_cuts]
derived_trees = [DerivationTree.from_bracketed(tree_string) for tree_string in tree_strings]
derived_trees[2].elem_tree.insertion_points[0].hlf_symbol = 'g0'
derived_trees[1].elem_tree.tree_operation.target['target_hlf'] = 'g0'
derived_trees[1].elem_tree.tree_operation.target['target_gorn'] = (0,)
#derived_two = [DerivationTree.from_parse_tree(tree) for tree in tree_cuts]
return derived_trees
if __name__ == "__main__":
test()
| braingineer/baal | baal/structures/gist_trees.py | Python | mit | 54,149 |
# encoding: utf-8
from __future__ import absolute_import
import os
START_MONITORING_THREAD = False
SAVE_STAT = True
DISABLE_DATABASE = True
# for tests we want only 1/2 seconds timeout instead of the normal 10s
INSTANCE_TIMEOUT = int(os.environ.get('CUSTOM_INSTANCE_TIMEOUT', 500))
STAT_CIRCUIT_BREAKER_MAX_FAIL = int(os.getenv('JORMUNGANDR_STAT_CIRCUIT_BREAKER_MAX_FAIL', 1000))
STAT_CIRCUIT_BREAKER_TIMEOUT_S = int(os.getenv('JORMUNGANDR_STAT_CIRCUIT_BREAKER_TIMEOUT_S', 1))
# do not authenticate for tests
PUBLIC = True
LOGGER = {
'version': 1,
'disable_existing_loggers': False,
'formatters':{
'default': {
'format': '[%(asctime)s] [%(levelname)5s] [%(process)5s] [%(name)10s] %(message)s',
},
},
'handlers': {
'default': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'default',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'INFO',
'propagate': True
},
'navitiacommon.default_values': {
'handlers': ['default'],
'level': 'ERROR',
'propagate': True
},
}
}
CACHE_CONFIGURATION = {
'CACHE_TYPE': 'null'
}
# List of enabled modules
MODULES = {
'v1': { # API v1 of Navitia
'import_path': 'jormungandr.modules.v1_routing.v1_routing',
'class_name': 'V1Routing'
}
}
# circuit breaker parameters, for the tests by default we don't want the circuit breaker
CIRCUIT_BREAKER_MAX_INSTANCE_FAIL = 99999
CIRCUIT_BREAKER_INSTANCE_TIMEOUT_S = 1
GRAPHICAL_ISOCHRONE = True
HEAT_MAP = True
PATCH_WITH_GEVENT_SOCKET = True
| antoine-de/navitia | source/jormungandr/tests/integration_tests_settings.py | Python | agpl-3.0 | 1,696 |
# -*- coding: utf-8 -*-
"""
sphinx.application
~~~~~~~~~~~~~~~~~~
Sphinx application object.
Gracefully adapted from the TextPress system by Armin.
:copyright: 2008 by Georg Brandl, Armin Ronacher.
:license: BSD.
"""
import sys
import posixpath
from docutils import nodes
from docutils.parsers.rst import directives, roles
import sphinx
from sphinx.roles import xfileref_role, innernodetypes
from sphinx.config import Config
from sphinx.builder import builtin_builders, StandaloneHTMLBuilder
from sphinx.directives import desc_directive, target_directive, additional_xref_types
from sphinx.environment import SphinxStandaloneReader
from sphinx.util.console import bold
class SphinxError(Exception):
"""
Base class for Sphinx errors that are shown to the user in a nicer
way than normal exceptions.
"""
category = 'Sphinx error'
class ExtensionError(SphinxError):
"""Raised if something's wrong with the configuration."""
category = 'Extension error'
def __init__(self, message, orig_exc=None):
self.message = message
self.orig_exc = orig_exc
def __repr__(self):
if self.orig_exc:
return '%s(%r, %r)' % (self.__class__.__name__,
self.message, self.orig_exc)
return '%s(%r)' % (self.__class__.__name__, self.message)
def __str__(self):
if self.orig_exc:
return '%s (exception: %s)' % (self.message, self.orig_exc)
return self.message
# List of all known core events. Maps name to arguments description.
events = {
'builder-inited': '',
'doctree-read': 'the doctree before being pickled',
'missing-reference': 'env, node, contnode',
'doctree-resolved': 'doctree, docname',
'env-updated': 'env',
'html-page-context': 'pagename, context, doctree or None',
'build-finished': 'exception',
}
CONFIG_FILENAME = 'conf.py'
class Sphinx(object):
def __init__(self, srcdir, confdir, outdir, doctreedir, buildername,
confoverrides, status, warning=sys.stderr, freshenv=False):
self.next_listener_id = 0
self._listeners = {}
self.builderclasses = builtin_builders.copy()
self.builder = None
self.srcdir = srcdir
self.confdir = confdir
self.outdir = outdir
self.doctreedir = doctreedir
self._status = status
self._warning = warning
self._warncount = 0
self._events = events.copy()
# read config
self.config = Config(confdir, CONFIG_FILENAME, confoverrides)
# load all extension modules
for extension in self.config.extensions:
self.setup_extension(extension)
# the config file itself can be an extension
if self.config.setup:
self.config.setup(self)
# now that we know all config values, collect them from conf.py
self.config.init_values()
if buildername is None:
print >>status, 'No builder selected, using default: html'
buildername = 'html'
if buildername not in self.builderclasses:
raise SphinxError('Builder name %s not registered' % buildername)
self.info(bold('Sphinx v%s, building %s' % (sphinx.__version__, buildername)))
builderclass = self.builderclasses[buildername]
self.builder = builderclass(self, freshenv=freshenv)
self.emit('builder-inited')
def build(self, all_files, filenames):
try:
if all_files:
self.builder.build_all()
elif filenames:
self.builder.build_specific(filenames)
else:
self.builder.build_update()
except Exception, err:
self.emit('build-finished', err)
raise
else:
self.emit('build-finished', None)
def warn(self, message):
self._warncount += 1
self._warning.write('WARNING: %s\n' % message)
def info(self, message='', nonl=False):
if nonl:
self._status.write(message)
else:
self._status.write(message + '\n')
self._status.flush()
# general extensibility interface
def setup_extension(self, extension):
"""Import and setup a Sphinx extension module."""
try:
mod = __import__(extension, None, None, ['setup'])
except ImportError, err:
raise ExtensionError('Could not import extension %s' % extension, err)
if hasattr(mod, 'setup'):
mod.setup(self)
def import_object(self, objname, source=None):
"""Import an object from a 'module.name' string."""
try:
module, name = objname.rsplit('.', 1)
except ValueError, err:
raise ExtensionError('Invalid full object name %s' % objname +
(source and ' (needed for %s)' % source or ''), err)
try:
return getattr(__import__(module, None, None, [name]), name)
except ImportError, err:
raise ExtensionError('Could not import %s' % module +
(source and ' (needed for %s)' % source or ''), err)
except AttributeError, err:
raise ExtensionError('Could not find %s' % objname +
(source and ' (needed for %s)' % source or ''), err)
# event interface
def _validate_event(self, event):
event = intern(event)
if event not in self._events:
raise ExtensionError('Unknown event name: %s' % event)
def connect(self, event, callback):
self._validate_event(event)
listener_id = self.next_listener_id
if event not in self._listeners:
self._listeners[event] = {listener_id: callback}
else:
self._listeners[event][listener_id] = callback
self.next_listener_id += 1
return listener_id
def disconnect(self, listener_id):
for event in self._listeners.itervalues():
event.pop(listener_id, None)
def emit(self, event, *args):
result = []
if event in self._listeners:
for _, callback in self._listeners[event].iteritems():
result.append(callback(self, *args))
return result
def emit_firstresult(self, event, *args):
for result in self.emit(event, *args):
if result is not None:
return result
return None
# registering addon parts
def add_builder(self, builder):
if not hasattr(builder, 'name'):
raise ExtensionError('Builder class %s has no "name" attribute' % builder)
if builder.name in self.builderclasses:
raise ExtensionError('Builder %r already exists (in module %s)' % (
builder.name, self.builderclasses[builder.name].__module__))
self.builderclasses[builder.name] = builder
def add_config_value(self, name, default, rebuild_env):
if name in self.config.values:
raise ExtensionError('Config value %r already present' % name)
self.config.values[name] = (default, rebuild_env)
def add_event(self, name):
if name in self._events:
raise ExtensionError('Event %r already present' % name)
self._events[name] = ''
def add_node(self, node, **kwds):
nodes._add_node_class_names([node.__name__])
for key, val in kwds.iteritems():
try:
visit, depart = val
except ValueError:
raise ExtensionError('Value for key %r must be a (visit, depart) '
'function tuple' % key)
if key == 'html':
from sphinx.htmlwriter import HTMLTranslator as translator
elif key == 'latex':
from sphinx.latexwriter import LaTeXTranslator as translator
elif key == 'text':
from sphinx.textwriter import TextTranslator as translator
else:
# ignore invalid keys for compatibility
continue
setattr(translator, 'visit_'+node.__name__, visit)
if depart:
setattr(translator, 'depart_'+node.__name__, depart)
def add_directive(self, name, func, content, arguments, **options):
func.content = content
func.arguments = arguments
func.options = options
directives.register_directive(name, func)
def add_role(self, name, role):
roles.register_canonical_role(name, role)
def add_description_unit(self, directivename, rolename, indextemplate='',
parse_node=None, ref_nodeclass=None):
additional_xref_types[directivename] = (rolename, indextemplate, parse_node)
directives.register_directive(directivename, desc_directive)
roles.register_canonical_role(rolename, xfileref_role)
if ref_nodeclass is not None:
innernodetypes[rolename] = ref_nodeclass
def add_crossref_type(self, directivename, rolename, indextemplate='',
ref_nodeclass=None):
additional_xref_types[directivename] = (rolename, indextemplate, None)
directives.register_directive(directivename, target_directive)
roles.register_canonical_role(rolename, xfileref_role)
if ref_nodeclass is not None:
innernodetypes[rolename] = ref_nodeclass
def add_transform(self, transform):
SphinxStandaloneReader.transforms.append(transform)
def add_javascript(self, filename):
StandaloneHTMLBuilder.script_files.append(
posixpath.join('_static', filename))
class TemplateBridge(object):
"""
This class defines the interface for a "template bridge", that is, a class
that renders templates given a template name and a context.
"""
def init(self, builder):
"""
Called by the builder to initialize the template system. *builder*
is the builder object; you'll probably want to look at the value of
``builder.config.templates_path``.
"""
raise NotImplementedError('must be implemented in subclasses')
def newest_template_mtime(self):
"""
Called by the builder to determine if output files are outdated
because of template changes. Return the mtime of the newest template
file that was changed. The default implementation returns ``0``.
"""
return 0
def render(self, template, context):
"""
Called by the builder to render a *template* with a specified
context (a Python dictionary).
"""
raise NotImplementedError('must be implemented in subclasses')
| BackupGGCode/sphinx | sphinx/application.py | Python | bsd-3-clause | 10,788 |
###
# Copyright (c) 2003-2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
class InternetTestCase(PluginTestCase):
plugins = ('Internet',)
if network:
def testDns(self):
self.assertNotError('dns slashdot.org')
self.assertResponse('dns alsdkjfaslkdfjaslkdfj.com',
'Host not found.')
def testWhois(self):
self.assertNotError('internet whois ohio-state.edu')
self.assertError('internet whois www.ohio-state.edu')
self.assertNotError('internet whois kuro5hin.org')
self.assertError('internet whois www.kuro5hin.org')
self.assertNotError('internet whois microsoft.com')
self.assertNotError('internet whois inria.fr')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| tecan/xchat-rt | plugins/scripts/Supybot-0.83.4.1-bitcoinotc-bot/plugins/Internet/test.py | Python | gpl-2.0 | 2,370 |
# -*- coding: utf-8 -*-
"""
<DefineSource>
@Date : Fri Nov 14 13:20:38 2014 \n
@Author : Erwan Ledoux \n\n
</DefineSource>
A Neurongrouper
"""
#<DefineAugmentation>
import ShareYourSystem as SYS
BaseModuleStr="ShareYourSystem.Specials.Simulaters.Simulater"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer"
SYS.setSubModule(globals())
#</DefineAugmentation>
#<ImportSpecificModules>
from ShareYourSystem.Specials.Simulaters import Synapser
#</ImportSpecificModules>
#<DefineLocals>
NeurongroupPreTeamKeyStr='Pres'
NeurongroupPostTeamKeyStr='Posts'
NeurongroupSpikeTeamKeyStr='Spikes'
NeurongroupStateTeamKeyStr='States'
#</DefineLocals>
#<DefineClass>
@DecorationClass()
class NeurongrouperClass(BaseClass):
def default_init(self,
_NeurongroupDeriveBrianerVariable=None,
_NeurongroupingBrianKwargDict=None,
_NeurongroupingVariableStrToGetStrDict=None,
_NeurongroupedPostModelInsertStrsList=None,
_NeurongroupedPostModelAddDict=None,
_NeurongroupedEquationStrsList=None,
_NeurongroupedBrianVariable=None,
_NeurongroupedSpikeMonitorsList=None,
_NeurongroupedStateMonitorsList=None,
**_KwargVariablesDict
):
#Call the parent __init__ method
BaseClass.__init__(self,**_KwargVariablesDict)
#team
map(
lambda __KeyStr:
self.team(__KeyStr),
[
NeurongroupPreTeamKeyStr,
NeurongroupPostTeamKeyStr,
NeurongroupSpikeTeamKeyStr,
NeurongroupStateTeamKeyStr
]
)
def do_neurongroup(
self
):
#debug
'''
self.debug(('self.',self,[
'NeurongroupingBrianKwargDict'
]))
'''
#maybe should import
from brian2 import NeuronGroup,SpikeMonitor,StateMonitor
#Check
if 'N' not in self.NeurongroupingBrianKwargDict:
self.NeurongroupingBrianKwargDict['N']=self.SimulatingUnitsInt
#add the synaptic model strs
'''
self.debug(('self.',self,['CollectionsOrderedDict']))
'''
#map
self.NeurongroupedPostModelInsertStrsList=list(
set(
SYS.flat(
map(
lambda __PreConnecter:
__PreConnecter.PostModelInsertStrsList,
self.TeamDict[NeurongroupPostTeamKeyStr].ManagementDict.values()
)
)
)
)
#map
'''
self.debug(
[
'self.PreConnectersCollectionOrderedDict.keys() is ',
self.PreConnectersCollectionOrderedDict.keys(),
'self.PostConnectersCollectionOrderedDict.keys() is ',
self.PostConnectersCollectionOrderedDict.keys(),
]
)
'''
#map
map(
lambda __PreConnecter:
map(
lambda __ItemTuple:
self.NeurongroupedPostModelAddDict.__setitem__(
__ItemTuple[0],
list(
set(
(self.NeurongroupedPostModelAddDict[__ItemTuple[0]]
if __ItemTuple[0] in self.NeurongroupedPostModelAddDict
else [])+__ItemTuple[1]
)
)
),
__PreConnecter.PostModelAddDict.items()
),
self.TeamDict[NeurongroupPreTeamKeyStr].ManagementDict.values()
)
#debug
'''
self.debug(('self.',self,[
'NeurongroupedPostModelInsertStrsList',
'NeurongroupedPostModelAddDict'
]))
'''
#Check
if 'model' not in self.NeurongroupingBrianKwargDict:
self.NeurongroupingBrianKwargDict['model']=''
#add synaptic model variables
map(
lambda __NeurongroupedPostModelInsertStr:
self.NeurongroupingBrianKwargDict.__setitem__(
'model',
self.NeurongroupingBrianKwargDict['model'
]+'\n'+__NeurongroupedPostModelInsertStr
),
self.NeurongroupedPostModelInsertStrsList
)
#map
self.NeurongroupedEquationStrsList=map(
lambda __KeyStr:
SYS.chunk(
['d'+__KeyStr+'/dt',')/'],
self.NeurongroupingBrianKwargDict['model'],
)[0],
self.NeurongroupedPostModelAddDict.keys()
)
#map
map(
lambda __NeurongroupedEquationStr,__AddStrsList:
self.NeurongroupingBrianKwargDict.__setitem__(
'model',
self.NeurongroupingBrianKwargDict['model'].replace(
__NeurongroupedEquationStr,
__NeurongroupedEquationStr+'+'+'+'.join(__AddStrsList)
)
),
self.NeurongroupedEquationStrsList,
self.NeurongroupedPostModelAddDict.values()
)
#debug
'''
self.debug(('self.',self,[
'NeurongroupedEquationStrsList',
'NeurongroupingBrianKwargDict'
]))
'''
#init
self.NeurongroupedBrianVariable=NeuronGroup(
**self.NeurongroupingBrianKwargDict
)
#debug
'''
self.debug(('self.',self,['NeurongroupedBrianVariable']))
'''
#update variables
map(
lambda __ItemTuple:
setattr(
self.NeurongroupedBrianVariable,
__ItemTuple[0],
self[__ItemTuple[1]]
),
self.NeurongroupingVariableStrToGetStrDict.items()
)
#debug
'''
self.debug(('self.',self,['NeurongroupedBrianVariable']))
'''
#map
self.NeurongroupedSpikeMonitorsList=map(
lambda __DeriveMoniter:
__DeriveMoniter.__setitem__(
'SpikeMonitor',
SpikeMonitor(
self.NeurongroupedBrianVariable
)
).SpikeMonitor,
self.TeamDict[NeurongroupSpikeTeamKeyStr].ManagementDict.values()
)
#debug
'''
self.debug(
[
('self.',self,[
'NeurongroupedSpikeMonitorsList'
])
]
)
'''
#map
self.NeurongroupedStateMonitorsList=map(
lambda __DeriveMoniter:
__DeriveMoniter.__setitem__(
'StateMonitor',
StateMonitor(
self.NeurongroupedBrianVariable,
__DeriveMoniter.MoniteringVariableStr,
__DeriveMoniter.MoniteringRecordTimeIndexIntsArray
)
).StateMonitor,
self.TeamDict[NeurongroupStateTeamKeyStr].ManagementDict.values()
)
#debug
'''
self.debug(('self.',self,['NeurongroupedStateMonitorsList']))
'''
"""
def propertize_setWatchAfterParentWithParenterBool(self,_SettingValueVariable):
#debug
self.debug(
[
'We have grand parents',
'map(type,self.ParentedDeriveTeamersList) is '+str(
map(type,self.ParentedDeriveTeamersList))
]
)
#Check
if type(self.ParentTopDeriveTeamerVariable)==SYS.BrianerClass:
#alias
self.NeurongroupDeriveBrianerVariable=self.ParentTopDeriveTeamerVariable
else:
#index
self.NeurongroupDeriveBrianerVariable=self.ParentedDeriveTeamersList[
map(
type,
self.ParentedDeriveTeamersList
).index(SYS.BrianerClass)
]
#manage self
self.NeurongroupDeriveBrianerVariable.TeamDict[
self.ParentTopDeriveTeamerVariable.Module.BrianPopulationTeamKeyStr
].manage(self)
#call the base method
BaseClass.propertize_setWatchAfterParentWithParenterBool(self,_SettingValueVariable)
"""
#</DefineClass>
#</DefinePrint>
NeurongrouperClass.PrintingClassSkipKeyStrsList.extend(
[
'NeurongroupingBrianKwargDict',
'NeurongroupingVariableStrToGetStrDict',
'NeurongroupedPostModelInsertStrsList',
'NeurongroupedPostModelAddDict',
'NeurongroupedEquationStrsList',
'NeurongroupedBrianVariable',
'NeurongroupedSpikeMonitorsList',
'NeurongroupedStateMonitorsList'
]
)
#<DefinePrint>
| Ledoux/ShareYourSystem | Pythonlogy/draft/Simulaters/Neurongrouper/draft/__init__ copy.py | Python | mit | 6,951 |
#Modify your code so that it normalizes the output for
#the function sense. This means that the entries in q
#should sum to one.
p=[0.2, 0.2, 0.2, 0.2, 0.2]
world=['green', 'red', 'red', 'green', 'green']
Z = 'red'
pHit = 0.6
pMiss = 0.2
def sense(p, Z):
q=[]
for i in range(len(p)):
hit = (Z == world[i])
q.append(p[i] * (hit * pHit + (1-hit) * pMiss))
mySum = sum(q)
for i in range(len(q)):
q[i] = q[i]/mySum
return q
print sense(p,Z)
| martinggww/lucasenlights | MachineLearning/AIRobots/sensefunctionWithNormalization.py | Python | cc0-1.0 | 488 |
#!/usr/bin/python
"""
This module contains an envelope class (MGE) which drives the rest of the package classes.
"""
# Importing the most important modules
# This MGE module requires numpy
from pygme.init_partMGE import nbodyMGE
__version__ = '4.0.4 (03/07/2012)'
DEBUG = 1
# Version 4.0.4: Small change replacing savedir by saveMGE
# Version 4.0.3: Cleaning and redistribution in modules for release pygme
# Version 4.0.2: Small bug with an expression on sigthetagas
# Version 4.0.1: Small bug when only reading projected quantities
# Version 4.0.0: Cleaning - using kwargs, removing all unnecessary parameters
# Version 3.9.1: Add softening and savefile for each model
# Version 3.9.0: Add the mge_to_ramses to simplify things
# Homegeneisation of nBody to nPart
# Adding the possibility not to realise ALL bodies
# Adding the Dynamic Groups
# Adding the possibility to only realise part of the particles
# Debug factor on Mbh calculation
# Version 3.8.0: Add the mge_to_gadget to simplify things
# Version 3.7.9: Put the right sigmaR in QToomre
# Version 3.7.8: Included an external function with Dij for anisotropy variables
# Version 3.7.7: Change of float to floatMGE for exp
# Version 3.7.6: Small bug with FirstHalo stars - Lablanche
# Version 3.7.5: Small bug with TGroupMass
# Version 3.7.4: Add the Black Hole as a Dark Matter particle (last particle)
# and change opNumber into comp_particles within addparam
# Version 3.7.3: Add opNumber option to get just the numbers
# Version 3.7.2: Major change to have single Gaussians with their own sigma
# Version 3.7.1: Add some None to imax and imin in the function calls
# Version 3.7.0: Add a Spin to be able to make counter-rotating components
# Version 3.6.1: Major debug when multiple components
# Version 3.6.0: Major rewriting of truncation of gaussians
# Version 3.5.5: Changed method to sample the position (Sphere / Cube)
# Version 3.5.4: Fixed distrib in mgetosnap
# Version 3.5.3: Fixed parameters to add imin/imax in rhop
# Version 3.5.2: Fixed parameters to include Halo particles
# Version 3.5.1: Fixed bug in derivation of theta (init_nbody)
# Version 3.5.0: Included Groups for the Gaussians, and cleaning a little
# Version 3.4.4: Added import floatMGE
# Version 3.4.3: Added specific MGE float to solve pb when Mbh is not 0 with exp
# Version 3.4.2: BUG! Already corrected??? Gas MUST BE BEFORE STARS in pmass
# Version 3.4.1: Added function set_minmax
# Version 3.4.0: Major change: using systematically imin, imax for functions
# Version 3.3.4: Debug the betaeps option
# Version 3.3.3: Debug the Gas mass when there is a Halo and some gas options
# Version 3.3.2: Added the option betaeps in the init_nbody to force beta(eps)
# Version 3.3.1: Added the option betaeps in the init_nbody to force beta(eps)
# Version 3.3.0: Added the epicycle approximation for the init of nbody
# Version 3.2.0: Changed anisotropy kRZ, KRTheta and added options in init_body
# Version 3.1.0: Added the derivation of kappa, Omega, and QToomre
# Version 3.0.1: Small bug in face-on projection
# Version 3.0.0: Reshuffling of all modules, with new snapshot
# Version 2.6.0: BUG. Mass of Gas was after Stars = INCONSISTENT!!
# Version 2.5.1: Changed default sigma for the gas
# Version 2.5.0: Added kR, kZ, kTheta
# Version 2.4.5: Debug: in weightGas, only take the right gaussians
# Version 2.4.4: Added the option of different kSatoh for the components
# Version 2.4.3: Changed rho to non 0 value and gamma to Ideal gas value
# Version 2.4.2: Added some more initialisation for snapshots
# Version 2.4.1: Minor changes to initialise self.axi
# Version 2.4.0: Introduced asarray instead of nfloat to convert arrays
# Version 2.3.0: Introduced the Halo Gaussians
# Version 2.2.2: Changed a few float32
# Version 2.2.1: Changed the name of pmsphpy a pmsphsf v1.6.0
# Version 2.2.0: Solved many bugs due to memory allocation
# Version 2.1.0: Added comp_Ep and comp_Ec modules
# Version 2.0.2: Added projection option
# Version 2.0.1: Changed value of G very slightly from Remco
# Version 2.0: Adding the gas and Black Holes!
# Version 1.9: Add a cut-off with Vescape
# Version 1.8: Debug rhoint which was wrong
# Version 1.7: Added the transformation to pmsph
# Version 1.6: Adding some comments
# Version 1.5: Adding of photometry deprojection
# Version 1.4: Adding of projected Jeans
# Version 1.1: Bug of biased distribution solved : R ==> x,y
# Version 1.0: first draft of the module
################################################################################
# MGE Functions - Specific Class and associated functions
################################################################################
class MGE(nbodyMGE):
""" Class MGE: includes a rather large structure describing the MGE
Multi-Gaussian Expansion model for a specific galaxy. Basic variables are:
- the gaussian parameters (Intensity, Sigma, Axis ratio, PA)
- the M/L for each gaussian
- the total mass and flux for each gaussian
- parameters for the galaxy: distance, inclination...
- and of course the number of particles per Group
The MGE class is not inheriting from any pref-defined class.
It is however, initialising the upper nbodyMGE class, which itself inherit
from dynMGE (dynamics-related functions/methods), and then from photMGE
(photometric-related functions/methods), and then from paramMGE (basic
MGE parameters).
"""
def __init__(self, infilename=None, indir=None, saveMGE=None, **kwargs) :
"""Wrapper around the top dynMGE class including the
photometric and dynamic modules.
:param name: str. infilename
"""
nbodyMGE.__init__(self, infilename=infilename, indir=indir, saveMGE=saveMGE, **kwargs)
| emsellem/pygme | pygme/mge.py | Python | bsd-3-clause | 5,913 |
#!/usr/bin/env python
import os, os.path
import re
import sys
placeholder_pat = re.compile("%\((.+?)\)(\w)")
def extract_placeholders(s):
return set(placeholder_pat.findall(s))
def check_file(fn):
msgid = ''
msgstr = ''
workingon = 'msgid'
mismatches = []
for line in open(fn):
if line.startswith('#'): continue
text = ''
line = line.rstrip()
if line.startswith('msg'):
workingon, text = line.split(' ',1)
if workingon == 'msgid':
if msgid and msgstr and len(msgstr.strip()) > 0:
id_placeholders = extract_placeholders(msgid)
str_placeholders = extract_placeholders(msgstr)
if len(id_placeholders) != len(str_placeholders) or (len(id_placeholders.difference(str_placeholders)) != 0):
mismatches.append((msgid,msgstr))
msgid = msgstr = ''
else:
text = line
text = text.strip('"')
if text:
if workingon == 'msgid':
msgid += text
else:
msgstr += text
if mismatches:
print "WARNING: %i mismatches in %s" % (len(mismatches),fn)
for msgid, msgstr in mismatches:
print 'msgid:' + msgid
print 'msgstr:' + msgstr
print
if __name__ == '__main__':
try:
start_dir = sys.argv[1]
except:
start_dir = '../locale'
for path, dirs, files in os.walk(start_dir):
for f in files:
if f.endswith('.po'):
check_file(os.path.join(path,f))
| PalmBeachPost/panda | scripts/check_po.py | Python | mit | 1,630 |
# -*- coding: utf-8 -*-
#
#
# Author: Nicolas Bessi, Guewen Baconnier, Yannick Vaucher
# Copyright 2013-2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{'name': 'Split picking',
'summary': 'Split a picking in two unconfirmed pickings',
'version': '8.0.1.0.0',
'author': "Camptocamp,Odoo Community Association (OCA)",
'maintainer': 'Camptocamp',
'license': 'AGPL-3',
'category': 'Warehouse Management',
'complexity': "normal", # easy, normal, expert
'depends': ['stock'],
'website': 'http://www.camptocamp.com',
'data': ['view/stock_partial_picking.xml'],
'demo': [],
'test': ['test/test_picking_split.yml',
'test/test_assigned_picking_split.yml',
'test/test_picking_split_two_move_lines.yml'],
'installable': True,
'auto_install': False,
}
| akretion/stock-logistics-workflow | stock_split_picking/__openerp__.py | Python | agpl-3.0 | 1,444 |
# Globals for the directions
# Change the values as you see fit
EAST = None
NORTH = None
WEST = None
SOUTH = None
class Robot:
def __init__(self, direction=NORTH, x_pos=0, y_pos=0):
pass
| jmluy/xpython | exercises/practice/robot-simulator/robot_simulator.py | Python | mit | 201 |
# -*- coding: utf-8 -*-
#
from .contextualize import Contextualize, contextualize
__all__ = ('Contextualize', 'contextualize')
| gonrin/gatco | gatco/cors/spf/plugins/__init__.py | Python | mit | 127 |
# -*- coding: utf-8 -*-
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from parler.utils.context import switch_language
from aldryn_newsblog.models import Article
class Command(BaseCommand):
can_import_settings = True
base_options = (
make_option(
"-l",
"--language",
action="append",
dest="languages",
default=None,
),
)
option_list = BaseCommand.option_list + base_options
def handle(self, *args, **options):
languages = options.get('languages')
if languages is None:
languages = [language[0] for language in settings.LANGUAGES]
# ArticleTranslation
translation_model = Article._parler_meta.root_model
for article in Article.objects.published():
translations = article.translations.filter(
language_code__in=languages
)
# build internal parler cache
parler_cache = dict(
(trans.language_code, trans) for trans in translations)
# set internal parler cache
# to avoid parler hitting db for every language
article._translations_cache[translation_model] = parler_cache
for translation in translations:
language = translation.language_code
with switch_language(article, language_code=language):
translation.search_data = article.get_search_data()
# make sure to only update the search_data field
translation.save(update_fields=["search_data"])
| Venturi/cms | env/lib/python2.7/site-packages/aldryn_newsblog/management/commands/rebuild_article_search_data.py | Python | gpl-2.0 | 1,694 |
from django.core.management.base import BaseCommand
from corehq.sql_db.shard_data_management import get_count_of_unmatched_models_by_shard
from corehq.sql_db.util import get_db_aliases_for_partitioned_query, get_all_sharded_models
class Command(BaseCommand):
help = "Print out all shard data that exists in databases that don't contain the associated shards."
def handle(self, **options):
sharded_models = list(get_all_sharded_models())
for database in get_db_aliases_for_partitioned_query():
for model in sharded_models:
invalid_data = get_count_of_unmatched_models_by_shard(database, model)
if invalid_data:
for shard_id, count in invalid_data:
print('found {} unexpected {}s in {} (shard {}).'.format(
count, model.__name__, database, shard_id)
)
| dimagi/commcare-hq | corehq/sql_db/management/commands/locate_invalid_shard_data.py | Python | bsd-3-clause | 917 |
import csv
import sys
from django.core.management.base import BaseCommand
from django.db.models import Prefetch
from course.models import CourseModule
from deviations.models import DeadlineRuleDeviation, MaxSubmissionsRuleDeviation
from ...models import BaseExercise, Submission
from userprofile.models import UserProfile
class Command(BaseCommand):
help = 'Exports submission and exercise data from given course instances into CSV files'
def add_arguments(self, parser):
parser.add_argument(
'course_instance_id',
nargs='+',
type=int,
help='Course instance id (from model CourseInstance) whose data is exported',
)
parser.add_argument(
'-e',
'--exercise-output-file',
help='Exercises are written to this CSV file. The file is created or overwritten. '
'If you do not set any value, then no exercise file is written.',
)
parser.add_argument(
'-s',
'--submission-output-file',
help='Submissions are written to this CSV file. The file is created or overwritten. '
'If you do not set any value, then no submission file is written.',
)
parser.add_argument(
'-b',
'--limit-submissions-start',
type=int,
help='Limit the number of submissions that are written to the CSV file. '
'This is the start index of the submissions. '
'By default, all submissions starting from index zero are included.',
)
parser.add_argument(
'-l',
'--limit-submissions-end',
type=int,
help='Limit the number of submissions that are written to the CSV file. '
'This is the end index of the submissions. '
'By default, all submissions up to the last index are included.',
)
parser.add_argument(
'-d',
'--include-deadline-deviations',
action='store_true',
help="If set, students' personal deadline deviations are included in the submissions CSV file.",
)
parser.add_argument(
'-m',
'--include-max-submission-deviations',
action='store_true',
help="If set, students' personal max submission attempt deviations are included in the submissions CSV file.",
)
parser.add_argument(
'-i',
'--include-student-ids',
action='store_true',
help="If set, submitters' student ids are included in the submissions CSV file.",
)
def handle(self, *args, **options):
course_instance_ids = options['course_instance_id']
exercise_file_path = options['exercise_output_file']
submission_file_path = options['submission_output_file']
if not exercise_file_path and not submission_file_path:
self.stderr.write(
'At least one of the options "--exercise-output-file" or '
'"--submission-output-file" must be given in order to output anything.'
)
self.stderr.write('Aborting...')
sys.exit(2)
limit_submissions_start = options['limit_submissions_start']
limit_submissions_end = options['limit_submissions_end']
if limit_submissions_start is not None and limit_submissions_start < 0:
self.stderr.write("--limit-submissions-start must be a non-negative integer.")
sys.exit(2)
if limit_submissions_end is not None and limit_submissions_end < 0:
self.stderr.write("--limit-submissions-end must be a non-negative integer.")
sys.exit(2)
submissions_slice = slice(limit_submissions_start, limit_submissions_end)
# Fetch all exercises from the given course instances.
exercises = BaseExercise.objects.filter(
course_module__course_instance__pk__in=course_instance_ids,
).select_related(
'category',
).prefetch_related(
Prefetch(
'course_module',
queryset=CourseModule.objects.select_related(
'course_instance',
).only(
'order',
'points_to_pass',
'closing_time',
'late_submissions_allowed',
'late_submission_deadline',
'late_submission_penalty',
'course_module__course_instance__instance_name',
'course_module__course_instance__url',
),
),
).defer(
'description',
'service_url',
'exercise_info',
'model_answers',
'templates',
'content',
'category__status',
'category__description',
'category__course_instance',
).order_by(
'id',
)
# Fetch all submissions for the exercises.
if submission_file_path:
user_fields = ['user__id']
if options['include_student_ids']:
user_fields.append('student_id')
submissions = Submission.objects.filter(
exercise__in=exercises,
).prefetch_related(
Prefetch(
'submitters',
queryset=UserProfile.objects.select_related('user').only(*user_fields),
to_attr='submitter_userprofiles',
),
).defer(
'hash',
'grader',
'feedback',
'assistant_feedback',
'submission_data',
'grading_data',
'meta_data',
).order_by(
'id',
)[submissions_slice]
all_deadline_deviations = {}
if options['include_deadline_deviations']:
# Fetch all deadline deviations in the course instances.
all_deadline_deviations_queryset = DeadlineRuleDeviation.objects.filter(
exercise__course_module__course_instance__id__in=course_instance_ids,
).prefetch_related(
Prefetch(
'exercise',
queryset=BaseExercise.objects.select_related(
'course_module',
).only(
'id',
'course_module__id',
'course_module__closing_time',
'course_module__course_instance__id',
'course_module__course_instance__course__id',
),
),
Prefetch(
'submitter',
queryset=UserProfile.objects.select_related('user').only('user__id'),
),
).only(
'exercise__id',
'exercise__course_module__closing_time',
'exercise__course_module__course_instance__id',
'submitter__user__id',
'extra_minutes',
)
for dl_dev in all_deadline_deviations_queryset:
all_deadline_deviations.setdefault(dl_dev.exercise.id, {})[dl_dev.submitter.user.id] = dl_dev.get_new_deadline()
all_max_submissions_deviations = {}
if options['include_max_submission_deviations']:
# Fetch all max submissions deviations in the course instances.
all_max_submissions_deviations_queryset = MaxSubmissionsRuleDeviation.objects.filter(
exercise__course_module__course_instance__id__in=course_instance_ids,
).select_related(
'exercise',
).prefetch_related(
Prefetch(
'submitter',
queryset=UserProfile.objects.select_related('user').only('user__id'),
),
).only(
'exercise__id',
'exercise__max_submissions',
'submitter__user__id',
'extra_submissions',
)
for sbms_dev in all_max_submissions_deviations_queryset:
all_max_submissions_deviations.setdefault(
sbms_dev.exercise.id,
{},
)[sbms_dev.submitter.user.id] = sbms_dev.exercise.max_submissions + sbms_dev.extra_submissions
# Create the CSV output files.
# One CSV file for all exercises.
if exercise_file_path:
self.write_exercise_csv(exercise_file_path, exercises)
self.stdout.write("Created the exercise file: " + exercise_file_path)
# One CSV file for all submissions.
if submission_file_path:
self.write_submission_csv(
submission_file_path,
submissions,
all_deadline_deviations,
all_max_submissions_deviations,
options['include_deadline_deviations'],
options['include_max_submission_deviations'],
options['include_student_ids'],
)
self.stdout.write("Created the submission file: " + submission_file_path)
def write_exercise_csv(self, exercise_file_path, exercises):
with open(exercise_file_path, 'w', newline='') as f:
writer = csv.DictWriter(f, fieldnames=(
'id',
'name',
'course_instance',
'deadline',
'late_submissions_allowed',
'late_submission_deadline',
'late_submission_penalty',
'max_points',
'max_submissions',
'category', # the name of the category
'difficulty',
'points_to_pass',
'status',
))
writer.writeheader()
for exercise in exercises:
writer.writerow({
'id': exercise.pk,
'name': exercise.name, # NB: this does not include any hierarchical numbering 1.2.3
'course_instance': exercise.course_module.course_instance.instance_name,
'deadline': exercise.course_module.closing_time,
'late_submissions_allowed': exercise.course_module.late_submissions_allowed,
'late_submission_deadline': exercise.course_module.late_submission_deadline,
'late_submission_penalty': exercise.course_module.late_submission_penalty,
'max_points': exercise.max_points,
'max_submissions': exercise.max_submissions,
'category': exercise.category.name,
'difficulty': exercise.difficulty,
'points_to_pass': exercise.points_to_pass,
'status': exercise.status,
})
def write_submission_csv(
self,
submission_file_path,
submissions,
all_deadline_deviations,
all_max_submissions_deviations,
include_deadline_deviations=False,
include_max_submission_deviations=False,
include_student_ids=False,
):
fieldnames = [
'submission_id',
'submitter_user_ids',
'exercise_id',
'submission_time',
'grade',
'service_points',
'service_max_points',
'status',
'late_penalty_applied',
'grading_time',
]
if include_max_submission_deviations:
fieldnames.insert(4, 'personal_max_submissions')
if include_deadline_deviations:
fieldnames.insert(4, 'personal_deadline')
if include_student_ids:
fieldnames.insert(2, 'student_ids')
with open(submission_file_path, 'w', newline='') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
for submission in submissions:
d = {
'submission_id': submission.pk,
'submitter_user_ids': '-'.join([str(profile.user.id) for profile in submission.submitter_userprofiles]),
'exercise_id': submission.exercise.pk,
'submission_time': submission.submission_time,
'grade': submission.grade,
'service_points': submission.service_points,
'service_max_points': submission.service_max_points,
'status': submission.status,
'late_penalty_applied': submission.late_penalty_applied,
'grading_time': submission.grading_time,
}
if include_student_ids:
d['student_ids'] = '-'.join([str(profile.student_id) for profile in submission.submitter_userprofiles])
if include_deadline_deviations:
dl_deviations = all_deadline_deviations.get(submission.exercise.pk, {})
personal_deadline = None
for profile in submission.submitter_userprofiles:
dl = dl_deviations.get(profile.user.id, None)
if dl is not None and (
personal_deadline is None
or dl > personal_deadline
):
personal_deadline = dl
d['personal_deadline'] = personal_deadline
if include_max_submission_deviations:
max_submissions_deviations = all_max_submissions_deviations.get(submission.exercise.pk, {})
personal_max_submissions = None
for profile in submission.submitter_userprofiles:
max_sbms = max_submissions_deviations.get(profile.user.id, None)
if max_sbms is not None and (
personal_max_submissions is None
or max_sbms > personal_max_submissions
):
personal_max_submissions = max_sbms
d['personal_max_submissions'] = personal_max_submissions
writer.writerow(d)
| teemulehtinen/a-plus | exercise/management/commands/export_submissions.py | Python | gpl-3.0 | 14,660 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_subnetwork
description:
- A VPC network is a virtual version of the traditional physical networks that exist
within and between physical data centers. A VPC network provides connectivity for
your Compute Engine virtual machine (VM) instances, Container Engine containers,
App Engine Flex services, and other network-related resources.
- Each GCP project contains one or more VPC networks. Each VPC network is a global
entity spanning all GCP regions. This global VPC network allows VM instances and
other resources to communicate with each other via internal, private IP addresses.
- Each VPC network is subdivided into subnets, and each subnet is contained within
a single region. You can have more than one subnet in a region for a given VPC network.
Each subnet has a contiguous private RFC1918 IP space. You create instances, containers,
and the like in these subnets.
- When you create an instance, you must create it in a subnet, and the instance draws
its internal IP address from that subnet.
- Virtual machine (VM) instances in a VPC network can communicate with instances in
all other subnets of the same VPC network, regardless of region, using their RFC1918
private IP addresses. You can isolate portions of the network, even entire subnets,
using firewall rules.
short_description: Creates a GCP Subnetwork
version_added: 2.6
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
description:
description:
- An optional description of this resource. Provide this property when you create
the resource. This field can be set only at resource creation time.
required: false
ip_cidr_range:
description:
- The range of internal addresses that are owned by this subnetwork.
- Provide this property when you create the subnetwork. For example, 10.0.0.0/8
or 192.168.0.0/16. Ranges must be unique and non-overlapping within a network.
Only IPv4 is supported.
required: true
name:
description:
- The name of the resource, provided by the client when initially creating the
resource. The name must be 1-63 characters long, and comply with RFC1035. Specifically,
the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
required: true
network:
description:
- The network this subnet belongs to.
- Only networks that are in the distributed mode can have subnetworks.
- 'This field represents a link to a Network resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_network
task and then set this network field to "{{ name-of-resource }}"'
required: true
enable_flow_logs:
description:
- Whether to enable flow logging for this subnetwork.
required: false
type: bool
version_added: 2.8
secondary_ip_ranges:
description:
- An array of configurations for secondary IP ranges for VM instances contained
in this subnetwork. The primary IP of such VM must belong to the primary ipCidrRange
of the subnetwork. The alias IPs may belong to either primary or secondary ranges.
required: false
version_added: 2.8
suboptions:
range_name:
description:
- The name associated with this subnetwork secondary range, used when adding
an alias IP range to a VM instance. The name must be 1-63 characters long,
and comply with RFC1035. The name must be unique within the subnetwork.
required: true
ip_cidr_range:
description:
- The range of IP addresses belonging to this subnetwork secondary range.
Provide this property when you create the subnetwork.
- Ranges must be unique and non-overlapping with all primary and secondary
IP ranges within a network. Only IPv4 is supported.
required: true
private_ip_google_access:
description:
- Whether the VMs in this subnet can access Google services without assigned external
IP addresses.
required: false
type: bool
region:
description:
- URL of the GCP region for this subnetwork.
required: true
extends_documentation_fragment: gcp
notes:
- 'API Reference: U(https://cloud.google.com/compute/docs/reference/rest/beta/subnetworks)'
- 'Private Google Access: U(https://cloud.google.com/vpc/docs/configure-private-google-access)'
- 'Cloud Networking: U(https://cloud.google.com/vpc/docs/using-vpc)'
'''
EXAMPLES = '''
- name: create a network
gcp_compute_network:
name: "network-subnetwork"
auto_create_subnetworks: true
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: network
- name: create a subnetwork
gcp_compute_subnetwork:
name: ansiblenet
region: us-west1
network: "{{ network }}"
ip_cidr_range: 172.16.0.0/16
project: "test_project"
auth_kind: "serviceaccount"
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource. Provide this property when you create
the resource. This field can be set only at resource creation time.
returned: success
type: str
gatewayAddress:
description:
- The gateway address for default routes to reach destination addresses outside
this subnetwork.
returned: success
type: str
id:
description:
- The unique identifier for the resource.
returned: success
type: int
ipCidrRange:
description:
- The range of internal addresses that are owned by this subnetwork.
- Provide this property when you create the subnetwork. For example, 10.0.0.0/8
or 192.168.0.0/16. Ranges must be unique and non-overlapping within a network.
Only IPv4 is supported.
returned: success
type: str
name:
description:
- The name of the resource, provided by the client when initially creating the resource.
The name must be 1-63 characters long, and comply with RFC1035. Specifically,
the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
network:
description:
- The network this subnet belongs to.
- Only networks that are in the distributed mode can have subnetworks.
returned: success
type: str
enableFlowLogs:
description:
- Whether to enable flow logging for this subnetwork.
returned: success
type: bool
fingerprint:
description:
- Fingerprint of this resource. This field is used internally during updates of
this resource.
returned: success
type: str
secondaryIpRanges:
description:
- An array of configurations for secondary IP ranges for VM instances contained
in this subnetwork. The primary IP of such VM must belong to the primary ipCidrRange
of the subnetwork. The alias IPs may belong to either primary or secondary ranges.
returned: success
type: complex
contains:
rangeName:
description:
- The name associated with this subnetwork secondary range, used when adding
an alias IP range to a VM instance. The name must be 1-63 characters long,
and comply with RFC1035. The name must be unique within the subnetwork.
returned: success
type: str
ipCidrRange:
description:
- The range of IP addresses belonging to this subnetwork secondary range. Provide
this property when you create the subnetwork.
- Ranges must be unique and non-overlapping with all primary and secondary IP
ranges within a network. Only IPv4 is supported.
returned: success
type: str
privateIpGoogleAccess:
description:
- Whether the VMs in this subnet can access Google services without assigned external
IP addresses.
returned: success
type: bool
region:
description:
- URL of the GCP region for this subnetwork.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
description=dict(type='str'),
ip_cidr_range=dict(required=True, type='str'),
name=dict(required=True, type='str'),
network=dict(required=True),
enable_flow_logs=dict(type='bool'),
secondary_ip_ranges=dict(
type='list', elements='dict', options=dict(range_name=dict(required=True, type='str'), ip_cidr_range=dict(required=True, type='str'))
),
private_ip_google_access=dict(type='bool'),
region=dict(required=True, type='str'),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
state = module.params['state']
kind = 'compute#subnetwork'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), kind, fetch)
fetch = fetch_resource(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind, fetch):
update_fields(module, resource_to_request(module), response_to_hash(module, fetch))
return fetch_resource(module, self_link(module), kind)
def update_fields(module, request, response):
if response.get('ipCidrRange') != request.get('ipCidrRange'):
ip_cidr_range_update(module, request, response)
if response.get('enableFlowLogs') != request.get('enableFlowLogs') or response.get('secondaryIpRanges') != request.get('secondaryIpRanges'):
enable_flow_logs_update(module, request, response)
if response.get('privateIpGoogleAccess') != request.get('privateIpGoogleAccess'):
private_ip_google_access_update(module, request, response)
def ip_cidr_range_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/regions/{region}/subnetworks/{name}/expandIpCidrRange"]).format(**module.params),
{u'ipCidrRange': module.params.get('ip_cidr_range')},
)
def enable_flow_logs_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.patch(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/regions/{region}/subnetworks/{name}"]).format(**module.params),
{
u'enableFlowLogs': module.params.get('enable_flow_logs'),
u'fingerprint': response.get('fingerprint'),
u'secondaryIpRanges': SubnetworkSecondaryiprangesArray(module.params.get('secondary_ip_ranges', []), module).to_request(),
},
)
def private_ip_google_access_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/regions/{region}/subnetworks/{name}/setPrivateIpGoogleAccess"]).format(
**module.params
),
{u'privateIpGoogleAccess': module.params.get('private_ip_google_access')},
)
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#subnetwork',
u'description': module.params.get('description'),
u'ipCidrRange': module.params.get('ip_cidr_range'),
u'name': module.params.get('name'),
u'network': replace_resource_dict(module.params.get(u'network', {}), 'selfLink'),
u'enableFlowLogs': module.params.get('enable_flow_logs'),
u'secondaryIpRanges': SubnetworkSecondaryiprangesArray(module.params.get('secondary_ip_ranges', []), module).to_request(),
u'privateIpGoogleAccess': module.params.get('private_ip_google_access'),
u'region': module.params.get('region'),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind, allow_not_found=True):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind, allow_not_found)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/subnetworks/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/subnetworks".format(**module.params)
def return_if_object(module, response, kind, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'creationTimestamp': response.get(u'creationTimestamp'),
u'description': response.get(u'description'),
u'gatewayAddress': response.get(u'gatewayAddress'),
u'id': response.get(u'id'),
u'ipCidrRange': response.get(u'ipCidrRange'),
u'name': response.get(u'name'),
u'network': replace_resource_dict(module.params.get(u'network', {}), 'selfLink'),
u'enableFlowLogs': response.get(u'enableFlowLogs'),
u'fingerprint': response.get(u'fingerprint'),
u'secondaryIpRanges': SubnetworkSecondaryiprangesArray(response.get(u'secondaryIpRanges', []), module).from_response(),
u'privateIpGoogleAccess': response.get(u'privateIpGoogleAccess'),
u'region': module.params.get('region'),
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return {}
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#subnetwork')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], module)
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, 'compute#operation')
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
class SubnetworkSecondaryiprangesArray(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = []
def to_request(self):
items = []
for item in self.request:
items.append(self._request_for_item(item))
return items
def from_response(self):
items = []
for item in self.request:
items.append(self._response_from_item(item))
return items
def _request_for_item(self, item):
return remove_nones_from_dict({u'rangeName': item.get('range_name'), u'ipCidrRange': item.get('ip_cidr_range')})
def _response_from_item(self, item):
return remove_nones_from_dict({u'rangeName': item.get(u'rangeName'), u'ipCidrRange': item.get(u'ipCidrRange')})
if __name__ == '__main__':
main()
| valentin-krasontovitsch/ansible | lib/ansible/modules/cloud/google/gcp_compute_subnetwork.py | Python | gpl-3.0 | 19,851 |
from __future__ import absolute_import
from django.core.exceptions import ImproperlyConfigured
from django.db import connection, transaction
from django.db.transaction import commit_on_success, commit_manually, TransactionManagementError
from django.test import TransactionTestCase, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils.unittest import skipIf
from .models import Mod, M2mA, M2mB
class TestTransactionClosing(TransactionTestCase):
"""
Tests to make sure that transactions are properly closed
when they should be, and aren't left pending after operations
have been performed in them. Refs #9964.
"""
def test_raw_committed_on_success(self):
"""
Make sure a transaction consisting of raw SQL execution gets
committed by the commit_on_success decorator.
"""
@commit_on_success
def raw_sql():
"Write a record using raw sql under a commit_on_success decorator"
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (id,fld) values (17,18)")
raw_sql()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
try:
# Check that the record is in the DB
obj = Mod.objects.get(pk=17)
self.assertEqual(obj.fld, 18)
except Mod.DoesNotExist:
self.fail("transaction with raw sql not committed")
def test_commit_manually_enforced(self):
"""
Make sure that under commit_manually, even "read-only" transaction require closure
(commit or rollback), and a transaction left pending is treated as an error.
"""
@commit_manually
def non_comitter():
"Execute a managed transaction with read-only operations and fail to commit"
_ = Mod.objects.count()
self.assertRaises(TransactionManagementError, non_comitter)
def test_commit_manually_commit_ok(self):
"""
Test that under commit_manually, a committed transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def committer():
"""
Perform a database query, then commit the transaction
"""
_ = Mod.objects.count()
transaction.commit()
try:
committer()
except TransactionManagementError:
self.fail("Commit did not clear the transaction state")
def test_commit_manually_rollback_ok(self):
"""
Test that under commit_manually, a rolled-back transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def roller_back():
"""
Perform a database query, then rollback the transaction
"""
_ = Mod.objects.count()
transaction.rollback()
try:
roller_back()
except TransactionManagementError:
self.fail("Rollback did not clear the transaction state")
def test_commit_manually_enforced_after_commit(self):
"""
Test that under commit_manually, if a transaction is committed and an operation is
performed later, we still require the new transaction to be closed
"""
@commit_manually
def fake_committer():
"Query, commit, then query again, leaving with a pending transaction"
_ = Mod.objects.count()
transaction.commit()
_ = Mod.objects.count()
self.assertRaises(TransactionManagementError, fake_committer)
@skipUnlessDBFeature('supports_transactions')
def test_reuse_cursor_reference(self):
"""
Make sure transaction closure is enforced even when the queries are performed
through a single cursor reference retrieved in the beginning
(this is to show why it is wrong to set the transaction dirty only when a cursor
is fetched from the connection).
"""
@commit_on_success
def reuse_cursor_ref():
"""
Fetch a cursor, perform an query, rollback to close the transaction,
then write a record (in a new transaction) using the same cursor object
(reference). All this under commit_on_success, so the second insert should
be committed.
"""
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (id,fld) values (1,2)")
transaction.rollback()
cursor.execute("INSERT into transactions_regress_mod (id,fld) values (1,2)")
reuse_cursor_ref()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
try:
# Check that the record is in the DB
obj = Mod.objects.get(pk=1)
self.assertEqual(obj.fld, 2)
except Mod.DoesNotExist:
self.fail("After ending a transaction, cursor use no longer sets dirty")
def test_failing_query_transaction_closed(self):
"""
Make sure that under commit_on_success, a transaction is rolled back even if
the first database-modifying operation fails.
This is prompted by http://code.djangoproject.com/ticket/6669 (and based on sample
code posted there to exemplify the problem): Before Django 1.3,
transactions were only marked "dirty" by the save() function after it successfully
wrote the object to the database.
"""
from django.contrib.auth.models import User
@transaction.commit_on_success
def create_system_user():
"Create a user in a transaction"
user = User.objects.create_user(username='system', password='iamr00t', email='root@SITENAME.com')
# Redundant, just makes sure the user id was read back from DB
Mod.objects.create(fld=user.id)
# Create a user
create_system_user()
try:
# The second call to create_system_user should fail for violating a unique constraint
# (it's trying to re-create the same user)
create_system_user()
except:
pass
else:
raise ImproperlyConfigured('Unique constraint not enforced on django.contrib.auth.models.User')
try:
# Try to read the database. If the last transaction was indeed closed,
# this should cause no problems
_ = User.objects.all()[0]
except:
self.fail("A transaction consisting of a failed operation was not closed.")
@override_settings(DEBUG=True)
def test_failing_query_transaction_closed_debug(self):
"""
Regression for #6669. Same test as above, with DEBUG=True.
"""
self.test_failing_query_transaction_closed()
class TestManyToManyAddTransaction(TransactionTestCase):
def test_manyrelated_add_commit(self):
"Test for https://code.djangoproject.com/ticket/16818"
a = M2mA.objects.create()
b = M2mB.objects.create(fld=10)
a.others.add(b)
# We're in a TransactionTestCase and have not changed transaction
# behavior from default of "autocommit", so this rollback should not
# actually do anything. If it does in fact undo our add, that's a bug
# that the bulk insert was not auto-committed.
transaction.rollback()
self.assertEqual(a.others.count(), 1)
class SavepointTest(TransactionTestCase):
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_commit(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
mod1 = Mod.objects.filter(pk=pk).update(fld=10)
transaction.savepoint_commit(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 10)
work()
@skipIf(connection.vendor == 'mysql' and \
connection.features._mysql_storage_engine == 'MyISAM',
"MyISAM MySQL storage engine doesn't support savepoints")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_rollback(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
mod1 = Mod.objects.filter(pk=pk).update(fld=20)
transaction.savepoint_rollback(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 1)
work()
| cobalys/django | tests/regressiontests/transactions_regress/tests.py | Python | bsd-3-clause | 8,777 |
#!/usr/bin/env python
import math, numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import hrl_lib.viz as hv
import hrl_lib.util as ut
import rospy
import pickle
def plotdata():
plot_data = ut.load_pickle('skin_data_2.pkl')
fig = plt.figure()
ax = Axes3D(fig)
ax.plot3D(plot_data[4],plot_data[1],plot_data[2])
ax.set_xlabel('Fy')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
fig.add_axes(ax)
plt.show()
if __name__ == '__main__':
plotdata()
| tapomayukh/projects_in_python | sandbox_tapo/src/skin_related/Misc/plotdata.py | Python | mit | 582 |
"""Lannouncer platform for notify component."""
import logging
import socket
from urllib.parse import urlencode
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
from homeassistant.components.notify import (
ATTR_DATA,
PLATFORM_SCHEMA,
BaseNotificationService,
)
ATTR_METHOD = "method"
ATTR_METHOD_DEFAULT = "speak"
ATTR_METHOD_ALLOWED = ["speak", "alarm"]
DEFAULT_PORT = 1035
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config, discovery_info=None):
"""Get the Lannouncer notification service."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
return LannouncerNotificationService(hass, host, port)
class LannouncerNotificationService(BaseNotificationService):
"""Implementation of a notification service for Lannouncer."""
def __init__(self, hass, host, port):
"""Initialize the service."""
self._hass = hass
self._host = host
self._port = port
def send_message(self, message="", **kwargs):
"""Send a message to Lannouncer."""
data = kwargs.get(ATTR_DATA)
if data is not None and ATTR_METHOD in data:
method = data.get(ATTR_METHOD)
else:
method = ATTR_METHOD_DEFAULT
if method not in ATTR_METHOD_ALLOWED:
_LOGGER.error("Unknown method %s", method)
return
cmd = urlencode({method: message})
try:
# Open socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(10)
sock.connect((self._host, self._port))
# Send message
_LOGGER.debug("Sending message: %s", cmd)
sock.sendall(cmd.encode())
sock.sendall("&@DONE@\n".encode())
# Check response
buffer = sock.recv(1024)
if buffer != b"LANnouncer: OK":
_LOGGER.error("Error sending data to Lannnouncer: %s", buffer.decode())
# Close socket
sock.close()
except socket.gaierror:
_LOGGER.error("Unable to connect to host %s", self._host)
except socket.error:
_LOGGER.exception("Failed to send data to Lannnouncer")
| fbradyirl/home-assistant | homeassistant/components/lannouncer/notify.py | Python | apache-2.0 | 2,459 |
#!/usr/bin/env python
from saml2.sigver import _get_xmlsec_cryptobackend, SecurityContext
from saml2.httpbase import HTTPBase
from saml2 import saml
from saml2 import md
from saml2.attribute_converter import ac_factory
from saml2.extension import dri
from saml2.extension import idpdisc
from saml2.extension import mdattr
from saml2.extension import mdrpi
from saml2.extension import mdui
from saml2.extension import shibmd
from saml2.extension import ui
import xmldsig
import xmlenc
import argparse
from saml2.mdstore import MetaDataFile, MetaDataExtern
__author__ = 'rolandh'
"""
A script that imports and verifies metadata and then dumps it in a basic
dictionary format.
"""
ONTS = {
saml.NAMESPACE: saml,
mdui.NAMESPACE: mdui,
mdattr.NAMESPACE: mdattr,
mdrpi.NAMESPACE: mdrpi,
dri.NAMESPACE: dri,
ui.NAMESPACE: ui,
idpdisc.NAMESPACE: idpdisc,
md.NAMESPACE: md,
xmldsig.NAMESPACE: xmldsig,
xmlenc.NAMESPACE: xmlenc,
shibmd.NAMESPACE: shibmd
}
parser = argparse.ArgumentParser()
parser.add_argument('-t', dest='type')
parser.add_argument('-u', dest='url')
parser.add_argument('-c', dest='cert')
parser.add_argument('-a', dest='attrsmap')
parser.add_argument('-o', dest='output')
parser.add_argument('-x', dest='xmlsec')
parser.add_argument(dest="item")
args = parser.parse_args()
metad = None
if args.type == "local":
metad = MetaDataFile(ONTS.values(), args.item, args.item)
elif args.type == "external":
ATTRCONV = ac_factory(args.attrsmap)
httpc = HTTPBase()
crypto = _get_xmlsec_cryptobackend(args.xmlsec)
sc = SecurityContext(crypto)
metad = MetaDataExtern(ONTS.values(), ATTRCONV, args.url,
sc, cert=args.cert, http=httpc)
if metad:
metad.load()
txt = metad.dumps()
if args.output:
f = open(args.output, "w")
f.write(txt)
f.close()
else:
print txt
| tpazderka/pysaml2 | tools/mdexport.py | Python | bsd-2-clause | 1,912 |
#!/usr/bin/env python
from ConfigParser import ConfigParser
from ordereddict import OrderedDict
import sys
def make_parser():
parser = ConfigParser(dict_type=OrderedDict)
parser.optionxform = str
return parser
def transform(sectionName):
sectionName = sectionName.replace(",Dialog=", ", Dialog=")
if sectionName.startswith("View="):
if sectionName.endswith("Viewer"):
return "Type=Viewer, " + sectionName.split(", ")[0]
else:
parts = sectionName.split(",")
parts.reverse()
if len(parts) == 1:
parts.insert(0, "Type=View")
return ", ".join(parts)
else:
return sectionName
if __name__ == "__main__":
fileName = sys.argv[1]
parser = make_parser()
parser.read([ fileName ])
newParser = make_parser()
for section in parser.sections():
newSection = transform(section)
newParser.add_section(newSection)
for option, value in parser.items(section):
newParser.set(newSection, option, value)
newParser.write(open(fileName + ".tmp", "w"))
| emilybache/texttest-runner | src/main/python/storytext/bin/migrate_uimap.py | Python | mit | 1,119 |
# author: Zhongyuan Sun
# Storing the loop numbers will help improving performance from 0.63% to 93.93%
class Solution(object):
def isHappy(self, n):
"""
:type n: int
:rtype: bool
"""
if n <= 0:
return False
x = self.cal(n)
# loop_numbers = self.all_loop_numbers()
loop_numbers = [2, 3, 4, 5, 6, 8, 9,
11, 12, 14, 15, 16, 17, 18,
20, 21, 22, 24, 25, 26, 27, 29,
30, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 45, 46, 47, 48,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
60, 61, 62, 63, 64, 65, 66, 67, 69,
71, 72, 73, 74, 75, 76, 77, 78,
80, 81, 83, 84, 85, 87, 88, 89,
90, 92, 93, 95, 96, 98, 99,
106, 113, 117, 128, 145, 162]
while x != 1 and x not in loop_numbers:
x = self.cal(x)
return x == 1
def cal(self, n):
res = 0
while n:
res += (n % 10) ** 2
n /= 10
return res
# return sum([int(c)**2 for c in str(n)])
def all_loop_numbers(self):
res = []
for i in range(1, 100):
tmp = [i]
next = self.cal(i)
while next not in res and next not in tmp and next != 1:
tmp.append(next)
next = self.cal(next)
if next != 1:
res += tmp
res = list(set(res))
return res
| danielsunzhongyuan/my_leetcode_in_python | happy_number_202.py | Python | apache-2.0 | 1,602 |
# -*- coding: utf-8 -*-
import gevent.monkey;gevent.monkey.patch_socket()
from gevent.pool import Pool
import codecs
import requests
from redis import Redis
from rq import Queue
from pyquery import PyQuery as pq
from bs4 import BeautifulSoup
from model import OneIssue
q = Queue(connection=Redis())
# q.enqueue_call(func=OneIssue.create,
# args=(1,[]),
# timeout=30)
# 177 - 432
# url="http://hanhan.qq.com/hanhan/one/one177m.htm"
def fetch(i):
url = "http://hanhan.qq.com/hanhan/one/one%sm.htm"%i
r = requests.get(url)
if r.status_code == 200:
d = BeautifulSoup(r.content.decode("gb2312", "ignore"), "html5lib")
q.enqueue_call( func=OneIssue.create,
args=(i,[str(one) for one in d.find_all(class_="ones")])
)
pool = Pool(50)
pool.map(fetch, xrange(177,432+1))
| zhy0216/another-one | fetch_new.py | Python | mit | 876 |
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
'''
* Where the property/indexer is defined
- interface
* Property/Indexer type
- Same as described in the "field"
* How the property is defined
- Static / instance
- Read-write, read-only, write-only properties
* Derivation scenario: base / derived type each has different accessor.
- As interface implementation (interface-type . identifier)
* How the indexer is defined
- Static / instance
- this[parameter-list], or VB style, name[parameter-list]
* signature again: ref, params
* overloads
- As interface implementation (interface-type.this[xxx], interface-type.Foo[xxx])
* set/get via Type|object (dot) (Static|Intance) Property|Indexer
* Set value with different type to the property/indexer, try None
* How Indexer choose the overload?
- Incorrect argument number, type
- (python) Overloaded index properties in general: foo['xyz'], foo['xyz', 'def'], (foo[] ?)
* repeating from the derived class or its instance
* (python) __set__/__get__...
* Negative scenario: property as By-ref argument
* Able to call the actual underlying methods
* Other operations against them
- Call, dot,
''' | tempbottle/ironpython3 | Tests/interop/net/property/__init__.py | Python | apache-2.0 | 1,852 |
from bika.lims import bikaMessageFactory as _
from bika.lims.browser.bika_listing import BikaListingView
from bika.lims.permissions import *
from Products.CMFCore.utils import getToolByName
class ClientSamplePointsView(BikaListingView):
"""This is displayed in the "Sample Points" tab on each client
"""
def __init__(self, context, request):
super(ClientSamplePointsView, self).__init__(context, request)
self.catalog = "bika_setup_catalog"
self.contentFilter = {
'portal_type': 'SamplePoint',
'sort_on': 'sortable_title',
'path': {
"query": "/".join(self.context.getPhysicalPath()),
"level": 0},
}
self.show_sort_column = False
self.show_select_row = False
self.show_select_column = True
self.pagesize = 50
self.form_id = "SamplePoints"
self.icon = self.portal_url + \
"/++resource++bika.lims.images/samplepoint_big.png"
self.title = self.context.translate(_("Sample Points"))
self.description = ""
self.columns = {
'title': {'title': _('Title'),
'index': 'sortable_title'},
'Description': {'title': _('Description'),
'index': 'description'},
}
self.review_states = [
{'id': 'default',
'title': _('Active'),
'contentFilter': {'inactive_state': 'active'},
'transitions': [{'id': 'deactivate'}, ],
'columns': ['title', 'Description']},
{'id': 'inactive',
'title': _('Dormant'),
'contentFilter': {'inactive_state': 'inactive'},
'transitions': [{'id': 'activate'}, ],
'columns': ['title', 'Description']},
{'id': 'all',
'title': _('All'),
'contentFilter': {},
'columns': ['title', 'Description']},
]
def __call__(self):
mtool = getToolByName(self.context, 'portal_membership')
checkPermission = mtool.checkPermission
if checkPermission(AddSamplePoint, self.context):
self.context_actions[_('Add')] = \
{'url': 'createObject?type_name=SamplePoint',
'icon': '++resource++bika.lims.images/add.png'}
return super(ClientSamplePointsView, self).__call__()
def folderitems(self):
items = BikaListingView.folderitems(self)
for x in range(len(items)):
if not items[x].has_key('obj'):
continue
obj = items[x]['obj']
items[x]['title'] = obj.Title()
items[x]['replace']['title'] = \
"<a href='%s'>%s</a>" % (items[x]['url'], items[x]['title'])
return items
| labsanmartin/Bika-LIMS | bika/lims/browser/client/views/samplepoints.py | Python | agpl-3.0 | 2,819 |
import _plotly_utils.basevalidators
class StartarrowsizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name="startarrowsize",
parent_name="layout.scene.annotation",
**kwargs
):
super(StartarrowsizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0.3),
role=kwargs.pop("role", "style"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/layout/scene/annotation/_startarrowsize.py | Python | mit | 557 |
"""
Methods responsible for managing a user's session and login/c_logout
"""
import logging
from ctypes import cast, c_ulong, byref
from .cryptoki import CK_ULONG, CA_TimeSync, CA_InitAudit, CK_SLOT_ID, CA_GetTime, CK_CHAR_PTR
from .exceptions import make_error_handle_function
logger = logging.getLogger(__name__)
def ca_init_audit(slot, audit_pin, audit_label):
"""
:param slot:
:param audit_pin:
:param audit_label:
"""
if audit_pin == "":
ret = CA_InitAudit(CK_SLOT_ID(slot), None, CK_ULONG(0), cast(audit_label, CK_CHAR_PTR))
else:
ret = CA_InitAudit(
CK_SLOT_ID(slot),
cast(audit_pin, CK_CHAR_PTR),
CK_ULONG(len(audit_pin)),
cast(audit_label, CK_CHAR_PTR),
)
return ret
ca_init_audit_ex = make_error_handle_function(ca_init_audit)
def ca_time_sync(h_session, ultime):
"""
:param int h_session: Session handle
:param ultime:
"""
ret = CA_TimeSync(h_session, CK_ULONG(ultime))
return ret
ca_time_sync_ex = make_error_handle_function(ca_time_sync)
def ca_get_time(h_session):
"""
:param int h_session: Session handle
"""
hsm_time = c_ulong()
ret = CA_GetTime(h_session, byref(hsm_time))
return ret, hsm_time
ca_get_time_ex = make_error_handle_function(ca_get_time)
| gemalto/pycryptoki | pycryptoki/audit_handling.py | Python | apache-2.0 | 1,343 |
# Copyright 2015
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#======================================================================
#
# PILO Header Format
#
# src - 6 bytes
# dst - 6 bytes
# seq - 4 bytes
# ack - 4 bytes
# partial_acks - 4 bytes
# flags - 2 bytes
# ttl - 2 bytes
#
#======================================================================
# TODO: Need to figure out options/flags - do we need an ack field? Any other options?
import struct
from packet_utils import *
from socket import htons
from socket import htonl
from packet_base import packet_base
from pox.lib.addresses import *
import logging
log = logging.getLogger('packet')
ETHER_ANY = EthAddr(b"\x00\x00\x00\x00\x00\x00")
ETHER_BROADCAST = EthAddr(b"\xff\xff\xff\xff\xff\xff")
class pilo(packet_base):
"PILO packet struct"
MIN_LEN = 28
TTL_INIT = 12 # TODO:Probably want to better test this?
ACK_flag = 0x01
SYN_flag = 0x02
FIN_flag = 0x04
HRB_flag = 0x08
@property
def ACK (self): return True if self.flags & self.ACK_flag else False
@property
def SYN (self): return True if self.flags & self.SYN_flag else False
@property
def FIN (self): return True if self.flags & self.FIN_flag else False
@property
def HRB (self): return True if self.flags & self.HRB_flag else False
@ACK.setter
def ACK (self, value): self._setflag(self.ACK_flag, value)
@SYN.setter
def SYN (self, value): self._setflag(self.SYN_flag, value)
@FIN.setter
def FIN (self, value): self._setflag(self.FIN_flag, value)
@HRB.setter
def HRB (self, value): self._setflag(self.HRB_flag, value)
def _setflag (self, flag, value):
self.flags = (self.flags & ~flag) | (flag if value else 0)
@classmethod
def get_flag_options (cls):
return ['ACK', 'SYN', 'FIN', 'HRB']
def __init__(self, raw=None, prev=None, **kw):
packet_base.__init__(self)
self.prev = prev
self.src_address = 0 # 32 bit
self.dst_address = 0 # 32 bit
self.seq = 0 # 32 bit
self.ack = 0 # 32 bit
self.partial_acks = 0 # 32 bit
self.flags = 0 # flags 16 bits
self.ttl = self.TTL_INIT # ttl 16 bits
self.next = b''
if raw is not None:
self.parse(raw)
self._init(kw)
def __str__(self):
f = ''
if self.ACK: f += 'A'
if self.SYN: f += 'S'
if self.FIN: f += 'F'
if self.HRB: f += 'H'
p_ack = ', '.join(str(ack) for ack in self.get_partial_acks())
s = '[PILO %s>%s seq:%s ack:%s p_ack:%s f:%s ttl:%s len:%s]' % (self.src_address,
self.dst_address, self.seq, self.ack, p_ack, f, self.ttl, len(self.pack()))
return s
def parse(self, raw):
assert isinstance(raw, bytes)
self.next = None # In case of unfinished parsing
self.raw = raw
dlen = len(raw)
if dlen < pilo.MIN_LEN:
self.msg('(pilo parse) warning PILO packet data too short to parse header: data len %u' % (dlen,))
return
self.src_address = EthAddr(raw[:6])
self.dst_address= EthAddr(raw[6:12])
(self.seq, self.ack, self.partial_acks, self.flags, self.ttl) \
= struct.unpack('!IIIHH', raw[12:pilo.MIN_LEN])
self.hdr_len = pilo.MIN_LEN ## TODO: should this be dynamic or will we have fixed header size?
self.payload_len = dlen - self.hdr_len
self.next = raw[self.hdr_len:]
self.parsed = True
def hdr(self, payload):
dst = self.dst_address
src = self.src_address
if type(dst) is EthAddr:
dst = dst.toRaw()
if type(src) is EthAddr:
src = src.toRaw()
header = struct.pack('!6s6sIIIHH', src, dst,
self.seq, self.ack, self.partial_acks, self.flags, self.ttl)
return header
def set_partial_acks (self, ack_array):
max_size = 8
self.partial_acks = 0
for ack in ack_array:
if ack > self.ack and ack - self.ack < max_size:
self.partial_acks = self.partial_acks | (1 << (ack - 1 - self.ack))
else:
log.warn(ack);
log.warn(ack_array);
log.warn('partial ack is invalid');
def get_partial_ack_holes (self):
holes_array = []
if self.partial_acks:
tmp_ack = self.partial_acks
ack_counter = 1
while tmp_ack > 0:
if tmp_ack & 1 != 1:
holes_array.append(ack_counter + self.ack)
tmp_ack >>= 1
ack_counter += 1
return holes_array
def get_partial_acks (self):
ack_array = []
if self.partial_acks:
tmp_ack = self.partial_acks
ack_counter = 1
while tmp_ack > 0:
if tmp_ack & 1 == 1:
ack_array.append(ack_counter + self.ack)
tmp_ack >>= 1
ack_counter += 1
return ack_array
def equals (self, other, match_ttl=False):
return (self.src_address == other.src_address and
self.dst_address == other.dst_address and
self.seq == other.seq and
self.ack == other.ack and
self.flags == other.flags and
self.raw == other.raw and
( self.ttl == other.ttl or match_ttl))
| denovogroup/pox | pox/lib/packet/pilo.py | Python | apache-2.0 | 6,007 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Nonce'
db.create_table('oauth_provider_nonce', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('token_key', self.gf('django.db.models.fields.CharField')(max_length=32)),
('consumer_key', self.gf('django.db.models.fields.CharField')(max_length=256)),
('key', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('oauth_provider', ['Nonce'])
# Adding model 'Resource'
db.create_table('oauth_provider_resource', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('url', self.gf('django.db.models.fields.TextField')(max_length=2083)),
('is_readonly', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('oauth_provider', ['Resource'])
# Adding model 'Consumer'
db.create_table('oauth_provider_consumer', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('description', self.gf('django.db.models.fields.TextField')()),
('key', self.gf('django.db.models.fields.CharField')(max_length=256)),
('secret', self.gf('django.db.models.fields.CharField')(max_length=16, blank=True)),
('status', self.gf('django.db.models.fields.SmallIntegerField')(default=1)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
))
db.send_create_signal('oauth_provider', ['Consumer'])
# Adding model 'Token'
db.create_table('oauth_provider_token', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('key', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('secret', self.gf('django.db.models.fields.CharField')(max_length=16, null=True, blank=True)),
('token_type', self.gf('django.db.models.fields.SmallIntegerField')()),
('timestamp', self.gf('django.db.models.fields.IntegerField')(default=1327884735L)),
('is_approved', self.gf('django.db.models.fields.BooleanField')(default=False)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='tokens', null=True, to=orm['auth.User'])),
('consumer', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['oauth_provider.Consumer'])),
('resource', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['oauth_provider.Resource'])),
('verifier', self.gf('django.db.models.fields.CharField')(max_length=10)),
('callback', self.gf('django.db.models.fields.CharField')(max_length=2083, null=True, blank=True)),
('callback_confirmed', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('oauth_provider', ['Token'])
def backwards(self, orm):
# Deleting model 'Nonce'
db.delete_table('oauth_provider_nonce')
# Deleting model 'Resource'
db.delete_table('oauth_provider_resource')
# Deleting model 'Consumer'
db.delete_table('oauth_provider_consumer')
# Deleting model 'Token'
db.delete_table('oauth_provider_token')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'oauth_provider.consumer': {
'Meta': {'object_name': 'Consumer'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'oauth_provider.nonce': {
'Meta': {'object_name': 'Nonce'},
'consumer_key': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'token_key': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'oauth_provider.resource': {
'Meta': {'object_name': 'Resource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_readonly': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url': ('django.db.models.fields.TextField', [], {'max_length': '2083'})
},
'oauth_provider.token': {
'Meta': {'object_name': 'Token'},
'callback': ('django.db.models.fields.CharField', [], {'max_length': '2083', 'null': 'True', 'blank': 'True'}),
'callback_confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'consumer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oauth_provider.Consumer']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oauth_provider.Resource']"}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.IntegerField', [], {'default': '1327884735L'}),
'token_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tokens'", 'null': 'True', 'to': "orm['auth.User']"}),
'verifier': ('django.db.models.fields.CharField', [], {'max_length': '10'})
}
}
complete_apps = ['oauth_provider']
| amrox/django-oauth-plus | oauth_provider/migrations/0001_initial.py | Python | bsd-3-clause | 9,975 |
import numpy as np
from sys import stdin, stdout, argv
# given a tree and a node, it returns all the descendant leaves from this node.
def get_leaves(tree, node):
# if node is already a leaf, return it
if node not in tree.keys():
return [node]
# otherwise explore the tree to get all leaves from the node
open_list = tree[node]
idx = 0
while idx < len(open_list):
if open_list[idx] in tree.keys(): # not a leaf
open_list.extend(tree[open_list[idx]])
del open_list[idx]
else:
idx += 1
return open_list
# this function creates an hierarchical clustering tree.
# Starts with each point as a leaf and at each step joins the two closest points
# into a parent weighted average point
def HierarchicalClustering(data, n):
next_node = n
tree = {}
# tracks the node of each column in matrix
data_header = range(n)
# weights for averaging (number of leaves of each node)
weights = [1]*n
while len(data_header) > 1:
# get the two closest nodes (row and col)
# think a better way to avoid setting diagonals to Inf and back to 0
np.fill_diagonal(data, float('inf')) # fill diagonal with Inf
idx = np.argmin(data)
np.fill_diagonal(data, 0.0) # put the diagonals back to 0
row, col = np.unravel_index(idx, data.shape)
#create a parent node in the tree
tree[next_node] = [data_header[row], data_header[col]]
# we can use min or weighted average
#new_node = np.min((data[:,row],data[:,col]),0)
new_node = 1.0 * (data[:,row]*weights[row] + data[:,col]*weights[col]) \
/ (weights[row] + weights[col])
# update the node row
data[:,row] = new_node
data[row,:] = new_node.T
data_header[row] = next_node
weights[row] += weights[col]
next_node += 1
# delete the node col
data = np.delete(data, col, 0) # delete row
data = np.delete(data, col, 1) # delete column
del data_header[col]
del weights[col]
return tree
def read_input(filein):
with open(filein, 'r') as fin:
# handle first line
first_line = fin.readline().rstrip()
if first_line == 'Input':
n = int(fin.readline().rstrip())
else:
n = int(first_line)
data = []
for line in fin:
if line.rstrip() == 'Output':
break
data.append(map(float,line.rstrip().split()))
data = np.matrix(data)
return n, data
print data
# expects a list of lists.
# first list will b separated by \n, second list separated by spaces
def write_output(data_out, fileout):
with open(fileout, 'w') as fout:
for i in data_out:
fout.write(' '.join(map(str,i)) + '\n')
if __name__ == '__main__':
# handle input
if len(argv) != 2:
print "Error, incorrect number of arguments"
quit()
fileout = argv[1] + '.out'
n, data = read_input(argv[1])
tree = HierarchicalClustering(data, n)
data_out = []
for i in range(n, np.max(tree.keys())+1):
leaves = get_leaves(tree, i)
leaves = [x+1 for x in leaves] # start numerating at 1
data_out.append(leaves)
write_output(data_out, fileout)
| SembeiNorimaki/Bioinformatics | Bio_V/HierarchicalClustering.py | Python | mit | 3,386 |
# from https://hg.python.org/cpython/file/default/Lib/collections/__init__.py
from collections import MutableMapping
class ChainMap(MutableMapping):
""" A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
The underlying mappings are stored in a list. That list is public and can
be accessed or updated using the *maps* attribute. There is no other
state.
Lookups search the underlying mappings successively until a key is found.
In contrast, writes, updates, and deletions only operate on the first
mapping.
"""
def __init__(self, *maps):
"""Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.
"""
self.maps = list(maps) or [{}] # always at least one map
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key):
for mapping in self.maps:
try:
return mapping[key] # can't use 'key in mapping' with defaultdict
except KeyError:
pass
return self.__missing__(key) # support subclasses that define __missing__
def get(self, key, default=None):
return self[key] if key in self else default
def __len__(self):
return len(set().union(*self.maps)) # reuses stored hash values if possible
def __iter__(self):
return iter(set().union(*self.maps))
def __contains__(self, key):
return any(key in m for m in self.maps)
def __bool__(self):
return any(self.maps)
def __repr__(self):
return "{0.__class__.__name__}({1})".format(
self, ", ".join(map(repr, self.maps))
)
@classmethod
def fromkeys(cls, iterable, *args):
"""Create a ChainMap with a single dict created from the iterable."""
return cls(dict.fromkeys(iterable, *args))
def copy(self):
"""New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]"""
return self.__class__(self.maps[0].copy(), *self.maps[1:])
__copy__ = copy
def new_child(self, m=None): # like Django's Context.push()
"""New ChainMap with a new map followed by all previous maps.
If no map is provided, an empty dict is used.
"""
if m is None:
m = {}
return self.__class__(m, *self.maps)
@property
def parents(self): # like Django's Context.pop()
"""New ChainMap from maps[1:]."""
return self.__class__(*self.maps[1:])
def __setitem__(self, key, value):
self.maps[0][key] = value
def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError("Key not found in the first mapping: {!r}".format(key))
def popitem(self):
"""Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty."""
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError("No keys found in the first mapping.")
def pop(self, key, *args):
"""Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0]."""
try:
return self.maps[0].pop(key, *args)
except KeyError:
raise KeyError("Key not found in the first mapping: {!r}".format(key))
def clear(self):
"""Clear maps[0], leaving maps[1:] intact."""
self.maps[0].clear()
| skoslowski/gnuradio | grc/core/utils/backports/chainmap.py | Python | gpl-3.0 | 3,530 |
from django import forms
from django.conf import settings
from django.contrib.admin.util import flatten_fieldsets, lookup_field
from django.contrib.admin.util import display_for_field, label_for_field
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.fields.related import ManyToManyRel
from django.forms.util import flatatt
from django.template.defaultfilters import capfirst
from django.utils.encoding import force_unicode, smart_unicode
from django.utils.html import escape, conditional_escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
ACTION_CHECKBOX_NAME = '_selected_action'
class ActionForm(forms.Form):
action = forms.ChoiceField(label=_('Action:'))
select_across = forms.BooleanField(label='', required=False, initial=0,
widget=forms.HiddenInput({'class': 'select-across'}))
checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False)
class AdminForm(object):
def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None):
self.form, self.fieldsets = form, normalize_fieldsets(fieldsets)
self.prepopulated_fields = [{
'field': form[field_name],
'dependencies': [form[f] for f in dependencies]
} for field_name, dependencies in prepopulated_fields.items()]
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for name, options in self.fieldsets:
yield Fieldset(self.form, name,
readonly_fields=self.readonly_fields,
model_admin=self.model_admin,
**options
)
def first_field(self):
try:
fieldset_name, fieldset_options = self.fieldsets[0]
field_name = fieldset_options['fields'][0]
if not isinstance(field_name, basestring):
field_name = field_name[0]
return self.form[field_name]
except (KeyError, IndexError):
pass
try:
return iter(self.form).next()
except StopIteration:
return None
def _media(self):
media = self.form.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class Fieldset(object):
def __init__(self, form, name=None, readonly_fields=(), fields=(), classes=(),
description=None, model_admin=None):
self.form = form
self.name, self.fields = name, fields
self.classes = u' '.join(classes)
self.description = description
self.model_admin = model_admin
self.readonly_fields = readonly_fields
def _media(self):
if 'collapse' in self.classes:
js = ['js/jquery.min.js', 'js/jquery.init.js', 'js/collapse.min.js']
return forms.Media(js=['%s%s' % (settings.ADMIN_MEDIA_PREFIX, url) for url in js])
return forms.Media()
media = property(_media)
def __iter__(self):
for field in self.fields:
yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
class Fieldline(object):
def __init__(self, form, field, readonly_fields=None, model_admin=None):
self.form = form # A django.forms.Form instance
if not hasattr(field, "__iter__"):
self.fields = [field]
else:
self.fields = field
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for i, field in enumerate(self.fields):
if field in self.readonly_fields:
yield AdminReadonlyField(self.form, field, is_first=(i == 0),
model_admin=self.model_admin)
else:
yield AdminField(self.form, field, is_first=(i == 0))
def errors(self):
return mark_safe(u'\n'.join([self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields]).strip('\n'))
class AdminField(object):
def __init__(self, form, field, is_first):
self.field = form[field] # A django.forms.BoundField instance
self.is_first = is_first # Whether this field is first on the line
self.is_checkbox = isinstance(self.field.field.widget, forms.CheckboxInput)
def label_tag(self):
classes = []
if self.is_checkbox:
classes.append(u'vCheckboxLabel')
contents = force_unicode(escape(self.field.label))
else:
contents = force_unicode(escape(self.field.label)) + u':'
if self.field.field.required:
classes.append(u'required')
if not self.is_first:
classes.append(u'inline')
attrs = classes and {'class': u' '.join(classes)} or {}
return self.field.label_tag(contents=contents, attrs=attrs)
def errors(self):
return mark_safe(self.field.errors.as_ul())
class AdminReadonlyField(object):
def __init__(self, form, field, is_first, model_admin=None):
label = label_for_field(field, form._meta.model, model_admin)
# Make self.field look a little bit like a field. This means that
# {{ field.name }} must be a useful class name to identify the field.
# For convenience, store other field-related data here too.
if callable(field):
class_name = field.__name__ != '<lambda>' and field.__name__ or ''
else:
class_name = field
self.field = {
'name': class_name,
'label': label,
'field': field,
}
self.form = form
self.model_admin = model_admin
self.is_first = is_first
self.is_checkbox = False
self.is_readonly = True
def label_tag(self):
attrs = {}
if not self.is_first:
attrs["class"] = "inline"
label = self.field['label']
contents = capfirst(force_unicode(escape(label))) + u":"
return mark_safe('<label%(attrs)s>%(contents)s</label>' % {
"attrs": flatatt(attrs),
"contents": contents,
})
def contents(self):
from django.contrib.admin.templatetags.admin_list import _boolean_icon
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin
try:
f, attr, value = lookup_field(field, obj, model_admin)
except (AttributeError, ValueError, ObjectDoesNotExist):
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
boolean = getattr(attr, "boolean", False)
if boolean:
result_repr = _boolean_icon(value)
else:
result_repr = smart_unicode(value)
if getattr(attr, "allow_tags", False):
result_repr = mark_safe(result_repr)
else:
if value is None:
result_repr = EMPTY_CHANGELIST_VALUE
elif isinstance(f.rel, ManyToManyRel):
result_repr = ", ".join(map(unicode, value.all()))
else:
result_repr = display_for_field(value, f)
return conditional_escape(result_repr)
class InlineAdminFormSet(object):
"""
A wrapper around an inline formset for use in the admin system.
"""
def __init__(self, inline, formset, fieldsets, readonly_fields=None, model_admin=None):
self.opts = inline
self.formset = formset
self.fieldsets = fieldsets
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for form, original in zip(self.formset.initial_forms, self.formset.get_queryset()):
yield InlineAdminForm(self.formset, form, self.fieldsets,
self.opts.prepopulated_fields, original, self.readonly_fields,
model_admin=self.model_admin)
for form in self.formset.extra_forms:
yield InlineAdminForm(self.formset, form, self.fieldsets,
self.opts.prepopulated_fields, None, self.readonly_fields,
model_admin=self.model_admin)
yield InlineAdminForm(self.formset, self.formset.empty_form,
self.fieldsets, self.opts.prepopulated_fields, None,
self.readonly_fields, model_admin=self.model_admin)
def fields(self):
fk = getattr(self.formset, "fk", None)
for i, field in enumerate(flatten_fieldsets(self.fieldsets)):
if fk and fk.name == field:
continue
if field in self.readonly_fields:
yield {
'label': label_for_field(field, self.opts.model, self.model_admin),
'widget': {
'is_hidden': False
},
'required': False
}
else:
yield self.formset.form.base_fields[field]
def _media(self):
media = self.opts.media + self.formset.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class InlineAdminForm(AdminForm):
"""
A wrapper around an inline form for use in the admin system.
"""
def __init__(self, formset, form, fieldsets, prepopulated_fields, original,
readonly_fields=None, model_admin=None):
self.formset = formset
self.model_admin = model_admin
self.original = original
if original is not None:
self.original_content_type_id = ContentType.objects.get_for_model(original).pk
self.show_url = original and hasattr(original, 'get_absolute_url')
super(InlineAdminForm, self).__init__(form, fieldsets, prepopulated_fields,
readonly_fields, model_admin)
def __iter__(self):
for name, options in self.fieldsets:
yield InlineFieldset(self.formset, self.form, name,
self.readonly_fields, model_admin=self.model_admin, **options)
def has_auto_field(self):
if self.form._meta.model._meta.has_auto_field:
return True
# Also search any parents for an auto field.
for parent in self.form._meta.model._meta.get_parent_list():
if parent._meta.has_auto_field:
return True
return False
def field_count(self):
# tabular.html uses this function for colspan value.
num_of_fields = 0
if self.has_auto_field():
num_of_fields += 1
num_of_fields += len(self.fieldsets[0][1]["fields"])
if self.formset.can_order:
num_of_fields += 1
if self.formset.can_delete:
num_of_fields += 1
return num_of_fields
def pk_field(self):
return AdminField(self.form, self.formset._pk_field.name, False)
def fk_field(self):
fk = getattr(self.formset, "fk", None)
if fk:
return AdminField(self.form, fk.name, False)
else:
return ""
def deletion_field(self):
from django.forms.formsets import DELETION_FIELD_NAME
return AdminField(self.form, DELETION_FIELD_NAME, False)
def ordering_field(self):
from django.forms.formsets import ORDERING_FIELD_NAME
return AdminField(self.form, ORDERING_FIELD_NAME, False)
class InlineFieldset(Fieldset):
def __init__(self, formset, *args, **kwargs):
self.formset = formset
super(InlineFieldset, self).__init__(*args, **kwargs)
def __iter__(self):
fk = getattr(self.formset, "fk", None)
for field in self.fields:
if fk and fk.name == field:
continue
yield Fieldline(self.form, field, self.readonly_fields,
model_admin=self.model_admin)
class AdminErrorList(forms.util.ErrorList):
"""
Stores all errors for the form/formsets in an add/change stage view.
"""
def __init__(self, form, inline_formsets):
if form.is_bound:
self.extend(form.errors.values())
for inline_formset in inline_formsets:
self.extend(inline_formset.non_form_errors())
for errors_in_inline_form in inline_formset.errors:
self.extend(errors_in_inline_form.values())
def normalize_fieldsets(fieldsets):
"""
Make sure the keys in fieldset dictionaries are strings. Returns the
normalized data.
"""
result = []
for name, options in fieldsets:
result.append((name, normalize_dictionary(options)))
return result
def normalize_dictionary(data_dict):
"""
Converts all the keys in "data_dict" to strings. The keys must be
convertible using str().
"""
for key, value in data_dict.items():
if not isinstance(key, str):
del data_dict[key]
data_dict[str(key)] = value
return data_dict
| rimbalinux/MSISDNArea | django/contrib/admin/helpers.py | Python | bsd-3-clause | 13,667 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Fasttree(Package):
"""FastTree infers approximately-maximum-likelihood phylogenetic
trees from alignments of nucleotide or protein sequences.
FastTree can handle alignments with up to a million of sequences
in a reasonable amount of time and memory."""
homepage = "http://www.microbesonline.org/fasttree"
url = "http://www.microbesonline.org/fasttree/FastTree-2.1.10.c"
version('2.1.10', sha256='54cb89fc1728a974a59eae7a7ee6309cdd3cddda9a4c55b700a71219fc6e926d', expand=False, url='http://www.microbesonline.org/fasttree/FastTree-2.1.10.c')
phases = ['build', 'install']
def build(self, spec, prefix):
cc = Executable(spack_cc)
cc('-O3', self.compiler.openmp_flag,
'-DOPENMP', '-finline-functions', '-funroll-loops', '-Wall',
'-oFastTreeMP', 'FastTree-' + format(spec.version.dotted) + '.c',
'-lm')
def install(self, spec, prefix):
mkdir(prefix.bin)
install('FastTreeMP', prefix.bin)
| LLNL/spack | var/spack/repos/builtin/packages/fasttree/package.py | Python | lgpl-2.1 | 1,234 |
from django.conf.urls import patterns, url
from tests.testapp.echo.views import (
SimpleCBV, SimpleDecoratedCBV,
SimpleCBVWithParam, SimpleCBVMulti
)
urlpatterns = patterns('',
url(r'^simpleview/$', 'tests.testapp.echo.views.simpleview'),
url(r'^simpleview_required/$', 'tests.testapp.echo.views.simpleview_with_required_arg'),
url(r'^simpleview/(?P<pid>\d+)/$', 'tests.testapp.echo.views.simpleview_with_param'),
url(r'^simplecbvview/$', SimpleCBV.as_view()),
url(r'^simplecbvview/(?P<pid>\d+)/$', SimpleCBVWithParam.as_view()),
url(r'^decoratedview/', 'tests.testapp.echo.views.decoratedview'),
url(r'^decoratedcbv/', SimpleDecoratedCBV.as_view()),
url(r'^cookieview/$', 'tests.testapp.echo.views.cookieview'),
url(r'^simpleview_multi/$', 'tests.testapp.echo.views.simpleview_multi'),
url(r'^simplecbv_multi/$', SimpleCBVMulti.as_view())
)
| jmcarp/webargs | tests/testapp/testapp/urls.py | Python | mit | 892 |
from sqlalchemy import Column, create_engine, orm, types
from sqlalchemy.ext.declarative import declarative_base
from django.http import Http404
from django.test import SimpleTestCase
from rest_framework.test import APIRequestFactory
from rest_witchcraft import serializers, viewsets
factory = APIRequestFactory()
engine = create_engine("sqlite://")
session = orm.scoped_session(orm.sessionmaker(bind=engine))
Base = declarative_base()
Base.query = session.query_property()
class RouterTestModel(Base):
__tablename__ = "routertest"
id = Column(types.Integer(), default=3, primary_key=True)
text = Column(types.String(length=200))
Base.metadata.create_all(engine)
class RouterTestModelSerializer(serializers.ModelSerializer):
class Meta:
model = RouterTestModel
session = session
fields = "__all__"
class TestModelRoutes(SimpleTestCase):
def test_get_model_using_queryset(self):
class RouterTestViewSet(viewsets.ModelViewSet):
queryset = RouterTestModel.query
serializer_class = RouterTestModelSerializer
model = RouterTestViewSet.get_model()
self.assertEqual(model, RouterTestModel)
def test_get_model_using_serializer(self):
class RouterTestViewSet(viewsets.ModelViewSet):
serializer_class = RouterTestModelSerializer
model = RouterTestViewSet.get_model()
self.assertEqual(model, RouterTestModel)
def test_get_model_fails_with_assert_error(self):
class RouterTestViewSet(viewsets.ModelViewSet):
pass
with self.assertRaises(AssertionError):
RouterTestViewSet.get_model()
def test_get_object_raises_404(self):
class RouterTestViewSet(viewsets.ModelViewSet):
queryset = RouterTestModel.query
serializer_class = RouterTestModelSerializer
lookup_field = "id"
lookup_url_kwarg = "pk"
viewset = RouterTestViewSet()
viewset.kwargs = {"pk": 1}
with self.assertRaises(Http404):
viewset.get_object()
| shosca/django-rest-witchcraft | tests/test_generics.py | Python | mit | 2,086 |
# $Id$
#
# pjsua Python GUI Demo
#
# Copyright (C)2013 Teluu Inc. (http://www.teluu.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import sys
if sys.version_info[0] >= 3: # Python 3
import tkinter as tk
from tkinter import ttk
else:
import Tkinter as tk
import ttk
import buddy
import call
import chatgui as gui
import endpoint as ep
import pjsua2 as pj
import re
SipUriRegex = re.compile('(sip|sips):([^:;>\@]*)@?([^:;>]*):?([^:;>]*)')
ConfIdx = 1
write=sys.stdout.write
# Simple SIP uri parser, input URI must have been validated
def ParseSipUri(sip_uri_str):
m = SipUriRegex.search(sip_uri_str)
if not m:
assert(0)
return None
scheme = m.group(1)
user = m.group(2)
host = m.group(3)
port = m.group(4)
if host == '':
host = user
user = ''
return SipUri(scheme.lower(), user, host.lower(), port)
class SipUri:
def __init__(self, scheme, user, host, port):
self.scheme = scheme
self.user = user
self.host = host
self.port = port
def __cmp__(self, sip_uri):
if self.scheme == sip_uri.scheme and self.user == sip_uri.user and self.host == sip_uri.host:
# don't check port, at least for now
return 0
return -1
def __str__(self):
s = self.scheme + ':'
if self.user: s += self.user + '@'
s += self.host
if self.port: s+= ':' + self.port
return s
class Chat(gui.ChatObserver):
def __init__(self, app, acc, uri, call_inst=None):
self._app = app
self._acc = acc
self.title = ''
global ConfIdx
self.confIdx = ConfIdx
ConfIdx += 1
# each participant call/buddy instances are stored in call list
# and buddy list with same index as in particpant list
self._participantList = [] # list of SipUri
self._callList = [] # list of Call
self._buddyList = [] # list of Buddy
self._gui = gui.ChatFrame(self)
self.addParticipant(uri, call_inst)
def _updateGui(self):
if self.isPrivate():
self.title = str(self._participantList[0])
else:
self.title = 'Conference #%d (%d participants)' % (self.confIdx, len(self._participantList))
self._gui.title(self.title)
self._app.updateWindowMenu()
def _getCallFromUriStr(self, uri_str, op = ''):
uri = ParseSipUri(uri_str)
if uri not in self._participantList:
write("=== "+ op +" cannot find participant with URI '" + uri_str + "'\r\n")
return None
idx = self._participantList.index(uri)
if idx < len(self._callList):
return self._callList[idx]
return None
def _getActiveMediaIdx(self, thecall):
ci = thecall.getInfo()
for mi in ci.media:
if mi.type == pj.PJMEDIA_TYPE_AUDIO and \
(mi.status != pj.PJSUA_CALL_MEDIA_NONE and \
mi.status != pj.PJSUA_CALL_MEDIA_ERROR):
return mi.index
return -1
def _getAudioMediaFromUriStr(self, uri_str):
c = self._getCallFromUriStr(uri_str)
if not c: return None
idx = self._getActiveMediaIdx(c)
if idx < 0: return None
m = c.getMedia(idx)
am = pj.AudioMedia.typecastFromMedia(m)
return am
def _sendTypingIndication(self, is_typing, sender_uri_str=''):
sender_uri = ParseSipUri(sender_uri_str) if sender_uri_str else None
type_ind_param = pj.SendTypingIndicationParam()
type_ind_param.isTyping = is_typing
for idx, p in enumerate(self._participantList):
# don't echo back to the original sender
if sender_uri and p == sender_uri:
continue
# send via call, if any, or buddy
target = None
if self._callList[idx] and self._callList[idx].connected:
target = self._callList[idx]
else:
target = self._buddyList[idx]
assert(target)
try:
target.sendTypingIndication(type_ind_param)
except:
pass
def _sendInstantMessage(self, msg, sender_uri_str=''):
sender_uri = ParseSipUri(sender_uri_str) if sender_uri_str else None
send_im_param = pj.SendInstantMessageParam()
send_im_param.content = str(msg)
for idx, p in enumerate(self._participantList):
# don't echo back to the original sender
if sender_uri and p == sender_uri:
continue
# send via call, if any, or buddy
target = None
if self._callList[idx] and self._callList[idx].connected:
target = self._callList[idx]
else:
target = self._buddyList[idx]
assert(target)
try:
target.sendInstantMessage(send_im_param)
except:
# error will be handled via Account::onInstantMessageStatus()
pass
def isPrivate(self):
return len(self._participantList) <= 1
def isUriParticipant(self, uri):
return uri in self._participantList
def registerCall(self, uri_str, call_inst):
uri = ParseSipUri(uri_str)
try:
idx = self._participantList.index(uri)
bud = self._buddyList[idx]
self._callList[idx] = call_inst
call_inst.chat = self
call_inst.peerUri = bud.cfg.uri
except:
assert(0) # idx must be found!
def showWindow(self, show_text_chat = False):
self._gui.bringToFront()
if show_text_chat:
self._gui.textShowHide(True)
def addParticipant(self, uri, call_inst=None):
# avoid duplication
if self.isUriParticipant(uri): return
uri_str = str(uri)
# find buddy, create one if not found (e.g: for IM/typing ind),
# it is a temporary one and not really registered to acc
bud = None
try:
bud = self._acc.findBuddy(uri_str)
except:
bud = buddy.Buddy(None)
bud_cfg = pj.BuddyConfig()
bud_cfg.uri = uri_str
bud_cfg.subscribe = False
bud.create(self._acc, bud_cfg)
bud.cfg = bud_cfg
bud.account = self._acc
# update URI from buddy URI
uri = ParseSipUri(bud.cfg.uri)
# add it
self._participantList.append(uri)
self._callList.append(call_inst)
self._buddyList.append(bud)
self._gui.addParticipant(str(uri))
self._updateGui()
def kickParticipant(self, uri):
if (not uri) or (uri not in self._participantList):
assert(0)
return
idx = self._participantList.index(uri)
del self._participantList[idx]
del self._callList[idx]
del self._buddyList[idx]
self._gui.delParticipant(str(uri))
if self._participantList:
self._updateGui()
else:
self.onCloseWindow()
def addMessage(self, from_uri_str, msg):
if from_uri_str:
# print message on GUI
msg = from_uri_str + ': ' + msg
self._gui.textAddMessage(msg)
# now relay to all participants
self._sendInstantMessage(msg, from_uri_str)
else:
self._gui.textAddMessage(msg, False)
def setTypingIndication(self, from_uri_str, is_typing):
# notify GUI
self._gui.textSetTypingIndication(from_uri_str, is_typing)
# now relay to all participants
self._sendTypingIndication(is_typing, from_uri_str)
def startCall(self):
self._gui.enableAudio()
call_param = pj.CallOpParam()
call_param.opt.audioCount = 1
call_param.opt.videoCount = 0
fails = []
for idx, p in enumerate(self._participantList):
# just skip if call is instantiated
if self._callList[idx]:
continue
uri_str = str(p)
c = call.Call(self._acc, uri_str, self)
self._callList[idx] = c
self._gui.audioUpdateState(uri_str, gui.AudioState.INITIALIZING)
try:
c.makeCall(uri_str, call_param)
except:
self._callList[idx] = None
self._gui.audioUpdateState(uri_str, gui.AudioState.FAILED)
fails.append(p)
for p in fails:
# kick participants with call failure, but spare the last (avoid zombie chat)
if not self.isPrivate():
self.kickParticipant(p)
def stopCall(self):
for idx, p in enumerate(self._participantList):
self._gui.audioUpdateState(str(p), gui.AudioState.DISCONNECTED)
c = self._callList[idx]
if c:
c.hangup(pj.CallOpParam())
def updateCallState(self, thecall, info = None):
# info is optional here, just to avoid calling getInfo() twice (in the caller and here)
if not info: info = thecall.getInfo()
if info.state < pj.PJSIP_INV_STATE_CONFIRMED:
self._gui.audioUpdateState(thecall.peerUri, gui.AudioState.INITIALIZING)
elif info.state == pj.PJSIP_INV_STATE_CONFIRMED:
self._gui.audioUpdateState(thecall.peerUri, gui.AudioState.CONNECTED)
if not self.isPrivate():
# inform peer about conference participants
conf_welcome_str = '\n---\n'
conf_welcome_str += 'Welcome to the conference, participants:\n'
conf_welcome_str += '%s (host)\n' % (self._acc.cfg.idUri)
for p in self._participantList:
conf_welcome_str += '%s\n' % (str(p))
conf_welcome_str += '---\n'
send_im_param = pj.SendInstantMessageParam()
send_im_param.content = conf_welcome_str
try:
thecall.sendInstantMessage(send_im_param)
except:
pass
# inform others, including self
msg = "[Conf manager] %s has joined" % (thecall.peerUri)
self.addMessage(None, msg)
self._sendInstantMessage(msg, thecall.peerUri)
elif info.state == pj.PJSIP_INV_STATE_DISCONNECTED:
if info.lastStatusCode/100 != 2:
self._gui.audioUpdateState(thecall.peerUri, gui.AudioState.FAILED)
else:
self._gui.audioUpdateState(thecall.peerUri, gui.AudioState.DISCONNECTED)
# reset entry in the callList
try:
idx = self._callList.index(thecall)
if idx >= 0: self._callList[idx] = None
except:
pass
self.addMessage(None, "Call to '%s' disconnected: %s" % (thecall.peerUri, info.lastReason))
# kick the disconnected participant, but the last (avoid zombie chat)
if not self.isPrivate():
self.kickParticipant(ParseSipUri(thecall.peerUri))
# inform others, including self
msg = "[Conf manager] %s has left" % (thecall.peerUri)
self.addMessage(None, msg)
self._sendInstantMessage(msg, thecall.peerUri)
def updateCallMediaState(self, thecall, info = None):
# info is optional here, just to avoid calling getInfo() twice (in the caller and here)
if not info: info = thecall.getInfo()
med_idx = self._getActiveMediaIdx(thecall)
if (med_idx < 0):
self._gui.audioSetStatsText(thecall.peerUri, 'No active media')
return
si = thecall.getStreamInfo(med_idx)
dir_str = ''
if si.dir == 0:
dir_str = 'inactive'
else:
if si.dir & pj.PJMEDIA_DIR_ENCODING:
dir_str += 'send '
if si.dir & pj.PJMEDIA_DIR_DECODING:
dir_str += 'receive '
stats_str = "Direction : %s\n" % (dir_str)
stats_str += "Audio codec : %s (%sHz)" % (si.codecName, si.codecClockRate)
self._gui.audioSetStatsText(thecall.peerUri, stats_str)
m = pj.AudioMedia.typecastFromMedia(thecall.getMedia(med_idx))
# make conference
for c in self._callList:
if c == thecall:
continue
med_idx = self._getActiveMediaIdx(c)
if med_idx < 0:
continue
mm = pj.AudioMedia.typecastFromMedia(c.getMedia(med_idx))
m.startTransmit(mm)
mm.startTransmit(m)
# ** callbacks from GUI (ChatObserver implementation) **
# Text
def onSendMessage(self, msg):
self._sendInstantMessage(msg)
def onStartTyping(self):
self._sendTypingIndication(True)
def onStopTyping(self):
self._sendTypingIndication(False)
# Audio
def onHangup(self, peer_uri_str):
c = self._getCallFromUriStr(peer_uri_str, "onHangup()")
if not c: return
call_param = pj.CallOpParam()
c.hangup(call_param)
def onHold(self, peer_uri_str):
c = self._getCallFromUriStr(peer_uri_str, "onHold()")
if not c: return
call_param = pj.CallOpParam()
c.setHold(call_param)
def onUnhold(self, peer_uri_str):
c = self._getCallFromUriStr(peer_uri_str, "onUnhold()")
if not c: return
call_param = pj.CallOpParam()
call_param.opt.audioCount = 1
call_param.opt.videoCount = 0
call_param.opt.flag |= pj.PJSUA_CALL_UNHOLD
c.reinvite(call_param)
def onRxMute(self, peer_uri_str, mute):
am = self._getAudioMediaFromUriStr(peer_uri_str)
if not am: return
if mute:
am.stopTransmit(ep.Endpoint.instance.audDevManager().getPlaybackDevMedia())
self.addMessage(None, "Muted audio from '%s'" % (peer_uri_str))
else:
am.startTransmit(ep.Endpoint.instance.audDevManager().getPlaybackDevMedia())
self.addMessage(None, "Unmuted audio from '%s'" % (peer_uri_str))
def onRxVol(self, peer_uri_str, vol_pct):
am = self._getAudioMediaFromUriStr(peer_uri_str)
if not am: return
# pjsua volume range = 0:mute, 1:no adjustment, 2:100% louder
am.adjustRxLevel(vol_pct/50.0)
self.addMessage(None, "Adjusted volume level audio from '%s'" % (peer_uri_str))
def onTxMute(self, peer_uri_str, mute):
am = self._getAudioMediaFromUriStr(peer_uri_str)
if not am: return
if mute:
ep.Endpoint.instance.audDevManager().getCaptureDevMedia().stopTransmit(am)
self.addMessage(None, "Muted audio to '%s'" % (peer_uri_str))
else:
ep.Endpoint.instance.audDevManager().getCaptureDevMedia().startTransmit(am)
self.addMessage(None, "Unmuted audio to '%s'" % (peer_uri_str))
# Chat room
def onAddParticipant(self):
buds = []
dlg = AddParticipantDlg(None, self._app, buds)
if dlg.doModal():
for bud in buds:
uri = ParseSipUri(bud.cfg.uri)
self.addParticipant(uri)
if not self.isPrivate():
self.startCall()
def onStartAudio(self):
self.startCall()
def onStopAudio(self):
self.stopCall()
def onCloseWindow(self):
self.stopCall()
# will remove entry from list eventually destroy this chat?
if self in self._acc.chatList: self._acc.chatList.remove(self)
self._app.updateWindowMenu()
# destroy GUI
self._gui.destroy()
class AddParticipantDlg(tk.Toplevel):
"""
List of buddies
"""
def __init__(self, parent, app, bud_list):
tk.Toplevel.__init__(self, parent)
self.title('Add participants..')
self.transient(parent)
self.parent = parent
self._app = app
self.buddyList = bud_list
self.isOk = False
self.createWidgets()
def doModal(self):
if self.parent:
self.parent.wait_window(self)
else:
self.wait_window(self)
return self.isOk
def createWidgets(self):
# buddy list
list_frame = ttk.Frame(self)
list_frame.pack(side=tk.TOP, fill=tk.BOTH, expand=1, padx=20, pady=20)
#scrl = ttk.Scrollbar(self, orient=tk.VERTICAL, command=list_frame.yview)
#list_frame.config(yscrollcommand=scrl.set)
#scrl.pack(side=tk.RIGHT, fill=tk.Y)
# draw buddy list
self.buddies = []
for acc in self._app.accList:
self.buddies.append((0, acc.cfg.idUri))
for bud in acc.buddyList:
self.buddies.append((1, bud))
self.bud_var = []
for idx,(flag,bud) in enumerate(self.buddies):
self.bud_var.append(tk.IntVar())
if flag==0:
s = ttk.Separator(list_frame, orient=tk.HORIZONTAL)
s.pack(fill=tk.X)
l = tk.Label(list_frame, anchor=tk.W, text="Account '%s':" % (bud))
l.pack(fill=tk.X)
else:
c = tk.Checkbutton(list_frame, anchor=tk.W, text=bud.cfg.uri, variable=self.bud_var[idx])
c.pack(fill=tk.X)
s = ttk.Separator(list_frame, orient=tk.HORIZONTAL)
s.pack(fill=tk.X)
# Ok/cancel buttons
tail_frame = ttk.Frame(self)
tail_frame.pack(side=tk.BOTTOM, fill=tk.BOTH, expand=1)
btnOk = ttk.Button(tail_frame, text='Ok', default=tk.ACTIVE, command=self.onOk)
btnOk.pack(side=tk.LEFT, padx=20, pady=10)
btnCancel = ttk.Button(tail_frame, text='Cancel', command=self.onCancel)
btnCancel.pack(side=tk.RIGHT, padx=20, pady=10)
def onOk(self):
self.buddyList[:] = []
for idx,(flag,bud) in enumerate(self.buddies):
if not flag: continue
if self.bud_var[idx].get() and not (bud in self.buddyList):
self.buddyList.append(bud)
self.isOk = True
self.destroy()
def onCancel(self):
self.destroy()
| svn2github/pjsip | pjsip-apps/src/pygui/chat.py | Python | gpl-2.0 | 18,871 |
# -*- coding: utf-8 -*-
__author__ = """Audrey Roy Greenfeld"""
__email__ = 'aroy@alum.mit.edu'
__version__ = '0.1.0'
from _python_boilerplate import run, val | waynenilsen/cookiecutter-python-rust-proto | python_boilerplate/__init__.py | Python | mit | 160 |
from __future__ import absolute_import
from sentry import tsdb, ratelimits
from sentry.api.serializers import serialize
from sentry.plugins.base import Plugin
from sentry.plugins.base.configuration import react_plugin_config
from sentry.plugins.status import PluginStatus
class DataForwardingPlugin(Plugin):
status = PluginStatus.BETA
def configure(self, project, request):
return react_plugin_config(self, project, request)
def has_project_conf(self):
return True
def get_rate_limit(self):
# number of requests, number of seconds (window)
return (50, 1)
def forward_event(self, payload):
"""
Forward the event and return a boolean if it was successful.
"""
raise NotImplementedError
def get_event_payload(self, event):
return serialize(event)
def get_plugin_type(self):
return "data-forwarding"
def post_process(self, event, **kwargs):
rl_key = u"{}:{}".format(self.conf_key, event.project.organization_id)
# limit segment to 50 requests/second
limit, window = self.get_rate_limit()
if limit and window and ratelimits.is_limited(rl_key, limit=limit, window=window):
return
payload = self.get_event_payload(event)
success = self.forward_event(event, payload)
if success is False:
# TODO(dcramer): record failure
pass
tsdb.incr(tsdb.models.project_total_forwarded, event.project.id, count=1)
| mvaled/sentry | src/sentry/plugins/bases/data_forwarding.py | Python | bsd-3-clause | 1,514 |
"""Extreme Discovery Protocol."""
import dpkt
import sys
class EDP(dpkt.Packet):
__hdr__ = (
('v', 'B', 1),
('res', 'B', 0),
('hlen', 'H', 0),
('sum', 'H', 0),
('seq', 'H', 0),
('mid', 'H', 0),
('mac', '6s', '')
)
def __str__(self):
if not self.sum:
self.sum = dpkt.in_cksum(dpkt.Packet.__str__(self))
return dpkt.Packet.__str__(self)
| hexcap/dpkt | dpkt/edp.py | Python | bsd-3-clause | 442 |
from cli import *
def local_print_disassemble_line(cpu, address, type, print_cpu, name):
return 0
def local_pregs(processor, all):
return 0
| iniverno/RnR-LLC | simics-3.0-install/simics-3.0.31/amd64-linux/lib/python/clock_commands.py | Python | gpl-2.0 | 150 |
import weakref
class ExpensiveObject:
def __init__(self, name):
self.name = name
def __del__(self):
print('(Deleting {})'.format(self))
obj = ExpensiveObject('My Object')
r = weakref.ref(obj)
p = weakref.proxy(obj)
print('via obj:', obj.name)
print('via ref:', r().name)
print('via proxy:', p.name)
del obj
print('via proxy:', p.name)
| jasonwee/asus-rt-n14uhp-mrtg | src/lesson_data_structures/weakref_proxy.py | Python | apache-2.0 | 366 |
"""
Created on June 9, 2015
@author: shiruilu
Adaptive Luminance Enhancement from AINDANE
"""
import numpy as np
import cv2
import matplotlib.pyplot as plt
from math import sqrt, pi
IMG_DIR = '../resources/images/'
BM_DIR = './benchmarks/'
eps = 1e-6 # eliminate divide by zero error in I_conv/I
def ale(I_bgr):
"""ale algorithm in 3.1 of the paper"""
#?cv2 doing same as NTSC # equation 1
I = cv2.cvtColor(I_bgr, cv2.COLOR_BGR2GRAY)
In = I/255.0 #2d array, equation 2
hist = cv2.calcHist([I],[0],None,[256],[0,256])
cdf = hist.cumsum()
L = np.searchsorted(cdf, 0.1*I.shape[0]*I.shape[1], side='right')
L_as_array = np.array([L]) # L as array, for np.piecewise
z = np.piecewise( L_as_array,
[ L_as_array<=50,
L_as_array>50 and L_as_array<=150,
L_as_array>150
],
[ 0, (L-50)/100.0, 1 ]
)
In_prime = (In**(0.75*z+0.25) + (1-In)*0.4*(1-z) + In**(2-z)) /2.0
return I, In_prime
def ace(I, In_prime, c=5):
"""ace algo in 3.2"""
sigma = sqrt(c**2 /2)
img_freq = np.fft.fft2(I)
img_freq_shift = np.fft.fftshift(img_freq)
# size of gaussian: 3*sigma(0.99...), cv2 require sigma to be int
_gaussian_x = cv2.getGaussianKernel(int(round(sigma*3)), int(round(sigma)))
gaussian = (_gaussian_x * _gaussian_x.T) \
/ np.sum(_gaussian_x * _gaussian_x.T) # normalize
##gaussian kernel padded with 0, extend to image.shape
gaussian_freq_shift = np.fft.fftshift( np.fft.fft2(gaussian, I.shape) )
image_fm = img_freq_shift * gaussian_freq_shift # element wise multiplication
I_conv = np.real( np.fft.ifft2( np.fft.ifftshift(image_fm) ) ) # equation 6
sigma_I = np.array( [np.std(I)] ) # std of I,to an array, for np.piecewise
P = np.piecewise(sigma_I,
[ sigma_I<=3,
sigma_I>3 and sigma_I<10,
sigma_I>=10
],
[ 3, 1.0*(27-2*sigma_I)/7, 1 ]
)
E = ((I_conv+eps) / (I+eps)) ** P
S = 255 * np.power(In_prime, E)
return S
def color_restoration(I_bgr, I, S, lambdaa):
S_restore = np.zeros(I_bgr.shape)
for j in range(3): # b,g,r
S_restore[...,j] = S * ( 1.0* I_bgr[...,j] / (I + eps) ) * lambdaa[j]
return S_restore
def _test_ale():
ale(IMG_DIR+'pdbuse.png')
return 0
def _test_ace():
ace(IMG_DIR+'input_teaser.png')
return 0
def _test_color_restoration():
I_rgb = cv2.cvtColor(cv2.imread(IMG_DIR+'input_teaser.png')
, cv2.COLOR_BGR2RGB)
I = cv2.cvtColor(I_rgb, cv2.COLOR_RGB2GRAY)
color_restoration(I,I)
def _test_all():
I_bgr = cv2.imread(IMG_DIR+'input_teaser.png')
I, In_prime = ale(I_bgr)
S = ace(I, In_prime, c=240)
# restore using color_restoration (aindane paper)
S_restore = color_restoration(I_bgr, I, S, [1,1,1]) #default lambda as all 1s
S_display = cv2.cvtColor( np.clip(S_restore, 0, 255).astype('uint8')
, cv2.COLOR_BGR2RGB)
I_rgb = cv2.cvtColor( I_bgr, cv2.COLOR_BGR2RGB)
plt.imshow( np.hstack([I_rgb, S_display]) )
plt.show()
def aindane(I_bgr):
I, In_prime = ale(I_bgr)
S = ace(I, In_prime, c=240)
S_restore = color_restoration(I_bgr, I, S, [1,1,1]) #default lambda as all 1s
S_bgr = np.clip(S_restore, 0, 255).astype('uint8')
return S_bgr
if __name__ == '__main__':
_test_all() | shiruilu/CAPE | aindane/aindane.py | Python | mit | 3,499 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('replays', '0030_auto_20160307_2205'),
]
operations = [
migrations.AlterField(
model_name='replay',
name='playlist',
field=models.PositiveIntegerField(choices=[(12, 'RankedSoloStandard'), (2, 'UnrankedDoubles'), (11, 'RankedDoubles'), (4, 'UnrankedChaos'), (13, 'RankedStandard'), (16, 'RocketLabs'), (10, 'RankedDuels'), (3, 'UnrankedStandard'), (1, 'UnrankedDuels')], null=True, blank=True, default=0),
),
migrations.AlterUniqueTogether(
name='player',
unique_together=set([('unique_id', 'replay')]),
),
]
| rocket-league-replays/rocket-league-replays | rocket_league/apps/replays/migrations/0031_auto_20160307_2222.py | Python | gpl-3.0 | 791 |
"""
sentry.rules.conditions.tagged_event
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from collections import OrderedDict
from django import forms
from sentry.models import TagKey
from sentry.rules.conditions.base import EventCondition
class MatchType(object):
EQUAL = 'eq'
NOT_EQUAL = 'ne'
STARTS_WITH = 'sw'
ENDS_WITH = 'ew'
CONTAINS = 'co'
NOT_CONTAINS = 'nc'
MATCH_CHOICES = OrderedDict([
(MatchType.EQUAL, 'equals'),
(MatchType.NOT_EQUAL, 'does not equal'),
(MatchType.STARTS_WITH, 'starts with'),
(MatchType.ENDS_WITH, 'ends with'),
(MatchType.CONTAINS, 'contains'),
(MatchType.NOT_CONTAINS, 'does not contain'),
])
class TaggedEventForm(forms.Form):
key = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'key'}))
match = forms.ChoiceField(MATCH_CHOICES.items(), widget=forms.Select(
attrs={'style': 'width:150px'},
))
value = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'value'}))
class TaggedEventCondition(EventCondition):
form_cls = TaggedEventForm
label = u'An events tags match {key} {match} {value}'
def passes(self, event, state, **kwargs):
key = self.get_option('key')
match = self.get_option('match')
value = self.get_option('value')
if not (key and match and value):
return False
value = value.lower()
key = key.lower()
tags = (v.lower() for k, v in event.get_tags() if k.lower() == key or TagKey.get_standardized_key(k) == key)
if match == MatchType.EQUAL:
for t_value in tags:
if t_value == value:
return True
return False
elif match == MatchType.NOT_EQUAL:
for t_value in tags:
if t_value == value:
return False
return True
elif match == MatchType.STARTS_WITH:
for t_value in tags:
if t_value.startswith(value):
return True
return False
elif match == MatchType.ENDS_WITH:
for t_value in tags:
if t_value.endswith(value):
return True
return False
elif match == MatchType.CONTAINS:
for t_value in tags:
if value in t_value:
return True
return False
elif match == MatchType.NOT_CONTAINS:
for t_value in tags:
if value in t_value:
return False
return True
| mitsuhiko/sentry | src/sentry/rules/conditions/tagged_event.py | Python | bsd-3-clause | 2,740 |
import threading
from html.parser import HTMLParser
import django
# A global lock, showing whether linkcheck is busy
update_lock = threading.Lock()
if django.VERSION <= (3, 2):
default_app_config = 'linkcheck.apps.LinkcheckConfig'
class Lister(HTMLParser):
def reset(self):
HTMLParser.reset(self)
self.urls = []
class URLLister(Lister):
def __init__(self):
self.in_a = False
self.text = ''
self.url = ''
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == 'a':
href = [v for k, v in attrs if k == 'href']
if href:
self.in_a = True
self.url = href[0]
elif tag == 'img' and self.in_a:
src = [v for k, v in attrs if k == 'src']
if src:
self.text += ' [image:%s] ' % src[0]
def handle_endtag(self, tag):
if tag == 'a' and self.in_a:
self.urls.append((self.text[:256], self.url))
self.in_a = False
self.text = ''
self.url = ''
def handle_data(self, data):
if self.in_a:
self.text += data
class ImageLister(Lister):
def handle_starttag(self, tag, attrs):
if tag == 'img':
src = [v for k, v in attrs if k=='src']
if src:
self.urls.append(('', src[0]))
class AnchorLister(HTMLParser):
def __init__(self):
self.names = []
HTMLParser.__init__(self)
def reset(self):
HTMLParser.reset(self)
self.names = []
def handle_starttag(self, tag, attributes):
name = [v for k, v in attributes if k=='id']
if name:
self.names.append(name[0])
if tag == 'a':
name = [v for k, v in attributes if k=='name']
if name:
self.names.append(name[0])
def parse(obj, field, parser):
html = getattr(obj,field)
if html:
parser.feed(html)
parser.close()
return parser.urls
else:
return []
def parse_urls(obj, field):
parser = URLLister()
return parse(obj, field, parser)
def parse_images(obj, field):
parser = ImageLister()
return parse(obj, field, parser)
def parse_anchors(content):
parser = AnchorLister()
if not isinstance(content, str):
content = str(content)
parser.feed(content)
parser.close()
return parser.names
class Linklist:
html_fields = []
url_fields = []
ignore_empty = []
image_fields = []
# You can override object_filter and object_exclude in a linklist class.
# Just provide a dictionary to be used as a Django lookup filter.
# Only objects that pass the filter will be queried for links.
# This doesn't affect whether an object is regarded as a valid link target. Only as a link source.
# Example usage in your linklists.py:
# object_filter = {'active': True} - Would only check active objects for links
object_filter = None
object_exclude = None
def __get(self, name, obj, default=None):
try:
attr = getattr(self, name)
except AttributeError:
return default
if callable(attr):
return attr(obj)
return attr
@staticmethod
def extract_url_from_field(obj, field_name):
val = getattr(obj, field_name)
try:
try:
url = val.url # FileField and ImageField have a url property
except ValueError: # And it throws an exception for empty fields
url = ''
except AttributeError:
url = val # Assume the field returns the url directly
return url or '' # Coerce None to ''
def get_urls_from_field_list(self, obj, field_list):
urls = []
for field_name in field_list:
url = self.extract_url_from_field(obj, field_name)
if field_name in self.ignore_empty and not url:
continue
urls.append((field_name, '', url))
return urls
def urls(self, obj):
urls = []
# Look for HREFS in HTML fields
for field_name in self.html_fields:
urls += [(field_name, text, url) for text, url in parse_urls(obj, field_name)]
# Now add in the URL fields
urls += self.get_urls_from_field_list(obj, self.url_fields)
return urls
def images(self, obj):
urls = []
# Look for IMGs in HTML fields
for field_name in self.html_fields:
urls += [(field_name, text, url) for text, url in parse_images(obj, field_name)]
# hostname_length = settings.MEDIA_URL[:-1].rfind('/')
# url[hostname_length:]
# Now add in the image fields
urls += self.get_urls_from_field_list(obj, self.image_fields)
return urls
@classmethod
def objects(cls):
objects = cls.model.objects.all()
if cls.object_filter:
objects = objects.filter(**cls.object_filter).distinct()
if cls.object_exclude:
objects = objects.exclude(**cls.object_exclude).distinct()
return objects
def get_linklist(self, extra_filter=None):
extra_filter = extra_filter or {}
linklist = []
objects = self.objects()
if extra_filter:
objects = objects.filter(**extra_filter)
for obj in objects:
linklist.append({
'object': obj,
'urls': self.urls(obj),
'images': self.images(obj),
})
return linklist
@classmethod
def content_type(cls):
from django.contrib.contenttypes.models import ContentType
return ContentType.objects.get_for_model(cls.model)
| DjangoAdminHackers/django-linkcheck | linkcheck/__init__.py | Python | bsd-3-clause | 5,795 |
"""
Example of training survival model with Dask on CPU
===================================================
"""
import xgboost as xgb
import os
from xgboost.dask import DaskDMatrix
import dask.dataframe as dd
from dask.distributed import Client
from dask.distributed import LocalCluster
def main(client):
# Load an example survival data from CSV into a Dask data frame.
# The Veterans' Administration Lung Cancer Trial
# The Statistical Analysis of Failure Time Data by Kalbfleisch J. and Prentice R (1980)
CURRENT_DIR = os.path.dirname(__file__)
df = dd.read_csv(os.path.join(CURRENT_DIR, os.pardir, 'data', 'veterans_lung_cancer.csv'))
# DaskDMatrix acts like normal DMatrix, works as a proxy for local
# DMatrix scatter around workers.
# For AFT survival, you'd need to extract the lower and upper bounds for the label
# and pass them as arguments to DaskDMatrix.
y_lower_bound = df['Survival_label_lower_bound']
y_upper_bound = df['Survival_label_upper_bound']
X = df.drop(['Survival_label_lower_bound',
'Survival_label_upper_bound'], axis=1)
dtrain = DaskDMatrix(client, X, label_lower_bound=y_lower_bound,
label_upper_bound=y_upper_bound)
# Use train method from xgboost.dask instead of xgboost. This
# distributed version of train returns a dictionary containing the
# resulting booster and evaluation history obtained from
# evaluation metrics.
params = {'verbosity': 1,
'objective': 'survival:aft',
'eval_metric': 'aft-nloglik',
'learning_rate': 0.05,
'aft_loss_distribution_scale': 1.20,
'aft_loss_distribution': 'normal',
'max_depth': 6,
'lambda': 0.01,
'alpha': 0.02}
output = xgb.dask.train(client,
params,
dtrain,
num_boost_round=100,
evals=[(dtrain, 'train')])
bst = output['booster']
history = output['history']
# you can pass output directly into `predict` too.
prediction = xgb.dask.predict(client, bst, dtrain)
print('Evaluation history: ', history)
# Uncomment the following line to save the model to the disk
# bst.save_model('survival_model.json')
return prediction
if __name__ == '__main__':
# or use other clusters for scaling
with LocalCluster(n_workers=7, threads_per_worker=4) as cluster:
with Client(cluster) as client:
main(client)
| dmlc/xgboost | demo/dask/cpu_survival.py | Python | apache-2.0 | 2,574 |
#objects.py
from buckPasser.sqlTable import SQLTable, StagedSqlTable
from buckPasser.inventory import PassiveInventory
from . import userInput
import os
from buckPasser.menus import ObjectMenu
def objectFactory(db, code, stage):
obj = Objects(db)
obj.setCode(code)
obj.readFromDB(stage)
obj.menu.title = obj.objName.value.title()
obj.menu.description = obj.shortDescrip.value
obj.menu.longDescrip = obj.descrip.value
obj.menu.cursor = "{} > ".format(obj.objName.value)
obj.inventory = PassiveInventory(db)
obj.inventory.setCode(obj.inventoryCode.value)
obj.inventory.menu.title = obj.objName.value.title()
obj.inventory.readFromDB()
obj.menu.commands.update({obj.useAlias.value.lower(): userInput.Command(func=obj.use, descrip = obj.useDescrip.value, takesArgs=False)})
return obj
class Objects(StagedSqlTable):
'''
Objects is the base class for all interactable objects in the game. Each object should have its own commands so you can say flush toilet or logon for the computer, read for the magazine etc. You enter into an object list menu by typing inspect.
'''
def __init__(self, db, title = "Object"):
StagedSqlTable.__init__(self, db)
self.code = None
self.stage = self.elementTable.addElement(title = 'Game Stage', name = 'stage', value = None, elementType = 'INT')
self.objName = self.elementTable.addElement(title = 'Objects Name', name = 'objName', value = None, elementType = 'STRING', updatable = False)
self.descrip = self.elementTable.addElement(title = 'Object Description', name = 'descrip', value = None, elementType = 'STRING', updatable = False)
self.shortDescrip = self.elementTable.addElement(title = 'Short Description', name = 'shortDescrip', value = None, elementType = 'STRING', updatable = False)
self.useAlias = self.elementTable.addElement(title = 'Object alias for the use method', name = 'useAlias', value = None, elementType = 'STRING')
self.useDescrip = self.elementTable.addElement(title = 'Object use method description', name = 'useDescrip', value = None, elementType = 'STRING')
self.usePrint = self.elementTable.addElement(title = 'What to print on use', name = 'usePrint', value = None, elementType = 'STRING')
self.inventoryCode = self.elementTable.addElement(title = 'Items in Object', name = 'inventoryCode', value = None, elementType = 'INT')
self.interactedFlag = self.elementTable.addElement(title = 'Object interacted with this stage', name = 'interactedFlag', value = None, elementType= 'INT')
self.inventory = None
self.table = 'objects'
self.codeName = 'objCode'
self.menu = ObjectMenu(db = db)
self.commands = {
'search':userInput.Command(func=self.search, descrip = "Search for items",takesArgs=False, hide = False),
'inspect': userInput.Command(func=self.inspect, takesArgs=False, hide = False),
'describe':userInput.Command(func=self.describe, takesArgs=False, hide = True)
}
self.menuCommands = {
'search':userInput.Command(func=self.search, descrip = "Search for items",takesArgs=False, hide = False),
'describe':userInput.Command(func=self.describe, takesArgs=False, hide = True)
#'use':userInput.Command(func=self.use, takesArgs=False, hide = True)
}
self.menu.commands.update(self.menuCommands)
def inspect(self):
self.menu.runMenu()
def describe(self):
userInput.printToScreen("\n{0.objName.value}\n-------------------\n{0.descrip.value}".format(self))
def search(self):
self.inventory.runMenu()
def use(self):
if self.usePrint.value in ['', 'NULL','None', None]:
userInput.printToScreen('That doesn\'t serve a purpose, just like your sorry ass.')
else:
userInput.printToScreen(self.usePrint.value)
self.interactedFlag.value = True
#userInput.printToScreen('Who would visit this website? Why does this dirt bag have it set as his home screen? Some questions are not meant to be answered.')
| snhobbs/DetectiveBuckPasser | buckPasser/objects.py | Python | unlicense | 3,863 |
from __future__ import absolute_import, division, print_function, unicode_literals
from importlib import import_module
def import_function(classpath):
# Try to import a function out of a module.
parts = classpath.split('.')
function = parts.pop()
return getattr(import_module('.'.join(parts)), function)
def importer(classpath):
try:
# Try to import a module.
return import_module(classpath)
except ImportError:
return import_function(classpath)
| rec/led | led/Importer.py | Python | artistic-2.0 | 498 |
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
@register.simple_tag
def upload_js():
return mark_safe("""
<!-- The template to display files available for upload -->
<script id="template-upload" type="text/x-tmpl">
{% for (var i=0, file; file=o.files[i]; i++) { %}
<tr class="template-upload fade">
<td>
<span class="preview"></span>
</td>
<td>
<p class="name">{%=file.name%}</p>
{% if (file.error) { %}
<div><span class="label label-important">{%=locale.fileupload.error%}</span> {%=file.error%}</div>
{% } %}
</td>
<td>
<p class="size">{%=o.formatFileSize(file.size)%}</p>
{% if (!o.files.error) { %}
<div class="progress progress-striped active" role="progressbar" aria-valuemin="0" aria-valuemax="100" aria-valuenow="0"><div class="progress-bar progress-bar-success" style="width:0%;"></div></div>
{% } %}
</td>
<td>
{% if (!o.files.error && !i && !o.options.autoUpload) { %}
<button class="btn btn-primary start">
<i class="glyphicon glyphicon-upload"></i>
<span>{%=locale.fileupload.start%}</span>
</button>
{% } %}
{% if (!i) { %}
<button class="btn btn-warning cancel">
<i class="glyphicon glyphicon-ban-circle"></i>
<span>{%=locale.fileupload.cancel%}</span>
</button>
{% } %}
</td>
</tr>
{% } %}
</script>
<!-- The template to display files available for download -->
<script id="template-download" type="text/x-tmpl">
{% for (var i=0, file; file=o.files[i]; i++) { %}
<tr class="template-download fade">
<td>
<span class="preview">
{% if (file.thumbnailUrl) { %}
<a href="{%=file.url%}" title="{%=file.name%}" download="{%=file.name%}" data-gallery><img src="{%=file.thumbnailUrl%}"></a>
{% } %}
</span>
</td>
<td>
<p class="name">
<a href="{%=file.url%}" title="{%=file.name%}" download="{%=file.name%}" {%=file.thumbnailUrl?'data-gallery':''%}>{%=file.name%}</a>
</p>
{% if (file.error) { %}
<div><span class="label label-important">{%=locale.fileupload.error%}</span> {%=file.error%}</div>
{% } %}
</td>
<td>
<span class="size">{%=o.formatFileSize(file.size)%}</span>
</td>
<td>
<button class="btn btn-danger delete" data-type="{%=file.deleteType%}" data-url="{%=file.deleteUrl%}"{% if (file.deleteWithCredentials) { %} data-xhr-fields='{"withCredentials":true}'{% } %}>
<i class="glyphicon glyphicon-trash"></i>
<span>{%=locale.fileupload.destroy%}</span>
</button>
<input type="checkbox" name="delete" value="1" class="toggle">
</td>
</tr>
{% } %}
</script>
""")
| raonyguimaraes/mendelmd | individuals/templatetags/upload_tags.py | Python | bsd-3-clause | 3,137 |
import json
from contextlib import contextmanager
from pathlib import Path
from unittest import mock
import pytest
from django_dynamic_fixture import get
from readthedocs.builds.storage import BuildMediaFileSystemStorage
from readthedocs.projects.constants import MKDOCS, SPHINX
from readthedocs.projects.models import HTMLFile, Project, Feature
data_path = Path(__file__).parent.resolve() / 'data'
@pytest.mark.django_db
@pytest.mark.search
class TestParsers:
def setup_method(self):
self.feature = get(
Feature,
feature_id=Feature.INDEX_FROM_HTML_FILES,
)
self.project = get(
Project,
slug='test',
main_language_project=None,
)
self.version = self.project.versions.first()
def _mock_open(self, content):
@contextmanager
def f(*args, **kwargs):
read_mock = mock.MagicMock()
read_mock.read.return_value = content
yield read_mock
return f
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_mkdocs(self, storage_open, storage_exists):
json_file = data_path / 'mkdocs/in/search_index.json'
storage_open.side_effect = self._mock_open(
json_file.open().read()
)
storage_exists.return_value = True
self.version.documentation_type = MKDOCS
self.version.save()
index_file = get(
HTMLFile,
project=self.project,
version=self.version,
path='index.html',
)
versions_file = get(
HTMLFile,
project=self.project,
version=self.version,
path='versions/index.html',
)
no_title_file = get(
HTMLFile,
project=self.project,
version=self.version,
path='no-title/index.html',
)
parsed_json = [
index_file.processed_json,
versions_file.processed_json,
no_title_file.processed_json,
]
expected_json = json.load(open(data_path / 'mkdocs/out/search_index.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_mkdocs_default_theme(self, storage_open, storage_exists):
local_path = data_path / 'mkdocs/in/mkdocs-1.1/'
storage_exists.return_value = True
self.project.feature_set.add(self.feature)
self.version.documentation_type = MKDOCS
self.version.save()
parsed_json = []
all_files = [
'index.html',
'404.html',
'configuration.html',
'no-title.html',
'no-main-header.html',
]
for file_name in all_files:
file = local_path / file_name
storage_open.reset_mock()
storage_open.side_effect = self._mock_open(file.open().read())
file = get(
HTMLFile,
project=self.project,
version=self.version,
path=file_name,
)
parsed_json.append(file.processed_json)
expected_json = json.load(open(data_path / 'mkdocs/out/mkdocs-1.1.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_mkdocs_gitbook_theme(self, storage_open, storage_exists):
file = data_path / 'mkdocs/in/gitbook/index.html'
storage_exists.return_value = True
self.project.feature_set.add(self.feature)
self.version.documentation_type = MKDOCS
self.version.save()
storage_open.side_effect = self._mock_open(file.open().read())
file = get(
HTMLFile,
project=self.project,
version=self.version,
path='index.html',
)
parsed_json = [file.processed_json]
expected_json = json.load(open(data_path / 'mkdocs/out/gitbook.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_mkdocs_material_theme(self, storage_open, storage_exists):
file = data_path / 'mkdocs/in/material/index.html'
storage_exists.return_value = True
self.project.feature_set.add(self.feature)
self.version.documentation_type = MKDOCS
self.version.save()
storage_open.side_effect = self._mock_open(file.open().read())
file = get(
HTMLFile,
project=self.project,
version=self.version,
path='index.html',
)
parsed_json = [file.processed_json]
expected_json = json.load(open(data_path / 'mkdocs/out/material.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_mkdocs_windmill_theme(self, storage_open, storage_exists):
file = data_path / 'mkdocs/in/windmill/index.html'
storage_exists.return_value = True
self.project.feature_set.add(self.feature)
self.version.documentation_type = MKDOCS
self.version.save()
storage_open.side_effect = self._mock_open(file.open().read())
file = get(
HTMLFile,
project=self.project,
version=self.version,
path='index.html',
)
parsed_json = [file.processed_json]
expected_json = json.load(open(data_path / 'mkdocs/out/windmill.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_mkdocs_readthedocs_theme(self, storage_open, storage_exists):
self.project.feature_set.add(self.feature)
storage_exists.return_value = True
self.version.documentation_type = MKDOCS
self.version.save()
local_path = data_path / 'mkdocs/in/readthedocs-1.1/'
parsed_json = []
for file_name in ['index.html', '404.html', 'versions.html']:
file = local_path / file_name
storage_open.reset_mock()
storage_open.side_effect = self._mock_open(file.open().read())
file = get(
HTMLFile,
project=self.project,
version=self.version,
path=file_name,
)
parsed_json.append(file.processed_json)
expected_json = json.load(open(data_path / 'mkdocs/out/readthedocs-1.1.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_mkdocs_old_version(self, storage_open, storage_exists):
json_file = data_path / 'mkdocs/in/search_index_old.json'
storage_open.side_effect = self._mock_open(
json_file.open().read()
)
storage_exists.return_value = True
self.version.documentation_type = MKDOCS
self.version.save()
index_file = get(
HTMLFile,
project=self.project,
version=self.version,
path='index.html',
)
versions_file = get(
HTMLFile,
project=self.project,
version=self.version,
path='versions/index.html',
)
parsed_json = [
index_file.processed_json,
versions_file.processed_json,
]
expected_json = json.load(open(data_path / 'mkdocs/out/search_index_old.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_sphinx(self, storage_open, storage_exists):
json_file = data_path / 'sphinx/in/page.json'
html_content = data_path / 'sphinx/in/page.html'
json_content = json.load(json_file.open())
json_content['body'] = html_content.open().read()
storage_open.side_effect = self._mock_open(
json.dumps(json_content)
)
storage_exists.return_value = True
self.version.documentation_type = SPHINX
self.version.save()
page_file = get(
HTMLFile,
project=self.project,
version=self.version,
path='page.html',
)
parsed_json = page_file.processed_json
expected_json = json.load(open(data_path / 'sphinx/out/page.json'))
assert parsed_json == expected_json
@mock.patch.object(BuildMediaFileSystemStorage, 'exists')
@mock.patch.object(BuildMediaFileSystemStorage, 'open')
def test_sphinx_page_without_title(self, storage_open, storage_exists):
json_file = data_path / 'sphinx/in/no-title.json'
html_content = data_path / 'sphinx/in/no-title.html'
json_content = json.load(json_file.open())
json_content['body'] = html_content.open().read()
storage_open.side_effect = self._mock_open(
json.dumps(json_content)
)
storage_exists.return_value = True
self.version.documentation_type = SPHINX
self.version.save()
page_file = get(
HTMLFile,
project=self.project,
version=self.version,
path='no-title.html',
)
parsed_json = page_file.processed_json
expected_json = json.load(open(data_path / 'sphinx/out/no-title.json'))
assert parsed_json == expected_json
| rtfd/readthedocs.org | readthedocs/search/tests/test_parsers.py | Python | mit | 9,892 |
# -*- coding: utf-8 -*-
"""
GridCal
# Copyright (C) 2022 Santiago Peñate Vera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from GridCal.Engine.Core.multi_circuit import MultiCircuit
from GridCal.Engine.Devices import *
import math
import numpy as np
import pandas as pd
from numpy import array
from pandas import DataFrame as df
from warnings import warn
# def get_transformer_impedances(Uhv, Ulv, Sn, Pcu, Pfe, I0, Usc, GR_hv1=0.5, GX_hv1=0.5):
# """
# Get the transformer series and shunt equivalent impedances from the short circuit values
# @param Uhv: Nominal voltage at the high side (kV)
# @param Ulv: Nominal voltage at the low side (kV)
# @param Sn: Nominal power (MVA)
# @param Pcu: Copper losses (kW) (Losses due to the Joule effect)
# @param Pfe: Iron-losses (kW) (Losses in the magnetic circuit)
# @param I0: No-load current (%)
# @param Usc: Short-circuit voltage (%)
# @param GR_hv1: Resistive short circuit contribution to the HV side. It is a value from 0 to 1.
# @param GX_hv1: Reactive short circuit contribution to the HV side. It is a value from 0 to 1.
# @return:
# """
#
# # Nominal impedance HV (Ohm)
# Zn_hv = Uhv * Uhv / Sn
#
# # Nominal impedance LV (Ohm)
# Zn_lv = Ulv * Ulv / Sn
#
# # Short circuit impedance (p.u.)
# zsc = Usc / 100.0
#
# # Short circuit resistance (p.u.)
# rsc = (Pcu / 1000.0) / Sn
#
# # Short circuit reactance (p.u.)
# xsc = np.sqrt(zsc * zsc - rsc * rsc)
#
# # HV resistance (p.u.)
# rcu_hv = rsc * GR_hv1
#
# # LV resistance (p.u.)
# rcu_lv = rsc * (1.0 - GR_hv1)
#
# # HV shunt reactance (p.u.)
# xs_hv = xsc * GX_hv1
#
# # LV shunt reactance (p.u.)
# xs_lv = xsc * (1.0 - GX_hv1)
#
# # Shunt resistance (p.u.)
# if Pfe > 0:
# rfe = Sn / (Pfe / 1000.0)
# else:
# rfe = 1e-20
#
# # Magnetization impedance (p.u.)
# if I0 > 0:
# zm = 1.0 / (I0 / 100.0)
# else:
# zm = 1e-20
#
# # Magnetization reactance (p.u.)
# xm = 0.0
# if rfe > zm:
# xm = 1.0 / np.sqrt(1.0 / (zm * zm) - 1.0 / (rfe * rfe))
# else:
# xm = 0.0 # the square root cannot be computed
#
# # Calculated parameters in per unit
# # leakage_impedance = rsc + 1j * xsc
# # magnetizing_impedance = rfe + 1j * xm
#
# leakage_impedance = (rcu_hv + rcu_lv) + 1j * (xs_hv + xs_lv)
# magnetizing_impedance = rfe + 1j * xm
#
# return leakage_impedance, magnetizing_impedance
def read_DGS(filename):
"""
Read a DigSilent Power Factory .dgs file and return a dictionary with the data
Args:
filename: File name or path
Returns: Dictionary of data where the keys are the object types and the values
are the data of the objects of the key object type
"""
###############################################################################
# Read the file
###############################################################################
f = open(filename, errors='replace')
lines = f.readlines()
f.close()
###############################################################################
# Process the data
###############################################################################
data = dict()
"""
Numpy types:
'b' boolean
'i' (signed) integer
'u' unsigned integer
'f' floating-point
'c' complex-floating point
'O' (Python) objects
'S', 'a' (byte-)string
'U' Unicode
'V' raw data (void)
"""
"""
DGS types
a
p
i
r
"""
types_dict = dict()
types_dict["a"] = "|S32"
types_dict["p"] = "|S32"
types_dict["i"] = "<i4"
types_dict["r"] = "<f4"
types_dict["d"] = "<f4"
types_dict2 = dict()
current_type = None
data_types = None
header = None
Headers = dict()
# parse the file lines
for line in lines:
if line.startswith("$$"):
line = line[2:]
chnks = line.split(";")
current_type = chnks[0]
data[current_type] = list()
# print(current_type)
# analyze types
data_types = list()
header = list()
for i in range(1, len(chnks)):
token = chnks[i].split("(")
name = token[0]
tpe = token[1][:-1]
data_types.append((name, types_dict[tpe[0]]))
header.append(name)
types_dict2[current_type] = data_types
Headers[current_type] = header
elif line.startswith("*"):
pass
elif line.startswith(" "):
if current_type is not None:
line = line.strip()
chnks = line.split(";")
chnks = ["0" if x == "" else x for x in chnks]
data[current_type].append(array(tuple(chnks)))
# format keys
for key in data.keys():
# print("Converting " + str(key))
table = array([tuple(x) for x in data[key]],dtype=types_dict2[key])
table = array([list(x) for x in table],dtype=np.object)
header = Headers[key]
data[key] = df(data=table, columns=header)
# positions dictionary
obj_id = data['IntGrf']['pDataObj'].values
x_vec = data['IntGrf']['rCenterX'].values
y_vec = data['IntGrf']['rCenterY'].values
pos_dict = dict()
for i in range(len(obj_id)):
pos_dict[obj_id[i]] = (x_vec[i], y_vec[i])
return data, pos_dict
def data_to_grid_object(data, pos_dict, codification="utf-8") -> MultiCircuit:
"""
Turns the read data dictionary into a GridCal MultiCircuit object
Args:
data: Dictionary of data read from a DGS file
pos_dict: Dictionary of objects and their positions read from a DGS file
Returns: GridCal MultiCircuit object
"""
###############################################################################
# Refactor data into classes
###############################################################################
# store tables for easy reference
'''
###############################################################################
* Line
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypLne,TypTow,TypGeo,TypCabsys
* chr_name: Characteristic Name
* dline: Parameters: Length of Line in km
* fline: Parameters: Derating Factor
* outserv: Out of Service
* pStoch: Failures: Element model in StoTyplne
'''
if "ElmLne" in data.keys():
lines = data["ElmLne"]
else:
lines = np.zeros((0, 20))
'''
###############################################################################
* Line Type
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* chr_name: Characteristic Name
* Ithr: Rated Short-Time (1s) Current (Conductor) in kA
* aohl_: Cable / OHL
* cline: Parameters per Length 1,2-Sequence: Capacitance C' in uF/km
* cline0: Parameters per Length Zero Sequence: Capacitance C0' in uF/km
* nlnph: Phases:1:2:3
* nneutral: Number of Neutrals:0:1
* rline: Parameters per Length 1,2-Sequence: AC-Resistance R'(20°C) in Ohm/km
* rline0: Parameters per Length Zero Sequence: AC-Resistance R0' in Ohm/km
* rtemp: Max. End Temperature in degC
* sline: Rated Current in kA
* uline: Rated Voltage in kV
* xline: Parameters per Length 1,2-Sequence: Reactance X' in Ohm/km
* xline0: Parameters per Length Zero Sequence: Reactance X0' in Ohm/km
'''
if "TypLne" in data.keys():
lines_types = data["TypLne"]
else:
lines_types = np.zeros((0, 20))
'''
###############################################################################
* 2-Winding Transformer
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypTr2
* chr_name: Characteristic Name
* sernum: Serial Number
* constr: Year of Construction
* cgnd_h: Internal Grounding Impedance, HV Side: Star Point:Connected:Not connected
* cgnd_l: Internal Grounding Impedance, LV Side: Star Point:Connected:Not connected
* i_auto: Auto Transformer
* nntap: Tap Changer 1: Tap Position
* ntrcn: Controller, Tap Changer 1: Automatic Tap Changing
* outserv: Out of Service
* ratfac: Rating Factor
'''
if "ElmTr2" in data.keys():
transformers = data["ElmTr2"]
else:
transformers = np.zeros((0, 20))
'''
###############################################################################
* 2-Winding Transformer Type
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* chr_name: Characteristic Name
* curmg: Magnetising Impedance: No Load Current in %
* dutap: Tap Changer 1: Additional Voltage per Tap in %
* frnom: Nominal Frequency in Hz
* manuf: Manufacturer
* nntap0: Tap Changer 1: Neutral Position
* nt2ag: Vector Group: Phase Shift in *30deg
* ntpmn: Tap Changer 1: Minimum Position
* ntpmx: Tap Changer 1: Maximum Position
* pcutr: Positive Sequence Impedance: Copper Losses in kW
* pfe: Magnetising Impedance: No Load Losses in kW
* phitr: Tap Changer 1: Phase of du in deg
* strn: Rated Power in MVA
* tap_side: Tap Changer 1: at Side:HV:LV
* tr2cn_h: Vector Group: HV-Side:Y :YN:Z :ZN:D
* tr2cn_l: Vector Group: LV-Side:Y :YN:Z :ZN:D
* uk0tr: Zero Sequence Impedance: Short-Circuit Voltage uk0 in %
* uktr: Positive Sequence Impedance: Short-Circuit Voltage uk in %
* ur0tr: Zero Sequence Impedance: SHC-Voltage (Re(uk0)) uk0r in %
* utrn_h: Rated Voltage: HV-Side in kV
* utrn_l: Rated Voltage: LV-Side in kV
* zx0hl_n: Zero Sequence Magnetising Impedance: Mag. Impedance/uk0
'''
if "TypTr2" in data.keys():
transformers_types = data["TypTr2"]
else:
transformers_types = np.zeros((0, 20))
'''
###############################################################################
* Terminal
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypBar
* chr_name: Characteristic Name
* iUsage: Usage:Busbar:Junction Node:Internal Node
* outserv: Out of Service
* phtech: Phase Technology:ABC:ABC-N:BI:BI-N:2PH:2PH-N:1PH:1PH-N:N
* uknom: Nominal Voltage: Line-Line in kV
'''
if "ElmTerm" in data.keys():
buses = data["ElmTerm"]
else:
buses = np.zeros((0, 20))
'''
###############################################################################
* Cubicle
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* chr_name: Characteristic Name
* obj_bus: Bus Index
* obj_id: Connected with in Elm*
'''
if "StaCubic" in data.keys():
cubicles = data["StaCubic"]
else:
cubicles = np.zeros((0, 20))
'''
###############################################################################
* General Load
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypLod,TypLodind
* chr_name: Characteristic Name
* outserv: Out of Service
* plini: Operating Point: Active Power in MW
* qlini: Operating Point: Reactive Power in Mvar
* scale0: Operating Point: Scaling Factor
'''
if "ElmLod" in data.keys():
loads = data["ElmLod"]
else:
loads = np.zeros((0, 20))
'''
###############################################################################
* External Grid
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* chr_name: Characteristic Name
* bustp: Bus Type:PQ:PV:SL
* cgnd: Internal Grounding Impedance: Star Point:Connected:Not connected
* iintgnd: Neutral Conductor: N-Connection:None:At terminal (ABC-N):Separate terminal
* ikssmin: Min. Values: Short-Circuit Current Ik''min in kA
* r0tx0: Max. Values Impedance Ratio: R0/X0 max.
* r0tx0min: Min. Values Impedance Ratio: R0/X0 min.
* rntxn: Max. Values: R/X Ratio (max.)
* rntxnmin: Min. Values: R/X Ratio (min.)
* snss: Max. Values: Short-Circuit Power Sk''max in MVA
* snssmin: Min. Values: Short-Circuit Power Sk''min in MVA
'''
if "ElmXnet" in data.keys():
external = data["ElmXnet"]
else:
external = np.zeros((0, 20))
'''
###############################################################################
* Grid
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* frnom: Nominal Frequency in Hz
'''
if "ElmNet" in data.keys():
grid = data["ElmNet"]
else:
grid = np.zeros((0, 20))
'''
###############################################################################
'''
if "ElmGenstat" in data.keys():
static_generators = data["ElmGenstat"]
else:
static_generators = np.zeros((0, 20))
'''
###############################################################################
* Synchronous Machine
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypSym
* chr_name: Characteristic Name
* i_mot: Generator/Motor
* iv_mode: Local Controller
* ngnum: Number of: parallel Machines
* outserv: Out of Service
* pgini: Dispatch: Active Power in MW
* q_max: Reactive Power Operational Limits: Max. in p.u.
* q_min: Reactive Power Operational Limits: Min. in p.u.
* qgini: Dispatch: Reactive Power in Mvar
* usetp: Dispatch: Voltage in p.u.
'''
if "ElmSym" in data.keys():
synchronous_machine = data["ElmSym"]
else:
synchronous_machine = np.zeros((0, 20))
'''
###############################################################################
* Synchronous Machine Type
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* cosn: Power Factor
* rstr: Stator Resistance: rstr in p.u.
* satur: For single fed short-circuit: Machine Type IEC909/IEC60909
* sgn: Nominal Apparent Power in MVA
* ugn: Nominal Voltage in kV
* xd: Synchronous Reactances: xd in p.u.
* xdsat: For single fed short-circuit: Reciprocal of short-circuit ratio (xdsat) in p.u.
* xdsss: Subtransient Reactance: saturated value xd''sat in p.u.
* xq: Synchronous Reactances: xq in p.u.
'''
if "TypSym" in data.keys():
synchronous_machine_type = data["TypSym"]
else:
synchronous_machine_type = np.zeros((0, 20))
'''
###############################################################################
* Asynchronous Machine
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypAsm*,TypAsmo*,TypAsm1*
* chr_name: Characteristic Name
* i_mot: Generator/Motor
* ngnum: Number of: parallel Machines
* outserv: Out of Service
* pgini: Dispatch: Active Power in MW
'''
if "ElmAsm" in data.keys():
asynchronous_machine = data["ElmAsm"]
else:
asynchronous_machine = np.zeros((0, 20))
'''
###############################################################################
* Synchronous Machine Type
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* i_mode: Input Mode
* aiazn: Consider Transient Parameter: Locked Rotor Current (Ilr/In) in p.u.
* amazn: Locked Rotor Torque in p.u.
* amkzn: Torque at Stalling Point in p.u.
* anend: Nominal Speed in rpm
* cosn: Rated Power Factor
* effic: Efficiency at nominal Operation in %
* frequ: Nominal Frequency in Hz
* i_cage: Rotor
* nppol: No of Pole Pairs
* pgn: Power Rating: Rated Mechanical Power in kW
* ugn: Rated Voltage in kV
* xmrtr: Rotor Leakage Reac. Xrm in p.u.
* xstr: Stator Reactance Xs in p.u.
'''
if "TypAsmo" in data.keys():
asynchronous_machine_type = data["TypAsmo"]
else:
asynchronous_machine_type = np.zeros((0, 20))
'''
###############################################################################
* Shunt/Filter
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* chr_name: Characteristic Name
* ctech: Technology
* fres: Design Parameter (per Step): Resonance Frequency in Hz
* greaf0: Design Parameter (per Step): Quality Factor (at fr)
* iswitch: Controller: Switchable
* ncapa: Controller: Act.No. of Step
* ncapx: Controller: Max. No. of Steps
* outserv: Out of Service
* qtotn: Design Parameter (per Step): Rated Reactive Power, L-C in Mvar
* shtype: Shunt Type
* ushnm: Nominal Voltage in kV
'''
if "ElmShnt" in data.keys():
shunts = data["ElmShnt"]
else:
shunts = np.zeros((0, 20))
'''
###############################################################################
* Breaker/Switch
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypSwitch
* chr_name: Characteristic Name
* aUsage: Switch Type
* nneutral: No. of Neutrals:0:1
* nphase: No. of Phases:1:2:3
* on_off: Closed
'''
if "ElmCoup" in data.keys():
switches = data["ElmCoup"]
else:
switches = np.zeros((0, 20))
###############################################################################
# Post process the data
###############################################################################
# put the tables that connect to a terminal in a list
classes = [lines, transformers, loads, external, static_generators, shunts,
synchronous_machine, asynchronous_machine]
# construct the terminals dictionary
'''
$$StaCubic;ID(a:40);loc_name(a:40);fold_id(p);chr_name(a:20);obj_bus(i);obj_id(p)
********************************************************************************
* Cubicle
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* chr_name: Characteristic Name
* obj_bus: Bus Index
* obj_id: Connected with in Elm*
********************************************************************************
'''
terminals_dict = dict() # dictionary to store the terminals ID associated with an object ID
cub_obj_idx = cubicles['obj_id'].values
cub_term_idx = cubicles['fold_id'].values
# for i, elm_id in enumerate(cub_obj_idx):
# elm_idx = cub_term_idx[i]
# terminals_dict[elm_id] = elm_idx
ID_idx = 0
for cla in classes:
if cla.__len__() > 0:
for ID in cla['ID'].values:
idx = np.where(cubicles == ID)[0]
terminals_dict[ID] = cub_term_idx[idx]
###############################################################################
# Generate GridCal data
###############################################################################
# general values
baseMVA = 100
frequency = grid['frnom'][0]
w = 2.0 * math.pi * frequency
circuit = MultiCircuit()
####################################################################################################################
# Terminals (nodes)
####################################################################################################################
'''
********************************************************************************
* Terminal
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypBar
* iUsage: Usage:Busbar:Junction Node:Internal Node
* uknom: Nominal Voltage: Line-Line in kV
* chr_name: Characteristic Name
* outserv: Out of Service
********************************************************************************
'''
# print('Parsing terminals')
buses_dict = dict()
for i in range(len(buses)):
ID = buses['ID'][i]
x, y = pos_dict[ID]
buses_dict[ID] = i
bus_name = buses['loc_name'][i].decode(codification) # BUS_Name
vnom = buses['uknom'][i]
bus = Bus(name=bus_name, vnom=vnom, vmin=0.9, vmax=1.1, xpos=x, ypos=-y, active=True)
circuit.add_bus(bus)
####################################################################################################################
# External grids (slacks)
####################################################################################################################
'''
###############################################################################
********************************************************************************
* External Grid
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* outserv: Out of Service
* snss: Max. Values: Short-Circuit Power Sk''max in MVA
* rntxn: Max. Values: R/X Ratio (max.)
* z2tz1: Max. Values Impedance Ratio: Z2/Z1 max.
* snssmin: Min. Values: Short-Circuit Power Sk''min in MVA
* rntxnmin: Min. Values: R/X Ratio (min.)
* z2tz1min: Min. Values Impedance Ratio: Z2/Z1 min.
* chr_name: Characteristic Name
* bustp: Bus Type:PQ:PV:SL
* pgini: Operation Point: Active Power in MW
* qgini: Operation Point: Reactive Power in Mvar
* phiini: Operation Point: Angle in deg
* usetp: Operation Point: Voltage Setpoint in p.u.
********************************************************************************
'''
for i in range(len(external)):
ID = external['ID'][i]
if 'phiini' in external.columns.values:
va = external['phiini'][i]
vm = external['usetp'][i]
else:
va = 0
vm = 1
buses = terminals_dict[ID] # array with the ID of the connection Buses
bus1 = buses_dict[buses[0]] # index of the bus
bus_obj = circuit.buses[bus1]
# apply the slack values to the buses structure if the element is marked as slack
if external['bustp'].values[i] == b'SL':
# create the slack entry on buses
bus_obj.is_slack = True
# BUSES[bus1, bd.BUS_TYPE] = 3
# BUSES[bus1, bd.VA] = va
# BUSES[bus1, bd.VM] = vm
#
# # create the slack entry on generators (add the slack generator)
# gen_ = gen_line.copy()
# gen_[gd.GEN_BUS] = bus1
# gen_[gd.MBASE] = baseMVA
# gen_[gd.VG] = vm
# gen_[gd.GEN_STATUS] = 1
# gen_[gd.PG] += external['pgini'].values[i]
#
# GEN.append(gen_)
# GEN_NAMES.append(external['loc_name'][i])
elif external['bustp'].values[i] == b'PV':
if 'pgini' in external.columns.values:
p = external['pgini'].values[i]
else:
p = 0
# add a generator to the bus
gen = Generator(name=external['loc_name'][i].decode(codification),
active_power=p,
voltage_module=vm, Qmin=-9999, Qmax=9999, Snom=9999,
power_prof=None, vset_prof=None)
circuit.add_generator(bus_obj, gen)
# # mark the bus as pv
# BUSES[bus1, bd.BUS_TYPE] = 2
# BUSES[bus1, bd.VA] = 0.0
# BUSES[bus1, bd.VM] = vm
# # add the PV entry on generators
# gen_ = gen_line.copy()
# gen_[gd.GEN_BUS] = bus1
# gen_[gd.MBASE] = baseMVA
# gen_[gd.VG] = vm
# gen_[gd.GEN_STATUS] = 1
# gen_[gd.PG] += external['pgini'].values[i]
#
# GEN.append(gen_)
# GEN_NAMES.append(external['loc_name'][i])
elif external['bustp'].values[i] == b'PQ':
# Add a load to the bus
load = Load(name=external['loc_name'][i].decode(codification),
P=external['pgini'].values[i],
Q=external['qgini'].values[i])
circuit.add_load(bus_obj, load)
# BUSES[bus1, bd.BUS_TYPE] = 1
# BUSES[bus1, bd.VA] = va
# BUSES[bus1, bd.VM] = vm
# BUSES[bus1, bd.PD] += external['pgini'].values[i]
# BUSES[bus1, bd.QD] += external['qgini'].values[i]
####################################################################################################################
# Lines (branches)
####################################################################################################################
# print('Parsing lines')
if lines_types.__len__() > 0:
lines_ID = lines['ID'].values
lines_type_id = lines['typ_id'].values
line_types_ID = lines_types['ID'].values
lines_lenght = lines['dline'].values
if 'outserv' in lines.keys():
lines_enables = lines['outserv']
else:
lines_enables = np.ones(len(lines_ID))
lines_R = lines_types['rline'].values
lines_L = lines_types['xline'].values
lines_C = lines_types['cline'].values
lines_rate = lines_types['sline'].values
lines_voltage = lines_types['uline'].values
for i in range(len(lines)):
# line_ = branch_line.copy()
ID = lines_ID[i]
ID_Type = lines_type_id[i]
type_idx = np.where(line_types_ID == ID_Type)[0][0]
buses = terminals_dict[ID] # array with the ID of the connection Buses
bus1 = buses_dict[buses[0]]
bus2 = buses_dict[buses[1]]
bus_from = circuit.buses[bus1]
bus_to = circuit.buses[bus2]
status = lines_enables[i]
# impedances
lenght = np.double(lines_lenght[i])
R = np.double(lines_R[type_idx]) * lenght # Ohm
L = np.double(lines_L[type_idx]) * lenght # Ohm
C = np.double(lines_C[type_idx]) * lenght * w * 1e-6 # S (siemens)
# pass impedance to per unit
vbase = np.double(lines_voltage[type_idx]) # kV
zbase = vbase**2 / baseMVA # Ohm
ybase = 1.0 / zbase # S
r = R / zbase # pu
l = L / zbase # pu
b = C / ybase # pu
# rated power
Irated = np.double(lines_rate[type_idx]) # kA
Smax = Irated * vbase # MVA
line = Branch(bus_from=bus_from, bus_to=bus_to,
name=lines['loc_name'][i].decode(codification),
r=r,
x=l,
g=1e-20,
b=b,
rate=Smax,
tap=1,
shift_angle=0,
active=status, mttf=0, mttr=0)
circuit.add_branch(line)
# # put all in the correct column
# line_[brd.F_BUS] = bus1
# line_[brd.T_BUS] = bus2
# line_[brd.BR_R] = r
# line_[brd.BR_X] = l
# line_[brd.BR_B] = c
# line_[brd.RATE_A] = Smax
# line_[brd.BR_STATUS] = status
# BRANCHES.append(line_)
#
# name_ = lines['loc_name'][i] # line_Name
# BRANCH_NAMES.append(name_)
#
# # add edge to graph
# g.add_edge(bus1, bus2)
else:
warn('Line types are empty')
####################################################################################################################
# Transformers (Branches)
####################################################################################################################
# print('Parsing transformers')
'''
********************************************************************************
* 2-Winding Transformer
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypTr2
* outserv: Out of Service
* nntap: Tap Changer 1: Tap Position
* sernum: Serial Number
* constr: Year of Construction
* chr_name: Characteristic Name
********************************************************************************
'''
if len(transformers_types) > 0:
'''
********************************************************************************
* 2-Winding Transformer Type
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* strn: Rated Power in MVA
* frnom: Nominal Frequency in Hz
* utrn_h: Rated Voltage: HV-Side in kV
* utrn_l: Rated Voltage: LV-Side in kV
* uktr: Positive Sequence Impedance: Short-Circuit Voltage uk in %
* pcutr: Positive Sequence Impedance: Copper Losses in kW
* uk0tr: Zero Sequence Impedance: Short-Circuit Voltage uk0 in %
* ur0tr: Zero Sequence Impedance: SHC-Voltage (Re(uk0)) uk0r in %
* tr2cn_h: Vector Group: HV-Side:Y :YN:Z :ZN:D
* tr2cn_l: Vector Group: LV-Side:Y :YN:Z :ZN:D
* nt2ag: Vector Group: Phase Shift in *30deg
* curmg: Magnetizing Impedance: No Load Current in %
* pfe: Magnetizing Impedance: No Load Losses in kW
* zx0hl_n: Zero Sequence Magnetizing Impedance: Mag. Impedance/uk0
* tap_side: Tap Changer 1: at Side:HV:LV
* dutap: Tap Changer 1: Additional Voltage per Tap in %
* phitr: Tap Changer 1: Phase of du in deg
* nntap0: Tap Changer 1: Neutral Position
* ntpmn: Tap Changer 1: Minimum Position
* ntpmx: Tap Changer 1: Maximum Position
* manuf: Manufacturer
* chr_name: Characteristic Name
********************************************************************************
'''
type_ID = transformers_types['ID'].values
HV_nominal_voltage = transformers_types['utrn_h'].values
LV_nominal_voltage = transformers_types['utrn_l'].values
Nominal_power = transformers_types['strn'].values
Copper_losses = transformers_types['pcutr'].values
Iron_losses = transformers_types['pfe'].values
No_load_current = transformers_types['curmg'].values
Short_circuit_voltage = transformers_types['uktr'].values
# GR_hv1 = transformers_types['ID']
# GX_hv1 = transformers_types['ID']
for i in range(len(transformers)):
# line_ = branch_line.copy()
ID = transformers['ID'][i]
ID_Type = transformers['typ_id'][i]
if ID_Type in type_ID:
type_idx = np.where(type_ID == ID_Type)[0][0]
buses = terminals_dict[ID] # array with the ID of the connection Buses
bus1 = buses_dict[buses[0]]
bus2 = buses_dict[buses[1]]
bus_from = circuit.buses[bus1]
bus_to = circuit.buses[bus2]
Smax = Nominal_power[type_idx]
# Uhv, Ulv, Sn, Pcu, Pfe, I0, Usc
tpe = TransformerType(hv_nominal_voltage=HV_nominal_voltage[type_idx],
lv_nominal_voltage=LV_nominal_voltage[type_idx],
nominal_power=Smax,
copper_losses=Copper_losses[type_idx],
iron_losses=Iron_losses[type_idx],
no_load_current=No_load_current[type_idx],
short_circuit_voltage=Short_circuit_voltage[type_idx],
gr_hv1=0.5,
gx_hv1=0.5)
Zs, Zsh = tpe.get_impedances(VH=HV_nominal_voltage[type_idx],
VL=LV_nominal_voltage[type_idx],
Sbase=baseMVA)
if Zsh != 0:
Ysh = 1.0 / Zsh
else:
Ysh = 0j
status = 1 - transformers['outserv'][i]
trafo = Branch(bus_from=bus_from,
bus_to=bus_to,
name=transformers['loc_name'][i].decode(codification),
r=Zs.real,
x=Zs.imag,
g=Ysh.real,
b=Ysh.imag,
rate=Smax,
tap=1.0,
shift_angle=0.0,
active=status,
mttf=0,
mttr=0,
branch_type=BranchType.Transformer)
circuit.add_branch(trafo)
else:
warn('Transformer type not found!')
else:
warn('Transformer types are empty')
####################################################################################################################
# Loads (nodes)
####################################################################################################################
'''
********************************************************************************
* General Load
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypLod,TypLodind
* chr_name: Characteristic Name
* plini: Operating Point: Active Power in MW
* qlini: Operating Point: Reactive Power in Mvar
* scale0: Operating Point: Scaling Factor
********************************************************************************
'''
# print('Parsing Loads')
if len(loads) > 0:
loads_ID = loads['ID']
loads_P = loads['plini']
loads_Q = loads['qlini']
scale = loads['scale0']
for i in range(len(loads)):
ID = loads_ID[i]
bus_idx = buses_dict[(terminals_dict[ID][0])]
bus_obj = circuit.buses[bus_idx]
p = loads_P[i] * scale[i] # in MW
q = loads_Q[i] * scale[i] # in MVA
load = Load(name=loads['loc_name'][i].decode(codification),
P=p,
Q=q)
circuit.add_load(bus_obj, load)
# BUSES[elm_idx, 2] += p
# BUSES[elm_idx, 3] += q
else:
warn('There are no loads')
####################################################################################################################
# Shunts
####################################################################################################################
'''
********************************************************************************
* Shunt/Filter
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* chr_name: Characteristic Name
* shtype: Shunt Type
* ushnm: Nominal Voltage in kV
* qcapn: Design Parameter (per Step): Rated Reactive Power, C in Mvar
* ncapx: Controller: Max. No. of Steps
* ncapa: Controller: Act.No. of Step
* outserv: Out of Service
********************************************************************************
'''
for i in range(len(shunts)):
ID = shunts['ID'][i]
buses = terminals_dict[ID] # array with the ID of the connection Buses
bus1 = buses_dict[buses[0]]
bus_obj = circuit.buses[bus1]
name = shunts['loc_name'][i].decode(codification)
if 'qcapn' in shunts.columns.values:
b = shunts['ushnm'][i] / shunts['qcapn'][i]
elif 'qtotn' in shunts.columns.values:
b = shunts['ushnm'][i] / shunts['qtotn'][i]
else:
b = 1e-20
shunt = Shunt(name=name, B=b)
circuit.add_shunt(bus_obj, shunt)
####################################################################################################################
# Static generators (Gen)
####################################################################################################################
'''
********************************************************************************
* Static Generator
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* bus1: Terminal in StaCubic
* outserv: Out of Service
* sgn: Ratings: Nominal Apparent Power in MVA
* cosn: Ratings: Power Factor
* ngnum: Number of: parallel Machines
* pgini: Dispatch: Active Power in MW
* qgini: Dispatch: Reactive Power in Mvar
* av_mode: Local Controller
* ip_ctrl: Reference Machine
********************************************************************************
'''
for i in range(len(static_generators)):
ID = static_generators['ID'][i]
buses = terminals_dict[ID] # array with the ID of the connection Buses
bus1 = buses_dict[buses[0]]
bus_obj = circuit.buses[bus1]
mode = static_generators['av_mode'][i]
num_machines = static_generators['ngnum'][i]
gen = StaticGenerator(name=static_generators['loc_name'][i].decode(codification),
P=static_generators['pgini'][i] * num_machines,
Q=static_generators['qgini'][i] * num_machines)
circuit.add_static_generator(bus_obj, gen)
####################################################################################################################
# Synchronous Machine (Gen)
####################################################################################################################
'''
********************************************************************************
* Synchronous Machine
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* typ_id: Type in TypSym
* ngnum: Number of: parallel Machines
* i_mot: Generator/Motor
* chr_name: Characteristic Name
* outserv: Out of Service
* pgini: Dispatch: Active Power in MW
* qgini: Dispatch: Reactive Power in Mvar
* usetp: Dispatch: Voltage in p.u.
* iv_mode: Mode of Local Voltage Controller
* q_min: Reactive Power Operational Limits: Min. in p.u.
* q_max: Reactive Power Operational Limits: Max. in p.u.
********************************************************************************
'''
for i in range(len(synchronous_machine)):
ID = synchronous_machine['ID'][i]
buses = terminals_dict[ID] # array with the ID of the connection Buses
bus1 = buses_dict[buses[0]]
bus_obj = circuit.buses[bus1]
num_machines = synchronous_machine['ngnum'][i]
# Get the type element
'''
********************************************************************************
* Synchronous Machine Type
*
* ID: Unique identifier for DGS file
* loc_name: Name
* fold_id: In Folder
* sgn: Nominal Apparent Power in MVA
* ugn: Nominal Voltage in kV
* cosn: Power Factor
* xd: Synchronous Reactances: xd in p.u.
* xq: Synchronous Reactances: xq in p.u.
* xdsss: Subtransient Reactance: saturated value xd''sat in p.u.
* rstr: Stator Resistance: rstr in p.u.
* xdsat: For single fed short-circuit: Reciprocal of short-circuit ratio (xdsat) in p.u.
* satur: For single fed short-circuit: Machine Type IEC909/IEC60909
********************************************************************************
'''
typ = synchronous_machine_type[synchronous_machine_type.ID == synchronous_machine['typ_id'][i]]
snom = typ['sgn'].values[0]
vnom = synchronous_machine['usetp'][i]
name = synchronous_machine['loc_name'][i].decode(codification)
gen = Generator(name=name,
active_power=synchronous_machine['pgini'][i] * num_machines,
voltage_module=vnom,
Qmin=synchronous_machine['q_min'][i] * num_machines * snom,
Qmax=synchronous_machine['q_max'][i] * num_machines * snom,
Snom=snom,
power_prof=None,
vset_prof=None)
circuit.add_generator(bus_obj, gen)
# if synchronous_machine['pgini'][i] != 0:
# # gen = StaticGenerator(name=name, power=complex(0, synchronous_machine['pgini'][i]))
# gen = Generator(name=name, active_power=synchronous_machine['pgini'][i])
# circuit.add_static_generator(bus_obj, gen)
return circuit
def dgs_to_circuit(filename) -> MultiCircuit:
data, pos_dict = read_DGS(filename)
return data_to_grid_object(data, pos_dict)
if __name__ == "__main__":
# fname = 'Example1.dgs'
fname = '/home/santi/Documentos/GitHub/GridCal/Grids_and_profiles/grids/IEEE_14.dgs'
# fname = 'PLATOS grid 3.dgs'
# fname = 'Example4.dgs'
circuit = dgs_to_circuit(fname)
circuit.compile_snapshot()
# print(BUS_NAMES, '\n')
# print(BUSES)
#
# print(BRANCH_NAMES, '\n')
# print(BRANCHES)
#
# print(GEN_NAMES, '\n')
# print(GEN)
#
# print(graph)
# print('Plotting grid...')
# nx.draw(circuit.graph)
# from matplotlib import pyplot as plt
# plt.show()
# print('done')
| SanPen/GridCal | src/GridCal/Engine/IO/dgs_parser.py | Python | lgpl-3.0 | 42,978 |
"""
Defines the function approximators
"""
import numpy as np
import theano.tensor as T
# from theano.tensor.signal import downsample
from blocks.bricks import Activation, MLP, Initializable, application, Identity
from blocks.bricks.conv import ConvolutionalActivation
from blocks.initialization import IsotropicGaussian, Constant, Orthogonal
# TODO IsotropicGaussian init will be wrong scale for some layers
class LeakyRelu(Activation):
@application(inputs=['input_'], outputs=['output'])
def apply(self, input_):
return T.switch(input_ > 0, input_, 0.05*input_)
dense_nonlinearity = LeakyRelu()
# dense_nonlinearity = Tanh()
conv_nonlinearity = LeakyRelu()
class MultiScaleConvolution(Initializable):
def __init__(self, num_channels, num_filters, spatial_width, num_scales, filter_size, downsample_method='meanout', name=""):
"""
A brick implementing a single layer in a multi-scale convolutional network.
"""
super(MultiScaleConvolution, self).__init__()
self.num_scales = num_scales
self.filter_size = filter_size
self.num_filters = num_filters
self.spatial_width = spatial_width
self.downsample_method = downsample_method
self.children = []
print "adding MultiScaleConvolution layer"
# for scale in range(self.num_scales-1, -1, -1):
for scale in range(self.num_scales):
print "scale %d"%scale
conv_layer = ConvolutionalActivation(activation=conv_nonlinearity.apply,
filter_size=(filter_size,filter_size), num_filters=num_filters,
num_channels=num_channels, image_size=(spatial_width/2**scale, spatial_width/2**scale),
# assume images are spatially smooth -- in which case output magnitude scales with
# # filter pixels rather than square root of # filter pixels, so initialize
# accordingly.
weights_init=IsotropicGaussian(std=np.sqrt(1./(num_filters))/filter_size**2),
biases_init=Constant(0), border_mode='full', name=name+"scale%d"%scale)
self.children.append(conv_layer)
def downsample(self, imgs_in, scale):
"""
Downsample an image by a factor of 2**scale
"""
imgs = imgs_in.copy()
if scale == 0:
return imgs
# if self.downsample_method == 'maxout':
# print "maxout",
# imgs_maxout = downsample.max_pool_2d(imgs.copy(), (2**scale, 2**scale), ignore_border=False)
# else:
# print "meanout",
# imgs_maxout = self.downsample_mean_pool_2d(imgs.copy(), (2**scale, 2**scale))
num_imgs = imgs.shape[0].astype('int16')
num_layers = imgs.shape[1].astype('int16')
nlx0 = imgs.shape[2].astype('int16')
nlx1 = imgs.shape[3].astype('int16')
scalepow = np.int16(2**scale)
# downsample
imgs = imgs.reshape((num_imgs, num_layers, nlx0/scalepow, scalepow, nlx1/scalepow, scalepow))
imgs = T.mean(imgs, axis=5)
imgs = T.mean(imgs, axis=3)
return imgs
@application
def apply(self, X):
print "MultiScaleConvolution apply"
nsamp = X.shape[0].astype('int16')
Z = 0
overshoot = (self.filter_size - 1)/2
imgs_accum = 0 # accumulate the output image
for scale in range(self.num_scales-1, -1, -1):
# downsample image to appropriate scale
imgs_down = self.downsample(X, scale)
# do a convolutional transformation on it
conv_layer = self.children[scale]
# NOTE this is different than described in the paper, since each conv_layer
# includes a nonlinearity -- it's not just one nonlinearity at the end
imgs_down_conv = conv_layer.apply(imgs_down)
# crop the edge so it's the same size as the input at that scale
imgs_down_conv_croppoed = imgs_down_conv[:,:,overshoot:-overshoot,overshoot:-overshoot]
imgs_accum += imgs_down_conv_croppoed
if scale > 0:
# scale up by factor of 2
layer_width = self.spatial_width/2**scale
imgs_accum = imgs_accum.reshape((nsamp, self.num_filters, layer_width, 1, layer_width, 1))
imgs_accum = T.concatenate((imgs_accum, imgs_accum), axis=5)
imgs_accum = T.concatenate((imgs_accum, imgs_accum), axis=3)
imgs_accum = imgs_accum.reshape((nsamp, self.num_filters, layer_width*2, layer_width*2))
return imgs_accum/self.num_scales
class MultiLayerConvolution(Initializable):
def __init__(self, n_layers, n_hidden, spatial_width, n_colors, n_scales, filter_size=3):
"""
A brick implementing a multi-layer convolutional network.
TODO make this multi-scale multi-layer convolution
"""
super(MultiLayerConvolution, self).__init__()
self.children = []
num_channels = n_colors
for ii in xrange(n_layers):
conv_layer = MultiScaleConvolution(num_channels, n_hidden, spatial_width, n_scales, filter_size, name="layer%d_"%ii)
self.children.append(conv_layer)
num_channels = n_hidden
@application
def apply(self, X):
Z = X
for conv_layer in self.children:
Z = conv_layer.apply(Z)
return Z
class MLP_conv_dense(Initializable):
def __init__(self, n_layers_conv, n_layers_dense_lower, n_layers_dense_upper,
n_hidden_conv, n_hidden_dense_lower, n_hidden_dense_lower_output, n_hidden_dense_upper,
spatial_width, n_colors, n_scales, n_temporal_basis):
"""
The multilayer perceptron, that provides temporal weighting coefficients for mu and sigma
images. This consists of a lower segment with a convolutional MLP, and optionally with a
dense MLP in parallel. The upper segment then consists of a per-pixel dense MLP
(convolutional MLP with 1x1 kernel).
"""
super(MLP_conv_dense, self).__init__()
self.n_colors = n_colors
self.spatial_width = spatial_width
self.n_hidden_dense_lower = n_hidden_dense_lower
self.n_hidden_dense_lower_output = n_hidden_dense_lower_output
self.n_hidden_conv = n_hidden_conv
## the lower layers
self.mlp_conv = MultiLayerConvolution(n_layers_conv, n_hidden_conv, spatial_width, n_colors, n_scales)
self.children = [self.mlp_conv]
if n_hidden_dense_lower > 0 and n_layers_dense_lower > 0:
n_input = n_colors*spatial_width**2
n_output = n_hidden_dense_lower_output*spatial_width**2
self.mlp_dense_lower = MLP([dense_nonlinearity] * n_layers_conv,
[n_input] + [n_hidden_dense_lower] * (n_layers_conv-1) + [n_output],
name='MLP dense lower', weights_init=Orthogonal(), biases_init=Constant(0))
self.children.append(self.mlp_dense_lower)
else:
n_hidden_dense_lower_output = 0
## the upper layers (applied to each pixel independently)
n_output = n_colors*n_temporal_basis*2 # "*2" for both mu and sigma
self.mlp_dense_upper = MLP([dense_nonlinearity] * (n_layers_dense_upper-1) + [Identity()],
[n_hidden_conv+n_hidden_dense_lower_output] +
[n_hidden_dense_upper] * (n_layers_dense_upper-1) + [n_output],
name='MLP dense upper', weights_init=Orthogonal(), biases_init=Constant(0))
self.children.append(self.mlp_dense_upper)
@application
def apply(self, X):
"""
Take in noisy input image and output temporal coefficients for mu and sigma.
"""
Y = self.mlp_conv.apply(X)
Y = Y.dimshuffle(0,2,3,1)
if self.n_hidden_dense_lower > 0:
n_images = X.shape[0].astype('int16')
X = X.reshape((n_images, self.n_colors*self.spatial_width**2))
Y_dense = self.mlp_dense_lower.apply(X)
Y_dense = Y_dense.reshape((n_images, self.spatial_width, self.spatial_width,
self.n_hidden_dense_lower_output))
Y = T.concatenate([Y/T.sqrt(self.n_hidden_conv),
Y_dense/T.sqrt(self.n_hidden_dense_lower_output)], axis=3)
Z = self.mlp_dense_upper.apply(Y)
return Z
| alexanderganderson/Diffusion-Probabilistic-Models | regression.py | Python | mit | 8,365 |
# Copyright (c) 2010 Arek Korbik
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
import time
from twisted.python import failure
from twisted.trial import unittest
from twimp.crypto import handshake as chandshake
from twimp.crypto.handshake import CryptoHandshaker
from twimp.handshake import HandshakeFailedError
from twimp.utils import GeneratorWrapperProtocol
from helpers import StringTransport
class TestHandshakeProtocol(GeneratorWrapperProtocol):
def __init__(self, *a, **kw):
GeneratorWrapperProtocol.__init__(self, *a, **kw)
self.handshake_status = None
def handshakeSucceeded(self, init_ts, hs_delay):
self.handshake_status = 'ok'
def handshakeFailed(self):
self.handshake_status = 'fail'
class TestHandshaker(unittest.TestCase):
protocol_version = 3
handshake_length = 1536
def test_complete_exchange_with_cli_ver_9_0_115_0(self):
class GeneratedHandshaker(CryptoHandshaker):
client_compat_version = (9,0,115,0)
return self._test_complete_exchange(GeneratedHandshaker)
def test_complete_exchange_with_cli_ver_10_0_32_2(self):
class GeneratedHandshaker(CryptoHandshaker):
client_compat_version = (10,0,32,2)
return self._test_complete_exchange(GeneratedHandshaker)
def test_complete_exchange_with_cli_ver_0(self):
class GeneratedHandshaker(CryptoHandshaker):
client_compat_version = (0, 0, 0, 0)
return self._test_complete_exchange(GeneratedHandshaker)
def test_complete_exchange_with_cli_ver_too_low(self):
# here a class imitating a client whose version is lower than
# the earlies one supporting crypted handshakes, but still
# claiming some version and performing a valid crypto
# handshake (with the newest offset scheme)
class GeneratedHandshaker(CryptoHandshaker):
def __init__(self, *a, **kw):
CryptoHandshaker.__init__(self, *a, **kw)
self.compat_version = (1, 1, 1, 1)
self._digest_offset_extractor = chandshake.schemes[-1][0]
return self._test_complete_exchange(GeneratedHandshaker)
def test_complete_exchange_with_cli_ver_too_high(self):
# here a class imitating a client whose version is higher than
# all that we know support crypted handshakes, but performing
# a valid crypto handshake (just with the oldest offset scheme)
class GeneratedHandshaker(CryptoHandshaker):
def __init__(self, *a, **kw):
CryptoHandshaker.__init__(self, *a, **kw)
self.compat_version = (255, 0, 3, 2)
self._digest_offset_extractor = chandshake.schemes[0][0]
return self._test_complete_exchange(GeneratedHandshaker)
def test_complete_exchange_with_cli_unknown_scheme(self):
# here a class imitating a client with an offset scheme that
# we don't know about
class GeneratedHandshaker(CryptoHandshaker):
def __init__(self, *a, **kw):
CryptoHandshaker.__init__(self, *a, **kw)
self.compat_version = (255, 254, 253, 252)
x = chandshake._make_offset_extractor(22, 6, 728, 20)
self._digest_offset_extractor = x
return self._test_complete_exchange(GeneratedHandshaker,
client_failure=True,
server_failure=True)
def test_complete_exchange_with_cli_unknown_key_1(self):
# here a class imitating a client who generates digests with an
# unknown key
_fp_key = 'Haxored Adobe Flash Player 777'
_full_fp_key = _fp_key + chandshake._shared_key_suffix
class GeneratedHandshaker(CryptoHandshaker):
def select_own_key_short(self):
if self.is_client:
return _fp_key
return CryptoHandshaker.select_own_key_short(self)
def select_own_key(self):
if self.is_client:
return _full_fp_key
return CryptoHandshaker.select_own_key(self)
return self._test_complete_exchange(GeneratedHandshaker,
client_failure=True,
server_failure=True)
def test_complete_exchange_with_cli_unknown_key_2(self):
# here a class imitating a client who generates digests with a
# key that has unknown shared part
_full_fp_key = chandshake._fp_key + (' ' * 32)
class GeneratedHandshaker(CryptoHandshaker):
def select_own_key(self):
if self.is_client:
return _full_fp_key
return CryptoHandshaker.select_own_key(self)
return self._test_complete_exchange(GeneratedHandshaker,
server_failure=True)
def _test_complete_exchange(self, client_handshaker_class,
client_failure=False, server_failure=False):
tcli = StringTransport()
tsrv = StringTransport()
p_cli = TestHandshakeProtocol()
p_srv = TestHandshakeProtocol()
now = time.time()
hs_cli = client_handshaker_class(p_cli, now - 0.001, is_client=True)
hs_srv = CryptoHandshaker(p_srv, now - 0.042)
p_srv.init_handler(hs_srv.gen_handler())
p_cli.init_handler(hs_cli.gen_handler(), do_init=False)
p_srv.makeConnection(tsrv)
self.assertEquals(tsrv.value(), '')
p_cli.makeConnection(tcli)
self.assertEquals(tcli.value(), '')
p_cli.init_handler()
# self.assertEquals(len(tcli.value()), 1 + 1536)
err = p_srv.dataReceived(tcli.value())
tcli.clear()
# self.assertEquals(len(tsrv.value()), 1 + 1536 + 1536)
self.assertIdentical(err, None)
self.assertEquals(p_cli.handshake_status, None)
err = p_cli.dataReceived(tsrv.value() + '\x00')
tsrv.clear()
if client_failure:
self.assertIsInstance(err, failure.Failure)
self.assertIsInstance(err.value, HandshakeFailedError)
self.assertFalse(tcli.disconnecting, 'should not be disconnecting')
self.assertEquals(p_cli.handshake_status, 'fail')
self.assertEquals(p_srv.handshake_status, None)
else:
self.assertIdentical(err, None)
self.assertFalse(tcli.disconnecting, 'disconnecting')
self.assertEquals(p_cli.handshake_status, 'ok')
self.assertEquals(p_srv.handshake_status, None)
err = p_srv.dataReceived(tcli.value() + '\x00')
tcli.clear()
if server_failure:
self.assertIsInstance(err, failure.Failure)
self.assertIsInstance(err.value, HandshakeFailedError)
self.assertFalse(tsrv.disconnecting, 'should not be disconnecting')
self.assertEquals(p_srv.handshake_status, 'fail')
else:
self.assertIdentical(err, None)
self.assertFalse(tsrv.disconnecting, 'disconnecting')
self.assertEquals(p_srv.handshake_status, 'ok')
def test_server_side_ok(self):
t = StringTransport()
p = TestHandshakeProtocol()
now = time.time()
hs = CryptoHandshaker(p, now)
p.init_handler(hs.gen_handler())
p.makeConnection(t)
# server shouldn't respond before spoken to
self.assertEquals(t.value(), '')
ver_pkt = struct.pack('B', self.protocol_version)
err = p.dataReceived(ver_pkt)
self.assertIdentical(err, None)
# server may wait until it receives handshake packet before
# responding, so not testing here
hs_rand_data = '.' * (self.handshake_length - 8)
timestamp = 42
hs_pkt = struct.pack('>LL', timestamp, 0) + hs_rand_data
err = p.dataReceived(hs_pkt)
self.assertIdentical(err, None)
# ... and here the server might send all the packets already,
# and our implementation should do so, so that's what we test
self.assert_(len(t.value()) == 1 + self.handshake_length * 2,
'Response too short (%d)' % len(t.value()))
self.assertEquals(struct.unpack_from('B', t.value(), 0)[0],
self.protocol_version)
srv_hs_pkt = t.value()[1:1+self.handshake_length]
hs_pkt_echo = t.value()[1+self.handshake_length:
1+self.handshake_length*2]
self.assertEquals(struct.unpack_from('>L', hs_pkt_echo, 0)[0],
timestamp)
self.assertEquals(hs_pkt_echo[8:], hs_rand_data)
# server response ok, let's finish the server side, and
# pretend client starts sending more data
err = p.dataReceived(srv_hs_pkt + '\x00')
self.assertIdentical(err, None)
self.assertFalse(t.disconnecting, 'disconnecting')
self.assertEquals(p.handshake_status, 'ok')
def test_server_side_invalid_1(self):
t = StringTransport()
p = TestHandshakeProtocol()
now = time.time()
hs = CryptoHandshaker(p, now)
p.init_handler(hs.gen_handler())
p.makeConnection(t)
self.assertEquals(t.value(), '')
# sending version different from the server's version
ver_pkt = struct.pack('B', self.protocol_version + 1)
err = p.dataReceived(ver_pkt)
self.assertIsInstance(err, failure.Failure)
self.assertIsInstance(err.value, HandshakeFailedError)
self.assertFalse(t.disconnecting, 'should not be disconnecting')
self.assertEquals(p.handshake_status, 'fail')
def test_server_side_invalid_2(self):
t = StringTransport()
p = TestHandshakeProtocol()
now = time.time()
hs = CryptoHandshaker(p, now)
p.init_handler(hs.gen_handler())
p.makeConnection(t)
self.assertEquals(t.value(), '')
ver_pkt = struct.pack('B', self.protocol_version)
invalid_hs_data = (('abcdefg' * int(self.handshake_length * 2 / 7)) +
'abcdefg'[:int(self.handshake_length * 2 % 7)])
# version is ok
err = p.dataReceived(ver_pkt)
self.assertIdentical(err, None)
# ... but the rest is just (the right amount of) rubbish
err = p.dataReceived(invalid_hs_data)
self.assertIsInstance(err, failure.Failure)
self.assertIsInstance(err.value, HandshakeFailedError)
self.assertFalse(t.disconnecting, 'should not be disconnecting')
self.assertEquals(p.handshake_status, 'fail')
# more tests, especially client side, would be nice but
# duplicating crypto code here seems to make little sense
| arkadini/twimp | test/test_crypto_handshake.py | Python | apache-2.0 | 11,395 |
import graphene
from graphene_django.types import DjangoObjectType
from org.models import Organization
class OrganizationType(DjangoObjectType):
class Meta:
model = Organization
class Query(object):
organizations = graphene.List(OrganizationType)
def resolve_organizations(self, info, **kwargs):
return Organization.objects.all()
| arturfelipe/condobus | backend/org/schema.py | Python | mit | 364 |
from django import template
from django.template import Context, loader
import logging
register = template.Library()
@register.inclusion_tag('licensing/setup.html')
def licensing_style(klass='openwebicon'):
'''Usage:
{% licensing_style 'foobar' %}
Render a <style> block with the (optional) class
'''
return {
'class': klass,
}
@register.simple_tag(takes_context=True)
def show_license(context, license_instance, template='licensing/symbols.html'):
'''
Usage:
{% show_license license template="path/to/template" %}
The template argument can be omitted, a default template is included.
'''
ctx = {
'symbols': license_instance.symbols,
}
t = context.template.engine.get_template(template) if hasattr(context, 'template') else loader.get_template(template)
return t.render(Context(ctx))
| editorsnotes/django-licensing | licensing/templatetags/licensing_tags.py | Python | unlicense | 878 |
# Copyright 2012 Pinterest.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import socket
import six
from pymemcache.client import (Client, MemcacheClientError,
MemcacheUnknownCommandError)
from pymemcache.client import MemcacheIllegalInputError
from nose import tools
def get_set_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
result = client.get('key')
tools.assert_equal(result, None)
client.set(b'key', b'value', noreply=False)
result = client.get(b'key')
tools.assert_equal(result, b'value')
client.set(b'key2', b'value2', noreply=True)
result = client.get(b'key2')
tools.assert_equal(result, b'value2')
result = client.get_many([b'key', b'key2'])
tools.assert_equal(result, {b'key': b'value', b'key2': b'value2'})
result = client.get_many([])
tools.assert_equal(result, {})
def add_replace_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
result = client.add(b'key', b'value', noreply=False)
tools.assert_equal(result, True)
result = client.get(b'key')
tools.assert_equal(result, b'value')
result = client.add(b'key', b'value2', noreply=False)
tools.assert_equal(result, False)
result = client.get(b'key')
tools.assert_equal(result, b'value')
result = client.replace(b'key1', b'value1', noreply=False)
tools.assert_equal(result, False)
result = client.get(b'key1')
tools.assert_equal(result, None)
result = client.replace(b'key', b'value2', noreply=False)
tools.assert_equal(result, True)
result = client.get(b'key')
tools.assert_equal(result, b'value2')
def append_prepend_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
result = client.append(b'key', b'value', noreply=False)
tools.assert_equal(result, False)
result = client.get(b'key')
tools.assert_equal(result, None)
result = client.set(b'key', b'value', noreply=False)
tools.assert_equal(result, True)
result = client.append(b'key', b'after', noreply=False)
tools.assert_equal(result, True)
result = client.get(b'key')
tools.assert_equal(result, b'valueafter')
result = client.prepend(b'key1', b'value', noreply=False)
tools.assert_equal(result, False)
result = client.get(b'key1')
tools.assert_equal(result, None)
result = client.prepend(b'key', b'before', noreply=False)
tools.assert_equal(result, True)
result = client.get(b'key')
tools.assert_equal(result, b'beforevalueafter')
def cas_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
result = client.cas(b'key', b'value', b'1', noreply=False)
tools.assert_equal(result, None)
result = client.set(b'key', b'value', noreply=False)
tools.assert_equal(result, True)
result = client.cas(b'key', b'value', b'1', noreply=False)
tools.assert_equal(result, False)
result, cas = client.gets(b'key')
tools.assert_equal(result, b'value')
result = client.cas(b'key', b'value1', cas, noreply=False)
tools.assert_equal(result, True)
result = client.cas(b'key', b'value2', cas, noreply=False)
tools.assert_equal(result, False)
def gets_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
result = client.gets(b'key')
tools.assert_equal(result, (None, None))
result = client.set(b'key', b'value', noreply=False)
tools.assert_equal(result, True)
result = client.gets(b'key')
tools.assert_equal(result[0], b'value')
def delete_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
result = client.delete(b'key', noreply=False)
tools.assert_equal(result, False)
result = client.get(b'key')
tools.assert_equal(result, None)
result = client.set(b'key', b'value', noreply=False)
tools.assert_equal(result, True)
result = client.delete(b'key', noreply=False)
tools.assert_equal(result, True)
result = client.get(b'key')
tools.assert_equal(result, None)
def incr_decr_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
result = client.incr(b'key', 1, noreply=False)
tools.assert_equal(result, None)
result = client.set(b'key', b'0', noreply=False)
tools.assert_equal(result, True)
result = client.incr(b'key', 1, noreply=False)
tools.assert_equal(result, 1)
def _bad_int():
client.incr(b'key', b'foobar')
tools.assert_raises(MemcacheClientError, _bad_int)
result = client.decr(b'key1', 1, noreply=False)
tools.assert_equal(result, None)
result = client.decr(b'key', 1, noreply=False)
tools.assert_equal(result, 0)
result = client.get(b'key')
tools.assert_equal(result, b'0')
def misc_test(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
def test_serialization_deserialization(host, port, socket_module):
def _ser(key, value):
return json.dumps(value).encode('ascii'), 1
def _des(key, value, flags):
if flags == 1:
return json.loads(value.decode('ascii'))
return value
client = Client((host, port), serializer=_ser, deserializer=_des,
socket_module=socket_module)
client.flush_all()
value = {'a': 'b', 'c': ['d']}
client.set(b'key', value)
result = client.get(b'key')
tools.assert_equal(result, value)
def test_errors(host, port, socket_module):
client = Client((host, port), socket_module=socket_module)
client.flush_all()
def _key_with_ws():
client.set(b'key with spaces', b'value', noreply=False)
tools.assert_raises(MemcacheIllegalInputError, _key_with_ws)
def _key_too_long():
client.set(b'x' * 1024, b'value', noreply=False)
tools.assert_raises(MemcacheClientError, _key_too_long)
def _unicode_key_in_set():
client.set(six.u('\u0FFF'), b'value', noreply=False)
tools.assert_raises(MemcacheClientError, _unicode_key_in_set)
def _unicode_key_in_get():
client.get(six.u('\u0FFF'))
tools.assert_raises(MemcacheClientError, _unicode_key_in_get)
def _unicode_value_in_set():
client.set(b'key', six.u('\u0FFF'), noreply=False)
tools.assert_raises(MemcacheClientError, _unicode_value_in_set)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--server',
metavar='HOST',
required=True)
parser.add_argument('-p', '--port',
metavar='PORT',
type=int,
required=True)
args = parser.parse_args()
socket_modules = [socket]
try:
from gevent import socket as gevent_socket
except ImportError:
print("Skipping gevent (not installed)")
else:
socket_modules.append(gevent_socket)
for socket_module in socket_modules:
print("Testing with socket module:", socket_module.__name__)
print("Testing get and set...")
get_set_test(args.server, args.port, socket_module)
print("Testing add and replace...")
add_replace_test(args.server, args.port, socket_module)
print("Testing append and prepend...")
append_prepend_test(args.server, args.port, socket_module)
print("Testing cas...")
cas_test(args.server, args.port, socket_module)
print("Testing gets...")
gets_test(args.server, args.port, socket_module)
print("Testing delete...")
delete_test(args.server, args.port, socket_module)
print("Testing incr and decr...")
incr_decr_test(args.server, args.port, socket_module)
print("Testing flush_all...")
misc_test(args.server, args.port, socket_module)
print("Testing serialization and deserialization...")
test_serialization_deserialization(args.server, args.port,
socket_module)
print("Testing error cases...")
test_errors(args.server, args.port, socket_module)
if __name__ == '__main__':
main()
| sontek/pymemcache | pymemcache/test/integration.py | Python | apache-2.0 | 9,000 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import time
import pytest
import numpy as np
from . import FitsTestCase
from .test_table import comparerecords
from ....io import fits
class TestGroupsFunctions(FitsTestCase):
def test_open(self):
with fits.open(self.data('random_groups.fits')) as hdul:
assert isinstance(hdul[0], fits.GroupsHDU)
naxes = (3, 1, 128, 1, 1)
parameters = ['UU', 'VV', 'WW', 'BASELINE', 'DATE']
info = [(0, 'PRIMARY', 1, 'GroupsHDU', 147, naxes, 'float32',
'3 Groups 5 Parameters')]
assert hdul.info(output=False) == info
ghdu = hdul[0]
assert ghdu.parnames == parameters
assert list(ghdu.data.dtype.names) == parameters + ['DATA']
assert isinstance(ghdu.data, fits.GroupData)
# The data should be equal to the number of groups
assert ghdu.header['GCOUNT'] == len(ghdu.data)
assert ghdu.data.data.shape == (len(ghdu.data),) + naxes[::-1]
assert ghdu.data.parnames == parameters
assert isinstance(ghdu.data[0], fits.Group)
assert len(ghdu.data[0]) == len(parameters) + 1
assert ghdu.data[0].data.shape == naxes[::-1]
assert ghdu.data[0].parnames == parameters
def test_open_groups_in_update_mode(self):
"""
Test that opening a file containing a groups HDU in update mode and
then immediately closing it does not result in any unnecessary file
modifications.
Similar to
test_image.TestImageFunctions.test_open_scaled_in_update_mode().
"""
# Copy the original file before making any possible changes to it
self.copy_file('random_groups.fits')
mtime = os.stat(self.temp('random_groups.fits')).st_mtime
time.sleep(1)
fits.open(self.temp('random_groups.fits'), mode='update',
memmap=False).close()
# Ensure that no changes were made to the file merely by immediately
# opening and closing it.
assert mtime == os.stat(self.temp('random_groups.fits')).st_mtime
def test_random_groups_data_update(self):
"""
Regression test for https://github.com/astropy/astropy/issues/3730 and
for https://github.com/spacetelescope/PyFITS/issues/102
"""
self.copy_file('random_groups.fits')
with fits.open(self.temp('random_groups.fits'), mode='update') as h:
h[0].data['UU'] = 0.42
with fits.open(self.temp('random_groups.fits'), mode='update') as h:
assert np.all(h[0].data['UU'] == 0.42)
def test_parnames_round_trip(self):
"""
Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/130
Ensures that opening a random groups file in update mode or writing it
to a new file does not cause any change to the parameter names.
"""
# Because this test tries to update the random_groups.fits file, let's
# make a copy of it first (so that the file doesn't actually get
# modified in the off chance that the test fails
self.copy_file('random_groups.fits')
parameters = ['UU', 'VV', 'WW', 'BASELINE', 'DATE']
with fits.open(self.temp('random_groups.fits'), mode='update') as h:
assert h[0].parnames == parameters
h.flush()
# Open again just in read-only mode to ensure the parnames didn't
# change
with fits.open(self.temp('random_groups.fits')) as h:
assert h[0].parnames == parameters
h.writeto(self.temp('test.fits'))
with fits.open(self.temp('test.fits')) as h:
assert h[0].parnames == parameters
def test_groupdata_slice(self):
"""
A simple test to ensure that slicing GroupData returns a new, smaller
GroupData object, as is the case with a normal FITS_rec. This is a
regression test for an as-of-yet unreported issue where slicing
GroupData returned a single Group record.
"""
with fits.open(self.data('random_groups.fits')) as hdul:
s = hdul[0].data[1:]
assert isinstance(s, fits.GroupData)
assert len(s) == 2
assert hdul[0].data.parnames == s.parnames
def test_group_slice(self):
"""
Tests basic slicing a single group record.
"""
# A very basic slice test
with fits.open(self.data('random_groups.fits')) as hdul:
g = hdul[0].data[0]
s = g[2:4]
assert len(s) == 2
assert s[0] == g[2]
assert s[-1] == g[-3]
s = g[::-1]
assert len(s) == 6
assert (s[0] == g[-1]).all()
assert s[-1] == g[0]
s = g[::2]
assert len(s) == 3
assert s[0] == g[0]
assert s[1] == g[2]
assert s[2] == g[4]
def test_create_groupdata(self):
"""
Basic test for creating GroupData from scratch.
"""
imdata = np.arange(100.0)
imdata.shape = (10, 1, 1, 2, 5)
pdata1 = np.arange(10, dtype=np.float32) + 0.1
pdata2 = 42.0
x = fits.hdu.groups.GroupData(imdata, parnames=['abc', 'xyz'],
pardata=[pdata1, pdata2], bitpix=-32)
assert x.parnames == ['abc', 'xyz']
assert (x.par('abc') == pdata1).all()
assert (x.par('xyz') == ([pdata2] * len(x))).all()
assert (x.data == imdata).all()
# Test putting the data into a GroupsHDU and round-tripping it
ghdu = fits.GroupsHDU(data=x)
ghdu.writeto(self.temp('test.fits'))
with fits.open(self.temp('test.fits')) as h:
hdr = h[0].header
assert hdr['GCOUNT'] == 10
assert hdr['PCOUNT'] == 2
assert hdr['NAXIS'] == 5
assert hdr['NAXIS1'] == 0
assert hdr['NAXIS2'] == 5
assert hdr['NAXIS3'] == 2
assert hdr['NAXIS4'] == 1
assert hdr['NAXIS5'] == 1
assert h[0].data.parnames == ['abc', 'xyz']
assert comparerecords(h[0].data, x)
def test_duplicate_parameter(self):
"""
Tests support for multiple parameters of the same name, and ensures
that the data in duplicate parameters are returned as a single summed
value.
"""
imdata = np.arange(100.0)
imdata.shape = (10, 1, 1, 2, 5)
pdata1 = np.arange(10, dtype=np.float32) + 1
pdata2 = 42.0
x = fits.hdu.groups.GroupData(imdata, parnames=['abc', 'xyz', 'abc'],
pardata=[pdata1, pdata2, pdata1],
bitpix=-32)
assert x.parnames == ['abc', 'xyz', 'abc']
assert (x.par('abc') == pdata1 * 2).all()
assert x[0].par('abc') == 2
# Test setting a parameter
x[0].setpar(0, 2)
assert x[0].par('abc') == 3
pytest.raises(ValueError, x[0].setpar, 'abc', 2)
x[0].setpar('abc', (2, 3))
assert x[0].par('abc') == 5
assert x.par('abc')[0] == 5
assert (x.par('abc')[1:] == pdata1[1:] * 2).all()
# Test round-trip
ghdu = fits.GroupsHDU(data=x)
ghdu.writeto(self.temp('test.fits'))
with fits.open(self.temp('test.fits')) as h:
hdr = h[0].header
assert hdr['PCOUNT'] == 3
assert hdr['PTYPE1'] == 'abc'
assert hdr['PTYPE2'] == 'xyz'
assert hdr['PTYPE3'] == 'abc'
assert x.parnames == ['abc', 'xyz', 'abc']
assert x.dtype.names == ('abc', 'xyz', '_abc', 'DATA')
assert x.par('abc')[0] == 5
assert (x.par('abc')[1:] == pdata1[1:] * 2).all()
| funbaker/astropy | astropy/io/fits/tests/test_groups.py | Python | bsd-3-clause | 7,893 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.