partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
noise_despike
|
Apply standard deviation filter to remove anomalous values.
Parameters
----------
win : int
The window used to calculate rolling statistics.
nlim : float
The number of standard deviations above the rolling
mean above which data are considered outliers.
Returns
-------
None
|
latools/processes/despiking.py
|
def noise_despike(sig, win=3, nlim=24., maxiter=4):
"""
Apply standard deviation filter to remove anomalous values.
Parameters
----------
win : int
The window used to calculate rolling statistics.
nlim : float
The number of standard deviations above the rolling
mean above which data are considered outliers.
Returns
-------
None
"""
if win % 2 != 1:
win += 1 # win must be odd
kernel = np.ones(win) / win # make convolution kernel
over = np.ones(len(sig), dtype=bool) # initialize bool array
# pad edges to avoid edge-effects
npad = int((win - 1) / 2)
over[:npad] = False
over[-npad:] = False
# set up monitoring
nloops = 0
# do the despiking
while any(over) and (nloops < maxiter):
rmean = np.convolve(sig, kernel, 'valid') # mean by convolution
rstd = rmean**0.5 # std = sqrt(signal), because count statistics
# identify where signal > mean + std * nlim (OR signa < mean - std *
# nlim)
# | (sig[npad:-npad] < rmean - nlim * rstd)
over[npad:-npad] = (sig[npad:-npad] > rmean + nlim * rstd)
# if any are over, replace them with mean of neighbours
if any(over):
# replace with values either side
# sig[over] = sig[np.roll(over, -1) | np.roll(over, 1)].reshape((sum(over), 2)).mean(1)
# replace with mean
sig[npad:-npad][over[npad:-npad]] = rmean[over[npad:-npad]]
nloops += 1
# repeat until no more removed.
return sig
|
def noise_despike(sig, win=3, nlim=24., maxiter=4):
"""
Apply standard deviation filter to remove anomalous values.
Parameters
----------
win : int
The window used to calculate rolling statistics.
nlim : float
The number of standard deviations above the rolling
mean above which data are considered outliers.
Returns
-------
None
"""
if win % 2 != 1:
win += 1 # win must be odd
kernel = np.ones(win) / win # make convolution kernel
over = np.ones(len(sig), dtype=bool) # initialize bool array
# pad edges to avoid edge-effects
npad = int((win - 1) / 2)
over[:npad] = False
over[-npad:] = False
# set up monitoring
nloops = 0
# do the despiking
while any(over) and (nloops < maxiter):
rmean = np.convolve(sig, kernel, 'valid') # mean by convolution
rstd = rmean**0.5 # std = sqrt(signal), because count statistics
# identify where signal > mean + std * nlim (OR signa < mean - std *
# nlim)
# | (sig[npad:-npad] < rmean - nlim * rstd)
over[npad:-npad] = (sig[npad:-npad] > rmean + nlim * rstd)
# if any are over, replace them with mean of neighbours
if any(over):
# replace with values either side
# sig[over] = sig[np.roll(over, -1) | np.roll(over, 1)].reshape((sum(over), 2)).mean(1)
# replace with mean
sig[npad:-npad][over[npad:-npad]] = rmean[over[npad:-npad]]
nloops += 1
# repeat until no more removed.
return sig
|
[
"Apply",
"standard",
"deviation",
"filter",
"to",
"remove",
"anomalous",
"values",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/processes/despiking.py#L4-L47
|
[
"def",
"noise_despike",
"(",
"sig",
",",
"win",
"=",
"3",
",",
"nlim",
"=",
"24.",
",",
"maxiter",
"=",
"4",
")",
":",
"if",
"win",
"%",
"2",
"!=",
"1",
":",
"win",
"+=",
"1",
"# win must be odd",
"kernel",
"=",
"np",
".",
"ones",
"(",
"win",
")",
"/",
"win",
"# make convolution kernel",
"over",
"=",
"np",
".",
"ones",
"(",
"len",
"(",
"sig",
")",
",",
"dtype",
"=",
"bool",
")",
"# initialize bool array",
"# pad edges to avoid edge-effects",
"npad",
"=",
"int",
"(",
"(",
"win",
"-",
"1",
")",
"/",
"2",
")",
"over",
"[",
":",
"npad",
"]",
"=",
"False",
"over",
"[",
"-",
"npad",
":",
"]",
"=",
"False",
"# set up monitoring",
"nloops",
"=",
"0",
"# do the despiking",
"while",
"any",
"(",
"over",
")",
"and",
"(",
"nloops",
"<",
"maxiter",
")",
":",
"rmean",
"=",
"np",
".",
"convolve",
"(",
"sig",
",",
"kernel",
",",
"'valid'",
")",
"# mean by convolution",
"rstd",
"=",
"rmean",
"**",
"0.5",
"# std = sqrt(signal), because count statistics",
"# identify where signal > mean + std * nlim (OR signa < mean - std *",
"# nlim)",
"# | (sig[npad:-npad] < rmean - nlim * rstd)",
"over",
"[",
"npad",
":",
"-",
"npad",
"]",
"=",
"(",
"sig",
"[",
"npad",
":",
"-",
"npad",
"]",
">",
"rmean",
"+",
"nlim",
"*",
"rstd",
")",
"# if any are over, replace them with mean of neighbours",
"if",
"any",
"(",
"over",
")",
":",
"# replace with values either side",
"# sig[over] = sig[np.roll(over, -1) | np.roll(over, 1)].reshape((sum(over), 2)).mean(1)",
"# replace with mean",
"sig",
"[",
"npad",
":",
"-",
"npad",
"]",
"[",
"over",
"[",
"npad",
":",
"-",
"npad",
"]",
"]",
"=",
"rmean",
"[",
"over",
"[",
"npad",
":",
"-",
"npad",
"]",
"]",
"nloops",
"+=",
"1",
"# repeat until no more removed.",
"return",
"sig"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
expdecay_despike
|
Apply exponential decay filter to remove physically impossible data based on instrumental washout.
The filter is re-applied until no more points are removed, or maxiter is reached.
Parameters
----------
exponent : float
Exponent used in filter
tstep : float
The time increment between data points.
maxiter : int
The maximum number of times the filter should be applied.
Returns
-------
None
|
latools/processes/despiking.py
|
def expdecay_despike(sig, expdecay_coef, tstep, maxiter=3):
"""
Apply exponential decay filter to remove physically impossible data based on instrumental washout.
The filter is re-applied until no more points are removed, or maxiter is reached.
Parameters
----------
exponent : float
Exponent used in filter
tstep : float
The time increment between data points.
maxiter : int
The maximum number of times the filter should be applied.
Returns
-------
None
"""
# determine rms noise of data
noise = np.std(sig[:5]) # initially, calculated based on first 5 points
# expand the selection up to 50 points, unless it dramatically increases
# the std (i.e. catches the 'laser on' region)
for i in [10, 20, 30, 50]:
inoise = np.std(sig[:i])
if inoise < 1.5 * noise:
noise = inoise
rms_noise3 = 3 * noise
i = 0
f = True
while (i < maxiter) and f:
# calculate low and high possibles values based on exponential decay
siglo = np.roll(sig * np.exp(tstep * expdecay_coef), 1)
sighi = np.roll(sig * np.exp(-tstep * expdecay_coef), -1)
# identify points that are outside these limits, beyond what might be explained
# by noise in the data
loind = (sig < siglo - rms_noise3) & (sig < np.roll(sig, -1) - rms_noise3)
hiind = (sig > sighi + rms_noise3) & (sig > np.roll(sig, 1) + rms_noise3)
# replace all such values with their preceding
sig[loind] = sig[np.roll(loind, -1)]
sig[hiind] = sig[np.roll(hiind, -1)]
f = any(np.concatenate([loind, hiind]))
i += 1
return sig
|
def expdecay_despike(sig, expdecay_coef, tstep, maxiter=3):
"""
Apply exponential decay filter to remove physically impossible data based on instrumental washout.
The filter is re-applied until no more points are removed, or maxiter is reached.
Parameters
----------
exponent : float
Exponent used in filter
tstep : float
The time increment between data points.
maxiter : int
The maximum number of times the filter should be applied.
Returns
-------
None
"""
# determine rms noise of data
noise = np.std(sig[:5]) # initially, calculated based on first 5 points
# expand the selection up to 50 points, unless it dramatically increases
# the std (i.e. catches the 'laser on' region)
for i in [10, 20, 30, 50]:
inoise = np.std(sig[:i])
if inoise < 1.5 * noise:
noise = inoise
rms_noise3 = 3 * noise
i = 0
f = True
while (i < maxiter) and f:
# calculate low and high possibles values based on exponential decay
siglo = np.roll(sig * np.exp(tstep * expdecay_coef), 1)
sighi = np.roll(sig * np.exp(-tstep * expdecay_coef), -1)
# identify points that are outside these limits, beyond what might be explained
# by noise in the data
loind = (sig < siglo - rms_noise3) & (sig < np.roll(sig, -1) - rms_noise3)
hiind = (sig > sighi + rms_noise3) & (sig > np.roll(sig, 1) + rms_noise3)
# replace all such values with their preceding
sig[loind] = sig[np.roll(loind, -1)]
sig[hiind] = sig[np.roll(hiind, -1)]
f = any(np.concatenate([loind, hiind]))
i += 1
return sig
|
[
"Apply",
"exponential",
"decay",
"filter",
"to",
"remove",
"physically",
"impossible",
"data",
"based",
"on",
"instrumental",
"washout",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/processes/despiking.py#L50-L98
|
[
"def",
"expdecay_despike",
"(",
"sig",
",",
"expdecay_coef",
",",
"tstep",
",",
"maxiter",
"=",
"3",
")",
":",
"# determine rms noise of data",
"noise",
"=",
"np",
".",
"std",
"(",
"sig",
"[",
":",
"5",
"]",
")",
"# initially, calculated based on first 5 points",
"# expand the selection up to 50 points, unless it dramatically increases ",
"# the std (i.e. catches the 'laser on' region)",
"for",
"i",
"in",
"[",
"10",
",",
"20",
",",
"30",
",",
"50",
"]",
":",
"inoise",
"=",
"np",
".",
"std",
"(",
"sig",
"[",
":",
"i",
"]",
")",
"if",
"inoise",
"<",
"1.5",
"*",
"noise",
":",
"noise",
"=",
"inoise",
"rms_noise3",
"=",
"3",
"*",
"noise",
"i",
"=",
"0",
"f",
"=",
"True",
"while",
"(",
"i",
"<",
"maxiter",
")",
"and",
"f",
":",
"# calculate low and high possibles values based on exponential decay",
"siglo",
"=",
"np",
".",
"roll",
"(",
"sig",
"*",
"np",
".",
"exp",
"(",
"tstep",
"*",
"expdecay_coef",
")",
",",
"1",
")",
"sighi",
"=",
"np",
".",
"roll",
"(",
"sig",
"*",
"np",
".",
"exp",
"(",
"-",
"tstep",
"*",
"expdecay_coef",
")",
",",
"-",
"1",
")",
"# identify points that are outside these limits, beyond what might be explained",
"# by noise in the data",
"loind",
"=",
"(",
"sig",
"<",
"siglo",
"-",
"rms_noise3",
")",
"&",
"(",
"sig",
"<",
"np",
".",
"roll",
"(",
"sig",
",",
"-",
"1",
")",
"-",
"rms_noise3",
")",
"hiind",
"=",
"(",
"sig",
">",
"sighi",
"+",
"rms_noise3",
")",
"&",
"(",
"sig",
">",
"np",
".",
"roll",
"(",
"sig",
",",
"1",
")",
"+",
"rms_noise3",
")",
"# replace all such values with their preceding",
"sig",
"[",
"loind",
"]",
"=",
"sig",
"[",
"np",
".",
"roll",
"(",
"loind",
",",
"-",
"1",
")",
"]",
"sig",
"[",
"hiind",
"]",
"=",
"sig",
"[",
"np",
".",
"roll",
"(",
"hiind",
",",
"-",
"1",
")",
"]",
"f",
"=",
"any",
"(",
"np",
".",
"concatenate",
"(",
"[",
"loind",
",",
"hiind",
"]",
")",
")",
"i",
"+=",
"1",
"return",
"sig"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
Eff._flat_map
|
**f** must return the same stack type as **self.value** has.
Iterates over the effects, sequences the inner instance
successively to the top and joins with the outer instance.
Example:
List(Right(Just(1))) => List(Right(Just(List(Right(Just(5))))))
=> List(List(Right(Just(Right(Just(5))))))
=> List(Right(Just(Right(Just(5)))))
=> List(Right(Right(Just(Just(5)))))
=> List(Right(Just(Just(5))))
=> List(Right(Just(5)))
Note: IO works only as outermost effect, as it cannot sequence
|
amino/eff.py
|
def _flat_map(self, f: Callable):
''' **f** must return the same stack type as **self.value** has.
Iterates over the effects, sequences the inner instance
successively to the top and joins with the outer instance.
Example:
List(Right(Just(1))) => List(Right(Just(List(Right(Just(5))))))
=> List(List(Right(Just(Right(Just(5))))))
=> List(Right(Just(Right(Just(5)))))
=> List(Right(Right(Just(Just(5)))))
=> List(Right(Just(Just(5))))
=> List(Right(Just(5)))
Note: IO works only as outermost effect, as it cannot sequence
'''
index = List.range(self.depth + 1)
g = index.fold_left(f)(lambda z, i: lambda a: a.map(z))
nested = g(self.value)
def sequence_level(z, depth, tpe):
nesting = lambda z, i: lambda a: a.map(z).sequence(tpe)
lifter = List.range(depth).fold_left(I)(nesting)
return z // lifter
def sequence_type(z, data):
return lambda a: sequence_level(a, *data).map(z)
h = self.all_effects.reversed.with_index.fold_left(I)(sequence_type)
return h(nested)
|
def _flat_map(self, f: Callable):
''' **f** must return the same stack type as **self.value** has.
Iterates over the effects, sequences the inner instance
successively to the top and joins with the outer instance.
Example:
List(Right(Just(1))) => List(Right(Just(List(Right(Just(5))))))
=> List(List(Right(Just(Right(Just(5))))))
=> List(Right(Just(Right(Just(5)))))
=> List(Right(Right(Just(Just(5)))))
=> List(Right(Just(Just(5))))
=> List(Right(Just(5)))
Note: IO works only as outermost effect, as it cannot sequence
'''
index = List.range(self.depth + 1)
g = index.fold_left(f)(lambda z, i: lambda a: a.map(z))
nested = g(self.value)
def sequence_level(z, depth, tpe):
nesting = lambda z, i: lambda a: a.map(z).sequence(tpe)
lifter = List.range(depth).fold_left(I)(nesting)
return z // lifter
def sequence_type(z, data):
return lambda a: sequence_level(a, *data).map(z)
h = self.all_effects.reversed.with_index.fold_left(I)(sequence_type)
return h(nested)
|
[
"**",
"f",
"**",
"must",
"return",
"the",
"same",
"stack",
"type",
"as",
"**",
"self",
".",
"value",
"**",
"has",
".",
"Iterates",
"over",
"the",
"effects",
"sequences",
"the",
"inner",
"instance",
"successively",
"to",
"the",
"top",
"and",
"joins",
"with",
"the",
"outer",
"instance",
".",
"Example",
":",
"List",
"(",
"Right",
"(",
"Just",
"(",
"1",
")))",
"=",
">",
"List",
"(",
"Right",
"(",
"Just",
"(",
"List",
"(",
"Right",
"(",
"Just",
"(",
"5",
"))))))",
"=",
">",
"List",
"(",
"List",
"(",
"Right",
"(",
"Just",
"(",
"Right",
"(",
"Just",
"(",
"5",
"))))))",
"=",
">",
"List",
"(",
"Right",
"(",
"Just",
"(",
"Right",
"(",
"Just",
"(",
"5",
")))))",
"=",
">",
"List",
"(",
"Right",
"(",
"Right",
"(",
"Just",
"(",
"Just",
"(",
"5",
")))))",
"=",
">",
"List",
"(",
"Right",
"(",
"Just",
"(",
"Just",
"(",
"5",
"))))",
"=",
">",
"List",
"(",
"Right",
"(",
"Just",
"(",
"5",
")))",
"Note",
":",
"IO",
"works",
"only",
"as",
"outermost",
"effect",
"as",
"it",
"cannot",
"sequence"
] |
tek/amino
|
python
|
https://github.com/tek/amino/blob/51b314933e047a45587a24ecff02c836706d27ff/amino/eff.py#L45-L68
|
[
"def",
"_flat_map",
"(",
"self",
",",
"f",
":",
"Callable",
")",
":",
"index",
"=",
"List",
".",
"range",
"(",
"self",
".",
"depth",
"+",
"1",
")",
"g",
"=",
"index",
".",
"fold_left",
"(",
"f",
")",
"(",
"lambda",
"z",
",",
"i",
":",
"lambda",
"a",
":",
"a",
".",
"map",
"(",
"z",
")",
")",
"nested",
"=",
"g",
"(",
"self",
".",
"value",
")",
"def",
"sequence_level",
"(",
"z",
",",
"depth",
",",
"tpe",
")",
":",
"nesting",
"=",
"lambda",
"z",
",",
"i",
":",
"lambda",
"a",
":",
"a",
".",
"map",
"(",
"z",
")",
".",
"sequence",
"(",
"tpe",
")",
"lifter",
"=",
"List",
".",
"range",
"(",
"depth",
")",
".",
"fold_left",
"(",
"I",
")",
"(",
"nesting",
")",
"return",
"z",
"//",
"lifter",
"def",
"sequence_type",
"(",
"z",
",",
"data",
")",
":",
"return",
"lambda",
"a",
":",
"sequence_level",
"(",
"a",
",",
"*",
"data",
")",
".",
"map",
"(",
"z",
")",
"h",
"=",
"self",
".",
"all_effects",
".",
"reversed",
".",
"with_index",
".",
"fold_left",
"(",
"I",
")",
"(",
"sequence_type",
")",
"return",
"h",
"(",
"nested",
")"
] |
51b314933e047a45587a24ecff02c836706d27ff
|
test
|
filt.add
|
Add filter.
Parameters
----------
name : str
filter name
filt : array_like
boolean filter array
info : str
informative description of the filter
params : tuple
parameters used to make the filter
Returns
-------
None
|
latools/filtering/filt_obj.py
|
def add(self, name, filt, info='', params=(), setn=None):
"""
Add filter.
Parameters
----------
name : str
filter name
filt : array_like
boolean filter array
info : str
informative description of the filter
params : tuple
parameters used to make the filter
Returns
-------
None
"""
iname = '{:.0f}_'.format(self.n) + name
self.index[self.n] = iname
if setn is None:
setn = self.maxset + 1
self.maxset = setn
if setn not in self.sets.keys():
self.sets[setn] = [iname]
else:
self.sets[setn].append(iname)
# self.keys is not added to?
self.components[iname] = filt
self.info[iname] = info
self.params[iname] = params
for a in self.analytes:
self.switches[a][iname] = False
self.n += 1
return
|
def add(self, name, filt, info='', params=(), setn=None):
"""
Add filter.
Parameters
----------
name : str
filter name
filt : array_like
boolean filter array
info : str
informative description of the filter
params : tuple
parameters used to make the filter
Returns
-------
None
"""
iname = '{:.0f}_'.format(self.n) + name
self.index[self.n] = iname
if setn is None:
setn = self.maxset + 1
self.maxset = setn
if setn not in self.sets.keys():
self.sets[setn] = [iname]
else:
self.sets[setn].append(iname)
# self.keys is not added to?
self.components[iname] = filt
self.info[iname] = info
self.params[iname] = params
for a in self.analytes:
self.switches[a][iname] = False
self.n += 1
return
|
[
"Add",
"filter",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L90-L129
|
[
"def",
"add",
"(",
"self",
",",
"name",
",",
"filt",
",",
"info",
"=",
"''",
",",
"params",
"=",
"(",
")",
",",
"setn",
"=",
"None",
")",
":",
"iname",
"=",
"'{:.0f}_'",
".",
"format",
"(",
"self",
".",
"n",
")",
"+",
"name",
"self",
".",
"index",
"[",
"self",
".",
"n",
"]",
"=",
"iname",
"if",
"setn",
"is",
"None",
":",
"setn",
"=",
"self",
".",
"maxset",
"+",
"1",
"self",
".",
"maxset",
"=",
"setn",
"if",
"setn",
"not",
"in",
"self",
".",
"sets",
".",
"keys",
"(",
")",
":",
"self",
".",
"sets",
"[",
"setn",
"]",
"=",
"[",
"iname",
"]",
"else",
":",
"self",
".",
"sets",
"[",
"setn",
"]",
".",
"append",
"(",
"iname",
")",
"# self.keys is not added to?",
"self",
".",
"components",
"[",
"iname",
"]",
"=",
"filt",
"self",
".",
"info",
"[",
"iname",
"]",
"=",
"info",
"self",
".",
"params",
"[",
"iname",
"]",
"=",
"params",
"for",
"a",
"in",
"self",
".",
"analytes",
":",
"self",
".",
"switches",
"[",
"a",
"]",
"[",
"iname",
"]",
"=",
"False",
"self",
".",
"n",
"+=",
"1",
"return"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.remove
|
Remove filter.
Parameters
----------
name : str
name of the filter to remove
setn : int or True
int: number of set to remove
True: remove all filters in set that 'name' belongs to
Returns
-------
None
|
latools/filtering/filt_obj.py
|
def remove(self, name=None, setn=None):
"""
Remove filter.
Parameters
----------
name : str
name of the filter to remove
setn : int or True
int: number of set to remove
True: remove all filters in set that 'name' belongs to
Returns
-------
None
"""
if isinstance(name, int):
name = self.index[name]
if setn is not None:
name = self.sets[setn]
del self.sets[setn]
elif isinstance(name, (int, str)):
name = [name]
if setn is True:
for n in name:
for k, v in self.sets.items():
if n in v:
name.append([m for m in v if m != n])
for n in name:
for k, v in self.sets.items():
if n in v:
self.sets[k] = [m for m in v if n != m]
del self.components[n]
del self.info[n]
del self.params[n]
del self.keys[n]
for a in self.analytes:
del self.switches[a][n]
return
|
def remove(self, name=None, setn=None):
"""
Remove filter.
Parameters
----------
name : str
name of the filter to remove
setn : int or True
int: number of set to remove
True: remove all filters in set that 'name' belongs to
Returns
-------
None
"""
if isinstance(name, int):
name = self.index[name]
if setn is not None:
name = self.sets[setn]
del self.sets[setn]
elif isinstance(name, (int, str)):
name = [name]
if setn is True:
for n in name:
for k, v in self.sets.items():
if n in v:
name.append([m for m in v if m != n])
for n in name:
for k, v in self.sets.items():
if n in v:
self.sets[k] = [m for m in v if n != m]
del self.components[n]
del self.info[n]
del self.params[n]
del self.keys[n]
for a in self.analytes:
del self.switches[a][n]
return
|
[
"Remove",
"filter",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L131-L172
|
[
"def",
"remove",
"(",
"self",
",",
"name",
"=",
"None",
",",
"setn",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"name",
",",
"int",
")",
":",
"name",
"=",
"self",
".",
"index",
"[",
"name",
"]",
"if",
"setn",
"is",
"not",
"None",
":",
"name",
"=",
"self",
".",
"sets",
"[",
"setn",
"]",
"del",
"self",
".",
"sets",
"[",
"setn",
"]",
"elif",
"isinstance",
"(",
"name",
",",
"(",
"int",
",",
"str",
")",
")",
":",
"name",
"=",
"[",
"name",
"]",
"if",
"setn",
"is",
"True",
":",
"for",
"n",
"in",
"name",
":",
"for",
"k",
",",
"v",
"in",
"self",
".",
"sets",
".",
"items",
"(",
")",
":",
"if",
"n",
"in",
"v",
":",
"name",
".",
"append",
"(",
"[",
"m",
"for",
"m",
"in",
"v",
"if",
"m",
"!=",
"n",
"]",
")",
"for",
"n",
"in",
"name",
":",
"for",
"k",
",",
"v",
"in",
"self",
".",
"sets",
".",
"items",
"(",
")",
":",
"if",
"n",
"in",
"v",
":",
"self",
".",
"sets",
"[",
"k",
"]",
"=",
"[",
"m",
"for",
"m",
"in",
"v",
"if",
"n",
"!=",
"m",
"]",
"del",
"self",
".",
"components",
"[",
"n",
"]",
"del",
"self",
".",
"info",
"[",
"n",
"]",
"del",
"self",
".",
"params",
"[",
"n",
"]",
"del",
"self",
".",
"keys",
"[",
"n",
"]",
"for",
"a",
"in",
"self",
".",
"analytes",
":",
"del",
"self",
".",
"switches",
"[",
"a",
"]",
"[",
"n",
"]",
"return"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.clear
|
Clear all filters.
|
latools/filtering/filt_obj.py
|
def clear(self):
"""
Clear all filters.
"""
self.components = {}
self.info = {}
self.params = {}
self.switches = {}
self.keys = {}
self.index = {}
self.sets = {}
self.maxset = -1
self.n = 0
for a in self.analytes:
self.switches[a] = {}
return
|
def clear(self):
"""
Clear all filters.
"""
self.components = {}
self.info = {}
self.params = {}
self.switches = {}
self.keys = {}
self.index = {}
self.sets = {}
self.maxset = -1
self.n = 0
for a in self.analytes:
self.switches[a] = {}
return
|
[
"Clear",
"all",
"filters",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L174-L189
|
[
"def",
"clear",
"(",
"self",
")",
":",
"self",
".",
"components",
"=",
"{",
"}",
"self",
".",
"info",
"=",
"{",
"}",
"self",
".",
"params",
"=",
"{",
"}",
"self",
".",
"switches",
"=",
"{",
"}",
"self",
".",
"keys",
"=",
"{",
"}",
"self",
".",
"index",
"=",
"{",
"}",
"self",
".",
"sets",
"=",
"{",
"}",
"self",
".",
"maxset",
"=",
"-",
"1",
"self",
".",
"n",
"=",
"0",
"for",
"a",
"in",
"self",
".",
"analytes",
":",
"self",
".",
"switches",
"[",
"a",
"]",
"=",
"{",
"}",
"return"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.clean
|
Remove unused filters.
|
latools/filtering/filt_obj.py
|
def clean(self):
"""
Remove unused filters.
"""
for f in sorted(self.components.keys()):
unused = not any(self.switches[a][f] for a in self.analytes)
if unused:
self.remove(f)
|
def clean(self):
"""
Remove unused filters.
"""
for f in sorted(self.components.keys()):
unused = not any(self.switches[a][f] for a in self.analytes)
if unused:
self.remove(f)
|
[
"Remove",
"unused",
"filters",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L191-L198
|
[
"def",
"clean",
"(",
"self",
")",
":",
"for",
"f",
"in",
"sorted",
"(",
"self",
".",
"components",
".",
"keys",
"(",
")",
")",
":",
"unused",
"=",
"not",
"any",
"(",
"self",
".",
"switches",
"[",
"a",
"]",
"[",
"f",
"]",
"for",
"a",
"in",
"self",
".",
"analytes",
")",
"if",
"unused",
":",
"self",
".",
"remove",
"(",
"f",
")"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.on
|
Turn on specified filter(s) for specified analyte(s).
Parameters
----------
analyte : optional, str or array_like
Name or list of names of analytes.
Defaults to all analytes.
filt : optional. int, str or array_like
Name/number or iterable names/numbers of filters.
Returns
-------
None
|
latools/filtering/filt_obj.py
|
def on(self, analyte=None, filt=None):
"""
Turn on specified filter(s) for specified analyte(s).
Parameters
----------
analyte : optional, str or array_like
Name or list of names of analytes.
Defaults to all analytes.
filt : optional. int, str or array_like
Name/number or iterable names/numbers of filters.
Returns
-------
None
"""
if isinstance(analyte, str):
analyte = [analyte]
if isinstance(filt, (int, float)):
filt = [filt]
elif isinstance(filt, str):
filt = self.fuzzmatch(filt, multi=True)
if analyte is None:
analyte = self.analytes
if filt is None:
filt = list(self.index.values())
for a in analyte:
for f in filt:
if isinstance(f, (int, float)):
f = self.index[int(f)]
try:
self.switches[a][f] = True
except KeyError:
f = self.fuzzmatch(f, multi=False)
self.switches[a][f] = True
# for k in self.switches[a].keys():
# if f in k:
# self.switches[a][k] = True
return
|
def on(self, analyte=None, filt=None):
"""
Turn on specified filter(s) for specified analyte(s).
Parameters
----------
analyte : optional, str or array_like
Name or list of names of analytes.
Defaults to all analytes.
filt : optional. int, str or array_like
Name/number or iterable names/numbers of filters.
Returns
-------
None
"""
if isinstance(analyte, str):
analyte = [analyte]
if isinstance(filt, (int, float)):
filt = [filt]
elif isinstance(filt, str):
filt = self.fuzzmatch(filt, multi=True)
if analyte is None:
analyte = self.analytes
if filt is None:
filt = list(self.index.values())
for a in analyte:
for f in filt:
if isinstance(f, (int, float)):
f = self.index[int(f)]
try:
self.switches[a][f] = True
except KeyError:
f = self.fuzzmatch(f, multi=False)
self.switches[a][f] = True
# for k in self.switches[a].keys():
# if f in k:
# self.switches[a][k] = True
return
|
[
"Turn",
"on",
"specified",
"filter",
"(",
"s",
")",
"for",
"specified",
"analyte",
"(",
"s",
")",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L200-L242
|
[
"def",
"on",
"(",
"self",
",",
"analyte",
"=",
"None",
",",
"filt",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"analyte",
",",
"str",
")",
":",
"analyte",
"=",
"[",
"analyte",
"]",
"if",
"isinstance",
"(",
"filt",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"filt",
"=",
"[",
"filt",
"]",
"elif",
"isinstance",
"(",
"filt",
",",
"str",
")",
":",
"filt",
"=",
"self",
".",
"fuzzmatch",
"(",
"filt",
",",
"multi",
"=",
"True",
")",
"if",
"analyte",
"is",
"None",
":",
"analyte",
"=",
"self",
".",
"analytes",
"if",
"filt",
"is",
"None",
":",
"filt",
"=",
"list",
"(",
"self",
".",
"index",
".",
"values",
"(",
")",
")",
"for",
"a",
"in",
"analyte",
":",
"for",
"f",
"in",
"filt",
":",
"if",
"isinstance",
"(",
"f",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"f",
"=",
"self",
".",
"index",
"[",
"int",
"(",
"f",
")",
"]",
"try",
":",
"self",
".",
"switches",
"[",
"a",
"]",
"[",
"f",
"]",
"=",
"True",
"except",
"KeyError",
":",
"f",
"=",
"self",
".",
"fuzzmatch",
"(",
"f",
",",
"multi",
"=",
"False",
")",
"self",
".",
"switches",
"[",
"a",
"]",
"[",
"f",
"]",
"=",
"True",
"# for k in self.switches[a].keys():",
"# if f in k:",
"# self.switches[a][k] = True",
"return"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.make
|
Make filter for specified analyte(s).
Filter specified in filt.switches.
Parameters
----------
analyte : str or array_like
Name or list of names of analytes.
Returns
-------
array_like
boolean filter
|
latools/filtering/filt_obj.py
|
def make(self, analyte):
"""
Make filter for specified analyte(s).
Filter specified in filt.switches.
Parameters
----------
analyte : str or array_like
Name or list of names of analytes.
Returns
-------
array_like
boolean filter
"""
if analyte is None:
analyte = self.analytes
elif isinstance(analyte, str):
analyte = [analyte]
out = []
for f in self.components.keys():
for a in analyte:
if self.switches[a][f]:
out.append(f)
key = ' & '.join(sorted(out))
for a in analyte:
self.keys[a] = key
return self.make_fromkey(key)
|
def make(self, analyte):
"""
Make filter for specified analyte(s).
Filter specified in filt.switches.
Parameters
----------
analyte : str or array_like
Name or list of names of analytes.
Returns
-------
array_like
boolean filter
"""
if analyte is None:
analyte = self.analytes
elif isinstance(analyte, str):
analyte = [analyte]
out = []
for f in self.components.keys():
for a in analyte:
if self.switches[a][f]:
out.append(f)
key = ' & '.join(sorted(out))
for a in analyte:
self.keys[a] = key
return self.make_fromkey(key)
|
[
"Make",
"filter",
"for",
"specified",
"analyte",
"(",
"s",
")",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L288-L317
|
[
"def",
"make",
"(",
"self",
",",
"analyte",
")",
":",
"if",
"analyte",
"is",
"None",
":",
"analyte",
"=",
"self",
".",
"analytes",
"elif",
"isinstance",
"(",
"analyte",
",",
"str",
")",
":",
"analyte",
"=",
"[",
"analyte",
"]",
"out",
"=",
"[",
"]",
"for",
"f",
"in",
"self",
".",
"components",
".",
"keys",
"(",
")",
":",
"for",
"a",
"in",
"analyte",
":",
"if",
"self",
".",
"switches",
"[",
"a",
"]",
"[",
"f",
"]",
":",
"out",
".",
"append",
"(",
"f",
")",
"key",
"=",
"' & '",
".",
"join",
"(",
"sorted",
"(",
"out",
")",
")",
"for",
"a",
"in",
"analyte",
":",
"self",
".",
"keys",
"[",
"a",
"]",
"=",
"key",
"return",
"self",
".",
"make_fromkey",
"(",
"key",
")"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.fuzzmatch
|
Identify a filter by fuzzy string matching.
Partial ('fuzzy') matching performed by `fuzzywuzzy.fuzzy.ratio`
Parameters
----------
fuzzkey : str
A string that partially matches one filter name more than the others.
Returns
-------
The name of the most closely matched filter. : str
|
latools/filtering/filt_obj.py
|
def fuzzmatch(self, fuzzkey, multi=False):
"""
Identify a filter by fuzzy string matching.
Partial ('fuzzy') matching performed by `fuzzywuzzy.fuzzy.ratio`
Parameters
----------
fuzzkey : str
A string that partially matches one filter name more than the others.
Returns
-------
The name of the most closely matched filter. : str
"""
keys, ratios = np.array([(f, seqm(None, fuzzkey, f).ratio()) for f in self.components.keys()]).T
mratio = max(ratios)
if multi:
return keys[ratios == mratio]
else:
if sum(ratios == mratio) == 1:
return keys[ratios == mratio][0]
else:
raise ValueError("\nThe filter key provided ('{:}') matches two or more filter names equally well:\n".format(fuzzkey) + ', '.join(keys[ratios == mratio]) + "\nPlease be more specific!")
|
def fuzzmatch(self, fuzzkey, multi=False):
"""
Identify a filter by fuzzy string matching.
Partial ('fuzzy') matching performed by `fuzzywuzzy.fuzzy.ratio`
Parameters
----------
fuzzkey : str
A string that partially matches one filter name more than the others.
Returns
-------
The name of the most closely matched filter. : str
"""
keys, ratios = np.array([(f, seqm(None, fuzzkey, f).ratio()) for f in self.components.keys()]).T
mratio = max(ratios)
if multi:
return keys[ratios == mratio]
else:
if sum(ratios == mratio) == 1:
return keys[ratios == mratio][0]
else:
raise ValueError("\nThe filter key provided ('{:}') matches two or more filter names equally well:\n".format(fuzzkey) + ', '.join(keys[ratios == mratio]) + "\nPlease be more specific!")
|
[
"Identify",
"a",
"filter",
"by",
"fuzzy",
"string",
"matching",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L319-L344
|
[
"def",
"fuzzmatch",
"(",
"self",
",",
"fuzzkey",
",",
"multi",
"=",
"False",
")",
":",
"keys",
",",
"ratios",
"=",
"np",
".",
"array",
"(",
"[",
"(",
"f",
",",
"seqm",
"(",
"None",
",",
"fuzzkey",
",",
"f",
")",
".",
"ratio",
"(",
")",
")",
"for",
"f",
"in",
"self",
".",
"components",
".",
"keys",
"(",
")",
"]",
")",
".",
"T",
"mratio",
"=",
"max",
"(",
"ratios",
")",
"if",
"multi",
":",
"return",
"keys",
"[",
"ratios",
"==",
"mratio",
"]",
"else",
":",
"if",
"sum",
"(",
"ratios",
"==",
"mratio",
")",
"==",
"1",
":",
"return",
"keys",
"[",
"ratios",
"==",
"mratio",
"]",
"[",
"0",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"\\nThe filter key provided ('{:}') matches two or more filter names equally well:\\n\"",
".",
"format",
"(",
"fuzzkey",
")",
"+",
"', '",
".",
"join",
"(",
"keys",
"[",
"ratios",
"==",
"mratio",
"]",
")",
"+",
"\"\\nPlease be more specific!\"",
")"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.make_fromkey
|
Make filter from logical expression.
Takes a logical expression as an input, and returns a filter. Used for advanced
filtering, where combinations of nested and/or filters are desired. Filter names must
exactly match the names listed by print(filt).
Example: ``key = '(Filter_1 | Filter_2) & Filter_3'``
is equivalent to:
``(Filter_1 OR Filter_2) AND Filter_3``
statements in parentheses are evaluated first.
Parameters
----------
key : str
logical expression describing filter construction.
Returns
-------
array_like
boolean filter
|
latools/filtering/filt_obj.py
|
def make_fromkey(self, key):
"""
Make filter from logical expression.
Takes a logical expression as an input, and returns a filter. Used for advanced
filtering, where combinations of nested and/or filters are desired. Filter names must
exactly match the names listed by print(filt).
Example: ``key = '(Filter_1 | Filter_2) & Filter_3'``
is equivalent to:
``(Filter_1 OR Filter_2) AND Filter_3``
statements in parentheses are evaluated first.
Parameters
----------
key : str
logical expression describing filter construction.
Returns
-------
array_like
boolean filter
"""
if key != '':
def make_runable(match):
return "self.components['" + self.fuzzmatch(match.group(0)) + "']"
runable = re.sub('[^\(\)|& ]+', make_runable, key)
return eval(runable)
else:
return ~np.zeros(self.size, dtype=bool)
|
def make_fromkey(self, key):
"""
Make filter from logical expression.
Takes a logical expression as an input, and returns a filter. Used for advanced
filtering, where combinations of nested and/or filters are desired. Filter names must
exactly match the names listed by print(filt).
Example: ``key = '(Filter_1 | Filter_2) & Filter_3'``
is equivalent to:
``(Filter_1 OR Filter_2) AND Filter_3``
statements in parentheses are evaluated first.
Parameters
----------
key : str
logical expression describing filter construction.
Returns
-------
array_like
boolean filter
"""
if key != '':
def make_runable(match):
return "self.components['" + self.fuzzmatch(match.group(0)) + "']"
runable = re.sub('[^\(\)|& ]+', make_runable, key)
return eval(runable)
else:
return ~np.zeros(self.size, dtype=bool)
|
[
"Make",
"filter",
"from",
"logical",
"expression",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L346-L377
|
[
"def",
"make_fromkey",
"(",
"self",
",",
"key",
")",
":",
"if",
"key",
"!=",
"''",
":",
"def",
"make_runable",
"(",
"match",
")",
":",
"return",
"\"self.components['\"",
"+",
"self",
".",
"fuzzmatch",
"(",
"match",
".",
"group",
"(",
"0",
")",
")",
"+",
"\"']\"",
"runable",
"=",
"re",
".",
"sub",
"(",
"'[^\\(\\)|& ]+'",
",",
"make_runable",
",",
"key",
")",
"return",
"eval",
"(",
"runable",
")",
"else",
":",
"return",
"~",
"np",
".",
"zeros",
"(",
"self",
".",
"size",
",",
"dtype",
"=",
"bool",
")"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.make_keydict
|
Make logical expressions describing the filter(s) for specified analyte(s).
Parameters
----------
analyte : optional, str or array_like
Name or list of names of analytes.
Defaults to all analytes.
Returns
-------
dict
containing the logical filter expression for each analyte.
|
latools/filtering/filt_obj.py
|
def make_keydict(self, analyte=None):
"""
Make logical expressions describing the filter(s) for specified analyte(s).
Parameters
----------
analyte : optional, str or array_like
Name or list of names of analytes.
Defaults to all analytes.
Returns
-------
dict
containing the logical filter expression for each analyte.
"""
if analyte is None:
analyte = self.analytes
elif isinstance(analyte, str):
analyte = [analyte]
out = {}
for a in analyte:
key = []
for f in self.components.keys():
if self.switches[a][f]:
key.append(f)
out[a] = ' & '.join(sorted(key))
self.keydict = out
return out
|
def make_keydict(self, analyte=None):
"""
Make logical expressions describing the filter(s) for specified analyte(s).
Parameters
----------
analyte : optional, str or array_like
Name or list of names of analytes.
Defaults to all analytes.
Returns
-------
dict
containing the logical filter expression for each analyte.
"""
if analyte is None:
analyte = self.analytes
elif isinstance(analyte, str):
analyte = [analyte]
out = {}
for a in analyte:
key = []
for f in self.components.keys():
if self.switches[a][f]:
key.append(f)
out[a] = ' & '.join(sorted(key))
self.keydict = out
return out
|
[
"Make",
"logical",
"expressions",
"describing",
"the",
"filter",
"(",
"s",
")",
"for",
"specified",
"analyte",
"(",
"s",
")",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L379-L407
|
[
"def",
"make_keydict",
"(",
"self",
",",
"analyte",
"=",
"None",
")",
":",
"if",
"analyte",
"is",
"None",
":",
"analyte",
"=",
"self",
".",
"analytes",
"elif",
"isinstance",
"(",
"analyte",
",",
"str",
")",
":",
"analyte",
"=",
"[",
"analyte",
"]",
"out",
"=",
"{",
"}",
"for",
"a",
"in",
"analyte",
":",
"key",
"=",
"[",
"]",
"for",
"f",
"in",
"self",
".",
"components",
".",
"keys",
"(",
")",
":",
"if",
"self",
".",
"switches",
"[",
"a",
"]",
"[",
"f",
"]",
":",
"key",
".",
"append",
"(",
"f",
")",
"out",
"[",
"a",
"]",
"=",
"' & '",
".",
"join",
"(",
"sorted",
"(",
"key",
")",
")",
"self",
".",
"keydict",
"=",
"out",
"return",
"out"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.grab_filt
|
Flexible access to specific filter using any key format.
Parameters
----------
f : str, dict or bool
either logical filter expression, dict of expressions,
or a boolean
analyte : str
name of analyte the filter is for.
Returns
-------
array_like
boolean filter
|
latools/filtering/filt_obj.py
|
def grab_filt(self, filt, analyte=None):
"""
Flexible access to specific filter using any key format.
Parameters
----------
f : str, dict or bool
either logical filter expression, dict of expressions,
or a boolean
analyte : str
name of analyte the filter is for.
Returns
-------
array_like
boolean filter
"""
if isinstance(filt, str):
if filt in self.components:
if analyte is None:
return self.components[filt]
else:
if self.switches[analyte][filt]:
return self.components[filt]
else:
try:
ind = self.make_fromkey(filt)
except KeyError:
print(("\n\n***Filter key invalid. Please consult "
"manual and try again."))
elif isinstance(filt, dict):
try:
ind = self.make_fromkey(filt[analyte])
except ValueError:
print(("\n\n***Filter key invalid. Please consult manual "
"and try again.\nOR\nAnalyte missing from filter "
"key dict."))
elif filt:
ind = self.make(analyte)
else:
ind = ~np.zeros(self.size, dtype=bool)
return ind
|
def grab_filt(self, filt, analyte=None):
"""
Flexible access to specific filter using any key format.
Parameters
----------
f : str, dict or bool
either logical filter expression, dict of expressions,
or a boolean
analyte : str
name of analyte the filter is for.
Returns
-------
array_like
boolean filter
"""
if isinstance(filt, str):
if filt in self.components:
if analyte is None:
return self.components[filt]
else:
if self.switches[analyte][filt]:
return self.components[filt]
else:
try:
ind = self.make_fromkey(filt)
except KeyError:
print(("\n\n***Filter key invalid. Please consult "
"manual and try again."))
elif isinstance(filt, dict):
try:
ind = self.make_fromkey(filt[analyte])
except ValueError:
print(("\n\n***Filter key invalid. Please consult manual "
"and try again.\nOR\nAnalyte missing from filter "
"key dict."))
elif filt:
ind = self.make(analyte)
else:
ind = ~np.zeros(self.size, dtype=bool)
return ind
|
[
"Flexible",
"access",
"to",
"specific",
"filter",
"using",
"any",
"key",
"format",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L409-L450
|
[
"def",
"grab_filt",
"(",
"self",
",",
"filt",
",",
"analyte",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"filt",
",",
"str",
")",
":",
"if",
"filt",
"in",
"self",
".",
"components",
":",
"if",
"analyte",
"is",
"None",
":",
"return",
"self",
".",
"components",
"[",
"filt",
"]",
"else",
":",
"if",
"self",
".",
"switches",
"[",
"analyte",
"]",
"[",
"filt",
"]",
":",
"return",
"self",
".",
"components",
"[",
"filt",
"]",
"else",
":",
"try",
":",
"ind",
"=",
"self",
".",
"make_fromkey",
"(",
"filt",
")",
"except",
"KeyError",
":",
"print",
"(",
"(",
"\"\\n\\n***Filter key invalid. Please consult \"",
"\"manual and try again.\"",
")",
")",
"elif",
"isinstance",
"(",
"filt",
",",
"dict",
")",
":",
"try",
":",
"ind",
"=",
"self",
".",
"make_fromkey",
"(",
"filt",
"[",
"analyte",
"]",
")",
"except",
"ValueError",
":",
"print",
"(",
"(",
"\"\\n\\n***Filter key invalid. Please consult manual \"",
"\"and try again.\\nOR\\nAnalyte missing from filter \"",
"\"key dict.\"",
")",
")",
"elif",
"filt",
":",
"ind",
"=",
"self",
".",
"make",
"(",
"analyte",
")",
"else",
":",
"ind",
"=",
"~",
"np",
".",
"zeros",
"(",
"self",
".",
"size",
",",
"dtype",
"=",
"bool",
")",
"return",
"ind"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.get_components
|
Extract filter components for specific analyte(s).
Parameters
----------
key : str
string present in one or more filter names.
e.g. 'Al27' will return all filters with
'Al27' in their names.
analyte : str
name of analyte the filter is for
Returns
-------
boolean filter : array-like
|
latools/filtering/filt_obj.py
|
def get_components(self, key, analyte=None):
"""
Extract filter components for specific analyte(s).
Parameters
----------
key : str
string present in one or more filter names.
e.g. 'Al27' will return all filters with
'Al27' in their names.
analyte : str
name of analyte the filter is for
Returns
-------
boolean filter : array-like
"""
out = {}
for k, v in self.components.items():
if key in k:
if analyte is None:
out[k] = v
elif self.switches[analyte][k]:
out[k] = v
return out
|
def get_components(self, key, analyte=None):
"""
Extract filter components for specific analyte(s).
Parameters
----------
key : str
string present in one or more filter names.
e.g. 'Al27' will return all filters with
'Al27' in their names.
analyte : str
name of analyte the filter is for
Returns
-------
boolean filter : array-like
"""
out = {}
for k, v in self.components.items():
if key in k:
if analyte is None:
out[k] = v
elif self.switches[analyte][k]:
out[k] = v
return out
|
[
"Extract",
"filter",
"components",
"for",
"specific",
"analyte",
"(",
"s",
")",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L452-L476
|
[
"def",
"get_components",
"(",
"self",
",",
"key",
",",
"analyte",
"=",
"None",
")",
":",
"out",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"self",
".",
"components",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"k",
":",
"if",
"analyte",
"is",
"None",
":",
"out",
"[",
"k",
"]",
"=",
"v",
"elif",
"self",
".",
"switches",
"[",
"analyte",
"]",
"[",
"k",
"]",
":",
"out",
"[",
"k",
"]",
"=",
"v",
"return",
"out"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
filt.get_info
|
Get info for all filters.
|
latools/filtering/filt_obj.py
|
def get_info(self):
"""
Get info for all filters.
"""
out = ''
for k in sorted(self.components.keys()):
out += '{:s}: {:s}'.format(k, self.info[k]) + '\n'
return(out)
|
def get_info(self):
"""
Get info for all filters.
"""
out = ''
for k in sorted(self.components.keys()):
out += '{:s}: {:s}'.format(k, self.info[k]) + '\n'
return(out)
|
[
"Get",
"info",
"for",
"all",
"filters",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/filtering/filt_obj.py#L478-L485
|
[
"def",
"get_info",
"(",
"self",
")",
":",
"out",
"=",
"''",
"for",
"k",
"in",
"sorted",
"(",
"self",
".",
"components",
".",
"keys",
"(",
")",
")",
":",
"out",
"+=",
"'{:s}: {:s}'",
".",
"format",
"(",
"k",
",",
"self",
".",
"info",
"[",
"k",
"]",
")",
"+",
"'\\n'",
"return",
"(",
"out",
")"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
read_data
|
Load data_file described by a dataformat dict.
Parameters
----------
data_file : str
Path to data file, including extension.
dataformat : dict
A dataformat dict, see example below.
name_mode : str
How to identyfy sample names. If 'file_names' uses the
input name of the file, stripped of the extension. If
'metadata_names' uses the 'name' attribute of the 'meta'
sub-dictionary in dataformat. If any other str, uses this
str as the sample name.
Example
-------
>>>
{'genfromtext_args': {'delimiter': ',',
'skip_header': 4}, # passed directly to np.genfromtxt
'column_id': {'name_row': 3, # which row contains the column names
'delimiter': ',', # delimeter between column names
'timecolumn': 0, # which column contains the 'time' variable
'pattern': '([A-z]{1,2}[0-9]{1,3})'}, # a regex pattern which captures the column names
'meta_regex': { # a dict of (line_no: ([descriptors], [regexs])) pairs
0: (['path'], '(.*)'),
2: (['date', 'method'], # MUST include date
'([A-Z][a-z]+ [0-9]+ [0-9]{4}[ ]+[0-9:]+ [amp]+).* ([A-z0-9]+\.m)')
}
}
Returns
-------
sample, analytes, data, meta : tuple
|
latools/processes/data_read.py
|
def read_data(data_file, dataformat, name_mode):
"""
Load data_file described by a dataformat dict.
Parameters
----------
data_file : str
Path to data file, including extension.
dataformat : dict
A dataformat dict, see example below.
name_mode : str
How to identyfy sample names. If 'file_names' uses the
input name of the file, stripped of the extension. If
'metadata_names' uses the 'name' attribute of the 'meta'
sub-dictionary in dataformat. If any other str, uses this
str as the sample name.
Example
-------
>>>
{'genfromtext_args': {'delimiter': ',',
'skip_header': 4}, # passed directly to np.genfromtxt
'column_id': {'name_row': 3, # which row contains the column names
'delimiter': ',', # delimeter between column names
'timecolumn': 0, # which column contains the 'time' variable
'pattern': '([A-z]{1,2}[0-9]{1,3})'}, # a regex pattern which captures the column names
'meta_regex': { # a dict of (line_no: ([descriptors], [regexs])) pairs
0: (['path'], '(.*)'),
2: (['date', 'method'], # MUST include date
'([A-Z][a-z]+ [0-9]+ [0-9]{4}[ ]+[0-9:]+ [amp]+).* ([A-z0-9]+\.m)')
}
}
Returns
-------
sample, analytes, data, meta : tuple
"""
with open(data_file) as f:
lines = f.readlines()
if 'meta_regex' in dataformat.keys():
meta = Bunch()
for k, v in dataformat['meta_regex'].items():
try:
out = re.search(v[-1], lines[int(k)]).groups()
except:
raise ValueError('Failed reading metadata when applying:\n regex: {}\nto\n line: {}'.format(v[-1], lines[int(k)]))
for i in np.arange(len(v[0])):
meta[v[0][i]] = out[i]
else:
meta = {}
# sample name
if name_mode == 'file_names':
sample = os.path.basename(data_file).split('.')[0]
elif name_mode == 'metadata_names':
sample = meta['name']
else:
sample = name_mode
# column and analyte names
columns = np.array(lines[dataformat['column_id']['name_row']].strip().split(
dataformat['column_id']['delimiter']))
if 'pattern' in dataformat['column_id'].keys():
pr = re.compile(dataformat['column_id']['pattern'])
analytes = [pr.match(c).groups()[0] for c in columns if pr.match(c)]
# do any required pre-formatting
if 'preformat_replace' in dataformat.keys():
with open(data_file) as f:
fbuffer = f.read()
for k, v in dataformat['preformat_replace'].items():
fbuffer = re.sub(k, v, fbuffer)
# dead data
read_data = np.genfromtxt(BytesIO(fbuffer.encode()),
**dataformat['genfromtext_args']).T
else:
# read data
read_data = np.genfromtxt(data_file,
**dataformat['genfromtext_args']).T
# data dict
dind = np.zeros(read_data.shape[0], dtype=bool)
for a in analytes:
dind[columns == a] = True
data = Bunch()
data['Time'] = read_data[dataformat['column_id']['timecolumn']]
# deal with time units
if 'time_unit' in dataformat['column_id']:
if isinstance(dataformat['column_id']['time_unit'], (float, int)):
time_mult = dataformat['column_id']['time_unit']
elif isinstance(dataformat['column_id']['time_unit'], str):
unit_multipliers = {'ms': 1/1000,
'min': 60/1,
's': 1}
try:
time_mult = unit_multipliers[dataformat['column_id']['time_unit']]
except:
raise ValueError("In dataformat: time_unit must be a number, 'ms', 'min' or 's'")
data['Time'] *= time_mult
# convert raw data into counts
# TODO: Is this correct? Should actually be per-analyte dwell?
# if 'unit' in dataformat:
# if dataformat['unit'] == 'cps':
# tstep = data['Time'][1] - data['Time'][0]
# read_data[dind] *= tstep
# else:
# pass
data['rawdata'] = Bunch(zip(analytes, read_data[dind]))
data['total_counts'] = np.nansum(read_data[dind], 0)
return sample, analytes, data, meta
|
def read_data(data_file, dataformat, name_mode):
"""
Load data_file described by a dataformat dict.
Parameters
----------
data_file : str
Path to data file, including extension.
dataformat : dict
A dataformat dict, see example below.
name_mode : str
How to identyfy sample names. If 'file_names' uses the
input name of the file, stripped of the extension. If
'metadata_names' uses the 'name' attribute of the 'meta'
sub-dictionary in dataformat. If any other str, uses this
str as the sample name.
Example
-------
>>>
{'genfromtext_args': {'delimiter': ',',
'skip_header': 4}, # passed directly to np.genfromtxt
'column_id': {'name_row': 3, # which row contains the column names
'delimiter': ',', # delimeter between column names
'timecolumn': 0, # which column contains the 'time' variable
'pattern': '([A-z]{1,2}[0-9]{1,3})'}, # a regex pattern which captures the column names
'meta_regex': { # a dict of (line_no: ([descriptors], [regexs])) pairs
0: (['path'], '(.*)'),
2: (['date', 'method'], # MUST include date
'([A-Z][a-z]+ [0-9]+ [0-9]{4}[ ]+[0-9:]+ [amp]+).* ([A-z0-9]+\.m)')
}
}
Returns
-------
sample, analytes, data, meta : tuple
"""
with open(data_file) as f:
lines = f.readlines()
if 'meta_regex' in dataformat.keys():
meta = Bunch()
for k, v in dataformat['meta_regex'].items():
try:
out = re.search(v[-1], lines[int(k)]).groups()
except:
raise ValueError('Failed reading metadata when applying:\n regex: {}\nto\n line: {}'.format(v[-1], lines[int(k)]))
for i in np.arange(len(v[0])):
meta[v[0][i]] = out[i]
else:
meta = {}
# sample name
if name_mode == 'file_names':
sample = os.path.basename(data_file).split('.')[0]
elif name_mode == 'metadata_names':
sample = meta['name']
else:
sample = name_mode
# column and analyte names
columns = np.array(lines[dataformat['column_id']['name_row']].strip().split(
dataformat['column_id']['delimiter']))
if 'pattern' in dataformat['column_id'].keys():
pr = re.compile(dataformat['column_id']['pattern'])
analytes = [pr.match(c).groups()[0] for c in columns if pr.match(c)]
# do any required pre-formatting
if 'preformat_replace' in dataformat.keys():
with open(data_file) as f:
fbuffer = f.read()
for k, v in dataformat['preformat_replace'].items():
fbuffer = re.sub(k, v, fbuffer)
# dead data
read_data = np.genfromtxt(BytesIO(fbuffer.encode()),
**dataformat['genfromtext_args']).T
else:
# read data
read_data = np.genfromtxt(data_file,
**dataformat['genfromtext_args']).T
# data dict
dind = np.zeros(read_data.shape[0], dtype=bool)
for a in analytes:
dind[columns == a] = True
data = Bunch()
data['Time'] = read_data[dataformat['column_id']['timecolumn']]
# deal with time units
if 'time_unit' in dataformat['column_id']:
if isinstance(dataformat['column_id']['time_unit'], (float, int)):
time_mult = dataformat['column_id']['time_unit']
elif isinstance(dataformat['column_id']['time_unit'], str):
unit_multipliers = {'ms': 1/1000,
'min': 60/1,
's': 1}
try:
time_mult = unit_multipliers[dataformat['column_id']['time_unit']]
except:
raise ValueError("In dataformat: time_unit must be a number, 'ms', 'min' or 's'")
data['Time'] *= time_mult
# convert raw data into counts
# TODO: Is this correct? Should actually be per-analyte dwell?
# if 'unit' in dataformat:
# if dataformat['unit'] == 'cps':
# tstep = data['Time'][1] - data['Time'][0]
# read_data[dind] *= tstep
# else:
# pass
data['rawdata'] = Bunch(zip(analytes, read_data[dind]))
data['total_counts'] = np.nansum(read_data[dind], 0)
return sample, analytes, data, meta
|
[
"Load",
"data_file",
"described",
"by",
"a",
"dataformat",
"dict",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/processes/data_read.py#L6-L120
|
[
"def",
"read_data",
"(",
"data_file",
",",
"dataformat",
",",
"name_mode",
")",
":",
"with",
"open",
"(",
"data_file",
")",
"as",
"f",
":",
"lines",
"=",
"f",
".",
"readlines",
"(",
")",
"if",
"'meta_regex'",
"in",
"dataformat",
".",
"keys",
"(",
")",
":",
"meta",
"=",
"Bunch",
"(",
")",
"for",
"k",
",",
"v",
"in",
"dataformat",
"[",
"'meta_regex'",
"]",
".",
"items",
"(",
")",
":",
"try",
":",
"out",
"=",
"re",
".",
"search",
"(",
"v",
"[",
"-",
"1",
"]",
",",
"lines",
"[",
"int",
"(",
"k",
")",
"]",
")",
".",
"groups",
"(",
")",
"except",
":",
"raise",
"ValueError",
"(",
"'Failed reading metadata when applying:\\n regex: {}\\nto\\n line: {}'",
".",
"format",
"(",
"v",
"[",
"-",
"1",
"]",
",",
"lines",
"[",
"int",
"(",
"k",
")",
"]",
")",
")",
"for",
"i",
"in",
"np",
".",
"arange",
"(",
"len",
"(",
"v",
"[",
"0",
"]",
")",
")",
":",
"meta",
"[",
"v",
"[",
"0",
"]",
"[",
"i",
"]",
"]",
"=",
"out",
"[",
"i",
"]",
"else",
":",
"meta",
"=",
"{",
"}",
"# sample name",
"if",
"name_mode",
"==",
"'file_names'",
":",
"sample",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"data_file",
")",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"elif",
"name_mode",
"==",
"'metadata_names'",
":",
"sample",
"=",
"meta",
"[",
"'name'",
"]",
"else",
":",
"sample",
"=",
"name_mode",
"# column and analyte names",
"columns",
"=",
"np",
".",
"array",
"(",
"lines",
"[",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'name_row'",
"]",
"]",
".",
"strip",
"(",
")",
".",
"split",
"(",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'delimiter'",
"]",
")",
")",
"if",
"'pattern'",
"in",
"dataformat",
"[",
"'column_id'",
"]",
".",
"keys",
"(",
")",
":",
"pr",
"=",
"re",
".",
"compile",
"(",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'pattern'",
"]",
")",
"analytes",
"=",
"[",
"pr",
".",
"match",
"(",
"c",
")",
".",
"groups",
"(",
")",
"[",
"0",
"]",
"for",
"c",
"in",
"columns",
"if",
"pr",
".",
"match",
"(",
"c",
")",
"]",
"# do any required pre-formatting",
"if",
"'preformat_replace'",
"in",
"dataformat",
".",
"keys",
"(",
")",
":",
"with",
"open",
"(",
"data_file",
")",
"as",
"f",
":",
"fbuffer",
"=",
"f",
".",
"read",
"(",
")",
"for",
"k",
",",
"v",
"in",
"dataformat",
"[",
"'preformat_replace'",
"]",
".",
"items",
"(",
")",
":",
"fbuffer",
"=",
"re",
".",
"sub",
"(",
"k",
",",
"v",
",",
"fbuffer",
")",
"# dead data",
"read_data",
"=",
"np",
".",
"genfromtxt",
"(",
"BytesIO",
"(",
"fbuffer",
".",
"encode",
"(",
")",
")",
",",
"*",
"*",
"dataformat",
"[",
"'genfromtext_args'",
"]",
")",
".",
"T",
"else",
":",
"# read data",
"read_data",
"=",
"np",
".",
"genfromtxt",
"(",
"data_file",
",",
"*",
"*",
"dataformat",
"[",
"'genfromtext_args'",
"]",
")",
".",
"T",
"# data dict",
"dind",
"=",
"np",
".",
"zeros",
"(",
"read_data",
".",
"shape",
"[",
"0",
"]",
",",
"dtype",
"=",
"bool",
")",
"for",
"a",
"in",
"analytes",
":",
"dind",
"[",
"columns",
"==",
"a",
"]",
"=",
"True",
"data",
"=",
"Bunch",
"(",
")",
"data",
"[",
"'Time'",
"]",
"=",
"read_data",
"[",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'timecolumn'",
"]",
"]",
"# deal with time units",
"if",
"'time_unit'",
"in",
"dataformat",
"[",
"'column_id'",
"]",
":",
"if",
"isinstance",
"(",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'time_unit'",
"]",
",",
"(",
"float",
",",
"int",
")",
")",
":",
"time_mult",
"=",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'time_unit'",
"]",
"elif",
"isinstance",
"(",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'time_unit'",
"]",
",",
"str",
")",
":",
"unit_multipliers",
"=",
"{",
"'ms'",
":",
"1",
"/",
"1000",
",",
"'min'",
":",
"60",
"/",
"1",
",",
"'s'",
":",
"1",
"}",
"try",
":",
"time_mult",
"=",
"unit_multipliers",
"[",
"dataformat",
"[",
"'column_id'",
"]",
"[",
"'time_unit'",
"]",
"]",
"except",
":",
"raise",
"ValueError",
"(",
"\"In dataformat: time_unit must be a number, 'ms', 'min' or 's'\"",
")",
"data",
"[",
"'Time'",
"]",
"*=",
"time_mult",
"# convert raw data into counts",
"# TODO: Is this correct? Should actually be per-analyte dwell?",
"# if 'unit' in dataformat:",
"# if dataformat['unit'] == 'cps':",
"# tstep = data['Time'][1] - data['Time'][0]",
"# read_data[dind] *= tstep",
"# else:",
"# pass",
"data",
"[",
"'rawdata'",
"]",
"=",
"Bunch",
"(",
"zip",
"(",
"analytes",
",",
"read_data",
"[",
"dind",
"]",
")",
")",
"data",
"[",
"'total_counts'",
"]",
"=",
"np",
".",
"nansum",
"(",
"read_data",
"[",
"dind",
"]",
",",
"0",
")",
"return",
"sample",
",",
"analytes",
",",
"data",
",",
"meta"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
residual_plots
|
Function for plotting Test User and LAtools data comparison.
Parameters
----------
df : pandas.DataFrame
A dataframe containing reference ('X/Ca_r'), test user
('X/Ca_t') and LAtools ('X123') data.
rep_stats : dict
Reproducibility stats of the reference data produced by
`pairwise_reproducibility`
els : list
list of elements (names only) to plot.
|
Supplement/comparison_tools/plots_1sample.py
|
def residual_plots(df, rep_stats=None, els=['Mg', 'Sr', 'Al', 'Mn', 'Fe', 'Cu', 'Zn', 'B']):
"""
Function for plotting Test User and LAtools data comparison.
Parameters
----------
df : pandas.DataFrame
A dataframe containing reference ('X/Ca_r'), test user
('X/Ca_t') and LAtools ('X123') data.
rep_stats : dict
Reproducibility stats of the reference data produced by
`pairwise_reproducibility`
els : list
list of elements (names only) to plot.
"""
# get corresponding analyte and ratio names
As = []
Rs = []
analytes = [c for c in df.columns if ('/' not in c)]
ratios = [c for c in df.columns if ('/' in c)]
for e in els:
if e == 'Sr':
As.append('88Sr')
elif e == 'Mg':
As.append('24Mg')
else:
As.append([a for a in analytes if e in a][0])
Rs.append([r for r in ratios if e in r][0])
fig, axs = plt.subplots(len(els), 2, figsize=(5, len(els) * 2))
for i, (e, a) in enumerate(zip(Rs, As)):
lax, hax = axs[i]
x = df.loc[:, e].values
yl = df.loc[:, a].values
c = element_colour(fmt_el(a))
u = 'mmol/mol'
# calculate residuals
rl = yl - x
# plot residuals
lax.scatter(x, rl, c=c, s=15, lw=0.5, edgecolor='k', alpha=0.5)
# plot PDFs
rl = rl[~np.isnan(rl)]
lims = np.percentile(rl, [99, 1])
lims += lims.ptp() * np.array((-1.25, 1.25))
bins = np.linspace(*lims, 100)
kdl = stats.gaussian_kde(rl, .4)
hax.fill_betweenx(bins, kdl(bins), facecolor=c, alpha=0.7, edgecolor='k', lw=0.5, label='LAtools')
hax.set_xlim([0, hax.get_xlim()[-1]])
# axis labels, annotations and limits
lax.set_ylabel(e + ' ('+ u + ')')
lax.text(.02,.02,fmt_RSS(rl), fontsize=8,
ha='left', va='bottom', transform=lax.transAxes)
xlim = np.percentile(x[~np.isnan(x)], [0, 98])
lax.set_xlim(xlim)
for ax in axs[i]:
ax.set_ylim(lims)
# zero line and 2SD precision
ax.axhline(0, c='k', ls='dashed', alpha=0.6)
if rep_stats is not None:
ax.axhspan(-rep_stats[e][0] * 2, rep_stats[e][0] * 2, color=(0,0,0,0.2), zorder=-1)
if not ax.is_first_col():
ax.set_yticklabels([])
if ax.is_last_row():
hax.set_xlabel('Density')
lax.set_xlabel('Iolite User')
if ax.is_first_row():
lax.set_title('LAtools', loc='left')
fig.tight_layout()
return fig, axs
|
def residual_plots(df, rep_stats=None, els=['Mg', 'Sr', 'Al', 'Mn', 'Fe', 'Cu', 'Zn', 'B']):
"""
Function for plotting Test User and LAtools data comparison.
Parameters
----------
df : pandas.DataFrame
A dataframe containing reference ('X/Ca_r'), test user
('X/Ca_t') and LAtools ('X123') data.
rep_stats : dict
Reproducibility stats of the reference data produced by
`pairwise_reproducibility`
els : list
list of elements (names only) to plot.
"""
# get corresponding analyte and ratio names
As = []
Rs = []
analytes = [c for c in df.columns if ('/' not in c)]
ratios = [c for c in df.columns if ('/' in c)]
for e in els:
if e == 'Sr':
As.append('88Sr')
elif e == 'Mg':
As.append('24Mg')
else:
As.append([a for a in analytes if e in a][0])
Rs.append([r for r in ratios if e in r][0])
fig, axs = plt.subplots(len(els), 2, figsize=(5, len(els) * 2))
for i, (e, a) in enumerate(zip(Rs, As)):
lax, hax = axs[i]
x = df.loc[:, e].values
yl = df.loc[:, a].values
c = element_colour(fmt_el(a))
u = 'mmol/mol'
# calculate residuals
rl = yl - x
# plot residuals
lax.scatter(x, rl, c=c, s=15, lw=0.5, edgecolor='k', alpha=0.5)
# plot PDFs
rl = rl[~np.isnan(rl)]
lims = np.percentile(rl, [99, 1])
lims += lims.ptp() * np.array((-1.25, 1.25))
bins = np.linspace(*lims, 100)
kdl = stats.gaussian_kde(rl, .4)
hax.fill_betweenx(bins, kdl(bins), facecolor=c, alpha=0.7, edgecolor='k', lw=0.5, label='LAtools')
hax.set_xlim([0, hax.get_xlim()[-1]])
# axis labels, annotations and limits
lax.set_ylabel(e + ' ('+ u + ')')
lax.text(.02,.02,fmt_RSS(rl), fontsize=8,
ha='left', va='bottom', transform=lax.transAxes)
xlim = np.percentile(x[~np.isnan(x)], [0, 98])
lax.set_xlim(xlim)
for ax in axs[i]:
ax.set_ylim(lims)
# zero line and 2SD precision
ax.axhline(0, c='k', ls='dashed', alpha=0.6)
if rep_stats is not None:
ax.axhspan(-rep_stats[e][0] * 2, rep_stats[e][0] * 2, color=(0,0,0,0.2), zorder=-1)
if not ax.is_first_col():
ax.set_yticklabels([])
if ax.is_last_row():
hax.set_xlabel('Density')
lax.set_xlabel('Iolite User')
if ax.is_first_row():
lax.set_title('LAtools', loc='left')
fig.tight_layout()
return fig, axs
|
[
"Function",
"for",
"plotting",
"Test",
"User",
"and",
"LAtools",
"data",
"comparison",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/Supplement/comparison_tools/plots_1sample.py#L94-L178
|
[
"def",
"residual_plots",
"(",
"df",
",",
"rep_stats",
"=",
"None",
",",
"els",
"=",
"[",
"'Mg'",
",",
"'Sr'",
",",
"'Al'",
",",
"'Mn'",
",",
"'Fe'",
",",
"'Cu'",
",",
"'Zn'",
",",
"'B'",
"]",
")",
":",
"# get corresponding analyte and ratio names",
"As",
"=",
"[",
"]",
"Rs",
"=",
"[",
"]",
"analytes",
"=",
"[",
"c",
"for",
"c",
"in",
"df",
".",
"columns",
"if",
"(",
"'/'",
"not",
"in",
"c",
")",
"]",
"ratios",
"=",
"[",
"c",
"for",
"c",
"in",
"df",
".",
"columns",
"if",
"(",
"'/'",
"in",
"c",
")",
"]",
"for",
"e",
"in",
"els",
":",
"if",
"e",
"==",
"'Sr'",
":",
"As",
".",
"append",
"(",
"'88Sr'",
")",
"elif",
"e",
"==",
"'Mg'",
":",
"As",
".",
"append",
"(",
"'24Mg'",
")",
"else",
":",
"As",
".",
"append",
"(",
"[",
"a",
"for",
"a",
"in",
"analytes",
"if",
"e",
"in",
"a",
"]",
"[",
"0",
"]",
")",
"Rs",
".",
"append",
"(",
"[",
"r",
"for",
"r",
"in",
"ratios",
"if",
"e",
"in",
"r",
"]",
"[",
"0",
"]",
")",
"fig",
",",
"axs",
"=",
"plt",
".",
"subplots",
"(",
"len",
"(",
"els",
")",
",",
"2",
",",
"figsize",
"=",
"(",
"5",
",",
"len",
"(",
"els",
")",
"*",
"2",
")",
")",
"for",
"i",
",",
"(",
"e",
",",
"a",
")",
"in",
"enumerate",
"(",
"zip",
"(",
"Rs",
",",
"As",
")",
")",
":",
"lax",
",",
"hax",
"=",
"axs",
"[",
"i",
"]",
"x",
"=",
"df",
".",
"loc",
"[",
":",
",",
"e",
"]",
".",
"values",
"yl",
"=",
"df",
".",
"loc",
"[",
":",
",",
"a",
"]",
".",
"values",
"c",
"=",
"element_colour",
"(",
"fmt_el",
"(",
"a",
")",
")",
"u",
"=",
"'mmol/mol'",
"# calculate residuals",
"rl",
"=",
"yl",
"-",
"x",
"# plot residuals",
"lax",
".",
"scatter",
"(",
"x",
",",
"rl",
",",
"c",
"=",
"c",
",",
"s",
"=",
"15",
",",
"lw",
"=",
"0.5",
",",
"edgecolor",
"=",
"'k'",
",",
"alpha",
"=",
"0.5",
")",
"# plot PDFs",
"rl",
"=",
"rl",
"[",
"~",
"np",
".",
"isnan",
"(",
"rl",
")",
"]",
"lims",
"=",
"np",
".",
"percentile",
"(",
"rl",
",",
"[",
"99",
",",
"1",
"]",
")",
"lims",
"+=",
"lims",
".",
"ptp",
"(",
")",
"*",
"np",
".",
"array",
"(",
"(",
"-",
"1.25",
",",
"1.25",
")",
")",
"bins",
"=",
"np",
".",
"linspace",
"(",
"*",
"lims",
",",
"100",
")",
"kdl",
"=",
"stats",
".",
"gaussian_kde",
"(",
"rl",
",",
".4",
")",
"hax",
".",
"fill_betweenx",
"(",
"bins",
",",
"kdl",
"(",
"bins",
")",
",",
"facecolor",
"=",
"c",
",",
"alpha",
"=",
"0.7",
",",
"edgecolor",
"=",
"'k'",
",",
"lw",
"=",
"0.5",
",",
"label",
"=",
"'LAtools'",
")",
"hax",
".",
"set_xlim",
"(",
"[",
"0",
",",
"hax",
".",
"get_xlim",
"(",
")",
"[",
"-",
"1",
"]",
"]",
")",
"# axis labels, annotations and limits",
"lax",
".",
"set_ylabel",
"(",
"e",
"+",
"' ('",
"+",
"u",
"+",
"')'",
")",
"lax",
".",
"text",
"(",
".02",
",",
".02",
",",
"fmt_RSS",
"(",
"rl",
")",
",",
"fontsize",
"=",
"8",
",",
"ha",
"=",
"'left'",
",",
"va",
"=",
"'bottom'",
",",
"transform",
"=",
"lax",
".",
"transAxes",
")",
"xlim",
"=",
"np",
".",
"percentile",
"(",
"x",
"[",
"~",
"np",
".",
"isnan",
"(",
"x",
")",
"]",
",",
"[",
"0",
",",
"98",
"]",
")",
"lax",
".",
"set_xlim",
"(",
"xlim",
")",
"for",
"ax",
"in",
"axs",
"[",
"i",
"]",
":",
"ax",
".",
"set_ylim",
"(",
"lims",
")",
"# zero line and 2SD precision",
"ax",
".",
"axhline",
"(",
"0",
",",
"c",
"=",
"'k'",
",",
"ls",
"=",
"'dashed'",
",",
"alpha",
"=",
"0.6",
")",
"if",
"rep_stats",
"is",
"not",
"None",
":",
"ax",
".",
"axhspan",
"(",
"-",
"rep_stats",
"[",
"e",
"]",
"[",
"0",
"]",
"*",
"2",
",",
"rep_stats",
"[",
"e",
"]",
"[",
"0",
"]",
"*",
"2",
",",
"color",
"=",
"(",
"0",
",",
"0",
",",
"0",
",",
"0.2",
")",
",",
"zorder",
"=",
"-",
"1",
")",
"if",
"not",
"ax",
".",
"is_first_col",
"(",
")",
":",
"ax",
".",
"set_yticklabels",
"(",
"[",
"]",
")",
"if",
"ax",
".",
"is_last_row",
"(",
")",
":",
"hax",
".",
"set_xlabel",
"(",
"'Density'",
")",
"lax",
".",
"set_xlabel",
"(",
"'Iolite User'",
")",
"if",
"ax",
".",
"is_first_row",
"(",
")",
":",
"lax",
".",
"set_title",
"(",
"'LAtools'",
",",
"loc",
"=",
"'left'",
")",
"fig",
".",
"tight_layout",
"(",
")",
"return",
"fig",
",",
"axs"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
comparison_stats
|
Compute comparison stats for test and LAtools data.
Population-level similarity assessed by a Kolmogorov-Smirnov test.
Individual similarity assessed by a pairwise Wilcoxon signed rank test.
Trends in residuals assessed by regression analysis, where significance of
the slope and intercept is determined by t-tests (both relative to zero).
Parameters
----------
df : pandas.DataFrame
A dataframe containing reference ('X/Ca_r'), test user
('X/Ca_t') and LAtools ('X123') data.
els : list
list of elements (names only) to plot.
Returns
-------
pandas.DataFrame
|
Supplement/comparison_tools/stats_zircon.py
|
def comparison_stats(df, els=None):
"""
Compute comparison stats for test and LAtools data.
Population-level similarity assessed by a Kolmogorov-Smirnov test.
Individual similarity assessed by a pairwise Wilcoxon signed rank test.
Trends in residuals assessed by regression analysis, where significance of
the slope and intercept is determined by t-tests (both relative to zero).
Parameters
----------
df : pandas.DataFrame
A dataframe containing reference ('X/Ca_r'), test user
('X/Ca_t') and LAtools ('X123') data.
els : list
list of elements (names only) to plot.
Returns
-------
pandas.DataFrame
"""
if els is None:
els = ['Li', 'Mg', 'Al', 'P', 'Ti', 'Y', 'La', 'Ce', 'Pr', 'Nd', 'Sm',
'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu', 'Hf', 'Pb', 'Th',
'U']
yl_stats = []
for i, e in enumerate(els):
x = df.loc[:, e + '_rd'].values
yl = df.loc[:, e + '_la'].values
yl_stats.append(summary_stats(x, yl, e))
yl_stats = pd.concat(yl_stats).T
return yl_stats.T
|
def comparison_stats(df, els=None):
"""
Compute comparison stats for test and LAtools data.
Population-level similarity assessed by a Kolmogorov-Smirnov test.
Individual similarity assessed by a pairwise Wilcoxon signed rank test.
Trends in residuals assessed by regression analysis, where significance of
the slope and intercept is determined by t-tests (both relative to zero).
Parameters
----------
df : pandas.DataFrame
A dataframe containing reference ('X/Ca_r'), test user
('X/Ca_t') and LAtools ('X123') data.
els : list
list of elements (names only) to plot.
Returns
-------
pandas.DataFrame
"""
if els is None:
els = ['Li', 'Mg', 'Al', 'P', 'Ti', 'Y', 'La', 'Ce', 'Pr', 'Nd', 'Sm',
'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu', 'Hf', 'Pb', 'Th',
'U']
yl_stats = []
for i, e in enumerate(els):
x = df.loc[:, e + '_rd'].values
yl = df.loc[:, e + '_la'].values
yl_stats.append(summary_stats(x, yl, e))
yl_stats = pd.concat(yl_stats).T
return yl_stats.T
|
[
"Compute",
"comparison",
"stats",
"for",
"test",
"and",
"LAtools",
"data",
".",
"Population",
"-",
"level",
"similarity",
"assessed",
"by",
"a",
"Kolmogorov",
"-",
"Smirnov",
"test",
".",
"Individual",
"similarity",
"assessed",
"by",
"a",
"pairwise",
"Wilcoxon",
"signed",
"rank",
"test",
".",
"Trends",
"in",
"residuals",
"assessed",
"by",
"regression",
"analysis",
"where",
"significance",
"of",
"the",
"slope",
"and",
"intercept",
"is",
"determined",
"by",
"t",
"-",
"tests",
"(",
"both",
"relative",
"to",
"zero",
")",
".",
"Parameters",
"----------",
"df",
":",
"pandas",
".",
"DataFrame",
"A",
"dataframe",
"containing",
"reference",
"(",
"X",
"/",
"Ca_r",
")",
"test",
"user",
"(",
"X",
"/",
"Ca_t",
")",
"and",
"LAtools",
"(",
"X123",
")",
"data",
".",
"els",
":",
"list",
"list",
"of",
"elements",
"(",
"names",
"only",
")",
"to",
"plot",
".",
"Returns",
"-------",
"pandas",
".",
"DataFrame"
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/Supplement/comparison_tools/stats_zircon.py#L8-L47
|
[
"def",
"comparison_stats",
"(",
"df",
",",
"els",
"=",
"None",
")",
":",
"if",
"els",
"is",
"None",
":",
"els",
"=",
"[",
"'Li'",
",",
"'Mg'",
",",
"'Al'",
",",
"'P'",
",",
"'Ti'",
",",
"'Y'",
",",
"'La'",
",",
"'Ce'",
",",
"'Pr'",
",",
"'Nd'",
",",
"'Sm'",
",",
"'Eu'",
",",
"'Gd'",
",",
"'Tb'",
",",
"'Dy'",
",",
"'Ho'",
",",
"'Er'",
",",
"'Tm'",
",",
"'Yb'",
",",
"'Lu'",
",",
"'Hf'",
",",
"'Pb'",
",",
"'Th'",
",",
"'U'",
"]",
"yl_stats",
"=",
"[",
"]",
"for",
"i",
",",
"e",
"in",
"enumerate",
"(",
"els",
")",
":",
"x",
"=",
"df",
".",
"loc",
"[",
":",
",",
"e",
"+",
"'_rd'",
"]",
".",
"values",
"yl",
"=",
"df",
".",
"loc",
"[",
":",
",",
"e",
"+",
"'_la'",
"]",
".",
"values",
"yl_stats",
".",
"append",
"(",
"summary_stats",
"(",
"x",
",",
"yl",
",",
"e",
")",
")",
"yl_stats",
"=",
"pd",
".",
"concat",
"(",
"yl_stats",
")",
".",
"T",
"return",
"yl_stats",
".",
"T"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
_log
|
Function for logging method calls and parameters
|
latools/helpers/logging.py
|
def _log(func):
"""
Function for logging method calls and parameters
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
a = func(self, *args, **kwargs)
self.log.append(func.__name__ + ' :: args={} kwargs={}'.format(args, kwargs))
return a
return wrapper
|
def _log(func):
"""
Function for logging method calls and parameters
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
a = func(self, *args, **kwargs)
self.log.append(func.__name__ + ' :: args={} kwargs={}'.format(args, kwargs))
return a
return wrapper
|
[
"Function",
"for",
"logging",
"method",
"calls",
"and",
"parameters"
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/helpers/logging.py#L6-L15
|
[
"def",
"_log",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"a",
"=",
"func",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"log",
".",
"append",
"(",
"func",
".",
"__name__",
"+",
"' :: args={} kwargs={}'",
".",
"format",
"(",
"args",
",",
"kwargs",
")",
")",
"return",
"a",
"return",
"wrapper"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
write_logfile
|
Write and analysis log to a file.
Parameters
----------
log : list
latools.analyse analysis log
header : list
File header lines.
file_name : str
Destination file. If no file extension
specified, uses '.lalog'
Returns
-------
None
|
latools/helpers/logging.py
|
def write_logfile(log, header, file_name):
"""
Write and analysis log to a file.
Parameters
----------
log : list
latools.analyse analysis log
header : list
File header lines.
file_name : str
Destination file. If no file extension
specified, uses '.lalog'
Returns
-------
None
"""
path, ext = os.path.splitext(file_name)
if ext == '':
ext = '.lalog'
with open(path + ext, 'w') as f:
f.write('\n'.join(header))
f.write('\n'.join(log))
return path + ext
|
def write_logfile(log, header, file_name):
"""
Write and analysis log to a file.
Parameters
----------
log : list
latools.analyse analysis log
header : list
File header lines.
file_name : str
Destination file. If no file extension
specified, uses '.lalog'
Returns
-------
None
"""
path, ext = os.path.splitext(file_name)
if ext == '':
ext = '.lalog'
with open(path + ext, 'w') as f:
f.write('\n'.join(header))
f.write('\n'.join(log))
return path + ext
|
[
"Write",
"and",
"analysis",
"log",
"to",
"a",
"file",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/helpers/logging.py#L17-L43
|
[
"def",
"write_logfile",
"(",
"log",
",",
"header",
",",
"file_name",
")",
":",
"path",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"file_name",
")",
"if",
"ext",
"==",
"''",
":",
"ext",
"=",
"'.lalog'",
"with",
"open",
"(",
"path",
"+",
"ext",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"header",
")",
")",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"log",
")",
")",
"return",
"path",
"+",
"ext"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
read_logfile
|
Reads an latools analysis.log file, and returns dicts of arguments.
Parameters
----------
log_file : str
Path to an analysis.log file produced by latools.
Returns
-------
runargs, paths : tuple
Two dictionaries. runargs contains all the arguments required to run each step
of analysis in the form (function_name, {'args': (), 'kwargs': {}}). paths contains
the locations of the data directory and the SRM database used for analysis.
|
latools/helpers/logging.py
|
def read_logfile(log_file):
"""
Reads an latools analysis.log file, and returns dicts of arguments.
Parameters
----------
log_file : str
Path to an analysis.log file produced by latools.
Returns
-------
runargs, paths : tuple
Two dictionaries. runargs contains all the arguments required to run each step
of analysis in the form (function_name, {'args': (), 'kwargs': {}}). paths contains
the locations of the data directory and the SRM database used for analysis.
"""
dirname = os.path.dirname(log_file) + '/'
with open(log_file, 'r') as f:
rlog = f.readlines()
hashind = [i for i, n in enumerate(rlog) if '#' in n]
pathread = re.compile('(.*) :: (.*)\n')
paths = (pathread.match(l).groups() for l in rlog[hashind[0] + 1:hashind[-1]] if pathread.match(l))
paths = {k: os.path.join(dirname, v) for k, v in paths}
# paths = {k: os.path.abspath(v) for k, v in paths}
logread = re.compile('([a-z_]+) :: args=(\(.*\)) kwargs=(\{.*\})')
runargs = []
for line in rlog[hashind[1] + 1:]:
fname, args, kwargs = (logread.match(line).groups())
runargs.append((fname ,{'args': eval(args), 'kwargs': eval(kwargs)}))
if fname == '__init__':
runargs[-1][-1]['kwargs']['config'] = 'REPRODUCE'
runargs[-1][-1]['kwargs']['dataformat'] = None
runargs[-1][-1]['kwargs']['data_folder'] = paths['data_folder']
if 'srm_table' in paths:
runargs[-1][-1]['kwargs']['srm_file'] = paths['srm_table']
return runargs, paths
|
def read_logfile(log_file):
"""
Reads an latools analysis.log file, and returns dicts of arguments.
Parameters
----------
log_file : str
Path to an analysis.log file produced by latools.
Returns
-------
runargs, paths : tuple
Two dictionaries. runargs contains all the arguments required to run each step
of analysis in the form (function_name, {'args': (), 'kwargs': {}}). paths contains
the locations of the data directory and the SRM database used for analysis.
"""
dirname = os.path.dirname(log_file) + '/'
with open(log_file, 'r') as f:
rlog = f.readlines()
hashind = [i for i, n in enumerate(rlog) if '#' in n]
pathread = re.compile('(.*) :: (.*)\n')
paths = (pathread.match(l).groups() for l in rlog[hashind[0] + 1:hashind[-1]] if pathread.match(l))
paths = {k: os.path.join(dirname, v) for k, v in paths}
# paths = {k: os.path.abspath(v) for k, v in paths}
logread = re.compile('([a-z_]+) :: args=(\(.*\)) kwargs=(\{.*\})')
runargs = []
for line in rlog[hashind[1] + 1:]:
fname, args, kwargs = (logread.match(line).groups())
runargs.append((fname ,{'args': eval(args), 'kwargs': eval(kwargs)}))
if fname == '__init__':
runargs[-1][-1]['kwargs']['config'] = 'REPRODUCE'
runargs[-1][-1]['kwargs']['dataformat'] = None
runargs[-1][-1]['kwargs']['data_folder'] = paths['data_folder']
if 'srm_table' in paths:
runargs[-1][-1]['kwargs']['srm_file'] = paths['srm_table']
return runargs, paths
|
[
"Reads",
"an",
"latools",
"analysis",
".",
"log",
"file",
"and",
"returns",
"dicts",
"of",
"arguments",
"."
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/helpers/logging.py#L45-L86
|
[
"def",
"read_logfile",
"(",
"log_file",
")",
":",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"log_file",
")",
"+",
"'/'",
"with",
"open",
"(",
"log_file",
",",
"'r'",
")",
"as",
"f",
":",
"rlog",
"=",
"f",
".",
"readlines",
"(",
")",
"hashind",
"=",
"[",
"i",
"for",
"i",
",",
"n",
"in",
"enumerate",
"(",
"rlog",
")",
"if",
"'#'",
"in",
"n",
"]",
"pathread",
"=",
"re",
".",
"compile",
"(",
"'(.*) :: (.*)\\n'",
")",
"paths",
"=",
"(",
"pathread",
".",
"match",
"(",
"l",
")",
".",
"groups",
"(",
")",
"for",
"l",
"in",
"rlog",
"[",
"hashind",
"[",
"0",
"]",
"+",
"1",
":",
"hashind",
"[",
"-",
"1",
"]",
"]",
"if",
"pathread",
".",
"match",
"(",
"l",
")",
")",
"paths",
"=",
"{",
"k",
":",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"paths",
"}",
"# paths = {k: os.path.abspath(v) for k, v in paths}",
"logread",
"=",
"re",
".",
"compile",
"(",
"'([a-z_]+) :: args=(\\(.*\\)) kwargs=(\\{.*\\})'",
")",
"runargs",
"=",
"[",
"]",
"for",
"line",
"in",
"rlog",
"[",
"hashind",
"[",
"1",
"]",
"+",
"1",
":",
"]",
":",
"fname",
",",
"args",
",",
"kwargs",
"=",
"(",
"logread",
".",
"match",
"(",
"line",
")",
".",
"groups",
"(",
")",
")",
"runargs",
".",
"append",
"(",
"(",
"fname",
",",
"{",
"'args'",
":",
"eval",
"(",
"args",
")",
",",
"'kwargs'",
":",
"eval",
"(",
"kwargs",
")",
"}",
")",
")",
"if",
"fname",
"==",
"'__init__'",
":",
"runargs",
"[",
"-",
"1",
"]",
"[",
"-",
"1",
"]",
"[",
"'kwargs'",
"]",
"[",
"'config'",
"]",
"=",
"'REPRODUCE'",
"runargs",
"[",
"-",
"1",
"]",
"[",
"-",
"1",
"]",
"[",
"'kwargs'",
"]",
"[",
"'dataformat'",
"]",
"=",
"None",
"runargs",
"[",
"-",
"1",
"]",
"[",
"-",
"1",
"]",
"[",
"'kwargs'",
"]",
"[",
"'data_folder'",
"]",
"=",
"paths",
"[",
"'data_folder'",
"]",
"if",
"'srm_table'",
"in",
"paths",
":",
"runargs",
"[",
"-",
"1",
"]",
"[",
"-",
"1",
"]",
"[",
"'kwargs'",
"]",
"[",
"'srm_file'",
"]",
"=",
"paths",
"[",
"'srm_table'",
"]",
"return",
"runargs",
",",
"paths"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
zipdir
|
Compresses the target directory, and saves it to ../name.zip
Parameters
----------
directory : str
Path to the directory you want to compress.
Compressed file will be saved at directory/../name.zip
name : str (default=None)
The name of the resulting zip file. If not specified, the
name of the directory to be compressed is used.
delete : bool
If True, the uncompressed directory is deleted after the zip file
has been created. Defaults to False.
Returns
-------
None
|
latools/helpers/utils.py
|
def zipdir(directory, name=None, delete=False):
"""
Compresses the target directory, and saves it to ../name.zip
Parameters
----------
directory : str
Path to the directory you want to compress.
Compressed file will be saved at directory/../name.zip
name : str (default=None)
The name of the resulting zip file. If not specified, the
name of the directory to be compressed is used.
delete : bool
If True, the uncompressed directory is deleted after the zip file
has been created. Defaults to False.
Returns
-------
None
"""
if not os.path.isdir(directory) or not os.path.exists(directory):
raise ValueError('Please provide a valid directory.')
if name is None:
name = directory.split('/')[-1]
savepath = os.path.join(directory, os.path.pardir)
# create zipfile
with zipfile.ZipFile(os.path.join(savepath, name + '.zip'), 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(directory):
for f in files:
zipf.write(os.path.join(root, f), os.path.join(root.replace(directory, ''), f))
if delete:
shutil.rmtree(directory)
return None
|
def zipdir(directory, name=None, delete=False):
"""
Compresses the target directory, and saves it to ../name.zip
Parameters
----------
directory : str
Path to the directory you want to compress.
Compressed file will be saved at directory/../name.zip
name : str (default=None)
The name of the resulting zip file. If not specified, the
name of the directory to be compressed is used.
delete : bool
If True, the uncompressed directory is deleted after the zip file
has been created. Defaults to False.
Returns
-------
None
"""
if not os.path.isdir(directory) or not os.path.exists(directory):
raise ValueError('Please provide a valid directory.')
if name is None:
name = directory.split('/')[-1]
savepath = os.path.join(directory, os.path.pardir)
# create zipfile
with zipfile.ZipFile(os.path.join(savepath, name + '.zip'), 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(directory):
for f in files:
zipf.write(os.path.join(root, f), os.path.join(root.replace(directory, ''), f))
if delete:
shutil.rmtree(directory)
return None
|
[
"Compresses",
"the",
"target",
"directory",
"and",
"saves",
"it",
"to",
"..",
"/",
"name",
".",
"zip"
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/helpers/utils.py#L6-L41
|
[
"def",
"zipdir",
"(",
"directory",
",",
"name",
"=",
"None",
",",
"delete",
"=",
"False",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"directory",
")",
"or",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"directory",
")",
":",
"raise",
"ValueError",
"(",
"'Please provide a valid directory.'",
")",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"directory",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"savepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"os",
".",
"path",
".",
"pardir",
")",
"# create zipfile",
"with",
"zipfile",
".",
"ZipFile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"savepath",
",",
"name",
"+",
"'.zip'",
")",
",",
"'w'",
",",
"zipfile",
".",
"ZIP_DEFLATED",
")",
"as",
"zipf",
":",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"directory",
")",
":",
"for",
"f",
"in",
"files",
":",
"zipf",
".",
"write",
"(",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"f",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"root",
".",
"replace",
"(",
"directory",
",",
"''",
")",
",",
"f",
")",
")",
"if",
"delete",
":",
"shutil",
".",
"rmtree",
"(",
"directory",
")",
"return",
"None"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
extract_zipdir
|
Extract contents of zip file into subfolder in parent directory.
Parameters
----------
zip_file : str
Path to zip file
Returns
-------
str : folder where the zip was extracted
|
latools/helpers/utils.py
|
def extract_zipdir(zip_file):
"""
Extract contents of zip file into subfolder in parent directory.
Parameters
----------
zip_file : str
Path to zip file
Returns
-------
str : folder where the zip was extracted
"""
if not os.path.exists(zip_file):
raise ValueError('{} does not exist'.format(zip_file))
directory = os.path.dirname(zip_file)
filename = os.path.basename(zip_file)
dirpath = os.path.join(directory, filename.replace('.zip', ''))
with zipfile.ZipFile(zip_file, 'r', zipfile.ZIP_DEFLATED) as zipf:
zipf.extractall(dirpath)
return dirpath
|
def extract_zipdir(zip_file):
"""
Extract contents of zip file into subfolder in parent directory.
Parameters
----------
zip_file : str
Path to zip file
Returns
-------
str : folder where the zip was extracted
"""
if not os.path.exists(zip_file):
raise ValueError('{} does not exist'.format(zip_file))
directory = os.path.dirname(zip_file)
filename = os.path.basename(zip_file)
dirpath = os.path.join(directory, filename.replace('.zip', ''))
with zipfile.ZipFile(zip_file, 'r', zipfile.ZIP_DEFLATED) as zipf:
zipf.extractall(dirpath)
return dirpath
|
[
"Extract",
"contents",
"of",
"zip",
"file",
"into",
"subfolder",
"in",
"parent",
"directory",
".",
"Parameters",
"----------",
"zip_file",
":",
"str",
"Path",
"to",
"zip",
"file",
"Returns",
"-------",
"str",
":",
"folder",
"where",
"the",
"zip",
"was",
"extracted"
] |
oscarbranson/latools
|
python
|
https://github.com/oscarbranson/latools/blob/cd25a650cfee318152f234d992708511f7047fbe/latools/helpers/utils.py#L43-L65
|
[
"def",
"extract_zipdir",
"(",
"zip_file",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"zip_file",
")",
":",
"raise",
"ValueError",
"(",
"'{} does not exist'",
".",
"format",
"(",
"zip_file",
")",
")",
"directory",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"zip_file",
")",
"filename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"zip_file",
")",
"dirpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"filename",
".",
"replace",
"(",
"'.zip'",
",",
"''",
")",
")",
"with",
"zipfile",
".",
"ZipFile",
"(",
"zip_file",
",",
"'r'",
",",
"zipfile",
".",
"ZIP_DEFLATED",
")",
"as",
"zipf",
":",
"zipf",
".",
"extractall",
"(",
"dirpath",
")",
"return",
"dirpath"
] |
cd25a650cfee318152f234d992708511f7047fbe
|
test
|
autologin
|
Decorator that will try to login and redo an action before failing.
|
eternalegypt/eternalegypt.py
|
def autologin(function, timeout=TIMEOUT):
"""Decorator that will try to login and redo an action before failing."""
@wraps(function)
async def wrapper(self, *args, **kwargs):
"""Wrap a function with timeout."""
try:
async with async_timeout.timeout(timeout):
return await function(self, *args, **kwargs)
except (asyncio.TimeoutError, ClientError, Error):
pass
_LOGGER.debug("autologin")
try:
async with async_timeout.timeout(timeout):
await self.login()
return await function(self, *args, **kwargs)
except (asyncio.TimeoutError, ClientError, Error):
raise Error(str(function))
return wrapper
|
def autologin(function, timeout=TIMEOUT):
"""Decorator that will try to login and redo an action before failing."""
@wraps(function)
async def wrapper(self, *args, **kwargs):
"""Wrap a function with timeout."""
try:
async with async_timeout.timeout(timeout):
return await function(self, *args, **kwargs)
except (asyncio.TimeoutError, ClientError, Error):
pass
_LOGGER.debug("autologin")
try:
async with async_timeout.timeout(timeout):
await self.login()
return await function(self, *args, **kwargs)
except (asyncio.TimeoutError, ClientError, Error):
raise Error(str(function))
return wrapper
|
[
"Decorator",
"that",
"will",
"try",
"to",
"login",
"and",
"redo",
"an",
"action",
"before",
"failing",
"."
] |
amelchio/eternalegypt
|
python
|
https://github.com/amelchio/eternalegypt/blob/895e0b235ceaf7f61458c620237c3ad397780e98/eternalegypt/eternalegypt.py#L52-L71
|
[
"def",
"autologin",
"(",
"function",
",",
"timeout",
"=",
"TIMEOUT",
")",
":",
"@",
"wraps",
"(",
"function",
")",
"async",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Wrap a function with timeout.\"\"\"",
"try",
":",
"async",
"with",
"async_timeout",
".",
"timeout",
"(",
"timeout",
")",
":",
"return",
"await",
"function",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"(",
"asyncio",
".",
"TimeoutError",
",",
"ClientError",
",",
"Error",
")",
":",
"pass",
"_LOGGER",
".",
"debug",
"(",
"\"autologin\"",
")",
"try",
":",
"async",
"with",
"async_timeout",
".",
"timeout",
"(",
"timeout",
")",
":",
"await",
"self",
".",
"login",
"(",
")",
"return",
"await",
"function",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"(",
"asyncio",
".",
"TimeoutError",
",",
"ClientError",
",",
"Error",
")",
":",
"raise",
"Error",
"(",
"str",
"(",
"function",
")",
")",
"return",
"wrapper"
] |
895e0b235ceaf7f61458c620237c3ad397780e98
|
test
|
get_information
|
Example of printing the inbox.
|
examples/inbox.py
|
async def get_information():
"""Example of printing the inbox."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
result = await modem.information()
for sms in result.sms:
pprint.pprint(sms)
await modem.logout()
await websession.close()
|
async def get_information():
"""Example of printing the inbox."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
result = await modem.information()
for sms in result.sms:
pprint.pprint(sms)
await modem.logout()
await websession.close()
|
[
"Example",
"of",
"printing",
"the",
"inbox",
"."
] |
amelchio/eternalegypt
|
python
|
https://github.com/amelchio/eternalegypt/blob/895e0b235ceaf7f61458c620237c3ad397780e98/examples/inbox.py#L16-L29
|
[
"async",
"def",
"get_information",
"(",
")",
":",
"jar",
"=",
"aiohttp",
".",
"CookieJar",
"(",
"unsafe",
"=",
"True",
")",
"websession",
"=",
"aiohttp",
".",
"ClientSession",
"(",
"cookie_jar",
"=",
"jar",
")",
"modem",
"=",
"eternalegypt",
".",
"Modem",
"(",
"hostname",
"=",
"sys",
".",
"argv",
"[",
"1",
"]",
",",
"websession",
"=",
"websession",
")",
"await",
"modem",
".",
"login",
"(",
"password",
"=",
"sys",
".",
"argv",
"[",
"2",
"]",
")",
"result",
"=",
"await",
"modem",
".",
"information",
"(",
")",
"for",
"sms",
"in",
"result",
".",
"sms",
":",
"pprint",
".",
"pprint",
"(",
"sms",
")",
"await",
"modem",
".",
"logout",
"(",
")",
"await",
"websession",
".",
"close",
"(",
")"
] |
895e0b235ceaf7f61458c620237c3ad397780e98
|
test
|
send_message
|
Example of sending a message.
|
examples/sms.py
|
async def send_message():
"""Example of sending a message."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
await modem.sms(phone=sys.argv[3], message=sys.argv[4])
await modem.logout()
await websession.close()
|
async def send_message():
"""Example of sending a message."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
await modem.sms(phone=sys.argv[3], message=sys.argv[4])
await modem.logout()
await websession.close()
|
[
"Example",
"of",
"sending",
"a",
"message",
"."
] |
amelchio/eternalegypt
|
python
|
https://github.com/amelchio/eternalegypt/blob/895e0b235ceaf7f61458c620237c3ad397780e98/examples/sms.py#L15-L26
|
[
"async",
"def",
"send_message",
"(",
")",
":",
"jar",
"=",
"aiohttp",
".",
"CookieJar",
"(",
"unsafe",
"=",
"True",
")",
"websession",
"=",
"aiohttp",
".",
"ClientSession",
"(",
"cookie_jar",
"=",
"jar",
")",
"modem",
"=",
"eternalegypt",
".",
"Modem",
"(",
"hostname",
"=",
"sys",
".",
"argv",
"[",
"1",
"]",
",",
"websession",
"=",
"websession",
")",
"await",
"modem",
".",
"login",
"(",
"password",
"=",
"sys",
".",
"argv",
"[",
"2",
"]",
")",
"await",
"modem",
".",
"sms",
"(",
"phone",
"=",
"sys",
".",
"argv",
"[",
"3",
"]",
",",
"message",
"=",
"sys",
".",
"argv",
"[",
"4",
"]",
")",
"await",
"modem",
".",
"logout",
"(",
")",
"await",
"websession",
".",
"close",
"(",
")"
] |
895e0b235ceaf7f61458c620237c3ad397780e98
|
test
|
get_information
|
Example of printing the current upstream.
|
examples/status.py
|
async def get_information():
"""Example of printing the current upstream."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
try:
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
result = await modem.information()
print("upstream: {}".format(result.upstream))
print("serial_number: {}".format(result.serial_number))
print("wire_connected: {}".format(result.wire_connected))
print("mobile_connected: {}".format(result.mobile_connected))
print("connection_text: {}".format(result.connection_text))
print("connection_type: {}".format(result.connection_type))
print("current_nw_service_type: {}".format(result.current_nw_service_type))
print("current_ps_service_type: {}".format(result.current_ps_service_type))
print("register_network_display: {}".format(result.register_network_display))
print("roaming: {}".format(result.roaming))
print("radio_quality: {}".format(result.radio_quality))
print("rx_level: {}".format(result.rx_level))
print("tx_level: {}".format(result.tx_level))
print("current_band: {}".format(result.current_band))
print("cell_id: {}".format(result.cell_id))
await modem.logout()
except eternalegypt.Error:
print("Could not login")
await websession.close()
|
async def get_information():
"""Example of printing the current upstream."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
try:
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
result = await modem.information()
print("upstream: {}".format(result.upstream))
print("serial_number: {}".format(result.serial_number))
print("wire_connected: {}".format(result.wire_connected))
print("mobile_connected: {}".format(result.mobile_connected))
print("connection_text: {}".format(result.connection_text))
print("connection_type: {}".format(result.connection_type))
print("current_nw_service_type: {}".format(result.current_nw_service_type))
print("current_ps_service_type: {}".format(result.current_ps_service_type))
print("register_network_display: {}".format(result.register_network_display))
print("roaming: {}".format(result.roaming))
print("radio_quality: {}".format(result.radio_quality))
print("rx_level: {}".format(result.rx_level))
print("tx_level: {}".format(result.tx_level))
print("current_band: {}".format(result.current_band))
print("cell_id: {}".format(result.cell_id))
await modem.logout()
except eternalegypt.Error:
print("Could not login")
await websession.close()
|
[
"Example",
"of",
"printing",
"the",
"current",
"upstream",
"."
] |
amelchio/eternalegypt
|
python
|
https://github.com/amelchio/eternalegypt/blob/895e0b235ceaf7f61458c620237c3ad397780e98/examples/status.py#L11-L41
|
[
"async",
"def",
"get_information",
"(",
")",
":",
"jar",
"=",
"aiohttp",
".",
"CookieJar",
"(",
"unsafe",
"=",
"True",
")",
"websession",
"=",
"aiohttp",
".",
"ClientSession",
"(",
"cookie_jar",
"=",
"jar",
")",
"try",
":",
"modem",
"=",
"eternalegypt",
".",
"Modem",
"(",
"hostname",
"=",
"sys",
".",
"argv",
"[",
"1",
"]",
",",
"websession",
"=",
"websession",
")",
"await",
"modem",
".",
"login",
"(",
"password",
"=",
"sys",
".",
"argv",
"[",
"2",
"]",
")",
"result",
"=",
"await",
"modem",
".",
"information",
"(",
")",
"print",
"(",
"\"upstream: {}\"",
".",
"format",
"(",
"result",
".",
"upstream",
")",
")",
"print",
"(",
"\"serial_number: {}\"",
".",
"format",
"(",
"result",
".",
"serial_number",
")",
")",
"print",
"(",
"\"wire_connected: {}\"",
".",
"format",
"(",
"result",
".",
"wire_connected",
")",
")",
"print",
"(",
"\"mobile_connected: {}\"",
".",
"format",
"(",
"result",
".",
"mobile_connected",
")",
")",
"print",
"(",
"\"connection_text: {}\"",
".",
"format",
"(",
"result",
".",
"connection_text",
")",
")",
"print",
"(",
"\"connection_type: {}\"",
".",
"format",
"(",
"result",
".",
"connection_type",
")",
")",
"print",
"(",
"\"current_nw_service_type: {}\"",
".",
"format",
"(",
"result",
".",
"current_nw_service_type",
")",
")",
"print",
"(",
"\"current_ps_service_type: {}\"",
".",
"format",
"(",
"result",
".",
"current_ps_service_type",
")",
")",
"print",
"(",
"\"register_network_display: {}\"",
".",
"format",
"(",
"result",
".",
"register_network_display",
")",
")",
"print",
"(",
"\"roaming: {}\"",
".",
"format",
"(",
"result",
".",
"roaming",
")",
")",
"print",
"(",
"\"radio_quality: {}\"",
".",
"format",
"(",
"result",
".",
"radio_quality",
")",
")",
"print",
"(",
"\"rx_level: {}\"",
".",
"format",
"(",
"result",
".",
"rx_level",
")",
")",
"print",
"(",
"\"tx_level: {}\"",
".",
"format",
"(",
"result",
".",
"tx_level",
")",
")",
"print",
"(",
"\"current_band: {}\"",
".",
"format",
"(",
"result",
".",
"current_band",
")",
")",
"print",
"(",
"\"cell_id: {}\"",
".",
"format",
"(",
"result",
".",
"cell_id",
")",
")",
"await",
"modem",
".",
"logout",
"(",
")",
"except",
"eternalegypt",
".",
"Error",
":",
"print",
"(",
"\"Could not login\"",
")",
"await",
"websession",
".",
"close",
"(",
")"
] |
895e0b235ceaf7f61458c620237c3ad397780e98
|
test
|
set_failover_mode
|
Example of printing the current upstream.
|
examples/failover.py
|
async def set_failover_mode(mode):
"""Example of printing the current upstream."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
try:
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
await modem.set_failover_mode(mode)
await modem.logout()
except eternalegypt.Error:
print("Could not login")
await websession.close()
|
async def set_failover_mode(mode):
"""Example of printing the current upstream."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
try:
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
await modem.set_failover_mode(mode)
await modem.logout()
except eternalegypt.Error:
print("Could not login")
await websession.close()
|
[
"Example",
"of",
"printing",
"the",
"current",
"upstream",
"."
] |
amelchio/eternalegypt
|
python
|
https://github.com/amelchio/eternalegypt/blob/895e0b235ceaf7f61458c620237c3ad397780e98/examples/failover.py#L12-L27
|
[
"async",
"def",
"set_failover_mode",
"(",
"mode",
")",
":",
"jar",
"=",
"aiohttp",
".",
"CookieJar",
"(",
"unsafe",
"=",
"True",
")",
"websession",
"=",
"aiohttp",
".",
"ClientSession",
"(",
"cookie_jar",
"=",
"jar",
")",
"try",
":",
"modem",
"=",
"eternalegypt",
".",
"Modem",
"(",
"hostname",
"=",
"sys",
".",
"argv",
"[",
"1",
"]",
",",
"websession",
"=",
"websession",
")",
"await",
"modem",
".",
"login",
"(",
"password",
"=",
"sys",
".",
"argv",
"[",
"2",
"]",
")",
"await",
"modem",
".",
"set_failover_mode",
"(",
"mode",
")",
"await",
"modem",
".",
"logout",
"(",
")",
"except",
"eternalegypt",
".",
"Error",
":",
"print",
"(",
"\"Could not login\"",
")",
"await",
"websession",
".",
"close",
"(",
")"
] |
895e0b235ceaf7f61458c620237c3ad397780e98
|
test
|
parse
|
Parse a file-like object or string.
Args:
file_or_string (file, str): File-like object or string.
Returns:
ParseResults: instance of pyparsing parse results.
|
mysqlparse/__init__.py
|
def parse(file_or_string):
"""Parse a file-like object or string.
Args:
file_or_string (file, str): File-like object or string.
Returns:
ParseResults: instance of pyparsing parse results.
"""
from mysqlparse.grammar.sql_file import sql_file_syntax
if hasattr(file_or_string, 'read') and hasattr(file_or_string.read, '__call__'):
return sql_file_syntax.parseString(file_or_string.read())
elif isinstance(file_or_string, six.string_types):
return sql_file_syntax.parseString(file_or_string)
else:
raise TypeError("Expected file-like or string object, but got '{type_name}' instead.".format(
type_name=type(file_or_string).__name__,
))
|
def parse(file_or_string):
"""Parse a file-like object or string.
Args:
file_or_string (file, str): File-like object or string.
Returns:
ParseResults: instance of pyparsing parse results.
"""
from mysqlparse.grammar.sql_file import sql_file_syntax
if hasattr(file_or_string, 'read') and hasattr(file_or_string.read, '__call__'):
return sql_file_syntax.parseString(file_or_string.read())
elif isinstance(file_or_string, six.string_types):
return sql_file_syntax.parseString(file_or_string)
else:
raise TypeError("Expected file-like or string object, but got '{type_name}' instead.".format(
type_name=type(file_or_string).__name__,
))
|
[
"Parse",
"a",
"file",
"-",
"like",
"object",
"or",
"string",
"."
] |
seporaitis/mysqlparse
|
python
|
https://github.com/seporaitis/mysqlparse/blob/c327c5a1d8d6d143b67f789be7dc80357a1a5556/mysqlparse/__init__.py#L11-L29
|
[
"def",
"parse",
"(",
"file_or_string",
")",
":",
"from",
"mysqlparse",
".",
"grammar",
".",
"sql_file",
"import",
"sql_file_syntax",
"if",
"hasattr",
"(",
"file_or_string",
",",
"'read'",
")",
"and",
"hasattr",
"(",
"file_or_string",
".",
"read",
",",
"'__call__'",
")",
":",
"return",
"sql_file_syntax",
".",
"parseString",
"(",
"file_or_string",
".",
"read",
"(",
")",
")",
"elif",
"isinstance",
"(",
"file_or_string",
",",
"six",
".",
"string_types",
")",
":",
"return",
"sql_file_syntax",
".",
"parseString",
"(",
"file_or_string",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"Expected file-like or string object, but got '{type_name}' instead.\"",
".",
"format",
"(",
"type_name",
"=",
"type",
"(",
"file_or_string",
")",
".",
"__name__",
",",
")",
")"
] |
c327c5a1d8d6d143b67f789be7dc80357a1a5556
|
test
|
nbviewer_link
|
Return the link to the Jupyter nbviewer for the given notebook url
|
sphinx_nbexamples/__init__.py
|
def nbviewer_link(url):
"""Return the link to the Jupyter nbviewer for the given notebook url"""
if six.PY2:
from urlparse import urlparse as urlsplit
else:
from urllib.parse import urlsplit
info = urlsplit(url)
domain = info.netloc
url_type = 'github' if domain == 'github.com' else 'url'
return 'https://nbviewer.jupyter.org/%s%s' % (url_type, info.path)
|
def nbviewer_link(url):
"""Return the link to the Jupyter nbviewer for the given notebook url"""
if six.PY2:
from urlparse import urlparse as urlsplit
else:
from urllib.parse import urlsplit
info = urlsplit(url)
domain = info.netloc
url_type = 'github' if domain == 'github.com' else 'url'
return 'https://nbviewer.jupyter.org/%s%s' % (url_type, info.path)
|
[
"Return",
"the",
"link",
"to",
"the",
"Jupyter",
"nbviewer",
"for",
"the",
"given",
"notebook",
"url"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L88-L97
|
[
"def",
"nbviewer_link",
"(",
"url",
")",
":",
"if",
"six",
".",
"PY2",
":",
"from",
"urlparse",
"import",
"urlparse",
"as",
"urlsplit",
"else",
":",
"from",
"urllib",
".",
"parse",
"import",
"urlsplit",
"info",
"=",
"urlsplit",
"(",
"url",
")",
"domain",
"=",
"info",
".",
"netloc",
"url_type",
"=",
"'github'",
"if",
"domain",
"==",
"'github.com'",
"else",
"'url'",
"return",
"'https://nbviewer.jupyter.org/%s%s'",
"%",
"(",
"url_type",
",",
"info",
".",
"path",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.thumbnail_div
|
The string for creating the thumbnail of this example
|
sphinx_nbexamples/__init__.py
|
def thumbnail_div(self):
"""The string for creating the thumbnail of this example"""
return self.THUMBNAIL_TEMPLATE.format(
snippet=self.get_description()[1], thumbnail=self.thumb_file,
ref_name=self.reference)
|
def thumbnail_div(self):
"""The string for creating the thumbnail of this example"""
return self.THUMBNAIL_TEMPLATE.format(
snippet=self.get_description()[1], thumbnail=self.thumb_file,
ref_name=self.reference)
|
[
"The",
"string",
"for",
"creating",
"the",
"thumbnail",
"of",
"this",
"example"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L201-L205
|
[
"def",
"thumbnail_div",
"(",
"self",
")",
":",
"return",
"self",
".",
"THUMBNAIL_TEMPLATE",
".",
"format",
"(",
"snippet",
"=",
"self",
".",
"get_description",
"(",
")",
"[",
"1",
"]",
",",
"thumbnail",
"=",
"self",
".",
"thumb_file",
",",
"ref_name",
"=",
"self",
".",
"reference",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.code_div
|
The string for creating a code example for the gallery
|
sphinx_nbexamples/__init__.py
|
def code_div(self):
"""The string for creating a code example for the gallery"""
code_example = self.code_example
if code_example is None:
return None
return self.CODE_TEMPLATE.format(
snippet=self.get_description()[1], code=code_example,
ref_name=self.reference)
|
def code_div(self):
"""The string for creating a code example for the gallery"""
code_example = self.code_example
if code_example is None:
return None
return self.CODE_TEMPLATE.format(
snippet=self.get_description()[1], code=code_example,
ref_name=self.reference)
|
[
"The",
"string",
"for",
"creating",
"a",
"code",
"example",
"for",
"the",
"gallery"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L208-L215
|
[
"def",
"code_div",
"(",
"self",
")",
":",
"code_example",
"=",
"self",
".",
"code_example",
"if",
"code_example",
"is",
"None",
":",
"return",
"None",
"return",
"self",
".",
"CODE_TEMPLATE",
".",
"format",
"(",
"snippet",
"=",
"self",
".",
"get_description",
"(",
")",
"[",
"1",
"]",
",",
"code",
"=",
"code_example",
",",
"ref_name",
"=",
"self",
".",
"reference",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.code_example
|
The code example out of the notebook metadata
|
sphinx_nbexamples/__init__.py
|
def code_example(self):
"""The code example out of the notebook metadata"""
if self._code_example is not None:
return self._code_example
return getattr(self.nb.metadata, 'code_example', None)
|
def code_example(self):
"""The code example out of the notebook metadata"""
if self._code_example is not None:
return self._code_example
return getattr(self.nb.metadata, 'code_example', None)
|
[
"The",
"code",
"example",
"out",
"of",
"the",
"notebook",
"metadata"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L218-L222
|
[
"def",
"code_example",
"(",
"self",
")",
":",
"if",
"self",
".",
"_code_example",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_code_example",
"return",
"getattr",
"(",
"self",
".",
"nb",
".",
"metadata",
",",
"'code_example'",
",",
"None",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.supplementary_files
|
The supplementary files of this notebook
|
sphinx_nbexamples/__init__.py
|
def supplementary_files(self):
"""The supplementary files of this notebook"""
if self._supplementary_files is not None:
return self._supplementary_files
return getattr(self.nb.metadata, 'supplementary_files', None)
|
def supplementary_files(self):
"""The supplementary files of this notebook"""
if self._supplementary_files is not None:
return self._supplementary_files
return getattr(self.nb.metadata, 'supplementary_files', None)
|
[
"The",
"supplementary",
"files",
"of",
"this",
"notebook"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L225-L229
|
[
"def",
"supplementary_files",
"(",
"self",
")",
":",
"if",
"self",
".",
"_supplementary_files",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_supplementary_files",
"return",
"getattr",
"(",
"self",
".",
"nb",
".",
"metadata",
",",
"'supplementary_files'",
",",
"None",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.other_supplementary_files
|
The supplementary files of this notebook
|
sphinx_nbexamples/__init__.py
|
def other_supplementary_files(self):
"""The supplementary files of this notebook"""
if self._other_supplementary_files is not None:
return self._other_supplementary_files
return getattr(self.nb.metadata, 'other_supplementary_files', None)
|
def other_supplementary_files(self):
"""The supplementary files of this notebook"""
if self._other_supplementary_files is not None:
return self._other_supplementary_files
return getattr(self.nb.metadata, 'other_supplementary_files', None)
|
[
"The",
"supplementary",
"files",
"of",
"this",
"notebook"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L232-L236
|
[
"def",
"other_supplementary_files",
"(",
"self",
")",
":",
"if",
"self",
".",
"_other_supplementary_files",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_other_supplementary_files",
"return",
"getattr",
"(",
"self",
".",
"nb",
".",
"metadata",
",",
"'other_supplementary_files'",
",",
"None",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.url
|
The url on jupyter nbviewer for this notebook or None if unknown
|
sphinx_nbexamples/__init__.py
|
def url(self):
"""The url on jupyter nbviewer for this notebook or None if unknown"""
if self._url is not None:
url = self._url
else:
url = getattr(self.nb.metadata, 'url', None)
if url is not None:
return nbviewer_link(url)
|
def url(self):
"""The url on jupyter nbviewer for this notebook or None if unknown"""
if self._url is not None:
url = self._url
else:
url = getattr(self.nb.metadata, 'url', None)
if url is not None:
return nbviewer_link(url)
|
[
"The",
"url",
"on",
"jupyter",
"nbviewer",
"for",
"this",
"notebook",
"or",
"None",
"if",
"unknown"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L244-L251
|
[
"def",
"url",
"(",
"self",
")",
":",
"if",
"self",
".",
"_url",
"is",
"not",
"None",
":",
"url",
"=",
"self",
".",
"_url",
"else",
":",
"url",
"=",
"getattr",
"(",
"self",
".",
"nb",
".",
"metadata",
",",
"'url'",
",",
"None",
")",
"if",
"url",
"is",
"not",
"None",
":",
"return",
"nbviewer_link",
"(",
"url",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.get_out_file
|
get the output file with the specified `ending`
|
sphinx_nbexamples/__init__.py
|
def get_out_file(self, ending='rst'):
"""get the output file with the specified `ending`"""
return os.path.splitext(self.outfile)[0] + os.path.extsep + ending
|
def get_out_file(self, ending='rst'):
"""get the output file with the specified `ending`"""
return os.path.splitext(self.outfile)[0] + os.path.extsep + ending
|
[
"get",
"the",
"output",
"file",
"with",
"the",
"specified",
"ending"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L313-L315
|
[
"def",
"get_out_file",
"(",
"self",
",",
"ending",
"=",
"'rst'",
")",
":",
"return",
"os",
".",
"path",
".",
"splitext",
"(",
"self",
".",
"outfile",
")",
"[",
"0",
"]",
"+",
"os",
".",
"path",
".",
"extsep",
"+",
"ending"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.process_notebook
|
Process the notebook and create all the pictures and files
This method runs the notebook using the :mod:`nbconvert` and
:mod:`nbformat` modules. It creates the :attr:`outfile` notebook,
a python and a rst file
|
sphinx_nbexamples/__init__.py
|
def process_notebook(self, disable_warnings=True):
"""Process the notebook and create all the pictures and files
This method runs the notebook using the :mod:`nbconvert` and
:mod:`nbformat` modules. It creates the :attr:`outfile` notebook,
a python and a rst file"""
infile = self.infile
outfile = self.outfile
in_dir = os.path.dirname(infile) + os.path.sep
odir = os.path.dirname(outfile) + os.path.sep
create_dirs(os.path.join(odir, 'images'))
ep = nbconvert.preprocessors.ExecutePreprocessor(
timeout=300)
cp = nbconvert.preprocessors.ClearOutputPreprocessor(
timeout=300)
self.nb = nb = nbformat.read(infile, nbformat.current_nbformat)
# disable warnings in the rst file
if disable_warnings:
for i, cell in enumerate(nb.cells):
if cell['cell_type'] == 'code':
cell = cell.copy()
break
cell = cell.copy()
cell.source = """
import logging
logging.captureWarnings(True)
logging.getLogger('py.warnings').setLevel(logging.ERROR)
"""
nb.cells.insert(i, cell)
# write and process rst_file
if self.preprocess:
t = dt.datetime.now()
logger.info('Processing %s', self.infile)
try:
ep.preprocess(nb, {'metadata': {'path': in_dir}})
except nbconvert.preprocessors.execute.CellExecutionError:
logger.critical(
'Error while processing %s!', self.infile, exc_info=True)
else:
logger.info('Done. Seconds needed: %i',
(dt.datetime.now() - t).seconds)
if disable_warnings:
nb.cells.pop(i)
self.py_file = self.get_out_file('py')
if self.remove_tags:
tp = nbconvert.preprocessors.TagRemovePreprocessor(timeout=300)
for key, val in self.tag_options.items():
setattr(tp, key, set(val))
nb4rst = deepcopy(nb)
tp.preprocess(nb4rst, {'metadata': {'path': in_dir}})
else:
nb4rst = nb
self.create_rst(nb4rst, in_dir, odir)
if self.clear:
cp.preprocess(nb, {'metadata': {'path': in_dir}})
# write notebook file
nbformat.write(nb, outfile)
self.create_py(nb)
|
def process_notebook(self, disable_warnings=True):
"""Process the notebook and create all the pictures and files
This method runs the notebook using the :mod:`nbconvert` and
:mod:`nbformat` modules. It creates the :attr:`outfile` notebook,
a python and a rst file"""
infile = self.infile
outfile = self.outfile
in_dir = os.path.dirname(infile) + os.path.sep
odir = os.path.dirname(outfile) + os.path.sep
create_dirs(os.path.join(odir, 'images'))
ep = nbconvert.preprocessors.ExecutePreprocessor(
timeout=300)
cp = nbconvert.preprocessors.ClearOutputPreprocessor(
timeout=300)
self.nb = nb = nbformat.read(infile, nbformat.current_nbformat)
# disable warnings in the rst file
if disable_warnings:
for i, cell in enumerate(nb.cells):
if cell['cell_type'] == 'code':
cell = cell.copy()
break
cell = cell.copy()
cell.source = """
import logging
logging.captureWarnings(True)
logging.getLogger('py.warnings').setLevel(logging.ERROR)
"""
nb.cells.insert(i, cell)
# write and process rst_file
if self.preprocess:
t = dt.datetime.now()
logger.info('Processing %s', self.infile)
try:
ep.preprocess(nb, {'metadata': {'path': in_dir}})
except nbconvert.preprocessors.execute.CellExecutionError:
logger.critical(
'Error while processing %s!', self.infile, exc_info=True)
else:
logger.info('Done. Seconds needed: %i',
(dt.datetime.now() - t).seconds)
if disable_warnings:
nb.cells.pop(i)
self.py_file = self.get_out_file('py')
if self.remove_tags:
tp = nbconvert.preprocessors.TagRemovePreprocessor(timeout=300)
for key, val in self.tag_options.items():
setattr(tp, key, set(val))
nb4rst = deepcopy(nb)
tp.preprocess(nb4rst, {'metadata': {'path': in_dir}})
else:
nb4rst = nb
self.create_rst(nb4rst, in_dir, odir)
if self.clear:
cp.preprocess(nb, {'metadata': {'path': in_dir}})
# write notebook file
nbformat.write(nb, outfile)
self.create_py(nb)
|
[
"Process",
"the",
"notebook",
"and",
"create",
"all",
"the",
"pictures",
"and",
"files"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L317-L379
|
[
"def",
"process_notebook",
"(",
"self",
",",
"disable_warnings",
"=",
"True",
")",
":",
"infile",
"=",
"self",
".",
"infile",
"outfile",
"=",
"self",
".",
"outfile",
"in_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"infile",
")",
"+",
"os",
".",
"path",
".",
"sep",
"odir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"outfile",
")",
"+",
"os",
".",
"path",
".",
"sep",
"create_dirs",
"(",
"os",
".",
"path",
".",
"join",
"(",
"odir",
",",
"'images'",
")",
")",
"ep",
"=",
"nbconvert",
".",
"preprocessors",
".",
"ExecutePreprocessor",
"(",
"timeout",
"=",
"300",
")",
"cp",
"=",
"nbconvert",
".",
"preprocessors",
".",
"ClearOutputPreprocessor",
"(",
"timeout",
"=",
"300",
")",
"self",
".",
"nb",
"=",
"nb",
"=",
"nbformat",
".",
"read",
"(",
"infile",
",",
"nbformat",
".",
"current_nbformat",
")",
"# disable warnings in the rst file",
"if",
"disable_warnings",
":",
"for",
"i",
",",
"cell",
"in",
"enumerate",
"(",
"nb",
".",
"cells",
")",
":",
"if",
"cell",
"[",
"'cell_type'",
"]",
"==",
"'code'",
":",
"cell",
"=",
"cell",
".",
"copy",
"(",
")",
"break",
"cell",
"=",
"cell",
".",
"copy",
"(",
")",
"cell",
".",
"source",
"=",
"\"\"\"\nimport logging\nlogging.captureWarnings(True)\nlogging.getLogger('py.warnings').setLevel(logging.ERROR)\n\"\"\"",
"nb",
".",
"cells",
".",
"insert",
"(",
"i",
",",
"cell",
")",
"# write and process rst_file",
"if",
"self",
".",
"preprocess",
":",
"t",
"=",
"dt",
".",
"datetime",
".",
"now",
"(",
")",
"logger",
".",
"info",
"(",
"'Processing %s'",
",",
"self",
".",
"infile",
")",
"try",
":",
"ep",
".",
"preprocess",
"(",
"nb",
",",
"{",
"'metadata'",
":",
"{",
"'path'",
":",
"in_dir",
"}",
"}",
")",
"except",
"nbconvert",
".",
"preprocessors",
".",
"execute",
".",
"CellExecutionError",
":",
"logger",
".",
"critical",
"(",
"'Error while processing %s!'",
",",
"self",
".",
"infile",
",",
"exc_info",
"=",
"True",
")",
"else",
":",
"logger",
".",
"info",
"(",
"'Done. Seconds needed: %i'",
",",
"(",
"dt",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"t",
")",
".",
"seconds",
")",
"if",
"disable_warnings",
":",
"nb",
".",
"cells",
".",
"pop",
"(",
"i",
")",
"self",
".",
"py_file",
"=",
"self",
".",
"get_out_file",
"(",
"'py'",
")",
"if",
"self",
".",
"remove_tags",
":",
"tp",
"=",
"nbconvert",
".",
"preprocessors",
".",
"TagRemovePreprocessor",
"(",
"timeout",
"=",
"300",
")",
"for",
"key",
",",
"val",
"in",
"self",
".",
"tag_options",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"tp",
",",
"key",
",",
"set",
"(",
"val",
")",
")",
"nb4rst",
"=",
"deepcopy",
"(",
"nb",
")",
"tp",
".",
"preprocess",
"(",
"nb4rst",
",",
"{",
"'metadata'",
":",
"{",
"'path'",
":",
"in_dir",
"}",
"}",
")",
"else",
":",
"nb4rst",
"=",
"nb",
"self",
".",
"create_rst",
"(",
"nb4rst",
",",
"in_dir",
",",
"odir",
")",
"if",
"self",
".",
"clear",
":",
"cp",
".",
"preprocess",
"(",
"nb",
",",
"{",
"'metadata'",
":",
"{",
"'path'",
":",
"in_dir",
"}",
"}",
")",
"# write notebook file",
"nbformat",
".",
"write",
"(",
"nb",
",",
"outfile",
")",
"self",
".",
"create_py",
"(",
"nb",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.create_rst
|
Create the rst file from the notebook node
|
sphinx_nbexamples/__init__.py
|
def create_rst(self, nb, in_dir, odir):
"""Create the rst file from the notebook node"""
raw_rst, resources = nbconvert.export_by_name('rst', nb)
# remove ipython magics
rst_content = ''
i0 = 0
m = None
# HACK: we insert the bokeh style sheets here as well, since for some
# themes (e.g. the sphinx_rtd_theme) it is not sufficient to include
# the style sheets only via app.add_stylesheet
bokeh_str = ''
if 'bokeh' in raw_rst and self.insert_bokeh:
bokeh_str += self.BOKEH_TEMPLATE.format(
version=self.insert_bokeh)
if 'bokeh' in raw_rst and self.insert_bokeh_widgets:
bokeh_str += self.BOKEH_WIDGETS_TEMPLATE.format(
version=self.insert_bokeh_widgets)
for m in code_blocks.finditer(raw_rst):
lines = m.group().splitlines(True)
header, content = lines[0], ''.join(lines[1:])
no_magics = magic_patt.sub('\g<1>', content)
# if the code cell only contained magic commands, we skip it
if no_magics.strip():
rst_content += (
raw_rst[i0:m.start()] + bokeh_str + header + no_magics)
bokeh_str = ''
i0 = m.end()
else:
rst_content += raw_rst[i0:m.start()]
i0 = m.end()
if m is not None:
rst_content += bokeh_str + raw_rst[m.end():]
else:
rst_content = raw_rst
rst_content = '.. _%s:\n\n' % self.reference + \
rst_content
url = self.url
if url is not None:
rst_content += self.CODE_DOWNLOAD_NBVIEWER.format(
pyfile=os.path.basename(self.py_file),
nbfile=os.path.basename(self.outfile),
url=url)
else:
rst_content += self.CODE_DOWNLOAD.format(
pyfile=os.path.basename(self.py_file),
nbfile=os.path.basename(self.outfile))
supplementary_files = self.supplementary_files
other_supplementary_files = self.other_supplementary_files
if supplementary_files or other_supplementary_files:
for f in (supplementary_files or []) + (
other_supplementary_files or []):
if not os.path.exists(os.path.join(odir, f)):
copyfile(os.path.join(in_dir, f), os.path.join(odir, f))
if supplementary_files:
rst_content += self.data_download(supplementary_files)
rst_file = self.get_out_file()
outputs = sorted(resources['outputs'], key=rst_content.find)
base = os.path.join('images', os.path.splitext(
os.path.basename(self.infile))[0] + '_%i.png')
out_map = {os.path.basename(original): base % i
for i, original in enumerate(outputs)}
for original, final in six.iteritems(out_map):
rst_content = rst_content.replace(original, final)
with open(rst_file, 'w') \
as f:
f.write(rst_content.rstrip() + '\n')
pictures = []
for original in outputs:
fname = os.path.join(odir, out_map[os.path.basename(original)])
pictures.append(fname)
if six.PY3:
f = open(fname, 'w+b')
else:
f = open(fname, 'w')
f.write(resources['outputs'][original])
f.close()
self.pictures = pictures
|
def create_rst(self, nb, in_dir, odir):
"""Create the rst file from the notebook node"""
raw_rst, resources = nbconvert.export_by_name('rst', nb)
# remove ipython magics
rst_content = ''
i0 = 0
m = None
# HACK: we insert the bokeh style sheets here as well, since for some
# themes (e.g. the sphinx_rtd_theme) it is not sufficient to include
# the style sheets only via app.add_stylesheet
bokeh_str = ''
if 'bokeh' in raw_rst and self.insert_bokeh:
bokeh_str += self.BOKEH_TEMPLATE.format(
version=self.insert_bokeh)
if 'bokeh' in raw_rst and self.insert_bokeh_widgets:
bokeh_str += self.BOKEH_WIDGETS_TEMPLATE.format(
version=self.insert_bokeh_widgets)
for m in code_blocks.finditer(raw_rst):
lines = m.group().splitlines(True)
header, content = lines[0], ''.join(lines[1:])
no_magics = magic_patt.sub('\g<1>', content)
# if the code cell only contained magic commands, we skip it
if no_magics.strip():
rst_content += (
raw_rst[i0:m.start()] + bokeh_str + header + no_magics)
bokeh_str = ''
i0 = m.end()
else:
rst_content += raw_rst[i0:m.start()]
i0 = m.end()
if m is not None:
rst_content += bokeh_str + raw_rst[m.end():]
else:
rst_content = raw_rst
rst_content = '.. _%s:\n\n' % self.reference + \
rst_content
url = self.url
if url is not None:
rst_content += self.CODE_DOWNLOAD_NBVIEWER.format(
pyfile=os.path.basename(self.py_file),
nbfile=os.path.basename(self.outfile),
url=url)
else:
rst_content += self.CODE_DOWNLOAD.format(
pyfile=os.path.basename(self.py_file),
nbfile=os.path.basename(self.outfile))
supplementary_files = self.supplementary_files
other_supplementary_files = self.other_supplementary_files
if supplementary_files or other_supplementary_files:
for f in (supplementary_files or []) + (
other_supplementary_files or []):
if not os.path.exists(os.path.join(odir, f)):
copyfile(os.path.join(in_dir, f), os.path.join(odir, f))
if supplementary_files:
rst_content += self.data_download(supplementary_files)
rst_file = self.get_out_file()
outputs = sorted(resources['outputs'], key=rst_content.find)
base = os.path.join('images', os.path.splitext(
os.path.basename(self.infile))[0] + '_%i.png')
out_map = {os.path.basename(original): base % i
for i, original in enumerate(outputs)}
for original, final in six.iteritems(out_map):
rst_content = rst_content.replace(original, final)
with open(rst_file, 'w') \
as f:
f.write(rst_content.rstrip() + '\n')
pictures = []
for original in outputs:
fname = os.path.join(odir, out_map[os.path.basename(original)])
pictures.append(fname)
if six.PY3:
f = open(fname, 'w+b')
else:
f = open(fname, 'w')
f.write(resources['outputs'][original])
f.close()
self.pictures = pictures
|
[
"Create",
"the",
"rst",
"file",
"from",
"the",
"notebook",
"node"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L381-L458
|
[
"def",
"create_rst",
"(",
"self",
",",
"nb",
",",
"in_dir",
",",
"odir",
")",
":",
"raw_rst",
",",
"resources",
"=",
"nbconvert",
".",
"export_by_name",
"(",
"'rst'",
",",
"nb",
")",
"# remove ipython magics",
"rst_content",
"=",
"''",
"i0",
"=",
"0",
"m",
"=",
"None",
"# HACK: we insert the bokeh style sheets here as well, since for some",
"# themes (e.g. the sphinx_rtd_theme) it is not sufficient to include",
"# the style sheets only via app.add_stylesheet",
"bokeh_str",
"=",
"''",
"if",
"'bokeh'",
"in",
"raw_rst",
"and",
"self",
".",
"insert_bokeh",
":",
"bokeh_str",
"+=",
"self",
".",
"BOKEH_TEMPLATE",
".",
"format",
"(",
"version",
"=",
"self",
".",
"insert_bokeh",
")",
"if",
"'bokeh'",
"in",
"raw_rst",
"and",
"self",
".",
"insert_bokeh_widgets",
":",
"bokeh_str",
"+=",
"self",
".",
"BOKEH_WIDGETS_TEMPLATE",
".",
"format",
"(",
"version",
"=",
"self",
".",
"insert_bokeh_widgets",
")",
"for",
"m",
"in",
"code_blocks",
".",
"finditer",
"(",
"raw_rst",
")",
":",
"lines",
"=",
"m",
".",
"group",
"(",
")",
".",
"splitlines",
"(",
"True",
")",
"header",
",",
"content",
"=",
"lines",
"[",
"0",
"]",
",",
"''",
".",
"join",
"(",
"lines",
"[",
"1",
":",
"]",
")",
"no_magics",
"=",
"magic_patt",
".",
"sub",
"(",
"'\\g<1>'",
",",
"content",
")",
"# if the code cell only contained magic commands, we skip it",
"if",
"no_magics",
".",
"strip",
"(",
")",
":",
"rst_content",
"+=",
"(",
"raw_rst",
"[",
"i0",
":",
"m",
".",
"start",
"(",
")",
"]",
"+",
"bokeh_str",
"+",
"header",
"+",
"no_magics",
")",
"bokeh_str",
"=",
"''",
"i0",
"=",
"m",
".",
"end",
"(",
")",
"else",
":",
"rst_content",
"+=",
"raw_rst",
"[",
"i0",
":",
"m",
".",
"start",
"(",
")",
"]",
"i0",
"=",
"m",
".",
"end",
"(",
")",
"if",
"m",
"is",
"not",
"None",
":",
"rst_content",
"+=",
"bokeh_str",
"+",
"raw_rst",
"[",
"m",
".",
"end",
"(",
")",
":",
"]",
"else",
":",
"rst_content",
"=",
"raw_rst",
"rst_content",
"=",
"'.. _%s:\\n\\n'",
"%",
"self",
".",
"reference",
"+",
"rst_content",
"url",
"=",
"self",
".",
"url",
"if",
"url",
"is",
"not",
"None",
":",
"rst_content",
"+=",
"self",
".",
"CODE_DOWNLOAD_NBVIEWER",
".",
"format",
"(",
"pyfile",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"py_file",
")",
",",
"nbfile",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"outfile",
")",
",",
"url",
"=",
"url",
")",
"else",
":",
"rst_content",
"+=",
"self",
".",
"CODE_DOWNLOAD",
".",
"format",
"(",
"pyfile",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"py_file",
")",
",",
"nbfile",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"outfile",
")",
")",
"supplementary_files",
"=",
"self",
".",
"supplementary_files",
"other_supplementary_files",
"=",
"self",
".",
"other_supplementary_files",
"if",
"supplementary_files",
"or",
"other_supplementary_files",
":",
"for",
"f",
"in",
"(",
"supplementary_files",
"or",
"[",
"]",
")",
"+",
"(",
"other_supplementary_files",
"or",
"[",
"]",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"odir",
",",
"f",
")",
")",
":",
"copyfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"in_dir",
",",
"f",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"odir",
",",
"f",
")",
")",
"if",
"supplementary_files",
":",
"rst_content",
"+=",
"self",
".",
"data_download",
"(",
"supplementary_files",
")",
"rst_file",
"=",
"self",
".",
"get_out_file",
"(",
")",
"outputs",
"=",
"sorted",
"(",
"resources",
"[",
"'outputs'",
"]",
",",
"key",
"=",
"rst_content",
".",
"find",
")",
"base",
"=",
"os",
".",
"path",
".",
"join",
"(",
"'images'",
",",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"infile",
")",
")",
"[",
"0",
"]",
"+",
"'_%i.png'",
")",
"out_map",
"=",
"{",
"os",
".",
"path",
".",
"basename",
"(",
"original",
")",
":",
"base",
"%",
"i",
"for",
"i",
",",
"original",
"in",
"enumerate",
"(",
"outputs",
")",
"}",
"for",
"original",
",",
"final",
"in",
"six",
".",
"iteritems",
"(",
"out_map",
")",
":",
"rst_content",
"=",
"rst_content",
".",
"replace",
"(",
"original",
",",
"final",
")",
"with",
"open",
"(",
"rst_file",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"rst_content",
".",
"rstrip",
"(",
")",
"+",
"'\\n'",
")",
"pictures",
"=",
"[",
"]",
"for",
"original",
"in",
"outputs",
":",
"fname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"odir",
",",
"out_map",
"[",
"os",
".",
"path",
".",
"basename",
"(",
"original",
")",
"]",
")",
"pictures",
".",
"append",
"(",
"fname",
")",
"if",
"six",
".",
"PY3",
":",
"f",
"=",
"open",
"(",
"fname",
",",
"'w+b'",
")",
"else",
":",
"f",
"=",
"open",
"(",
"fname",
",",
"'w'",
")",
"f",
".",
"write",
"(",
"resources",
"[",
"'outputs'",
"]",
"[",
"original",
"]",
")",
"f",
".",
"close",
"(",
")",
"self",
".",
"pictures",
"=",
"pictures"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.create_py
|
Create the python script from the notebook node
|
sphinx_nbexamples/__init__.py
|
def create_py(self, nb, force=False):
"""Create the python script from the notebook node"""
# Although we would love to simply use ``nbconvert.export_python(nb)``
# this causes troubles in other cells processed by the ipython
# directive. Instead of getting something like ``Out [5]:``, we get
# some weird like '[0;31mOut[[1;31m5[0;31m]: [0m' which look like
# color information if we allow the call of nbconvert.export_python
if list(map(int, re.findall('\d+', nbconvert.__version__))) >= [4, 2]:
py_file = os.path.basename(self.py_file)
else:
py_file = self.py_file
try:
level = logger.logger.level
except AttributeError:
level = logger.level
spr.call(['jupyter', 'nbconvert', '--to=python',
'--output=' + py_file, '--log-level=%s' % level,
self.outfile])
with open(self.py_file) as f:
py_content = f.read()
# comment out ipython magics
py_content = re.sub('^\s*get_ipython\(\).magic.*', '# \g<0>',
py_content, flags=re.MULTILINE)
with open(self.py_file, 'w') as f:
f.write(py_content)
|
def create_py(self, nb, force=False):
"""Create the python script from the notebook node"""
# Although we would love to simply use ``nbconvert.export_python(nb)``
# this causes troubles in other cells processed by the ipython
# directive. Instead of getting something like ``Out [5]:``, we get
# some weird like '[0;31mOut[[1;31m5[0;31m]: [0m' which look like
# color information if we allow the call of nbconvert.export_python
if list(map(int, re.findall('\d+', nbconvert.__version__))) >= [4, 2]:
py_file = os.path.basename(self.py_file)
else:
py_file = self.py_file
try:
level = logger.logger.level
except AttributeError:
level = logger.level
spr.call(['jupyter', 'nbconvert', '--to=python',
'--output=' + py_file, '--log-level=%s' % level,
self.outfile])
with open(self.py_file) as f:
py_content = f.read()
# comment out ipython magics
py_content = re.sub('^\s*get_ipython\(\).magic.*', '# \g<0>',
py_content, flags=re.MULTILINE)
with open(self.py_file, 'w') as f:
f.write(py_content)
|
[
"Create",
"the",
"python",
"script",
"from",
"the",
"notebook",
"node"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L460-L484
|
[
"def",
"create_py",
"(",
"self",
",",
"nb",
",",
"force",
"=",
"False",
")",
":",
"# Although we would love to simply use ``nbconvert.export_python(nb)``",
"# this causes troubles in other cells processed by the ipython",
"# directive. Instead of getting something like ``Out [5]:``, we get",
"# some weird like '[0;31mOut[\u001b[1;31m5\u001b[0;31m]: \u001b[0m' which look like",
"# color information if we allow the call of nbconvert.export_python",
"if",
"list",
"(",
"map",
"(",
"int",
",",
"re",
".",
"findall",
"(",
"'\\d+'",
",",
"nbconvert",
".",
"__version__",
")",
")",
")",
">=",
"[",
"4",
",",
"2",
"]",
":",
"py_file",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"py_file",
")",
"else",
":",
"py_file",
"=",
"self",
".",
"py_file",
"try",
":",
"level",
"=",
"logger",
".",
"logger",
".",
"level",
"except",
"AttributeError",
":",
"level",
"=",
"logger",
".",
"level",
"spr",
".",
"call",
"(",
"[",
"'jupyter'",
",",
"'nbconvert'",
",",
"'--to=python'",
",",
"'--output='",
"+",
"py_file",
",",
"'--log-level=%s'",
"%",
"level",
",",
"self",
".",
"outfile",
"]",
")",
"with",
"open",
"(",
"self",
".",
"py_file",
")",
"as",
"f",
":",
"py_content",
"=",
"f",
".",
"read",
"(",
")",
"# comment out ipython magics",
"py_content",
"=",
"re",
".",
"sub",
"(",
"'^\\s*get_ipython\\(\\).magic.*'",
",",
"'# \\g<0>'",
",",
"py_content",
",",
"flags",
"=",
"re",
".",
"MULTILINE",
")",
"with",
"open",
"(",
"self",
".",
"py_file",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"py_content",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.data_download
|
Create the rst string to download supplementary data
|
sphinx_nbexamples/__init__.py
|
def data_download(self, files):
"""Create the rst string to download supplementary data"""
if len(files) > 1:
return self.DATA_DOWNLOAD % (
('\n\n' + ' '*8) + ('\n' + ' '*8).join(
'* :download:`%s`' % f for f in files))
return self.DATA_DOWNLOAD % ':download:`%s`' % files[0]
|
def data_download(self, files):
"""Create the rst string to download supplementary data"""
if len(files) > 1:
return self.DATA_DOWNLOAD % (
('\n\n' + ' '*8) + ('\n' + ' '*8).join(
'* :download:`%s`' % f for f in files))
return self.DATA_DOWNLOAD % ':download:`%s`' % files[0]
|
[
"Create",
"the",
"rst",
"string",
"to",
"download",
"supplementary",
"data"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L486-L492
|
[
"def",
"data_download",
"(",
"self",
",",
"files",
")",
":",
"if",
"len",
"(",
"files",
")",
">",
"1",
":",
"return",
"self",
".",
"DATA_DOWNLOAD",
"%",
"(",
"(",
"'\\n\\n'",
"+",
"' '",
"*",
"8",
")",
"+",
"(",
"'\\n'",
"+",
"' '",
"*",
"8",
")",
".",
"join",
"(",
"'* :download:`%s`'",
"%",
"f",
"for",
"f",
"in",
"files",
")",
")",
"return",
"self",
".",
"DATA_DOWNLOAD",
"%",
"':download:`%s`'",
"%",
"files",
"[",
"0",
"]"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.create_thumb
|
Create the thumbnail for html output
|
sphinx_nbexamples/__init__.py
|
def create_thumb(self):
"""Create the thumbnail for html output"""
thumbnail_figure = self.copy_thumbnail_figure()
if thumbnail_figure is not None:
if isinstance(thumbnail_figure, six.string_types):
pic = thumbnail_figure
else:
pic = self.pictures[thumbnail_figure]
self.save_thumbnail(pic)
else:
for pic in self.pictures[::-1]:
if pic.endswith('png'):
self.save_thumbnail(pic)
return
|
def create_thumb(self):
"""Create the thumbnail for html output"""
thumbnail_figure = self.copy_thumbnail_figure()
if thumbnail_figure is not None:
if isinstance(thumbnail_figure, six.string_types):
pic = thumbnail_figure
else:
pic = self.pictures[thumbnail_figure]
self.save_thumbnail(pic)
else:
for pic in self.pictures[::-1]:
if pic.endswith('png'):
self.save_thumbnail(pic)
return
|
[
"Create",
"the",
"thumbnail",
"for",
"html",
"output"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L494-L507
|
[
"def",
"create_thumb",
"(",
"self",
")",
":",
"thumbnail_figure",
"=",
"self",
".",
"copy_thumbnail_figure",
"(",
")",
"if",
"thumbnail_figure",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"thumbnail_figure",
",",
"six",
".",
"string_types",
")",
":",
"pic",
"=",
"thumbnail_figure",
"else",
":",
"pic",
"=",
"self",
".",
"pictures",
"[",
"thumbnail_figure",
"]",
"self",
".",
"save_thumbnail",
"(",
"pic",
")",
"else",
":",
"for",
"pic",
"in",
"self",
".",
"pictures",
"[",
":",
":",
"-",
"1",
"]",
":",
"if",
"pic",
".",
"endswith",
"(",
"'png'",
")",
":",
"self",
".",
"save_thumbnail",
"(",
"pic",
")",
"return"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.get_description
|
Get summary and description of this notebook
|
sphinx_nbexamples/__init__.py
|
def get_description(self):
"""Get summary and description of this notebook"""
def split_header(s, get_header=True):
s = s.lstrip().rstrip()
parts = s.splitlines()
if parts[0].startswith('#'):
if get_header:
header = re.sub('#+\s*', '', parts.pop(0))
if not parts:
return header, ''
else:
header = ''
rest = '\n'.join(parts).lstrip().split('\n\n')
desc = rest[0].replace('\n', ' ')
return header, desc
else:
if get_header:
if parts[0].startswith(('=', '-')):
parts = parts[1:]
header = parts.pop(0)
if parts and parts[0].startswith(('=', '-')):
parts.pop(0)
if not parts:
return header, ''
else:
header = ''
rest = '\n'.join(parts).lstrip().split('\n\n')
desc = rest[0].replace('\n', ' ')
return header, desc
first_cell = self.nb['cells'][0]
if not first_cell['cell_type'] == 'markdown':
return '', ''
header, desc = split_header(first_cell['source'])
if not desc and len(self.nb['cells']) > 1:
second_cell = self.nb['cells'][1]
if second_cell['cell_type'] == 'markdown':
_, desc = split_header(second_cell['source'], False)
return header, desc
|
def get_description(self):
"""Get summary and description of this notebook"""
def split_header(s, get_header=True):
s = s.lstrip().rstrip()
parts = s.splitlines()
if parts[0].startswith('#'):
if get_header:
header = re.sub('#+\s*', '', parts.pop(0))
if not parts:
return header, ''
else:
header = ''
rest = '\n'.join(parts).lstrip().split('\n\n')
desc = rest[0].replace('\n', ' ')
return header, desc
else:
if get_header:
if parts[0].startswith(('=', '-')):
parts = parts[1:]
header = parts.pop(0)
if parts and parts[0].startswith(('=', '-')):
parts.pop(0)
if not parts:
return header, ''
else:
header = ''
rest = '\n'.join(parts).lstrip().split('\n\n')
desc = rest[0].replace('\n', ' ')
return header, desc
first_cell = self.nb['cells'][0]
if not first_cell['cell_type'] == 'markdown':
return '', ''
header, desc = split_header(first_cell['source'])
if not desc and len(self.nb['cells']) > 1:
second_cell = self.nb['cells'][1]
if second_cell['cell_type'] == 'markdown':
_, desc = split_header(second_cell['source'], False)
return header, desc
|
[
"Get",
"summary",
"and",
"description",
"of",
"this",
"notebook"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L509-L548
|
[
"def",
"get_description",
"(",
"self",
")",
":",
"def",
"split_header",
"(",
"s",
",",
"get_header",
"=",
"True",
")",
":",
"s",
"=",
"s",
".",
"lstrip",
"(",
")",
".",
"rstrip",
"(",
")",
"parts",
"=",
"s",
".",
"splitlines",
"(",
")",
"if",
"parts",
"[",
"0",
"]",
".",
"startswith",
"(",
"'#'",
")",
":",
"if",
"get_header",
":",
"header",
"=",
"re",
".",
"sub",
"(",
"'#+\\s*'",
",",
"''",
",",
"parts",
".",
"pop",
"(",
"0",
")",
")",
"if",
"not",
"parts",
":",
"return",
"header",
",",
"''",
"else",
":",
"header",
"=",
"''",
"rest",
"=",
"'\\n'",
".",
"join",
"(",
"parts",
")",
".",
"lstrip",
"(",
")",
".",
"split",
"(",
"'\\n\\n'",
")",
"desc",
"=",
"rest",
"[",
"0",
"]",
".",
"replace",
"(",
"'\\n'",
",",
"' '",
")",
"return",
"header",
",",
"desc",
"else",
":",
"if",
"get_header",
":",
"if",
"parts",
"[",
"0",
"]",
".",
"startswith",
"(",
"(",
"'='",
",",
"'-'",
")",
")",
":",
"parts",
"=",
"parts",
"[",
"1",
":",
"]",
"header",
"=",
"parts",
".",
"pop",
"(",
"0",
")",
"if",
"parts",
"and",
"parts",
"[",
"0",
"]",
".",
"startswith",
"(",
"(",
"'='",
",",
"'-'",
")",
")",
":",
"parts",
".",
"pop",
"(",
"0",
")",
"if",
"not",
"parts",
":",
"return",
"header",
",",
"''",
"else",
":",
"header",
"=",
"''",
"rest",
"=",
"'\\n'",
".",
"join",
"(",
"parts",
")",
".",
"lstrip",
"(",
")",
".",
"split",
"(",
"'\\n\\n'",
")",
"desc",
"=",
"rest",
"[",
"0",
"]",
".",
"replace",
"(",
"'\\n'",
",",
"' '",
")",
"return",
"header",
",",
"desc",
"first_cell",
"=",
"self",
".",
"nb",
"[",
"'cells'",
"]",
"[",
"0",
"]",
"if",
"not",
"first_cell",
"[",
"'cell_type'",
"]",
"==",
"'markdown'",
":",
"return",
"''",
",",
"''",
"header",
",",
"desc",
"=",
"split_header",
"(",
"first_cell",
"[",
"'source'",
"]",
")",
"if",
"not",
"desc",
"and",
"len",
"(",
"self",
".",
"nb",
"[",
"'cells'",
"]",
")",
">",
"1",
":",
"second_cell",
"=",
"self",
".",
"nb",
"[",
"'cells'",
"]",
"[",
"1",
"]",
"if",
"second_cell",
"[",
"'cell_type'",
"]",
"==",
"'markdown'",
":",
"_",
",",
"desc",
"=",
"split_header",
"(",
"second_cell",
"[",
"'source'",
"]",
",",
"False",
")",
"return",
"header",
",",
"desc"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.scale_image
|
Scales an image with the same aspect ratio centered in an
image with a given max_width and max_height
if in_fname == out_fname the image can only be scaled down
|
sphinx_nbexamples/__init__.py
|
def scale_image(self, in_fname, out_fname, max_width, max_height):
"""Scales an image with the same aspect ratio centered in an
image with a given max_width and max_height
if in_fname == out_fname the image can only be scaled down
"""
# local import to avoid testing dependency on PIL:
try:
from PIL import Image
except ImportError:
import Image
img = Image.open(in_fname)
width_in, height_in = img.size
scale_w = max_width / float(width_in)
scale_h = max_height / float(height_in)
if height_in * scale_w <= max_height:
scale = scale_w
else:
scale = scale_h
if scale >= 1.0 and in_fname == out_fname:
return
width_sc = int(round(scale * width_in))
height_sc = int(round(scale * height_in))
# resize the image
img.thumbnail((width_sc, height_sc), Image.ANTIALIAS)
# insert centered
thumb = Image.new('RGB', (max_width, max_height), (255, 255, 255))
pos_insert = (
(max_width - width_sc) // 2, (max_height - height_sc) // 2)
thumb.paste(img, pos_insert)
thumb.save(out_fname)
|
def scale_image(self, in_fname, out_fname, max_width, max_height):
"""Scales an image with the same aspect ratio centered in an
image with a given max_width and max_height
if in_fname == out_fname the image can only be scaled down
"""
# local import to avoid testing dependency on PIL:
try:
from PIL import Image
except ImportError:
import Image
img = Image.open(in_fname)
width_in, height_in = img.size
scale_w = max_width / float(width_in)
scale_h = max_height / float(height_in)
if height_in * scale_w <= max_height:
scale = scale_w
else:
scale = scale_h
if scale >= 1.0 and in_fname == out_fname:
return
width_sc = int(round(scale * width_in))
height_sc = int(round(scale * height_in))
# resize the image
img.thumbnail((width_sc, height_sc), Image.ANTIALIAS)
# insert centered
thumb = Image.new('RGB', (max_width, max_height), (255, 255, 255))
pos_insert = (
(max_width - width_sc) // 2, (max_height - height_sc) // 2)
thumb.paste(img, pos_insert)
thumb.save(out_fname)
|
[
"Scales",
"an",
"image",
"with",
"the",
"same",
"aspect",
"ratio",
"centered",
"in",
"an",
"image",
"with",
"a",
"given",
"max_width",
"and",
"max_height",
"if",
"in_fname",
"==",
"out_fname",
"the",
"image",
"can",
"only",
"be",
"scaled",
"down"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L550-L585
|
[
"def",
"scale_image",
"(",
"self",
",",
"in_fname",
",",
"out_fname",
",",
"max_width",
",",
"max_height",
")",
":",
"# local import to avoid testing dependency on PIL:",
"try",
":",
"from",
"PIL",
"import",
"Image",
"except",
"ImportError",
":",
"import",
"Image",
"img",
"=",
"Image",
".",
"open",
"(",
"in_fname",
")",
"width_in",
",",
"height_in",
"=",
"img",
".",
"size",
"scale_w",
"=",
"max_width",
"/",
"float",
"(",
"width_in",
")",
"scale_h",
"=",
"max_height",
"/",
"float",
"(",
"height_in",
")",
"if",
"height_in",
"*",
"scale_w",
"<=",
"max_height",
":",
"scale",
"=",
"scale_w",
"else",
":",
"scale",
"=",
"scale_h",
"if",
"scale",
">=",
"1.0",
"and",
"in_fname",
"==",
"out_fname",
":",
"return",
"width_sc",
"=",
"int",
"(",
"round",
"(",
"scale",
"*",
"width_in",
")",
")",
"height_sc",
"=",
"int",
"(",
"round",
"(",
"scale",
"*",
"height_in",
")",
")",
"# resize the image",
"img",
".",
"thumbnail",
"(",
"(",
"width_sc",
",",
"height_sc",
")",
",",
"Image",
".",
"ANTIALIAS",
")",
"# insert centered",
"thumb",
"=",
"Image",
".",
"new",
"(",
"'RGB'",
",",
"(",
"max_width",
",",
"max_height",
")",
",",
"(",
"255",
",",
"255",
",",
"255",
")",
")",
"pos_insert",
"=",
"(",
"(",
"max_width",
"-",
"width_sc",
")",
"//",
"2",
",",
"(",
"max_height",
"-",
"height_sc",
")",
"//",
"2",
")",
"thumb",
".",
"paste",
"(",
"img",
",",
"pos_insert",
")",
"thumb",
".",
"save",
"(",
"out_fname",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.save_thumbnail
|
Save the thumbnail image
|
sphinx_nbexamples/__init__.py
|
def save_thumbnail(self, image_path):
"""Save the thumbnail image"""
thumb_dir = os.path.join(os.path.dirname(image_path), 'thumb')
create_dirs(thumb_dir)
thumb_file = os.path.join(thumb_dir,
'%s_thumb.png' % self.reference)
if os.path.exists(image_path):
logger.info('Scaling %s to thumbnail %s', image_path, thumb_file)
self.scale_image(image_path, thumb_file, 400, 280)
self.thumb_file = thumb_file
|
def save_thumbnail(self, image_path):
"""Save the thumbnail image"""
thumb_dir = os.path.join(os.path.dirname(image_path), 'thumb')
create_dirs(thumb_dir)
thumb_file = os.path.join(thumb_dir,
'%s_thumb.png' % self.reference)
if os.path.exists(image_path):
logger.info('Scaling %s to thumbnail %s', image_path, thumb_file)
self.scale_image(image_path, thumb_file, 400, 280)
self.thumb_file = thumb_file
|
[
"Save",
"the",
"thumbnail",
"image"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L587-L597
|
[
"def",
"save_thumbnail",
"(",
"self",
",",
"image_path",
")",
":",
"thumb_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"image_path",
")",
",",
"'thumb'",
")",
"create_dirs",
"(",
"thumb_dir",
")",
"thumb_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"thumb_dir",
",",
"'%s_thumb.png'",
"%",
"self",
".",
"reference",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"image_path",
")",
":",
"logger",
".",
"info",
"(",
"'Scaling %s to thumbnail %s'",
",",
"image_path",
",",
"thumb_file",
")",
"self",
".",
"scale_image",
"(",
"image_path",
",",
"thumb_file",
",",
"400",
",",
"280",
")",
"self",
".",
"thumb_file",
"=",
"thumb_file"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
NotebookProcessor.copy_thumbnail_figure
|
The integer of the thumbnail figure
|
sphinx_nbexamples/__init__.py
|
def copy_thumbnail_figure(self):
"""The integer of the thumbnail figure"""
ret = None
if self._thumbnail_figure is not None:
if not isstring(self._thumbnail_figure):
ret = self._thumbnail_figure
else:
ret = osp.join(osp.dirname(self.outfile),
osp.basename(self._thumbnail_figure))
copyfile(self._thumbnail_figure, ret)
return ret
elif hasattr(self.nb.metadata, 'thumbnail_figure'):
if not isstring(self.nb.metadata.thumbnail_figure):
ret = self.nb.metadata.thumbnail_figure
else:
ret = osp.join(osp.dirname(self.outfile), 'images',
osp.basename(self.nb.metadata.thumbnail_figure))
copyfile(osp.join(osp.dirname(self.infile),
self.nb.metadata.thumbnail_figure),
ret)
return ret
|
def copy_thumbnail_figure(self):
"""The integer of the thumbnail figure"""
ret = None
if self._thumbnail_figure is not None:
if not isstring(self._thumbnail_figure):
ret = self._thumbnail_figure
else:
ret = osp.join(osp.dirname(self.outfile),
osp.basename(self._thumbnail_figure))
copyfile(self._thumbnail_figure, ret)
return ret
elif hasattr(self.nb.metadata, 'thumbnail_figure'):
if not isstring(self.nb.metadata.thumbnail_figure):
ret = self.nb.metadata.thumbnail_figure
else:
ret = osp.join(osp.dirname(self.outfile), 'images',
osp.basename(self.nb.metadata.thumbnail_figure))
copyfile(osp.join(osp.dirname(self.infile),
self.nb.metadata.thumbnail_figure),
ret)
return ret
|
[
"The",
"integer",
"of",
"the",
"thumbnail",
"figure"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L603-L623
|
[
"def",
"copy_thumbnail_figure",
"(",
"self",
")",
":",
"ret",
"=",
"None",
"if",
"self",
".",
"_thumbnail_figure",
"is",
"not",
"None",
":",
"if",
"not",
"isstring",
"(",
"self",
".",
"_thumbnail_figure",
")",
":",
"ret",
"=",
"self",
".",
"_thumbnail_figure",
"else",
":",
"ret",
"=",
"osp",
".",
"join",
"(",
"osp",
".",
"dirname",
"(",
"self",
".",
"outfile",
")",
",",
"osp",
".",
"basename",
"(",
"self",
".",
"_thumbnail_figure",
")",
")",
"copyfile",
"(",
"self",
".",
"_thumbnail_figure",
",",
"ret",
")",
"return",
"ret",
"elif",
"hasattr",
"(",
"self",
".",
"nb",
".",
"metadata",
",",
"'thumbnail_figure'",
")",
":",
"if",
"not",
"isstring",
"(",
"self",
".",
"nb",
".",
"metadata",
".",
"thumbnail_figure",
")",
":",
"ret",
"=",
"self",
".",
"nb",
".",
"metadata",
".",
"thumbnail_figure",
"else",
":",
"ret",
"=",
"osp",
".",
"join",
"(",
"osp",
".",
"dirname",
"(",
"self",
".",
"outfile",
")",
",",
"'images'",
",",
"osp",
".",
"basename",
"(",
"self",
".",
"nb",
".",
"metadata",
".",
"thumbnail_figure",
")",
")",
"copyfile",
"(",
"osp",
".",
"join",
"(",
"osp",
".",
"dirname",
"(",
"self",
".",
"infile",
")",
",",
"self",
".",
"nb",
".",
"metadata",
".",
"thumbnail_figure",
")",
",",
"ret",
")",
"return",
"ret"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
Gallery.process_directories
|
Create the rst files from the input directories in the
:attr:`in_dir` attribute
|
sphinx_nbexamples/__init__.py
|
def process_directories(self):
"""Create the rst files from the input directories in the
:attr:`in_dir` attribute"""
for i, (base_dir, target_dir, paths) in enumerate(zip(
self.in_dir, self.out_dir, map(os.walk, self.in_dir))):
self._in_dir_count = i
self.recursive_processing(base_dir, target_dir, paths)
|
def process_directories(self):
"""Create the rst files from the input directories in the
:attr:`in_dir` attribute"""
for i, (base_dir, target_dir, paths) in enumerate(zip(
self.in_dir, self.out_dir, map(os.walk, self.in_dir))):
self._in_dir_count = i
self.recursive_processing(base_dir, target_dir, paths)
|
[
"Create",
"the",
"rst",
"files",
"from",
"the",
"input",
"directories",
"in",
"the",
":",
"attr",
":",
"in_dir",
"attribute"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L778-L784
|
[
"def",
"process_directories",
"(",
"self",
")",
":",
"for",
"i",
",",
"(",
"base_dir",
",",
"target_dir",
",",
"paths",
")",
"in",
"enumerate",
"(",
"zip",
"(",
"self",
".",
"in_dir",
",",
"self",
".",
"out_dir",
",",
"map",
"(",
"os",
".",
"walk",
",",
"self",
".",
"in_dir",
")",
")",
")",
":",
"self",
".",
"_in_dir_count",
"=",
"i",
"self",
".",
"recursive_processing",
"(",
"base_dir",
",",
"target_dir",
",",
"paths",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
Gallery.recursive_processing
|
Method to recursivly process the notebooks in the `base_dir`
Parameters
----------
base_dir: str
Path to the base example directory (see the `examples_dir`
parameter for the :class:`Gallery` class)
target_dir: str
Path to the output directory for the rst files (see the
`gallery_dirs` parameter for the :class:`Gallery` class)
it: iterable
The iterator over the subdirectories and files in `base_dir`
generated by the :func:`os.walk` function
|
sphinx_nbexamples/__init__.py
|
def recursive_processing(self, base_dir, target_dir, it):
"""Method to recursivly process the notebooks in the `base_dir`
Parameters
----------
base_dir: str
Path to the base example directory (see the `examples_dir`
parameter for the :class:`Gallery` class)
target_dir: str
Path to the output directory for the rst files (see the
`gallery_dirs` parameter for the :class:`Gallery` class)
it: iterable
The iterator over the subdirectories and files in `base_dir`
generated by the :func:`os.walk` function"""
try:
file_dir, dirs, files = next(it)
except StopIteration:
return '', []
readme_files = {'README.md', 'README.rst', 'README.txt'}
if readme_files.intersection(files):
foutdir = file_dir.replace(base_dir, target_dir)
create_dirs(foutdir)
this_nbps = [
NotebookProcessor(
infile=f,
outfile=os.path.join(foutdir, os.path.basename(f)),
disable_warnings=self.disable_warnings,
preprocess=(
(self.preprocess is True or f in self.preprocess) and
not (self.dont_preprocess is True or
f in self.dont_preprocess)),
clear=((self.clear is True or f in self.clear) and not
(self.dont_clear is True or f in self.dont_clear)),
code_example=self.code_examples.get(f),
supplementary_files=self.supplementary_files.get(f),
other_supplementary_files=self.osf.get(f),
thumbnail_figure=self.thumbnail_figures.get(f),
url=self.get_url(f.replace(base_dir, '')),
**self._nbp_kws)
for f in map(lambda f: os.path.join(file_dir, f),
filter(self.pattern.match, files))]
readme_file = next(iter(readme_files.intersection(files)))
else:
return '', []
labels = OrderedDict()
this_label = 'gallery_' + foutdir.replace(os.path.sep, '_')
if this_label.endswith('_'):
this_label = this_label[:-1]
for d in dirs:
label, nbps = self.recursive_processing(
base_dir, target_dir, it)
if label:
labels[label] = nbps
s = ".. _%s:\n\n" % this_label
with open(os.path.join(file_dir, readme_file)) as f:
s += f.read().rstrip() + '\n\n'
s += "\n\n.. toctree::\n\n"
s += ''.join(' %s\n' % os.path.splitext(os.path.basename(
nbp.get_out_file()))[0] for nbp in this_nbps)
for d in dirs:
findex = os.path.join(d, 'index.rst')
if os.path.exists(os.path.join(foutdir, findex)):
s += ' %s\n' % os.path.splitext(findex)[0]
s += '\n'
for nbp in this_nbps:
code_div = nbp.code_div
if code_div is not None:
s += code_div + '\n'
else:
s += nbp.thumbnail_div + '\n'
s += "\n.. raw:: html\n\n <div style='clear:both'></div>\n"
for label, nbps in labels.items():
s += '\n.. only:: html\n\n .. rubric:: :ref:`%s`\n\n' % (
label)
for nbp in nbps:
code_div = nbp.code_div
if code_div is not None:
s += code_div + '\n'
else:
s += nbp.thumbnail_div + '\n'
s += "\n.. raw:: html\n\n <div style='clear:both'></div>\n"
s += '\n'
with open(os.path.join(foutdir, 'index.rst'), 'w') as f:
f.write(s)
return this_label, list(chain(this_nbps, *labels.values()))
|
def recursive_processing(self, base_dir, target_dir, it):
"""Method to recursivly process the notebooks in the `base_dir`
Parameters
----------
base_dir: str
Path to the base example directory (see the `examples_dir`
parameter for the :class:`Gallery` class)
target_dir: str
Path to the output directory for the rst files (see the
`gallery_dirs` parameter for the :class:`Gallery` class)
it: iterable
The iterator over the subdirectories and files in `base_dir`
generated by the :func:`os.walk` function"""
try:
file_dir, dirs, files = next(it)
except StopIteration:
return '', []
readme_files = {'README.md', 'README.rst', 'README.txt'}
if readme_files.intersection(files):
foutdir = file_dir.replace(base_dir, target_dir)
create_dirs(foutdir)
this_nbps = [
NotebookProcessor(
infile=f,
outfile=os.path.join(foutdir, os.path.basename(f)),
disable_warnings=self.disable_warnings,
preprocess=(
(self.preprocess is True or f in self.preprocess) and
not (self.dont_preprocess is True or
f in self.dont_preprocess)),
clear=((self.clear is True or f in self.clear) and not
(self.dont_clear is True or f in self.dont_clear)),
code_example=self.code_examples.get(f),
supplementary_files=self.supplementary_files.get(f),
other_supplementary_files=self.osf.get(f),
thumbnail_figure=self.thumbnail_figures.get(f),
url=self.get_url(f.replace(base_dir, '')),
**self._nbp_kws)
for f in map(lambda f: os.path.join(file_dir, f),
filter(self.pattern.match, files))]
readme_file = next(iter(readme_files.intersection(files)))
else:
return '', []
labels = OrderedDict()
this_label = 'gallery_' + foutdir.replace(os.path.sep, '_')
if this_label.endswith('_'):
this_label = this_label[:-1]
for d in dirs:
label, nbps = self.recursive_processing(
base_dir, target_dir, it)
if label:
labels[label] = nbps
s = ".. _%s:\n\n" % this_label
with open(os.path.join(file_dir, readme_file)) as f:
s += f.read().rstrip() + '\n\n'
s += "\n\n.. toctree::\n\n"
s += ''.join(' %s\n' % os.path.splitext(os.path.basename(
nbp.get_out_file()))[0] for nbp in this_nbps)
for d in dirs:
findex = os.path.join(d, 'index.rst')
if os.path.exists(os.path.join(foutdir, findex)):
s += ' %s\n' % os.path.splitext(findex)[0]
s += '\n'
for nbp in this_nbps:
code_div = nbp.code_div
if code_div is not None:
s += code_div + '\n'
else:
s += nbp.thumbnail_div + '\n'
s += "\n.. raw:: html\n\n <div style='clear:both'></div>\n"
for label, nbps in labels.items():
s += '\n.. only:: html\n\n .. rubric:: :ref:`%s`\n\n' % (
label)
for nbp in nbps:
code_div = nbp.code_div
if code_div is not None:
s += code_div + '\n'
else:
s += nbp.thumbnail_div + '\n'
s += "\n.. raw:: html\n\n <div style='clear:both'></div>\n"
s += '\n'
with open(os.path.join(foutdir, 'index.rst'), 'w') as f:
f.write(s)
return this_label, list(chain(this_nbps, *labels.values()))
|
[
"Method",
"to",
"recursivly",
"process",
"the",
"notebooks",
"in",
"the",
"base_dir"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L786-L875
|
[
"def",
"recursive_processing",
"(",
"self",
",",
"base_dir",
",",
"target_dir",
",",
"it",
")",
":",
"try",
":",
"file_dir",
",",
"dirs",
",",
"files",
"=",
"next",
"(",
"it",
")",
"except",
"StopIteration",
":",
"return",
"''",
",",
"[",
"]",
"readme_files",
"=",
"{",
"'README.md'",
",",
"'README.rst'",
",",
"'README.txt'",
"}",
"if",
"readme_files",
".",
"intersection",
"(",
"files",
")",
":",
"foutdir",
"=",
"file_dir",
".",
"replace",
"(",
"base_dir",
",",
"target_dir",
")",
"create_dirs",
"(",
"foutdir",
")",
"this_nbps",
"=",
"[",
"NotebookProcessor",
"(",
"infile",
"=",
"f",
",",
"outfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"foutdir",
",",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
")",
",",
"disable_warnings",
"=",
"self",
".",
"disable_warnings",
",",
"preprocess",
"=",
"(",
"(",
"self",
".",
"preprocess",
"is",
"True",
"or",
"f",
"in",
"self",
".",
"preprocess",
")",
"and",
"not",
"(",
"self",
".",
"dont_preprocess",
"is",
"True",
"or",
"f",
"in",
"self",
".",
"dont_preprocess",
")",
")",
",",
"clear",
"=",
"(",
"(",
"self",
".",
"clear",
"is",
"True",
"or",
"f",
"in",
"self",
".",
"clear",
")",
"and",
"not",
"(",
"self",
".",
"dont_clear",
"is",
"True",
"or",
"f",
"in",
"self",
".",
"dont_clear",
")",
")",
",",
"code_example",
"=",
"self",
".",
"code_examples",
".",
"get",
"(",
"f",
")",
",",
"supplementary_files",
"=",
"self",
".",
"supplementary_files",
".",
"get",
"(",
"f",
")",
",",
"other_supplementary_files",
"=",
"self",
".",
"osf",
".",
"get",
"(",
"f",
")",
",",
"thumbnail_figure",
"=",
"self",
".",
"thumbnail_figures",
".",
"get",
"(",
"f",
")",
",",
"url",
"=",
"self",
".",
"get_url",
"(",
"f",
".",
"replace",
"(",
"base_dir",
",",
"''",
")",
")",
",",
"*",
"*",
"self",
".",
"_nbp_kws",
")",
"for",
"f",
"in",
"map",
"(",
"lambda",
"f",
":",
"os",
".",
"path",
".",
"join",
"(",
"file_dir",
",",
"f",
")",
",",
"filter",
"(",
"self",
".",
"pattern",
".",
"match",
",",
"files",
")",
")",
"]",
"readme_file",
"=",
"next",
"(",
"iter",
"(",
"readme_files",
".",
"intersection",
"(",
"files",
")",
")",
")",
"else",
":",
"return",
"''",
",",
"[",
"]",
"labels",
"=",
"OrderedDict",
"(",
")",
"this_label",
"=",
"'gallery_'",
"+",
"foutdir",
".",
"replace",
"(",
"os",
".",
"path",
".",
"sep",
",",
"'_'",
")",
"if",
"this_label",
".",
"endswith",
"(",
"'_'",
")",
":",
"this_label",
"=",
"this_label",
"[",
":",
"-",
"1",
"]",
"for",
"d",
"in",
"dirs",
":",
"label",
",",
"nbps",
"=",
"self",
".",
"recursive_processing",
"(",
"base_dir",
",",
"target_dir",
",",
"it",
")",
"if",
"label",
":",
"labels",
"[",
"label",
"]",
"=",
"nbps",
"s",
"=",
"\".. _%s:\\n\\n\"",
"%",
"this_label",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"file_dir",
",",
"readme_file",
")",
")",
"as",
"f",
":",
"s",
"+=",
"f",
".",
"read",
"(",
")",
".",
"rstrip",
"(",
")",
"+",
"'\\n\\n'",
"s",
"+=",
"\"\\n\\n.. toctree::\\n\\n\"",
"s",
"+=",
"''",
".",
"join",
"(",
"' %s\\n'",
"%",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"nbp",
".",
"get_out_file",
"(",
")",
")",
")",
"[",
"0",
"]",
"for",
"nbp",
"in",
"this_nbps",
")",
"for",
"d",
"in",
"dirs",
":",
"findex",
"=",
"os",
".",
"path",
".",
"join",
"(",
"d",
",",
"'index.rst'",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"foutdir",
",",
"findex",
")",
")",
":",
"s",
"+=",
"' %s\\n'",
"%",
"os",
".",
"path",
".",
"splitext",
"(",
"findex",
")",
"[",
"0",
"]",
"s",
"+=",
"'\\n'",
"for",
"nbp",
"in",
"this_nbps",
":",
"code_div",
"=",
"nbp",
".",
"code_div",
"if",
"code_div",
"is",
"not",
"None",
":",
"s",
"+=",
"code_div",
"+",
"'\\n'",
"else",
":",
"s",
"+=",
"nbp",
".",
"thumbnail_div",
"+",
"'\\n'",
"s",
"+=",
"\"\\n.. raw:: html\\n\\n <div style='clear:both'></div>\\n\"",
"for",
"label",
",",
"nbps",
"in",
"labels",
".",
"items",
"(",
")",
":",
"s",
"+=",
"'\\n.. only:: html\\n\\n .. rubric:: :ref:`%s`\\n\\n'",
"%",
"(",
"label",
")",
"for",
"nbp",
"in",
"nbps",
":",
"code_div",
"=",
"nbp",
".",
"code_div",
"if",
"code_div",
"is",
"not",
"None",
":",
"s",
"+=",
"code_div",
"+",
"'\\n'",
"else",
":",
"s",
"+=",
"nbp",
".",
"thumbnail_div",
"+",
"'\\n'",
"s",
"+=",
"\"\\n.. raw:: html\\n\\n <div style='clear:both'></div>\\n\"",
"s",
"+=",
"'\\n'",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"foutdir",
",",
"'index.rst'",
")",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"s",
")",
"return",
"this_label",
",",
"list",
"(",
"chain",
"(",
"this_nbps",
",",
"*",
"labels",
".",
"values",
"(",
")",
")",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
Gallery.from_sphinx
|
Class method to create a :class:`Gallery` instance from the
configuration of a sphinx application
|
sphinx_nbexamples/__init__.py
|
def from_sphinx(cls, app):
"""Class method to create a :class:`Gallery` instance from the
configuration of a sphinx application"""
app.config.html_static_path.append(os.path.join(
os.path.dirname(__file__), '_static'))
config = app.config.example_gallery_config
insert_bokeh = config.get('insert_bokeh')
if insert_bokeh:
if not isstring(insert_bokeh):
import bokeh
insert_bokeh = bokeh.__version__
app.add_stylesheet(
NotebookProcessor.BOKEH_STYLE_SHEET.format(
version=insert_bokeh))
app.add_javascript(
NotebookProcessor.BOKEH_JS.format(version=insert_bokeh))
insert_bokeh_widgets = config.get('insert_bokeh_widgets')
if insert_bokeh_widgets:
if not isstring(insert_bokeh_widgets):
import bokeh
insert_bokeh_widgets = bokeh.__version__
app.add_stylesheet(
NotebookProcessor.BOKEH_WIDGETS_STYLE_SHEET.format(
version=insert_bokeh_widgets))
app.add_javascript(
NotebookProcessor.BOKEH_WIDGETS_JS.format(
version=insert_bokeh_widgets))
if not app.config.process_examples:
return
cls(**app.config.example_gallery_config).process_directories()
|
def from_sphinx(cls, app):
"""Class method to create a :class:`Gallery` instance from the
configuration of a sphinx application"""
app.config.html_static_path.append(os.path.join(
os.path.dirname(__file__), '_static'))
config = app.config.example_gallery_config
insert_bokeh = config.get('insert_bokeh')
if insert_bokeh:
if not isstring(insert_bokeh):
import bokeh
insert_bokeh = bokeh.__version__
app.add_stylesheet(
NotebookProcessor.BOKEH_STYLE_SHEET.format(
version=insert_bokeh))
app.add_javascript(
NotebookProcessor.BOKEH_JS.format(version=insert_bokeh))
insert_bokeh_widgets = config.get('insert_bokeh_widgets')
if insert_bokeh_widgets:
if not isstring(insert_bokeh_widgets):
import bokeh
insert_bokeh_widgets = bokeh.__version__
app.add_stylesheet(
NotebookProcessor.BOKEH_WIDGETS_STYLE_SHEET.format(
version=insert_bokeh_widgets))
app.add_javascript(
NotebookProcessor.BOKEH_WIDGETS_JS.format(
version=insert_bokeh_widgets))
if not app.config.process_examples:
return
cls(**app.config.example_gallery_config).process_directories()
|
[
"Class",
"method",
"to",
"create",
"a",
":",
"class",
":",
"Gallery",
"instance",
"from",
"the",
"configuration",
"of",
"a",
"sphinx",
"application"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L878-L910
|
[
"def",
"from_sphinx",
"(",
"cls",
",",
"app",
")",
":",
"app",
".",
"config",
".",
"html_static_path",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'_static'",
")",
")",
"config",
"=",
"app",
".",
"config",
".",
"example_gallery_config",
"insert_bokeh",
"=",
"config",
".",
"get",
"(",
"'insert_bokeh'",
")",
"if",
"insert_bokeh",
":",
"if",
"not",
"isstring",
"(",
"insert_bokeh",
")",
":",
"import",
"bokeh",
"insert_bokeh",
"=",
"bokeh",
".",
"__version__",
"app",
".",
"add_stylesheet",
"(",
"NotebookProcessor",
".",
"BOKEH_STYLE_SHEET",
".",
"format",
"(",
"version",
"=",
"insert_bokeh",
")",
")",
"app",
".",
"add_javascript",
"(",
"NotebookProcessor",
".",
"BOKEH_JS",
".",
"format",
"(",
"version",
"=",
"insert_bokeh",
")",
")",
"insert_bokeh_widgets",
"=",
"config",
".",
"get",
"(",
"'insert_bokeh_widgets'",
")",
"if",
"insert_bokeh_widgets",
":",
"if",
"not",
"isstring",
"(",
"insert_bokeh_widgets",
")",
":",
"import",
"bokeh",
"insert_bokeh_widgets",
"=",
"bokeh",
".",
"__version__",
"app",
".",
"add_stylesheet",
"(",
"NotebookProcessor",
".",
"BOKEH_WIDGETS_STYLE_SHEET",
".",
"format",
"(",
"version",
"=",
"insert_bokeh_widgets",
")",
")",
"app",
".",
"add_javascript",
"(",
"NotebookProcessor",
".",
"BOKEH_WIDGETS_JS",
".",
"format",
"(",
"version",
"=",
"insert_bokeh_widgets",
")",
")",
"if",
"not",
"app",
".",
"config",
".",
"process_examples",
":",
"return",
"cls",
"(",
"*",
"*",
"app",
".",
"config",
".",
"example_gallery_config",
")",
".",
"process_directories",
"(",
")"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
Gallery.get_url
|
Return the url corresponding to the given notebook file
Parameters
----------
nbfile: str
The path of the notebook relative to the corresponding
:attr:``in_dir``
Returns
-------
str or None
The url or None if no url has been specified
|
sphinx_nbexamples/__init__.py
|
def get_url(self, nbfile):
"""Return the url corresponding to the given notebook file
Parameters
----------
nbfile: str
The path of the notebook relative to the corresponding
:attr:``in_dir``
Returns
-------
str or None
The url or None if no url has been specified
"""
urls = self.urls
if isinstance(urls, dict):
return urls.get(nbfile)
elif isstring(urls):
if not urls.endswith('/'):
urls += '/'
return urls + nbfile
|
def get_url(self, nbfile):
"""Return the url corresponding to the given notebook file
Parameters
----------
nbfile: str
The path of the notebook relative to the corresponding
:attr:``in_dir``
Returns
-------
str or None
The url or None if no url has been specified
"""
urls = self.urls
if isinstance(urls, dict):
return urls.get(nbfile)
elif isstring(urls):
if not urls.endswith('/'):
urls += '/'
return urls + nbfile
|
[
"Return",
"the",
"url",
"corresponding",
"to",
"the",
"given",
"notebook",
"file"
] |
Chilipp/sphinx-nbexamples
|
python
|
https://github.com/Chilipp/sphinx-nbexamples/blob/08e0319ff3c70f8a931dfa8890caf48add4d0470/sphinx_nbexamples/__init__.py#L912-L932
|
[
"def",
"get_url",
"(",
"self",
",",
"nbfile",
")",
":",
"urls",
"=",
"self",
".",
"urls",
"if",
"isinstance",
"(",
"urls",
",",
"dict",
")",
":",
"return",
"urls",
".",
"get",
"(",
"nbfile",
")",
"elif",
"isstring",
"(",
"urls",
")",
":",
"if",
"not",
"urls",
".",
"endswith",
"(",
"'/'",
")",
":",
"urls",
"+=",
"'/'",
"return",
"urls",
"+",
"nbfile"
] |
08e0319ff3c70f8a931dfa8890caf48add4d0470
|
test
|
Command.handle
|
command execution
|
transmeta/management/commands/sync_transmeta_db.py
|
def handle(self, *args, **options):
""" command execution """
assume_yes = options.get('assume_yes', False)
default_language = options.get('default_language', None)
# set manual transaction management
transaction.commit_unless_managed()
transaction.enter_transaction_management()
transaction.managed(True)
self.cursor = connection.cursor()
self.introspection = connection.introspection
self.default_lang = default_language or mandatory_language()
all_models = get_models()
found_db_change_fields = False
for model in all_models:
if hasattr(model._meta, 'translatable_fields'):
model_full_name = '%s.%s' % (model._meta.app_label, model._meta.module_name)
translatable_fields = get_all_translatable_fields(model, column_in_current_table=True)
db_table = model._meta.db_table
for field_name in translatable_fields:
db_table_fields = self.get_table_fields(db_table)
db_change_langs = list(set(list(self.get_db_change_languages(field_name, db_table_fields)) + [self.default_lang]))
if db_change_langs:
sql_sentences = self.get_sync_sql(field_name, db_change_langs, model, db_table_fields)
if sql_sentences:
found_db_change_fields = True
print_db_change_langs(db_change_langs, field_name, model_full_name)
execute_sql = ask_for_confirmation(sql_sentences, model_full_name, assume_yes)
if execute_sql:
print ('Executing SQL...')
for sentence in sql_sentences:
self.cursor.execute(sentence)
# commit
transaction.commit()
print ('Done')
else:
print ('SQL not executed')
if transaction.is_dirty():
transaction.commit()
transaction.leave_transaction_management()
if not found_db_change_fields:
print ('\nNo new translatable fields detected')
if default_language:
variable = 'TRANSMETA_DEFAULT_LANGUAGE'
has_transmeta_default_language = getattr(settings, variable, False)
if not has_transmeta_default_language:
variable = 'LANGUAGE_CODE'
if getattr(settings, variable) != default_language:
print (('\n\nYou should change in your settings '
'the %s variable to "%s"' % (variable, default_language)))
|
def handle(self, *args, **options):
""" command execution """
assume_yes = options.get('assume_yes', False)
default_language = options.get('default_language', None)
# set manual transaction management
transaction.commit_unless_managed()
transaction.enter_transaction_management()
transaction.managed(True)
self.cursor = connection.cursor()
self.introspection = connection.introspection
self.default_lang = default_language or mandatory_language()
all_models = get_models()
found_db_change_fields = False
for model in all_models:
if hasattr(model._meta, 'translatable_fields'):
model_full_name = '%s.%s' % (model._meta.app_label, model._meta.module_name)
translatable_fields = get_all_translatable_fields(model, column_in_current_table=True)
db_table = model._meta.db_table
for field_name in translatable_fields:
db_table_fields = self.get_table_fields(db_table)
db_change_langs = list(set(list(self.get_db_change_languages(field_name, db_table_fields)) + [self.default_lang]))
if db_change_langs:
sql_sentences = self.get_sync_sql(field_name, db_change_langs, model, db_table_fields)
if sql_sentences:
found_db_change_fields = True
print_db_change_langs(db_change_langs, field_name, model_full_name)
execute_sql = ask_for_confirmation(sql_sentences, model_full_name, assume_yes)
if execute_sql:
print ('Executing SQL...')
for sentence in sql_sentences:
self.cursor.execute(sentence)
# commit
transaction.commit()
print ('Done')
else:
print ('SQL not executed')
if transaction.is_dirty():
transaction.commit()
transaction.leave_transaction_management()
if not found_db_change_fields:
print ('\nNo new translatable fields detected')
if default_language:
variable = 'TRANSMETA_DEFAULT_LANGUAGE'
has_transmeta_default_language = getattr(settings, variable, False)
if not has_transmeta_default_language:
variable = 'LANGUAGE_CODE'
if getattr(settings, variable) != default_language:
print (('\n\nYou should change in your settings '
'the %s variable to "%s"' % (variable, default_language)))
|
[
"command",
"execution"
] |
Yaco-Sistemas/django-transmeta
|
python
|
https://github.com/Yaco-Sistemas/django-transmeta/blob/de070aae27770df046b4ba995f01f654db7ed1a2/transmeta/management/commands/sync_transmeta_db.py#L63-L117
|
[
"def",
"handle",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"assume_yes",
"=",
"options",
".",
"get",
"(",
"'assume_yes'",
",",
"False",
")",
"default_language",
"=",
"options",
".",
"get",
"(",
"'default_language'",
",",
"None",
")",
"# set manual transaction management",
"transaction",
".",
"commit_unless_managed",
"(",
")",
"transaction",
".",
"enter_transaction_management",
"(",
")",
"transaction",
".",
"managed",
"(",
"True",
")",
"self",
".",
"cursor",
"=",
"connection",
".",
"cursor",
"(",
")",
"self",
".",
"introspection",
"=",
"connection",
".",
"introspection",
"self",
".",
"default_lang",
"=",
"default_language",
"or",
"mandatory_language",
"(",
")",
"all_models",
"=",
"get_models",
"(",
")",
"found_db_change_fields",
"=",
"False",
"for",
"model",
"in",
"all_models",
":",
"if",
"hasattr",
"(",
"model",
".",
"_meta",
",",
"'translatable_fields'",
")",
":",
"model_full_name",
"=",
"'%s.%s'",
"%",
"(",
"model",
".",
"_meta",
".",
"app_label",
",",
"model",
".",
"_meta",
".",
"module_name",
")",
"translatable_fields",
"=",
"get_all_translatable_fields",
"(",
"model",
",",
"column_in_current_table",
"=",
"True",
")",
"db_table",
"=",
"model",
".",
"_meta",
".",
"db_table",
"for",
"field_name",
"in",
"translatable_fields",
":",
"db_table_fields",
"=",
"self",
".",
"get_table_fields",
"(",
"db_table",
")",
"db_change_langs",
"=",
"list",
"(",
"set",
"(",
"list",
"(",
"self",
".",
"get_db_change_languages",
"(",
"field_name",
",",
"db_table_fields",
")",
")",
"+",
"[",
"self",
".",
"default_lang",
"]",
")",
")",
"if",
"db_change_langs",
":",
"sql_sentences",
"=",
"self",
".",
"get_sync_sql",
"(",
"field_name",
",",
"db_change_langs",
",",
"model",
",",
"db_table_fields",
")",
"if",
"sql_sentences",
":",
"found_db_change_fields",
"=",
"True",
"print_db_change_langs",
"(",
"db_change_langs",
",",
"field_name",
",",
"model_full_name",
")",
"execute_sql",
"=",
"ask_for_confirmation",
"(",
"sql_sentences",
",",
"model_full_name",
",",
"assume_yes",
")",
"if",
"execute_sql",
":",
"print",
"(",
"'Executing SQL...'",
")",
"for",
"sentence",
"in",
"sql_sentences",
":",
"self",
".",
"cursor",
".",
"execute",
"(",
"sentence",
")",
"# commit",
"transaction",
".",
"commit",
"(",
")",
"print",
"(",
"'Done'",
")",
"else",
":",
"print",
"(",
"'SQL not executed'",
")",
"if",
"transaction",
".",
"is_dirty",
"(",
")",
":",
"transaction",
".",
"commit",
"(",
")",
"transaction",
".",
"leave_transaction_management",
"(",
")",
"if",
"not",
"found_db_change_fields",
":",
"print",
"(",
"'\\nNo new translatable fields detected'",
")",
"if",
"default_language",
":",
"variable",
"=",
"'TRANSMETA_DEFAULT_LANGUAGE'",
"has_transmeta_default_language",
"=",
"getattr",
"(",
"settings",
",",
"variable",
",",
"False",
")",
"if",
"not",
"has_transmeta_default_language",
":",
"variable",
"=",
"'LANGUAGE_CODE'",
"if",
"getattr",
"(",
"settings",
",",
"variable",
")",
"!=",
"default_language",
":",
"print",
"(",
"(",
"'\\n\\nYou should change in your settings '",
"'the %s variable to \"%s\"'",
"%",
"(",
"variable",
",",
"default_language",
")",
")",
")"
] |
de070aae27770df046b4ba995f01f654db7ed1a2
|
test
|
Command.get_db_change_languages
|
get only db changes fields
|
transmeta/management/commands/sync_transmeta_db.py
|
def get_db_change_languages(self, field_name, db_table_fields):
""" get only db changes fields """
for lang_code, lang_name in get_languages():
if get_real_fieldname(field_name, lang_code) not in db_table_fields:
yield lang_code
for db_table_field in db_table_fields:
pattern = re.compile('^%s_(?P<lang>\w{2})$' % field_name)
m = pattern.match(db_table_field)
if not m:
continue
lang = m.group('lang')
yield lang
|
def get_db_change_languages(self, field_name, db_table_fields):
""" get only db changes fields """
for lang_code, lang_name in get_languages():
if get_real_fieldname(field_name, lang_code) not in db_table_fields:
yield lang_code
for db_table_field in db_table_fields:
pattern = re.compile('^%s_(?P<lang>\w{2})$' % field_name)
m = pattern.match(db_table_field)
if not m:
continue
lang = m.group('lang')
yield lang
|
[
"get",
"only",
"db",
"changes",
"fields"
] |
Yaco-Sistemas/django-transmeta
|
python
|
https://github.com/Yaco-Sistemas/django-transmeta/blob/de070aae27770df046b4ba995f01f654db7ed1a2/transmeta/management/commands/sync_transmeta_db.py#L134-L145
|
[
"def",
"get_db_change_languages",
"(",
"self",
",",
"field_name",
",",
"db_table_fields",
")",
":",
"for",
"lang_code",
",",
"lang_name",
"in",
"get_languages",
"(",
")",
":",
"if",
"get_real_fieldname",
"(",
"field_name",
",",
"lang_code",
")",
"not",
"in",
"db_table_fields",
":",
"yield",
"lang_code",
"for",
"db_table_field",
"in",
"db_table_fields",
":",
"pattern",
"=",
"re",
".",
"compile",
"(",
"'^%s_(?P<lang>\\w{2})$'",
"%",
"field_name",
")",
"m",
"=",
"pattern",
".",
"match",
"(",
"db_table_field",
")",
"if",
"not",
"m",
":",
"continue",
"lang",
"=",
"m",
".",
"group",
"(",
"'lang'",
")",
"yield",
"lang"
] |
de070aae27770df046b4ba995f01f654db7ed1a2
|
test
|
Command.get_sync_sql
|
returns SQL needed for sync schema for a new translatable field
|
transmeta/management/commands/sync_transmeta_db.py
|
def get_sync_sql(self, field_name, db_change_langs, model, db_table_fields):
""" returns SQL needed for sync schema for a new translatable field """
qn = connection.ops.quote_name
style = no_style()
sql_output = []
db_table = model._meta.db_table
was_translatable_before = self.was_translatable_before(field_name, db_table_fields)
default_f = self.get_default_field(field_name, model)
default_f_required = default_f and self.get_field_required_in_db(db_table,
default_f.name,
value_not_implemented=False)
for lang in db_change_langs:
new_field = get_real_fieldname(field_name, lang)
try:
f = model._meta.get_field(new_field)
col_type = self.get_type_of_db_field(field_name, model)
field_column = f.column
except FieldDoesNotExist: # columns in db, removed the settings.LANGUGES
field_column = new_field
col_type = self.get_type_of_db_field(field_name, model)
field_sql = [style.SQL_FIELD(qn(field_column)), style.SQL_COLTYPE(col_type)]
alter_colum_set = 'ALTER COLUMN %s SET' % qn(field_column)
if default_f:
alter_colum_drop = 'ALTER COLUMN %s DROP' % qn(field_column)
not_null = style.SQL_KEYWORD('NOT NULL')
if 'mysql' in backend.__name__:
alter_colum_set = 'MODIFY %s %s' % (qn(field_column), col_type)
not_null = style.SQL_KEYWORD('NULL')
if default_f:
alter_colum_drop = 'MODIFY %s %s' % (qn(field_column), col_type)
# column creation
if not new_field in db_table_fields:
sql_output.append("ALTER TABLE %s ADD COLUMN %s" % (qn(db_table), ' '.join(field_sql)))
if lang == self.default_lang and not was_translatable_before:
# data copy from old field (only for default language)
sql_output.append("UPDATE %s SET %s = %s" % (qn(db_table), \
qn(field_column), qn(field_name)))
if not f.null:
# changing to NOT NULL after having data copied
sql_output.append("ALTER TABLE %s %s %s" % \
(qn(db_table), alter_colum_set, \
style.SQL_KEYWORD('NOT NULL')))
elif default_f and not default_f.null:
if lang == self.default_lang:
f_required = self.get_field_required_in_db(db_table,
field_column,
value_not_implemented=False)
if default_f.name == new_field and default_f_required:
continue
if not f_required:
# data copy from old field (only for default language)
sql_output.append(("UPDATE %(db_table)s SET %(f_colum)s = '%(value_default)s' "
"WHERE %(f_colum)s is %(null)s or %(f_colum)s = '' " %
{'db_table': qn(db_table),
'f_colum': qn(field_column),
'value_default': self.get_value_default(),
'null': style.SQL_KEYWORD('NULL'),
}))
# changing to NOT NULL after having data copied
sql_output.append("ALTER TABLE %s %s %s" % \
(qn(db_table), alter_colum_set, \
style.SQL_KEYWORD('NOT NULL')))
else:
f_required = self.get_field_required_in_db(db_table,
field_column,
value_not_implemented=True)
if f_required:
sql_output.append(("ALTER TABLE %s %s %s" %
(qn(db_table), alter_colum_drop, not_null)))
if not was_translatable_before:
# we drop field only if field was no translatable before
sql_output.append("ALTER TABLE %s DROP COLUMN %s" % (qn(db_table), qn(field_name)))
return sql_output
|
def get_sync_sql(self, field_name, db_change_langs, model, db_table_fields):
""" returns SQL needed for sync schema for a new translatable field """
qn = connection.ops.quote_name
style = no_style()
sql_output = []
db_table = model._meta.db_table
was_translatable_before = self.was_translatable_before(field_name, db_table_fields)
default_f = self.get_default_field(field_name, model)
default_f_required = default_f and self.get_field_required_in_db(db_table,
default_f.name,
value_not_implemented=False)
for lang in db_change_langs:
new_field = get_real_fieldname(field_name, lang)
try:
f = model._meta.get_field(new_field)
col_type = self.get_type_of_db_field(field_name, model)
field_column = f.column
except FieldDoesNotExist: # columns in db, removed the settings.LANGUGES
field_column = new_field
col_type = self.get_type_of_db_field(field_name, model)
field_sql = [style.SQL_FIELD(qn(field_column)), style.SQL_COLTYPE(col_type)]
alter_colum_set = 'ALTER COLUMN %s SET' % qn(field_column)
if default_f:
alter_colum_drop = 'ALTER COLUMN %s DROP' % qn(field_column)
not_null = style.SQL_KEYWORD('NOT NULL')
if 'mysql' in backend.__name__:
alter_colum_set = 'MODIFY %s %s' % (qn(field_column), col_type)
not_null = style.SQL_KEYWORD('NULL')
if default_f:
alter_colum_drop = 'MODIFY %s %s' % (qn(field_column), col_type)
# column creation
if not new_field in db_table_fields:
sql_output.append("ALTER TABLE %s ADD COLUMN %s" % (qn(db_table), ' '.join(field_sql)))
if lang == self.default_lang and not was_translatable_before:
# data copy from old field (only for default language)
sql_output.append("UPDATE %s SET %s = %s" % (qn(db_table), \
qn(field_column), qn(field_name)))
if not f.null:
# changing to NOT NULL after having data copied
sql_output.append("ALTER TABLE %s %s %s" % \
(qn(db_table), alter_colum_set, \
style.SQL_KEYWORD('NOT NULL')))
elif default_f and not default_f.null:
if lang == self.default_lang:
f_required = self.get_field_required_in_db(db_table,
field_column,
value_not_implemented=False)
if default_f.name == new_field and default_f_required:
continue
if not f_required:
# data copy from old field (only for default language)
sql_output.append(("UPDATE %(db_table)s SET %(f_colum)s = '%(value_default)s' "
"WHERE %(f_colum)s is %(null)s or %(f_colum)s = '' " %
{'db_table': qn(db_table),
'f_colum': qn(field_column),
'value_default': self.get_value_default(),
'null': style.SQL_KEYWORD('NULL'),
}))
# changing to NOT NULL after having data copied
sql_output.append("ALTER TABLE %s %s %s" % \
(qn(db_table), alter_colum_set, \
style.SQL_KEYWORD('NOT NULL')))
else:
f_required = self.get_field_required_in_db(db_table,
field_column,
value_not_implemented=True)
if f_required:
sql_output.append(("ALTER TABLE %s %s %s" %
(qn(db_table), alter_colum_drop, not_null)))
if not was_translatable_before:
# we drop field only if field was no translatable before
sql_output.append("ALTER TABLE %s DROP COLUMN %s" % (qn(db_table), qn(field_name)))
return sql_output
|
[
"returns",
"SQL",
"needed",
"for",
"sync",
"schema",
"for",
"a",
"new",
"translatable",
"field"
] |
Yaco-Sistemas/django-transmeta
|
python
|
https://github.com/Yaco-Sistemas/django-transmeta/blob/de070aae27770df046b4ba995f01f654db7ed1a2/transmeta/management/commands/sync_transmeta_db.py#L179-L256
|
[
"def",
"get_sync_sql",
"(",
"self",
",",
"field_name",
",",
"db_change_langs",
",",
"model",
",",
"db_table_fields",
")",
":",
"qn",
"=",
"connection",
".",
"ops",
".",
"quote_name",
"style",
"=",
"no_style",
"(",
")",
"sql_output",
"=",
"[",
"]",
"db_table",
"=",
"model",
".",
"_meta",
".",
"db_table",
"was_translatable_before",
"=",
"self",
".",
"was_translatable_before",
"(",
"field_name",
",",
"db_table_fields",
")",
"default_f",
"=",
"self",
".",
"get_default_field",
"(",
"field_name",
",",
"model",
")",
"default_f_required",
"=",
"default_f",
"and",
"self",
".",
"get_field_required_in_db",
"(",
"db_table",
",",
"default_f",
".",
"name",
",",
"value_not_implemented",
"=",
"False",
")",
"for",
"lang",
"in",
"db_change_langs",
":",
"new_field",
"=",
"get_real_fieldname",
"(",
"field_name",
",",
"lang",
")",
"try",
":",
"f",
"=",
"model",
".",
"_meta",
".",
"get_field",
"(",
"new_field",
")",
"col_type",
"=",
"self",
".",
"get_type_of_db_field",
"(",
"field_name",
",",
"model",
")",
"field_column",
"=",
"f",
".",
"column",
"except",
"FieldDoesNotExist",
":",
"# columns in db, removed the settings.LANGUGES",
"field_column",
"=",
"new_field",
"col_type",
"=",
"self",
".",
"get_type_of_db_field",
"(",
"field_name",
",",
"model",
")",
"field_sql",
"=",
"[",
"style",
".",
"SQL_FIELD",
"(",
"qn",
"(",
"field_column",
")",
")",
",",
"style",
".",
"SQL_COLTYPE",
"(",
"col_type",
")",
"]",
"alter_colum_set",
"=",
"'ALTER COLUMN %s SET'",
"%",
"qn",
"(",
"field_column",
")",
"if",
"default_f",
":",
"alter_colum_drop",
"=",
"'ALTER COLUMN %s DROP'",
"%",
"qn",
"(",
"field_column",
")",
"not_null",
"=",
"style",
".",
"SQL_KEYWORD",
"(",
"'NOT NULL'",
")",
"if",
"'mysql'",
"in",
"backend",
".",
"__name__",
":",
"alter_colum_set",
"=",
"'MODIFY %s %s'",
"%",
"(",
"qn",
"(",
"field_column",
")",
",",
"col_type",
")",
"not_null",
"=",
"style",
".",
"SQL_KEYWORD",
"(",
"'NULL'",
")",
"if",
"default_f",
":",
"alter_colum_drop",
"=",
"'MODIFY %s %s'",
"%",
"(",
"qn",
"(",
"field_column",
")",
",",
"col_type",
")",
"# column creation",
"if",
"not",
"new_field",
"in",
"db_table_fields",
":",
"sql_output",
".",
"append",
"(",
"\"ALTER TABLE %s ADD COLUMN %s\"",
"%",
"(",
"qn",
"(",
"db_table",
")",
",",
"' '",
".",
"join",
"(",
"field_sql",
")",
")",
")",
"if",
"lang",
"==",
"self",
".",
"default_lang",
"and",
"not",
"was_translatable_before",
":",
"# data copy from old field (only for default language)",
"sql_output",
".",
"append",
"(",
"\"UPDATE %s SET %s = %s\"",
"%",
"(",
"qn",
"(",
"db_table",
")",
",",
"qn",
"(",
"field_column",
")",
",",
"qn",
"(",
"field_name",
")",
")",
")",
"if",
"not",
"f",
".",
"null",
":",
"# changing to NOT NULL after having data copied",
"sql_output",
".",
"append",
"(",
"\"ALTER TABLE %s %s %s\"",
"%",
"(",
"qn",
"(",
"db_table",
")",
",",
"alter_colum_set",
",",
"style",
".",
"SQL_KEYWORD",
"(",
"'NOT NULL'",
")",
")",
")",
"elif",
"default_f",
"and",
"not",
"default_f",
".",
"null",
":",
"if",
"lang",
"==",
"self",
".",
"default_lang",
":",
"f_required",
"=",
"self",
".",
"get_field_required_in_db",
"(",
"db_table",
",",
"field_column",
",",
"value_not_implemented",
"=",
"False",
")",
"if",
"default_f",
".",
"name",
"==",
"new_field",
"and",
"default_f_required",
":",
"continue",
"if",
"not",
"f_required",
":",
"# data copy from old field (only for default language)",
"sql_output",
".",
"append",
"(",
"(",
"\"UPDATE %(db_table)s SET %(f_colum)s = '%(value_default)s' \"",
"\"WHERE %(f_colum)s is %(null)s or %(f_colum)s = '' \"",
"%",
"{",
"'db_table'",
":",
"qn",
"(",
"db_table",
")",
",",
"'f_colum'",
":",
"qn",
"(",
"field_column",
")",
",",
"'value_default'",
":",
"self",
".",
"get_value_default",
"(",
")",
",",
"'null'",
":",
"style",
".",
"SQL_KEYWORD",
"(",
"'NULL'",
")",
",",
"}",
")",
")",
"# changing to NOT NULL after having data copied",
"sql_output",
".",
"append",
"(",
"\"ALTER TABLE %s %s %s\"",
"%",
"(",
"qn",
"(",
"db_table",
")",
",",
"alter_colum_set",
",",
"style",
".",
"SQL_KEYWORD",
"(",
"'NOT NULL'",
")",
")",
")",
"else",
":",
"f_required",
"=",
"self",
".",
"get_field_required_in_db",
"(",
"db_table",
",",
"field_column",
",",
"value_not_implemented",
"=",
"True",
")",
"if",
"f_required",
":",
"sql_output",
".",
"append",
"(",
"(",
"\"ALTER TABLE %s %s %s\"",
"%",
"(",
"qn",
"(",
"db_table",
")",
",",
"alter_colum_drop",
",",
"not_null",
")",
")",
")",
"if",
"not",
"was_translatable_before",
":",
"# we drop field only if field was no translatable before",
"sql_output",
".",
"append",
"(",
"\"ALTER TABLE %s DROP COLUMN %s\"",
"%",
"(",
"qn",
"(",
"db_table",
")",
",",
"qn",
"(",
"field_name",
")",
")",
")",
"return",
"sql_output"
] |
de070aae27770df046b4ba995f01f654db7ed1a2
|
test
|
get_all_translatable_fields
|
returns all translatable fields in a model (including superclasses ones)
|
transmeta/__init__.py
|
def get_all_translatable_fields(model, model_trans_fields=None, column_in_current_table=False):
""" returns all translatable fields in a model (including superclasses ones) """
if model_trans_fields is None:
model_trans_fields = set()
model_trans_fields.update(set(getattr(model._meta, 'translatable_fields', [])))
for parent in model.__bases__:
if getattr(parent, '_meta', None) and (not column_in_current_table or parent._meta.abstract):
get_all_translatable_fields(parent, model_trans_fields, column_in_current_table)
return tuple(model_trans_fields)
|
def get_all_translatable_fields(model, model_trans_fields=None, column_in_current_table=False):
""" returns all translatable fields in a model (including superclasses ones) """
if model_trans_fields is None:
model_trans_fields = set()
model_trans_fields.update(set(getattr(model._meta, 'translatable_fields', [])))
for parent in model.__bases__:
if getattr(parent, '_meta', None) and (not column_in_current_table or parent._meta.abstract):
get_all_translatable_fields(parent, model_trans_fields, column_in_current_table)
return tuple(model_trans_fields)
|
[
"returns",
"all",
"translatable",
"fields",
"in",
"a",
"model",
"(",
"including",
"superclasses",
"ones",
")"
] |
Yaco-Sistemas/django-transmeta
|
python
|
https://github.com/Yaco-Sistemas/django-transmeta/blob/de070aae27770df046b4ba995f01f654db7ed1a2/transmeta/__init__.py#L59-L67
|
[
"def",
"get_all_translatable_fields",
"(",
"model",
",",
"model_trans_fields",
"=",
"None",
",",
"column_in_current_table",
"=",
"False",
")",
":",
"if",
"model_trans_fields",
"is",
"None",
":",
"model_trans_fields",
"=",
"set",
"(",
")",
"model_trans_fields",
".",
"update",
"(",
"set",
"(",
"getattr",
"(",
"model",
".",
"_meta",
",",
"'translatable_fields'",
",",
"[",
"]",
")",
")",
")",
"for",
"parent",
"in",
"model",
".",
"__bases__",
":",
"if",
"getattr",
"(",
"parent",
",",
"'_meta'",
",",
"None",
")",
"and",
"(",
"not",
"column_in_current_table",
"or",
"parent",
".",
"_meta",
".",
"abstract",
")",
":",
"get_all_translatable_fields",
"(",
"parent",
",",
"model_trans_fields",
",",
"column_in_current_table",
")",
"return",
"tuple",
"(",
"model_trans_fields",
")"
] |
de070aae27770df046b4ba995f01f654db7ed1a2
|
test
|
default_value
|
When accessing to the name of the field itself, the value
in the current language will be returned. Unless it's set,
the value in the default language will be returned.
|
transmeta/__init__.py
|
def default_value(field):
'''
When accessing to the name of the field itself, the value
in the current language will be returned. Unless it's set,
the value in the default language will be returned.
'''
def default_value_func(self):
attname = lambda x: get_real_fieldname(field, x)
if getattr(self, attname(get_language()), None):
result = getattr(self, attname(get_language()))
elif getattr(self, attname(get_language()[:2]), None):
result = getattr(self, attname(get_language()[:2]))
else:
default_language = fallback_language()
if getattr(self, attname(default_language), None):
result = getattr(self, attname(default_language), None)
else:
result = getattr(self, attname(settings.LANGUAGE_CODE), None)
return result
return default_value_func
|
def default_value(field):
'''
When accessing to the name of the field itself, the value
in the current language will be returned. Unless it's set,
the value in the default language will be returned.
'''
def default_value_func(self):
attname = lambda x: get_real_fieldname(field, x)
if getattr(self, attname(get_language()), None):
result = getattr(self, attname(get_language()))
elif getattr(self, attname(get_language()[:2]), None):
result = getattr(self, attname(get_language()[:2]))
else:
default_language = fallback_language()
if getattr(self, attname(default_language), None):
result = getattr(self, attname(default_language), None)
else:
result = getattr(self, attname(settings.LANGUAGE_CODE), None)
return result
return default_value_func
|
[
"When",
"accessing",
"to",
"the",
"name",
"of",
"the",
"field",
"itself",
"the",
"value",
"in",
"the",
"current",
"language",
"will",
"be",
"returned",
".",
"Unless",
"it",
"s",
"set",
"the",
"value",
"in",
"the",
"default",
"language",
"will",
"be",
"returned",
"."
] |
Yaco-Sistemas/django-transmeta
|
python
|
https://github.com/Yaco-Sistemas/django-transmeta/blob/de070aae27770df046b4ba995f01f654db7ed1a2/transmeta/__init__.py#L70-L92
|
[
"def",
"default_value",
"(",
"field",
")",
":",
"def",
"default_value_func",
"(",
"self",
")",
":",
"attname",
"=",
"lambda",
"x",
":",
"get_real_fieldname",
"(",
"field",
",",
"x",
")",
"if",
"getattr",
"(",
"self",
",",
"attname",
"(",
"get_language",
"(",
")",
")",
",",
"None",
")",
":",
"result",
"=",
"getattr",
"(",
"self",
",",
"attname",
"(",
"get_language",
"(",
")",
")",
")",
"elif",
"getattr",
"(",
"self",
",",
"attname",
"(",
"get_language",
"(",
")",
"[",
":",
"2",
"]",
")",
",",
"None",
")",
":",
"result",
"=",
"getattr",
"(",
"self",
",",
"attname",
"(",
"get_language",
"(",
")",
"[",
":",
"2",
"]",
")",
")",
"else",
":",
"default_language",
"=",
"fallback_language",
"(",
")",
"if",
"getattr",
"(",
"self",
",",
"attname",
"(",
"default_language",
")",
",",
"None",
")",
":",
"result",
"=",
"getattr",
"(",
"self",
",",
"attname",
"(",
"default_language",
")",
",",
"None",
")",
"else",
":",
"result",
"=",
"getattr",
"(",
"self",
",",
"attname",
"(",
"settings",
".",
"LANGUAGE_CODE",
")",
",",
"None",
")",
"return",
"result",
"return",
"default_value_func"
] |
de070aae27770df046b4ba995f01f654db7ed1a2
|
test
|
process
|
Post processors are functions that receive file objects,
performs necessary operations and return the results as file objects.
|
thumbnails/post_processors.py
|
def process(thumbnail_file, size, **kwargs):
"""
Post processors are functions that receive file objects,
performs necessary operations and return the results as file objects.
"""
from . import conf
size_dict = conf.SIZES[size]
for processor in size_dict['POST_PROCESSORS']:
processor['processor'](thumbnail_file, **processor['kwargs'])
return thumbnail_file
|
def process(thumbnail_file, size, **kwargs):
"""
Post processors are functions that receive file objects,
performs necessary operations and return the results as file objects.
"""
from . import conf
size_dict = conf.SIZES[size]
for processor in size_dict['POST_PROCESSORS']:
processor['processor'](thumbnail_file, **processor['kwargs'])
return thumbnail_file
|
[
"Post",
"processors",
"are",
"functions",
"that",
"receive",
"file",
"objects",
"performs",
"necessary",
"operations",
"and",
"return",
"the",
"results",
"as",
"file",
"objects",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/post_processors.py#L17-L28
|
[
"def",
"process",
"(",
"thumbnail_file",
",",
"size",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"import",
"conf",
"size_dict",
"=",
"conf",
".",
"SIZES",
"[",
"size",
"]",
"for",
"processor",
"in",
"size_dict",
"[",
"'POST_PROCESSORS'",
"]",
":",
"processor",
"[",
"'processor'",
"]",
"(",
"thumbnail_file",
",",
"*",
"*",
"processor",
"[",
"'kwargs'",
"]",
")",
"return",
"thumbnail_file"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
optimize
|
A post processing function to optimize file size. Accepts commands
to optimize JPG, PNG and GIF images as arguments. Example:
THUMBNAILS = {
# Other options...
'POST_PROCESSORS': [
{
'processor': 'thumbnails.post_processors.optimize',
'png_command': 'optipng -force -o3 "%(filename)s"',
'jpg_command': 'jpegoptim -f --strip-all "%(filename)s"',
},
],
}
Note: using output redirection in commands may cause unpredictable results.
For example 'optipng -force -o3 "%(filename)s" &> /dev/null' may cause
optimize command to fail on some systems.
|
thumbnails/post_processors.py
|
def optimize(thumbnail_file, jpg_command=None, png_command=None,
gif_command=None):
"""
A post processing function to optimize file size. Accepts commands
to optimize JPG, PNG and GIF images as arguments. Example:
THUMBNAILS = {
# Other options...
'POST_PROCESSORS': [
{
'processor': 'thumbnails.post_processors.optimize',
'png_command': 'optipng -force -o3 "%(filename)s"',
'jpg_command': 'jpegoptim -f --strip-all "%(filename)s"',
},
],
}
Note: using output redirection in commands may cause unpredictable results.
For example 'optipng -force -o3 "%(filename)s" &> /dev/null' may cause
optimize command to fail on some systems.
"""
temp_dir = get_or_create_temp_dir()
thumbnail_filename = os.path.join(temp_dir, "%s" % shortuuid.uuid())
f = open(thumbnail_filename, 'wb')
f.write(thumbnail_file.read())
f.close()
# Detect filetype
filetype = imghdr.what(thumbnail_filename)
# Construct command to optimize image based on filetype
command = None
if filetype == "jpg" or filetype == "jpeg":
command = jpg_command
elif filetype == "png":
command = png_command
elif filetype == "gif":
command = gif_command
# Run Command
if command:
command = command % {'filename': thumbnail_filename}
call(command, shell=True)
optimized_file = File(open(thumbnail_filename, 'rb'))
os.remove(thumbnail_filename)
return optimized_file
|
def optimize(thumbnail_file, jpg_command=None, png_command=None,
gif_command=None):
"""
A post processing function to optimize file size. Accepts commands
to optimize JPG, PNG and GIF images as arguments. Example:
THUMBNAILS = {
# Other options...
'POST_PROCESSORS': [
{
'processor': 'thumbnails.post_processors.optimize',
'png_command': 'optipng -force -o3 "%(filename)s"',
'jpg_command': 'jpegoptim -f --strip-all "%(filename)s"',
},
],
}
Note: using output redirection in commands may cause unpredictable results.
For example 'optipng -force -o3 "%(filename)s" &> /dev/null' may cause
optimize command to fail on some systems.
"""
temp_dir = get_or_create_temp_dir()
thumbnail_filename = os.path.join(temp_dir, "%s" % shortuuid.uuid())
f = open(thumbnail_filename, 'wb')
f.write(thumbnail_file.read())
f.close()
# Detect filetype
filetype = imghdr.what(thumbnail_filename)
# Construct command to optimize image based on filetype
command = None
if filetype == "jpg" or filetype == "jpeg":
command = jpg_command
elif filetype == "png":
command = png_command
elif filetype == "gif":
command = gif_command
# Run Command
if command:
command = command % {'filename': thumbnail_filename}
call(command, shell=True)
optimized_file = File(open(thumbnail_filename, 'rb'))
os.remove(thumbnail_filename)
return optimized_file
|
[
"A",
"post",
"processing",
"function",
"to",
"optimize",
"file",
"size",
".",
"Accepts",
"commands",
"to",
"optimize",
"JPG",
"PNG",
"and",
"GIF",
"images",
"as",
"arguments",
".",
"Example",
":"
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/post_processors.py#L31-L79
|
[
"def",
"optimize",
"(",
"thumbnail_file",
",",
"jpg_command",
"=",
"None",
",",
"png_command",
"=",
"None",
",",
"gif_command",
"=",
"None",
")",
":",
"temp_dir",
"=",
"get_or_create_temp_dir",
"(",
")",
"thumbnail_filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"temp_dir",
",",
"\"%s\"",
"%",
"shortuuid",
".",
"uuid",
"(",
")",
")",
"f",
"=",
"open",
"(",
"thumbnail_filename",
",",
"'wb'",
")",
"f",
".",
"write",
"(",
"thumbnail_file",
".",
"read",
"(",
")",
")",
"f",
".",
"close",
"(",
")",
"# Detect filetype",
"filetype",
"=",
"imghdr",
".",
"what",
"(",
"thumbnail_filename",
")",
"# Construct command to optimize image based on filetype",
"command",
"=",
"None",
"if",
"filetype",
"==",
"\"jpg\"",
"or",
"filetype",
"==",
"\"jpeg\"",
":",
"command",
"=",
"jpg_command",
"elif",
"filetype",
"==",
"\"png\"",
":",
"command",
"=",
"png_command",
"elif",
"filetype",
"==",
"\"gif\"",
":",
"command",
"=",
"gif_command",
"# Run Command",
"if",
"command",
":",
"command",
"=",
"command",
"%",
"{",
"'filename'",
":",
"thumbnail_filename",
"}",
"call",
"(",
"command",
",",
"shell",
"=",
"True",
")",
"optimized_file",
"=",
"File",
"(",
"open",
"(",
"thumbnail_filename",
",",
"'rb'",
")",
")",
"os",
".",
"remove",
"(",
"thumbnail_filename",
")",
"return",
"optimized_file"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
import_attribute
|
Return an attribute from a dotted path name (e.g. "path.to.func").
Copied from nvie's rq https://github.com/nvie/rq/blob/master/rq/utils.py
|
thumbnails/utils.py
|
def import_attribute(name):
"""
Return an attribute from a dotted path name (e.g. "path.to.func").
Copied from nvie's rq https://github.com/nvie/rq/blob/master/rq/utils.py
"""
if hasattr(name, '__call__'):
return name
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
|
def import_attribute(name):
"""
Return an attribute from a dotted path name (e.g. "path.to.func").
Copied from nvie's rq https://github.com/nvie/rq/blob/master/rq/utils.py
"""
if hasattr(name, '__call__'):
return name
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
|
[
"Return",
"an",
"attribute",
"from",
"a",
"dotted",
"path",
"name",
"(",
"e",
".",
"g",
".",
"path",
".",
"to",
".",
"func",
")",
".",
"Copied",
"from",
"nvie",
"s",
"rq",
"https",
":",
"//",
"github",
".",
"com",
"/",
"nvie",
"/",
"rq",
"/",
"blob",
"/",
"master",
"/",
"rq",
"/",
"utils",
".",
"py"
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/utils.py#L9-L18
|
[
"def",
"import_attribute",
"(",
"name",
")",
":",
"if",
"hasattr",
"(",
"name",
",",
"'__call__'",
")",
":",
"return",
"name",
"module_name",
",",
"attribute",
"=",
"name",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"module_name",
")",
"return",
"getattr",
"(",
"module",
",",
"attribute",
")"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
parse_processors
|
Returns a dictionary that contains the imported processors and
kwargs. For example, passing in:
processors = [
{'processor': 'thumbnails.processors.resize', 'width': 10, 'height': 10},
{'processor': 'thumbnails.processors.crop', 'width': 10, 'height': 10},
]
Would return:
[
{'processor': resize_function, kwargs: {'width': 10, 'height': 10}}
{'processor': crop_function, kwargs: {'width': 10, 'height': 10}}
]
|
thumbnails/utils.py
|
def parse_processors(processor_definition):
"""
Returns a dictionary that contains the imported processors and
kwargs. For example, passing in:
processors = [
{'processor': 'thumbnails.processors.resize', 'width': 10, 'height': 10},
{'processor': 'thumbnails.processors.crop', 'width': 10, 'height': 10},
]
Would return:
[
{'processor': resize_function, kwargs: {'width': 10, 'height': 10}}
{'processor': crop_function, kwargs: {'width': 10, 'height': 10}}
]
"""
parsed_processors = []
for processor in processor_definition:
processor_function = import_attribute(processor['PATH'])
kwargs = deepcopy(processor)
kwargs.pop('PATH')
parsed_processors.append({
'processor': processor_function,
'kwargs': kwargs
})
return parsed_processors
|
def parse_processors(processor_definition):
"""
Returns a dictionary that contains the imported processors and
kwargs. For example, passing in:
processors = [
{'processor': 'thumbnails.processors.resize', 'width': 10, 'height': 10},
{'processor': 'thumbnails.processors.crop', 'width': 10, 'height': 10},
]
Would return:
[
{'processor': resize_function, kwargs: {'width': 10, 'height': 10}}
{'processor': crop_function, kwargs: {'width': 10, 'height': 10}}
]
"""
parsed_processors = []
for processor in processor_definition:
processor_function = import_attribute(processor['PATH'])
kwargs = deepcopy(processor)
kwargs.pop('PATH')
parsed_processors.append({
'processor': processor_function,
'kwargs': kwargs
})
return parsed_processors
|
[
"Returns",
"a",
"dictionary",
"that",
"contains",
"the",
"imported",
"processors",
"and",
"kwargs",
".",
"For",
"example",
"passing",
"in",
":"
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/utils.py#L21-L48
|
[
"def",
"parse_processors",
"(",
"processor_definition",
")",
":",
"parsed_processors",
"=",
"[",
"]",
"for",
"processor",
"in",
"processor_definition",
":",
"processor_function",
"=",
"import_attribute",
"(",
"processor",
"[",
"'PATH'",
"]",
")",
"kwargs",
"=",
"deepcopy",
"(",
"processor",
")",
"kwargs",
".",
"pop",
"(",
"'PATH'",
")",
"parsed_processors",
".",
"append",
"(",
"{",
"'processor'",
":",
"processor_function",
",",
"'kwargs'",
":",
"kwargs",
"}",
")",
"return",
"parsed_processors"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
process
|
Process an image through its defined processors
params :file: filename or file-like object
params :size: string for size defined in settings
return a ContentFile
|
thumbnails/processors.py
|
def process(file, size):
"""
Process an image through its defined processors
params :file: filename or file-like object
params :size: string for size defined in settings
return a ContentFile
"""
from . import conf
# open image in piccaso
raw_image = images.from_file(file)
# run through all processors, if defined
size_dict = conf.SIZES[size]
for processor in size_dict['PROCESSORS']:
raw_image = processor['processor'](raw_image, **processor['kwargs'])
# write to Content File
image_io = io.BytesIO()
raw_image.save(file=image_io)
image_file = ContentFile(image_io.getvalue())
#print dir(image_file)
return image_file
|
def process(file, size):
"""
Process an image through its defined processors
params :file: filename or file-like object
params :size: string for size defined in settings
return a ContentFile
"""
from . import conf
# open image in piccaso
raw_image = images.from_file(file)
# run through all processors, if defined
size_dict = conf.SIZES[size]
for processor in size_dict['PROCESSORS']:
raw_image = processor['processor'](raw_image, **processor['kwargs'])
# write to Content File
image_io = io.BytesIO()
raw_image.save(file=image_io)
image_file = ContentFile(image_io.getvalue())
#print dir(image_file)
return image_file
|
[
"Process",
"an",
"image",
"through",
"its",
"defined",
"processors",
"params",
":",
"file",
":",
"filename",
"or",
"file",
"-",
"like",
"object",
"params",
":",
"size",
":",
"string",
"for",
"size",
"defined",
"in",
"settings",
"return",
"a",
"ContentFile"
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/processors.py#L48-L70
|
[
"def",
"process",
"(",
"file",
",",
"size",
")",
":",
"from",
".",
"import",
"conf",
"# open image in piccaso",
"raw_image",
"=",
"images",
".",
"from_file",
"(",
"file",
")",
"# run through all processors, if defined",
"size_dict",
"=",
"conf",
".",
"SIZES",
"[",
"size",
"]",
"for",
"processor",
"in",
"size_dict",
"[",
"'PROCESSORS'",
"]",
":",
"raw_image",
"=",
"processor",
"[",
"'processor'",
"]",
"(",
"raw_image",
",",
"*",
"*",
"processor",
"[",
"'kwargs'",
"]",
")",
"# write to Content File",
"image_io",
"=",
"io",
".",
"BytesIO",
"(",
")",
"raw_image",
".",
"save",
"(",
"file",
"=",
"image_io",
")",
"image_file",
"=",
"ContentFile",
"(",
"image_io",
".",
"getvalue",
"(",
")",
")",
"#print dir(image_file)",
"return",
"image_file"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
ImageField.pre_save
|
Process the source image through the defined processors.
|
thumbnails/fields.py
|
def pre_save(self, model_instance, add):
"""
Process the source image through the defined processors.
"""
file = getattr(model_instance, self.attname)
if file and not file._committed:
image_file = file
if self.resize_source_to:
file.seek(0)
image_file = processors.process(file, self.resize_source_to)
image_file = post_processors.process(image_file, self.resize_source_to)
filename = str(shortuuid.uuid()) + os.path.splitext(file.name)[1]
file.save(filename, image_file, save=False)
return file
|
def pre_save(self, model_instance, add):
"""
Process the source image through the defined processors.
"""
file = getattr(model_instance, self.attname)
if file and not file._committed:
image_file = file
if self.resize_source_to:
file.seek(0)
image_file = processors.process(file, self.resize_source_to)
image_file = post_processors.process(image_file, self.resize_source_to)
filename = str(shortuuid.uuid()) + os.path.splitext(file.name)[1]
file.save(filename, image_file, save=False)
return file
|
[
"Process",
"the",
"source",
"image",
"through",
"the",
"defined",
"processors",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/fields.py#L30-L44
|
[
"def",
"pre_save",
"(",
"self",
",",
"model_instance",
",",
"add",
")",
":",
"file",
"=",
"getattr",
"(",
"model_instance",
",",
"self",
".",
"attname",
")",
"if",
"file",
"and",
"not",
"file",
".",
"_committed",
":",
"image_file",
"=",
"file",
"if",
"self",
".",
"resize_source_to",
":",
"file",
".",
"seek",
"(",
"0",
")",
"image_file",
"=",
"processors",
".",
"process",
"(",
"file",
",",
"self",
".",
"resize_source_to",
")",
"image_file",
"=",
"post_processors",
".",
"process",
"(",
"image_file",
",",
"self",
".",
"resize_source_to",
")",
"filename",
"=",
"str",
"(",
"shortuuid",
".",
"uuid",
"(",
")",
")",
"+",
"os",
".",
"path",
".",
"splitext",
"(",
"file",
".",
"name",
")",
"[",
"1",
"]",
"file",
".",
"save",
"(",
"filename",
",",
"image_file",
",",
"save",
"=",
"False",
")",
"return",
"file"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
ThumbnailManager._refresh_cache
|
Populate self._thumbnails.
|
thumbnails/files.py
|
def _refresh_cache(self):
"""Populate self._thumbnails."""
self._thumbnails = {}
metadatas = self.metadata_backend.get_thumbnails(self.source_image.name)
for metadata in metadatas:
self._thumbnails[metadata.size] = Thumbnail(metadata=metadata, storage=self.storage)
|
def _refresh_cache(self):
"""Populate self._thumbnails."""
self._thumbnails = {}
metadatas = self.metadata_backend.get_thumbnails(self.source_image.name)
for metadata in metadatas:
self._thumbnails[metadata.size] = Thumbnail(metadata=metadata, storage=self.storage)
|
[
"Populate",
"self",
".",
"_thumbnails",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/files.py#L53-L58
|
[
"def",
"_refresh_cache",
"(",
"self",
")",
":",
"self",
".",
"_thumbnails",
"=",
"{",
"}",
"metadatas",
"=",
"self",
".",
"metadata_backend",
".",
"get_thumbnails",
"(",
"self",
".",
"source_image",
".",
"name",
")",
"for",
"metadata",
"in",
"metadatas",
":",
"self",
".",
"_thumbnails",
"[",
"metadata",
".",
"size",
"]",
"=",
"Thumbnail",
"(",
"metadata",
"=",
"metadata",
",",
"storage",
"=",
"self",
".",
"storage",
")"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
ThumbnailManager.all
|
Return all thumbnails in a dict format.
|
thumbnails/files.py
|
def all(self):
"""
Return all thumbnails in a dict format.
"""
if self._thumbnails is not None:
return self._thumbnails
self._refresh_cache()
return self._thumbnails
|
def all(self):
"""
Return all thumbnails in a dict format.
"""
if self._thumbnails is not None:
return self._thumbnails
self._refresh_cache()
return self._thumbnails
|
[
"Return",
"all",
"thumbnails",
"in",
"a",
"dict",
"format",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/files.py#L60-L67
|
[
"def",
"all",
"(",
"self",
")",
":",
"if",
"self",
".",
"_thumbnails",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_thumbnails",
"self",
".",
"_refresh_cache",
"(",
")",
"return",
"self",
".",
"_thumbnails"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
ThumbnailManager.get
|
Returns a Thumbnail instance.
First check whether thumbnail is already cached. If it doesn't:
1. Try to fetch the thumbnail
2. Create thumbnail if it's not present
3. Cache the thumbnail for future use
|
thumbnails/files.py
|
def get(self, size, create=True):
"""
Returns a Thumbnail instance.
First check whether thumbnail is already cached. If it doesn't:
1. Try to fetch the thumbnail
2. Create thumbnail if it's not present
3. Cache the thumbnail for future use
"""
if self._thumbnails is None:
self._refresh_cache()
thumbnail = self._thumbnails.get(size)
if thumbnail is None:
thumbnail = images.get(self.source_image.name, size,
self.metadata_backend, self.storage)
if thumbnail is None:
thumbnail = self.create(size)
self._thumbnails[size] = thumbnail
return thumbnail
|
def get(self, size, create=True):
"""
Returns a Thumbnail instance.
First check whether thumbnail is already cached. If it doesn't:
1. Try to fetch the thumbnail
2. Create thumbnail if it's not present
3. Cache the thumbnail for future use
"""
if self._thumbnails is None:
self._refresh_cache()
thumbnail = self._thumbnails.get(size)
if thumbnail is None:
thumbnail = images.get(self.source_image.name, size,
self.metadata_backend, self.storage)
if thumbnail is None:
thumbnail = self.create(size)
self._thumbnails[size] = thumbnail
return thumbnail
|
[
"Returns",
"a",
"Thumbnail",
"instance",
".",
"First",
"check",
"whether",
"thumbnail",
"is",
"already",
"cached",
".",
"If",
"it",
"doesn",
"t",
":",
"1",
".",
"Try",
"to",
"fetch",
"the",
"thumbnail",
"2",
".",
"Create",
"thumbnail",
"if",
"it",
"s",
"not",
"present",
"3",
".",
"Cache",
"the",
"thumbnail",
"for",
"future",
"use"
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/files.py#L69-L91
|
[
"def",
"get",
"(",
"self",
",",
"size",
",",
"create",
"=",
"True",
")",
":",
"if",
"self",
".",
"_thumbnails",
"is",
"None",
":",
"self",
".",
"_refresh_cache",
"(",
")",
"thumbnail",
"=",
"self",
".",
"_thumbnails",
".",
"get",
"(",
"size",
")",
"if",
"thumbnail",
"is",
"None",
":",
"thumbnail",
"=",
"images",
".",
"get",
"(",
"self",
".",
"source_image",
".",
"name",
",",
"size",
",",
"self",
".",
"metadata_backend",
",",
"self",
".",
"storage",
")",
"if",
"thumbnail",
"is",
"None",
":",
"thumbnail",
"=",
"self",
".",
"create",
"(",
"size",
")",
"self",
".",
"_thumbnails",
"[",
"size",
"]",
"=",
"thumbnail",
"return",
"thumbnail"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
ThumbnailManager.create
|
Creates and return a thumbnail of a given size.
|
thumbnails/files.py
|
def create(self, size):
"""
Creates and return a thumbnail of a given size.
"""
thumbnail = images.create(self.source_image.name, size,
self.metadata_backend, self.storage)
return thumbnail
|
def create(self, size):
"""
Creates and return a thumbnail of a given size.
"""
thumbnail = images.create(self.source_image.name, size,
self.metadata_backend, self.storage)
return thumbnail
|
[
"Creates",
"and",
"return",
"a",
"thumbnail",
"of",
"a",
"given",
"size",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/files.py#L93-L99
|
[
"def",
"create",
"(",
"self",
",",
"size",
")",
":",
"thumbnail",
"=",
"images",
".",
"create",
"(",
"self",
".",
"source_image",
".",
"name",
",",
"size",
",",
"self",
".",
"metadata_backend",
",",
"self",
".",
"storage",
")",
"return",
"thumbnail"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
ThumbnailManager.delete
|
Deletes a thumbnail of a given size
|
thumbnails/files.py
|
def delete(self, size):
"""
Deletes a thumbnail of a given size
"""
images.delete(self.source_image.name, size,
self.metadata_backend, self.storage)
del(self._thumbnails[size])
|
def delete(self, size):
"""
Deletes a thumbnail of a given size
"""
images.delete(self.source_image.name, size,
self.metadata_backend, self.storage)
del(self._thumbnails[size])
|
[
"Deletes",
"a",
"thumbnail",
"of",
"a",
"given",
"size"
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/files.py#L101-L107
|
[
"def",
"delete",
"(",
"self",
",",
"size",
")",
":",
"images",
".",
"delete",
"(",
"self",
".",
"source_image",
".",
"name",
",",
"size",
",",
"self",
".",
"metadata_backend",
",",
"self",
".",
"storage",
")",
"del",
"(",
"self",
".",
"_thumbnails",
"[",
"size",
"]",
")"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
create
|
Creates a thumbnail file and its relevant metadata. Returns a
Thumbnail instance.
|
thumbnails/images.py
|
def create(source_name, size, metadata_backend=None, storage_backend=None):
"""
Creates a thumbnail file and its relevant metadata. Returns a
Thumbnail instance.
"""
if storage_backend is None:
storage_backend = backends.storage.get_backend()
if metadata_backend is None:
metadata_backend = backends.metadata.get_backend()
thumbnail_file = processors.process(storage_backend.open(source_name), size)
thumbnail_file = post_processors.process(thumbnail_file, size)
name = get_thumbnail_name(source_name, size)
name = storage_backend.save(name, thumbnail_file)
metadata = metadata_backend.add_thumbnail(source_name, size, name)
return Thumbnail(metadata=metadata, storage=storage_backend)
|
def create(source_name, size, metadata_backend=None, storage_backend=None):
"""
Creates a thumbnail file and its relevant metadata. Returns a
Thumbnail instance.
"""
if storage_backend is None:
storage_backend = backends.storage.get_backend()
if metadata_backend is None:
metadata_backend = backends.metadata.get_backend()
thumbnail_file = processors.process(storage_backend.open(source_name), size)
thumbnail_file = post_processors.process(thumbnail_file, size)
name = get_thumbnail_name(source_name, size)
name = storage_backend.save(name, thumbnail_file)
metadata = metadata_backend.add_thumbnail(source_name, size, name)
return Thumbnail(metadata=metadata, storage=storage_backend)
|
[
"Creates",
"a",
"thumbnail",
"file",
"and",
"its",
"relevant",
"metadata",
".",
"Returns",
"a",
"Thumbnail",
"instance",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/images.py#L68-L85
|
[
"def",
"create",
"(",
"source_name",
",",
"size",
",",
"metadata_backend",
"=",
"None",
",",
"storage_backend",
"=",
"None",
")",
":",
"if",
"storage_backend",
"is",
"None",
":",
"storage_backend",
"=",
"backends",
".",
"storage",
".",
"get_backend",
"(",
")",
"if",
"metadata_backend",
"is",
"None",
":",
"metadata_backend",
"=",
"backends",
".",
"metadata",
".",
"get_backend",
"(",
")",
"thumbnail_file",
"=",
"processors",
".",
"process",
"(",
"storage_backend",
".",
"open",
"(",
"source_name",
")",
",",
"size",
")",
"thumbnail_file",
"=",
"post_processors",
".",
"process",
"(",
"thumbnail_file",
",",
"size",
")",
"name",
"=",
"get_thumbnail_name",
"(",
"source_name",
",",
"size",
")",
"name",
"=",
"storage_backend",
".",
"save",
"(",
"name",
",",
"thumbnail_file",
")",
"metadata",
"=",
"metadata_backend",
".",
"add_thumbnail",
"(",
"source_name",
",",
"size",
",",
"name",
")",
"return",
"Thumbnail",
"(",
"metadata",
"=",
"metadata",
",",
"storage",
"=",
"storage_backend",
")"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
get
|
Returns a Thumbnail instance, or None if thumbnail does not yet exist.
|
thumbnails/images.py
|
def get(source_name, size, metadata_backend=None, storage_backend=None):
"""
Returns a Thumbnail instance, or None if thumbnail does not yet exist.
"""
if storage_backend is None:
storage_backend = backends.storage.get_backend()
if metadata_backend is None:
metadata_backend = backends.metadata.get_backend()
metadata = metadata_backend.get_thumbnail(source_name, size)
if metadata is None:
return None
else:
return Thumbnail(metadata=metadata, storage=storage_backend)
|
def get(source_name, size, metadata_backend=None, storage_backend=None):
"""
Returns a Thumbnail instance, or None if thumbnail does not yet exist.
"""
if storage_backend is None:
storage_backend = backends.storage.get_backend()
if metadata_backend is None:
metadata_backend = backends.metadata.get_backend()
metadata = metadata_backend.get_thumbnail(source_name, size)
if metadata is None:
return None
else:
return Thumbnail(metadata=metadata, storage=storage_backend)
|
[
"Returns",
"a",
"Thumbnail",
"instance",
"or",
"None",
"if",
"thumbnail",
"does",
"not",
"yet",
"exist",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/images.py#L88-L101
|
[
"def",
"get",
"(",
"source_name",
",",
"size",
",",
"metadata_backend",
"=",
"None",
",",
"storage_backend",
"=",
"None",
")",
":",
"if",
"storage_backend",
"is",
"None",
":",
"storage_backend",
"=",
"backends",
".",
"storage",
".",
"get_backend",
"(",
")",
"if",
"metadata_backend",
"is",
"None",
":",
"metadata_backend",
"=",
"backends",
".",
"metadata",
".",
"get_backend",
"(",
")",
"metadata",
"=",
"metadata_backend",
".",
"get_thumbnail",
"(",
"source_name",
",",
"size",
")",
"if",
"metadata",
"is",
"None",
":",
"return",
"None",
"else",
":",
"return",
"Thumbnail",
"(",
"metadata",
"=",
"metadata",
",",
"storage",
"=",
"storage_backend",
")"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
delete
|
Deletes a thumbnail file and its relevant metadata.
|
thumbnails/images.py
|
def delete(source_name, size, metadata_backend=None, storage_backend=None):
"""
Deletes a thumbnail file and its relevant metadata.
"""
if storage_backend is None:
storage_backend = backends.storage.get_backend()
if metadata_backend is None:
metadata_backend = backends.metadata.get_backend()
storage_backend.delete(get_thumbnail_name(source_name, size))
metadata_backend.delete_thumbnail(source_name, size)
|
def delete(source_name, size, metadata_backend=None, storage_backend=None):
"""
Deletes a thumbnail file and its relevant metadata.
"""
if storage_backend is None:
storage_backend = backends.storage.get_backend()
if metadata_backend is None:
metadata_backend = backends.metadata.get_backend()
storage_backend.delete(get_thumbnail_name(source_name, size))
metadata_backend.delete_thumbnail(source_name, size)
|
[
"Deletes",
"a",
"thumbnail",
"file",
"and",
"its",
"relevant",
"metadata",
"."
] |
ui/django-thumbnails
|
python
|
https://github.com/ui/django-thumbnails/blob/5cef55e7f167060458709ed760dd43981124796a/thumbnails/images.py#L104-L113
|
[
"def",
"delete",
"(",
"source_name",
",",
"size",
",",
"metadata_backend",
"=",
"None",
",",
"storage_backend",
"=",
"None",
")",
":",
"if",
"storage_backend",
"is",
"None",
":",
"storage_backend",
"=",
"backends",
".",
"storage",
".",
"get_backend",
"(",
")",
"if",
"metadata_backend",
"is",
"None",
":",
"metadata_backend",
"=",
"backends",
".",
"metadata",
".",
"get_backend",
"(",
")",
"storage_backend",
".",
"delete",
"(",
"get_thumbnail_name",
"(",
"source_name",
",",
"size",
")",
")",
"metadata_backend",
".",
"delete_thumbnail",
"(",
"source_name",
",",
"size",
")"
] |
5cef55e7f167060458709ed760dd43981124796a
|
test
|
LoopbackProvider.received
|
Simulate an incoming message
:type src: str
:param src: Message source
:type boby: str | unicode
:param body: Message body
:rtype: IncomingMessage
|
smsframework/providers/loopback.py
|
def received(self, src, body):
""" Simulate an incoming message
:type src: str
:param src: Message source
:type boby: str | unicode
:param body: Message body
:rtype: IncomingMessage
"""
# Create the message
self._msgid += 1
message = IncomingMessage(src, body, self._msgid)
# Log traffic
self._traffic.append(message)
# Handle it
self._receive_message(message)
# Finish
return message
|
def received(self, src, body):
""" Simulate an incoming message
:type src: str
:param src: Message source
:type boby: str | unicode
:param body: Message body
:rtype: IncomingMessage
"""
# Create the message
self._msgid += 1
message = IncomingMessage(src, body, self._msgid)
# Log traffic
self._traffic.append(message)
# Handle it
self._receive_message(message)
# Finish
return message
|
[
"Simulate",
"an",
"incoming",
"message"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/loopback.py#L46-L66
|
[
"def",
"received",
"(",
"self",
",",
"src",
",",
"body",
")",
":",
"# Create the message",
"self",
".",
"_msgid",
"+=",
"1",
"message",
"=",
"IncomingMessage",
"(",
"src",
",",
"body",
",",
"self",
".",
"_msgid",
")",
"# Log traffic",
"self",
".",
"_traffic",
".",
"append",
"(",
"message",
")",
"# Handle it",
"self",
".",
"_receive_message",
"(",
"message",
")",
"# Finish",
"return",
"message"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
LoopbackProvider.subscribe
|
Register a virtual subscriber which receives messages to the matching number.
:type number: str
:param number: Subscriber phone number
:type callback: callable
:param callback: A callback(OutgoingMessage) which handles the messages directed to the subscriber.
The message object is augmented with the .reply(str) method which allows to send a reply easily!
:rtype: LoopbackProvider
|
smsframework/providers/loopback.py
|
def subscribe(self, number, callback):
""" Register a virtual subscriber which receives messages to the matching number.
:type number: str
:param number: Subscriber phone number
:type callback: callable
:param callback: A callback(OutgoingMessage) which handles the messages directed to the subscriber.
The message object is augmented with the .reply(str) method which allows to send a reply easily!
:rtype: LoopbackProvider
"""
self._subscribers[digits_only(number)] = callback
return self
|
def subscribe(self, number, callback):
""" Register a virtual subscriber which receives messages to the matching number.
:type number: str
:param number: Subscriber phone number
:type callback: callable
:param callback: A callback(OutgoingMessage) which handles the messages directed to the subscriber.
The message object is augmented with the .reply(str) method which allows to send a reply easily!
:rtype: LoopbackProvider
"""
self._subscribers[digits_only(number)] = callback
return self
|
[
"Register",
"a",
"virtual",
"subscriber",
"which",
"receives",
"messages",
"to",
"the",
"matching",
"number",
"."
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/loopback.py#L68-L79
|
[
"def",
"subscribe",
"(",
"self",
",",
"number",
",",
"callback",
")",
":",
"self",
".",
"_subscribers",
"[",
"digits_only",
"(",
"number",
")",
"]",
"=",
"callback",
"return",
"self"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
MessageStatus.states
|
Get the set of states. Mostly used for pretty printing
:rtype: set
:returns: Set of 'accepted', 'delivered', 'expired', 'error'
|
smsframework/data/MessageStatus.py
|
def states(self):
""" Get the set of states. Mostly used for pretty printing
:rtype: set
:returns: Set of 'accepted', 'delivered', 'expired', 'error'
"""
ret = set()
if self.accepted:
ret.add('accepted')
if self.delivered:
ret.add('delivered')
if self.expired:
ret.add('expired')
if self.error:
ret.add('error')
return ret
|
def states(self):
""" Get the set of states. Mostly used for pretty printing
:rtype: set
:returns: Set of 'accepted', 'delivered', 'expired', 'error'
"""
ret = set()
if self.accepted:
ret.add('accepted')
if self.delivered:
ret.add('delivered')
if self.expired:
ret.add('expired')
if self.error:
ret.add('error')
return ret
|
[
"Get",
"the",
"set",
"of",
"states",
".",
"Mostly",
"used",
"for",
"pretty",
"printing"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/data/MessageStatus.py#L54-L69
|
[
"def",
"states",
"(",
"self",
")",
":",
"ret",
"=",
"set",
"(",
")",
"if",
"self",
".",
"accepted",
":",
"ret",
".",
"add",
"(",
"'accepted'",
")",
"if",
"self",
".",
"delivered",
":",
"ret",
".",
"add",
"(",
"'delivered'",
")",
"if",
"self",
".",
"expired",
":",
"ret",
".",
"add",
"(",
"'expired'",
")",
"if",
"self",
".",
"error",
":",
"ret",
".",
"add",
"(",
"'error'",
")",
"return",
"ret"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
Gateway.add_provider
|
Register a provider on the gateway
The first provider defined becomes the default one: used in case the routing function has no better idea.
:type name: str
:param name: Provider name that will be used to uniquely identify it
:type Provider: type
:param Provider: Provider class that inherits from `smsframework.IProvider`
:param config: Provider configuration. Please refer to the Provider documentation.
:rtype: IProvider
:returns: The created provider
|
smsframework/Gateway.py
|
def add_provider(self, name, Provider, **config):
""" Register a provider on the gateway
The first provider defined becomes the default one: used in case the routing function has no better idea.
:type name: str
:param name: Provider name that will be used to uniquely identify it
:type Provider: type
:param Provider: Provider class that inherits from `smsframework.IProvider`
:param config: Provider configuration. Please refer to the Provider documentation.
:rtype: IProvider
:returns: The created provider
"""
assert issubclass(Provider, IProvider), 'Provider does not implement IProvider'
assert isinstance(name, str), 'Provider name must be a string'
# Configure
provider = Provider(self, name, **config)
# Register
assert name not in self._providers, 'Provider is already registered'
self._providers[name] = provider
# If first - set default
if self.default_provider is None:
self.default_provider = name
# Finish
return provider
|
def add_provider(self, name, Provider, **config):
""" Register a provider on the gateway
The first provider defined becomes the default one: used in case the routing function has no better idea.
:type name: str
:param name: Provider name that will be used to uniquely identify it
:type Provider: type
:param Provider: Provider class that inherits from `smsframework.IProvider`
:param config: Provider configuration. Please refer to the Provider documentation.
:rtype: IProvider
:returns: The created provider
"""
assert issubclass(Provider, IProvider), 'Provider does not implement IProvider'
assert isinstance(name, str), 'Provider name must be a string'
# Configure
provider = Provider(self, name, **config)
# Register
assert name not in self._providers, 'Provider is already registered'
self._providers[name] = provider
# If first - set default
if self.default_provider is None:
self.default_provider = name
# Finish
return provider
|
[
"Register",
"a",
"provider",
"on",
"the",
"gateway"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/Gateway.py#L61-L89
|
[
"def",
"add_provider",
"(",
"self",
",",
"name",
",",
"Provider",
",",
"*",
"*",
"config",
")",
":",
"assert",
"issubclass",
"(",
"Provider",
",",
"IProvider",
")",
",",
"'Provider does not implement IProvider'",
"assert",
"isinstance",
"(",
"name",
",",
"str",
")",
",",
"'Provider name must be a string'",
"# Configure",
"provider",
"=",
"Provider",
"(",
"self",
",",
"name",
",",
"*",
"*",
"config",
")",
"# Register",
"assert",
"name",
"not",
"in",
"self",
".",
"_providers",
",",
"'Provider is already registered'",
"self",
".",
"_providers",
"[",
"name",
"]",
"=",
"provider",
"# If first - set default",
"if",
"self",
".",
"default_provider",
"is",
"None",
":",
"self",
".",
"default_provider",
"=",
"name",
"# Finish",
"return",
"provider"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
Gateway.send
|
Send a message object
:type message: data.OutgoingMessage
:param message: The message to send
:rtype: data.OutgoingMessage
:returns: The sent message with populated fields
:raises AssertionError: wrong provider name encountered (returned by the router, or provided to OutgoingMessage)
:raises MessageSendError: generic errors
:raises AuthError: provider authentication failed
:raises LimitsError: sending limits exceeded
:raises CreditError: not enough money on the account
|
smsframework/Gateway.py
|
def send(self, message):
""" Send a message object
:type message: data.OutgoingMessage
:param message: The message to send
:rtype: data.OutgoingMessage
:returns: The sent message with populated fields
:raises AssertionError: wrong provider name encountered (returned by the router, or provided to OutgoingMessage)
:raises MessageSendError: generic errors
:raises AuthError: provider authentication failed
:raises LimitsError: sending limits exceeded
:raises CreditError: not enough money on the account
"""
# Which provider to use?
provider_name = self._default_provider # default
if message.provider is not None:
assert message.provider in self._providers, \
'Unknown provider specified in OutgoingMessage.provideer: {}'.format(provider_name)
provider = self.get_provider(message.provider)
else:
# Apply routing
if message.routing_values is not None: # Use the default provider when no routing values are given
# Routing values are present
provider_name = self.router(message, *message.routing_values) or self._default_provider
assert provider_name in self._providers, \
'Routing function returned an unknown provider name: {}'.format(provider_name)
provider = self.get_provider(provider_name)
# Set message provider name
message.provider = provider.name
# Send the message using the provider
message = provider.send(message)
# Emit the send event
self.onSend(message)
# Finish
return message
|
def send(self, message):
""" Send a message object
:type message: data.OutgoingMessage
:param message: The message to send
:rtype: data.OutgoingMessage
:returns: The sent message with populated fields
:raises AssertionError: wrong provider name encountered (returned by the router, or provided to OutgoingMessage)
:raises MessageSendError: generic errors
:raises AuthError: provider authentication failed
:raises LimitsError: sending limits exceeded
:raises CreditError: not enough money on the account
"""
# Which provider to use?
provider_name = self._default_provider # default
if message.provider is not None:
assert message.provider in self._providers, \
'Unknown provider specified in OutgoingMessage.provideer: {}'.format(provider_name)
provider = self.get_provider(message.provider)
else:
# Apply routing
if message.routing_values is not None: # Use the default provider when no routing values are given
# Routing values are present
provider_name = self.router(message, *message.routing_values) or self._default_provider
assert provider_name in self._providers, \
'Routing function returned an unknown provider name: {}'.format(provider_name)
provider = self.get_provider(provider_name)
# Set message provider name
message.provider = provider.name
# Send the message using the provider
message = provider.send(message)
# Emit the send event
self.onSend(message)
# Finish
return message
|
[
"Send",
"a",
"message",
"object"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/Gateway.py#L139-L177
|
[
"def",
"send",
"(",
"self",
",",
"message",
")",
":",
"# Which provider to use?",
"provider_name",
"=",
"self",
".",
"_default_provider",
"# default",
"if",
"message",
".",
"provider",
"is",
"not",
"None",
":",
"assert",
"message",
".",
"provider",
"in",
"self",
".",
"_providers",
",",
"'Unknown provider specified in OutgoingMessage.provideer: {}'",
".",
"format",
"(",
"provider_name",
")",
"provider",
"=",
"self",
".",
"get_provider",
"(",
"message",
".",
"provider",
")",
"else",
":",
"# Apply routing",
"if",
"message",
".",
"routing_values",
"is",
"not",
"None",
":",
"# Use the default provider when no routing values are given",
"# Routing values are present",
"provider_name",
"=",
"self",
".",
"router",
"(",
"message",
",",
"*",
"message",
".",
"routing_values",
")",
"or",
"self",
".",
"_default_provider",
"assert",
"provider_name",
"in",
"self",
".",
"_providers",
",",
"'Routing function returned an unknown provider name: {}'",
".",
"format",
"(",
"provider_name",
")",
"provider",
"=",
"self",
".",
"get_provider",
"(",
"provider_name",
")",
"# Set message provider name",
"message",
".",
"provider",
"=",
"provider",
".",
"name",
"# Send the message using the provider",
"message",
"=",
"provider",
".",
"send",
"(",
"message",
")",
"# Emit the send event",
"self",
".",
"onSend",
"(",
"message",
")",
"# Finish",
"return",
"message"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
Gateway.receiver_blueprint_for
|
Get a Flask blueprint for the named provider that handles incoming messages & status reports
Note: this requires Flask microframework.
:rtype: flask.blueprints.Blueprint
:returns: Flask Blueprint, fully functional
:raises KeyError: provider not found
:raises NotImplementedError: Provider does not implement a receiver
|
smsframework/Gateway.py
|
def receiver_blueprint_for(self, name):
""" Get a Flask blueprint for the named provider that handles incoming messages & status reports
Note: this requires Flask microframework.
:rtype: flask.blueprints.Blueprint
:returns: Flask Blueprint, fully functional
:raises KeyError: provider not found
:raises NotImplementedError: Provider does not implement a receiver
"""
# Get the provider & blueprint
provider = self.get_provider(name)
bp = provider.make_receiver_blueprint()
# Register a Flask handler that initializes `g.provider`
# This is the only way for the blueprint to get the current IProvider instance
from flask.globals import g # local import as the user is not required to use receivers at all
@bp.before_request
def init_g():
g.provider = provider
# Finish
return bp
|
def receiver_blueprint_for(self, name):
""" Get a Flask blueprint for the named provider that handles incoming messages & status reports
Note: this requires Flask microframework.
:rtype: flask.blueprints.Blueprint
:returns: Flask Blueprint, fully functional
:raises KeyError: provider not found
:raises NotImplementedError: Provider does not implement a receiver
"""
# Get the provider & blueprint
provider = self.get_provider(name)
bp = provider.make_receiver_blueprint()
# Register a Flask handler that initializes `g.provider`
# This is the only way for the blueprint to get the current IProvider instance
from flask.globals import g # local import as the user is not required to use receivers at all
@bp.before_request
def init_g():
g.provider = provider
# Finish
return bp
|
[
"Get",
"a",
"Flask",
"blueprint",
"for",
"the",
"named",
"provider",
"that",
"handles",
"incoming",
"messages",
"&",
"status",
"reports"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/Gateway.py#L184-L207
|
[
"def",
"receiver_blueprint_for",
"(",
"self",
",",
"name",
")",
":",
"# Get the provider & blueprint",
"provider",
"=",
"self",
".",
"get_provider",
"(",
"name",
")",
"bp",
"=",
"provider",
".",
"make_receiver_blueprint",
"(",
")",
"# Register a Flask handler that initializes `g.provider`",
"# This is the only way for the blueprint to get the current IProvider instance",
"from",
"flask",
".",
"globals",
"import",
"g",
"# local import as the user is not required to use receivers at all",
"@",
"bp",
".",
"before_request",
"def",
"init_g",
"(",
")",
":",
"g",
".",
"provider",
"=",
"provider",
"# Finish",
"return",
"bp"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
Gateway.receiver_blueprints
|
Get Flask blueprints for every provider that supports it
Note: this requires Flask microframework.
:rtype: dict
:returns: A dict { provider-name: Blueprint }
|
smsframework/Gateway.py
|
def receiver_blueprints(self):
""" Get Flask blueprints for every provider that supports it
Note: this requires Flask microframework.
:rtype: dict
:returns: A dict { provider-name: Blueprint }
"""
blueprints = {}
for name in self._providers:
try:
blueprints[name] = self.receiver_blueprint_for(name)
except NotImplementedError:
pass # Ignore providers that does not support receivers
return blueprints
|
def receiver_blueprints(self):
""" Get Flask blueprints for every provider that supports it
Note: this requires Flask microframework.
:rtype: dict
:returns: A dict { provider-name: Blueprint }
"""
blueprints = {}
for name in self._providers:
try:
blueprints[name] = self.receiver_blueprint_for(name)
except NotImplementedError:
pass # Ignore providers that does not support receivers
return blueprints
|
[
"Get",
"Flask",
"blueprints",
"for",
"every",
"provider",
"that",
"supports",
"it"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/Gateway.py#L209-L223
|
[
"def",
"receiver_blueprints",
"(",
"self",
")",
":",
"blueprints",
"=",
"{",
"}",
"for",
"name",
"in",
"self",
".",
"_providers",
":",
"try",
":",
"blueprints",
"[",
"name",
"]",
"=",
"self",
".",
"receiver_blueprint_for",
"(",
"name",
")",
"except",
"NotImplementedError",
":",
"pass",
"# Ignore providers that does not support receivers",
"return",
"blueprints"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
Gateway.receiver_blueprints_register
|
Register all provider receivers on the provided Flask application under '/{prefix}/provider-name'
Note: this requires Flask microframework.
:type app: flask.Flask
:param app: Flask app to register the blueprints on
:type prefix: str
:param prefix: URL prefix to hide the receivers under.
You likely want some random stuff here so no stranger can simulate incoming messages.
:rtype: flask.Flask
|
smsframework/Gateway.py
|
def receiver_blueprints_register(self, app, prefix='/'):
""" Register all provider receivers on the provided Flask application under '/{prefix}/provider-name'
Note: this requires Flask microframework.
:type app: flask.Flask
:param app: Flask app to register the blueprints on
:type prefix: str
:param prefix: URL prefix to hide the receivers under.
You likely want some random stuff here so no stranger can simulate incoming messages.
:rtype: flask.Flask
"""
# Register
for name, bp in self.receiver_blueprints().items():
app.register_blueprint(
bp,
url_prefix='{prefix}{name}'.format(
prefix='/'+prefix.strip('/')+'/' if prefix else '/',
name=name
)
)
# Finish
return app
|
def receiver_blueprints_register(self, app, prefix='/'):
""" Register all provider receivers on the provided Flask application under '/{prefix}/provider-name'
Note: this requires Flask microframework.
:type app: flask.Flask
:param app: Flask app to register the blueprints on
:type prefix: str
:param prefix: URL prefix to hide the receivers under.
You likely want some random stuff here so no stranger can simulate incoming messages.
:rtype: flask.Flask
"""
# Register
for name, bp in self.receiver_blueprints().items():
app.register_blueprint(
bp,
url_prefix='{prefix}{name}'.format(
prefix='/'+prefix.strip('/')+'/' if prefix else '/',
name=name
)
)
# Finish
return app
|
[
"Register",
"all",
"provider",
"receivers",
"on",
"the",
"provided",
"Flask",
"application",
"under",
"/",
"{",
"prefix",
"}",
"/",
"provider",
"-",
"name"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/Gateway.py#L225-L248
|
[
"def",
"receiver_blueprints_register",
"(",
"self",
",",
"app",
",",
"prefix",
"=",
"'/'",
")",
":",
"# Register",
"for",
"name",
",",
"bp",
"in",
"self",
".",
"receiver_blueprints",
"(",
")",
".",
"items",
"(",
")",
":",
"app",
".",
"register_blueprint",
"(",
"bp",
",",
"url_prefix",
"=",
"'{prefix}{name}'",
".",
"format",
"(",
"prefix",
"=",
"'/'",
"+",
"prefix",
".",
"strip",
"(",
"'/'",
")",
"+",
"'/'",
"if",
"prefix",
"else",
"'/'",
",",
"name",
"=",
"name",
")",
")",
"# Finish",
"return",
"app"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
IProvider._receive_message
|
Incoming message callback
Calls Gateway.onReceive event hook
Providers are required to:
* Cast phone numbers to digits-only
* Support both ASCII and Unicode messages
* Populate `message.msgid` and `message.meta` fields
* If this method fails with an exception, the provider is required to respond with an error to the service
:type message: IncomingMessage
:param message: The received message
:rtype: IncomingMessage
|
smsframework/IProvider.py
|
def _receive_message(self, message):
""" Incoming message callback
Calls Gateway.onReceive event hook
Providers are required to:
* Cast phone numbers to digits-only
* Support both ASCII and Unicode messages
* Populate `message.msgid` and `message.meta` fields
* If this method fails with an exception, the provider is required to respond with an error to the service
:type message: IncomingMessage
:param message: The received message
:rtype: IncomingMessage
"""
# Populate fields
message.provider = self.name
# Fire the event hook
self.gateway.onReceive(message)
# Finish
return message
|
def _receive_message(self, message):
""" Incoming message callback
Calls Gateway.onReceive event hook
Providers are required to:
* Cast phone numbers to digits-only
* Support both ASCII and Unicode messages
* Populate `message.msgid` and `message.meta` fields
* If this method fails with an exception, the provider is required to respond with an error to the service
:type message: IncomingMessage
:param message: The received message
:rtype: IncomingMessage
"""
# Populate fields
message.provider = self.name
# Fire the event hook
self.gateway.onReceive(message)
# Finish
return message
|
[
"Incoming",
"message",
"callback"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/IProvider.py#L52-L74
|
[
"def",
"_receive_message",
"(",
"self",
",",
"message",
")",
":",
"# Populate fields",
"message",
".",
"provider",
"=",
"self",
".",
"name",
"# Fire the event hook",
"self",
".",
"gateway",
".",
"onReceive",
"(",
"message",
")",
"# Finish",
"return",
"message"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
IProvider._receive_status
|
Incoming status callback
Calls Gateway.onStatus event hook
Providers are required to:
* Cast phone numbers to digits-only
* Use proper MessageStatus subclasses
* Populate `status.msgid` and `status.meta` fields
* If this method fails with an exception, the provider is required to respond with an error to the service
:type status: MessageStatus
:param status: The received status
:rtype: MessageStatus
|
smsframework/IProvider.py
|
def _receive_status(self, status):
""" Incoming status callback
Calls Gateway.onStatus event hook
Providers are required to:
* Cast phone numbers to digits-only
* Use proper MessageStatus subclasses
* Populate `status.msgid` and `status.meta` fields
* If this method fails with an exception, the provider is required to respond with an error to the service
:type status: MessageStatus
:param status: The received status
:rtype: MessageStatus
"""
# Populate fields
status.provider = self.name
# Fire the event hook
self.gateway.onStatus(status)
# Finish
return status
|
def _receive_status(self, status):
""" Incoming status callback
Calls Gateway.onStatus event hook
Providers are required to:
* Cast phone numbers to digits-only
* Use proper MessageStatus subclasses
* Populate `status.msgid` and `status.meta` fields
* If this method fails with an exception, the provider is required to respond with an error to the service
:type status: MessageStatus
:param status: The received status
:rtype: MessageStatus
"""
# Populate fields
status.provider = self.name
# Fire the event hook
self.gateway.onStatus(status)
# Finish
return status
|
[
"Incoming",
"status",
"callback"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/IProvider.py#L76-L98
|
[
"def",
"_receive_status",
"(",
"self",
",",
"status",
")",
":",
"# Populate fields",
"status",
".",
"provider",
"=",
"self",
".",
"name",
"# Fire the event hook",
"self",
".",
"gateway",
".",
"onStatus",
"(",
"status",
")",
"# Finish",
"return",
"status"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
im
|
Incoming message handler: forwarded by ForwardServerProvider
|
smsframework/providers/forward/receiver_client.py
|
def im():
""" Incoming message handler: forwarded by ForwardServerProvider """
req = jsonex_loads(request.get_data())
message = g.provider._receive_message(req['message'])
return {'message': message}
|
def im():
""" Incoming message handler: forwarded by ForwardServerProvider """
req = jsonex_loads(request.get_data())
message = g.provider._receive_message(req['message'])
return {'message': message}
|
[
"Incoming",
"message",
"handler",
":",
"forwarded",
"by",
"ForwardServerProvider"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/receiver_client.py#L11-L15
|
[
"def",
"im",
"(",
")",
":",
"req",
"=",
"jsonex_loads",
"(",
"request",
".",
"get_data",
"(",
")",
")",
"message",
"=",
"g",
".",
"provider",
".",
"_receive_message",
"(",
"req",
"[",
"'message'",
"]",
")",
"return",
"{",
"'message'",
":",
"message",
"}"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
status
|
Incoming status handler: forwarded by ForwardServerProvider
|
smsframework/providers/forward/receiver_client.py
|
def status():
""" Incoming status handler: forwarded by ForwardServerProvider """
req = jsonex_loads(request.get_data())
status = g.provider._receive_status(req['status'])
return {'status': status}
|
def status():
""" Incoming status handler: forwarded by ForwardServerProvider """
req = jsonex_loads(request.get_data())
status = g.provider._receive_status(req['status'])
return {'status': status}
|
[
"Incoming",
"status",
"handler",
":",
"forwarded",
"by",
"ForwardServerProvider"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/receiver_client.py#L20-L24
|
[
"def",
"status",
"(",
")",
":",
"req",
"=",
"jsonex_loads",
"(",
"request",
".",
"get_data",
"(",
")",
")",
"status",
"=",
"g",
".",
"provider",
".",
"_receive_status",
"(",
"req",
"[",
"'status'",
"]",
")",
"return",
"{",
"'status'",
":",
"status",
"}"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
jsonex_loads
|
Unserialize with JsonEx
:rtype: dict
|
smsframework/providers/forward/provider.py
|
def jsonex_loads(s):
""" Unserialize with JsonEx
:rtype: dict
"""
return json.loads(s.decode('utf-8'), cls=JsonExDecoder, classes=classes, exceptions=exceptions)
|
def jsonex_loads(s):
""" Unserialize with JsonEx
:rtype: dict
"""
return json.loads(s.decode('utf-8'), cls=JsonExDecoder, classes=classes, exceptions=exceptions)
|
[
"Unserialize",
"with",
"JsonEx",
":",
"rtype",
":",
"dict"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/provider.py#L51-L55
|
[
"def",
"jsonex_loads",
"(",
"s",
")",
":",
"return",
"json",
".",
"loads",
"(",
"s",
".",
"decode",
"(",
"'utf-8'",
")",
",",
"cls",
"=",
"JsonExDecoder",
",",
"classes",
"=",
"classes",
",",
"exceptions",
"=",
"exceptions",
")"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
jsonex_api
|
View wrapper for JsonEx responses. Catches exceptions as well
|
smsframework/providers/forward/provider.py
|
def jsonex_api(f):
""" View wrapper for JsonEx responses. Catches exceptions as well """
@wraps(f)
def wrapper(*args, **kwargs):
# Call, catch exceptions
try:
code, res = 200, f(*args, **kwargs)
except HTTPException as e:
code, res = e.code, {'error': e}
except Exception as e:
code, res = 500, {'error': e}
logger.exception('Method error')
# Response
response = make_response(jsonex_dumps(res), code)
response.headers['Content-Type'] = 'application/json'
return response
return wrapper
|
def jsonex_api(f):
""" View wrapper for JsonEx responses. Catches exceptions as well """
@wraps(f)
def wrapper(*args, **kwargs):
# Call, catch exceptions
try:
code, res = 200, f(*args, **kwargs)
except HTTPException as e:
code, res = e.code, {'error': e}
except Exception as e:
code, res = 500, {'error': e}
logger.exception('Method error')
# Response
response = make_response(jsonex_dumps(res), code)
response.headers['Content-Type'] = 'application/json'
return response
return wrapper
|
[
"View",
"wrapper",
"for",
"JsonEx",
"responses",
".",
"Catches",
"exceptions",
"as",
"well"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/provider.py#L58-L75
|
[
"def",
"jsonex_api",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Call, catch exceptions",
"try",
":",
"code",
",",
"res",
"=",
"200",
",",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"HTTPException",
"as",
"e",
":",
"code",
",",
"res",
"=",
"e",
".",
"code",
",",
"{",
"'error'",
":",
"e",
"}",
"except",
"Exception",
"as",
"e",
":",
"code",
",",
"res",
"=",
"500",
",",
"{",
"'error'",
":",
"e",
"}",
"logger",
".",
"exception",
"(",
"'Method error'",
")",
"# Response",
"response",
"=",
"make_response",
"(",
"jsonex_dumps",
"(",
"res",
")",
",",
"code",
")",
"response",
".",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"'application/json'",
"return",
"response",
"return",
"wrapper"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
_parse_authentication
|
Parse authentication data from the URL and put it in the `headers` dict. With caching behavior
:param url: URL
:type url: str
:return: (URL without authentication info, headers dict)
:rtype: str, dict
|
smsframework/providers/forward/provider.py
|
def _parse_authentication(url):
""" Parse authentication data from the URL and put it in the `headers` dict. With caching behavior
:param url: URL
:type url: str
:return: (URL without authentication info, headers dict)
:rtype: str, dict
"""
u = url
h = {} # New headers
# Cache?
if url in _parse_authentication._memoize:
u, h = _parse_authentication._memoize[url]
else:
# Parse
p = urlsplit(url, 'http')
if p.username and p.password:
# Prepare header
h['Authorization'] = b'Basic ' + base64.b64encode(p.username.encode() + b':' + p.password.encode())
# Remove authentication info since urllib2.Request() does not understand it
u = urlunsplit((p.scheme, p.netloc.split('@', 1)[1], p.path, p.query, p.fragment))
# Cache
_parse_authentication._memoize[url] = (u, h)
# Finish
return u, h
|
def _parse_authentication(url):
""" Parse authentication data from the URL and put it in the `headers` dict. With caching behavior
:param url: URL
:type url: str
:return: (URL without authentication info, headers dict)
:rtype: str, dict
"""
u = url
h = {} # New headers
# Cache?
if url in _parse_authentication._memoize:
u, h = _parse_authentication._memoize[url]
else:
# Parse
p = urlsplit(url, 'http')
if p.username and p.password:
# Prepare header
h['Authorization'] = b'Basic ' + base64.b64encode(p.username.encode() + b':' + p.password.encode())
# Remove authentication info since urllib2.Request() does not understand it
u = urlunsplit((p.scheme, p.netloc.split('@', 1)[1], p.path, p.query, p.fragment))
# Cache
_parse_authentication._memoize[url] = (u, h)
# Finish
return u, h
|
[
"Parse",
"authentication",
"data",
"from",
"the",
"URL",
"and",
"put",
"it",
"in",
"the",
"headers",
"dict",
".",
"With",
"caching",
"behavior",
":",
"param",
"url",
":",
"URL",
":",
"type",
"url",
":",
"str",
":",
"return",
":",
"(",
"URL",
"without",
"authentication",
"info",
"headers",
"dict",
")",
":",
"rtype",
":",
"str",
"dict"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/provider.py#L78-L103
|
[
"def",
"_parse_authentication",
"(",
"url",
")",
":",
"u",
"=",
"url",
"h",
"=",
"{",
"}",
"# New headers",
"# Cache?",
"if",
"url",
"in",
"_parse_authentication",
".",
"_memoize",
":",
"u",
",",
"h",
"=",
"_parse_authentication",
".",
"_memoize",
"[",
"url",
"]",
"else",
":",
"# Parse",
"p",
"=",
"urlsplit",
"(",
"url",
",",
"'http'",
")",
"if",
"p",
".",
"username",
"and",
"p",
".",
"password",
":",
"# Prepare header",
"h",
"[",
"'Authorization'",
"]",
"=",
"b'Basic '",
"+",
"base64",
".",
"b64encode",
"(",
"p",
".",
"username",
".",
"encode",
"(",
")",
"+",
"b':'",
"+",
"p",
".",
"password",
".",
"encode",
"(",
")",
")",
"# Remove authentication info since urllib2.Request() does not understand it",
"u",
"=",
"urlunsplit",
"(",
"(",
"p",
".",
"scheme",
",",
"p",
".",
"netloc",
".",
"split",
"(",
"'@'",
",",
"1",
")",
"[",
"1",
"]",
",",
"p",
".",
"path",
",",
"p",
".",
"query",
",",
"p",
".",
"fragment",
")",
")",
"# Cache",
"_parse_authentication",
".",
"_memoize",
"[",
"url",
"]",
"=",
"(",
"u",
",",
"h",
")",
"# Finish",
"return",
"u",
",",
"h"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
jsonex_request
|
Make a request with JsonEx
:param url: URL
:type url: str
:param data: Data to POST
:type data: dict
:return: Response
:rtype: dict
:raises exc.ConnectionError: Connection error
:raises exc.ServerError: Remote server error (unknown)
:raises exc.ProviderError: any errors reported by the remote
|
smsframework/providers/forward/provider.py
|
def jsonex_request(url, data, headers=None):
""" Make a request with JsonEx
:param url: URL
:type url: str
:param data: Data to POST
:type data: dict
:return: Response
:rtype: dict
:raises exc.ConnectionError: Connection error
:raises exc.ServerError: Remote server error (unknown)
:raises exc.ProviderError: any errors reported by the remote
"""
# Authentication?
url, headers = _parse_authentication(url)
headers['Content-Type'] = 'application/json'
# Request
try:
req = Request(url, headers=headers)
response = urlopen(req, jsonex_dumps(data))
res_str = response.read()
res = jsonex_loads(res_str)
except HTTPError as e:
if 'Content-Type' in e.headers and e.headers['Content-Type'] == 'application/json':
res = jsonex_loads(e.read())
else:
raise exc.ServerError('Server at "{}" failed: {}'.format(url, e))
except URLError as e:
raise exc.ConnectionError('Connection to "{}" failed: {}'.format(url, e))
# Errors?
if 'error' in res: # Exception object
raise res['error'] # Error raised by the remote side
return res
|
def jsonex_request(url, data, headers=None):
""" Make a request with JsonEx
:param url: URL
:type url: str
:param data: Data to POST
:type data: dict
:return: Response
:rtype: dict
:raises exc.ConnectionError: Connection error
:raises exc.ServerError: Remote server error (unknown)
:raises exc.ProviderError: any errors reported by the remote
"""
# Authentication?
url, headers = _parse_authentication(url)
headers['Content-Type'] = 'application/json'
# Request
try:
req = Request(url, headers=headers)
response = urlopen(req, jsonex_dumps(data))
res_str = response.read()
res = jsonex_loads(res_str)
except HTTPError as e:
if 'Content-Type' in e.headers and e.headers['Content-Type'] == 'application/json':
res = jsonex_loads(e.read())
else:
raise exc.ServerError('Server at "{}" failed: {}'.format(url, e))
except URLError as e:
raise exc.ConnectionError('Connection to "{}" failed: {}'.format(url, e))
# Errors?
if 'error' in res: # Exception object
raise res['error'] # Error raised by the remote side
return res
|
[
"Make",
"a",
"request",
"with",
"JsonEx",
":",
"param",
"url",
":",
"URL",
":",
"type",
"url",
":",
"str",
":",
"param",
"data",
":",
"Data",
"to",
"POST",
":",
"type",
"data",
":",
"dict",
":",
"return",
":",
"Response",
":",
"rtype",
":",
"dict",
":",
"raises",
"exc",
".",
"ConnectionError",
":",
"Connection",
"error",
":",
"raises",
"exc",
".",
"ServerError",
":",
"Remote",
"server",
"error",
"(",
"unknown",
")",
":",
"raises",
"exc",
".",
"ProviderError",
":",
"any",
"errors",
"reported",
"by",
"the",
"remote"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/provider.py#L107-L141
|
[
"def",
"jsonex_request",
"(",
"url",
",",
"data",
",",
"headers",
"=",
"None",
")",
":",
"# Authentication?",
"url",
",",
"headers",
"=",
"_parse_authentication",
"(",
"url",
")",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"'application/json'",
"# Request",
"try",
":",
"req",
"=",
"Request",
"(",
"url",
",",
"headers",
"=",
"headers",
")",
"response",
"=",
"urlopen",
"(",
"req",
",",
"jsonex_dumps",
"(",
"data",
")",
")",
"res_str",
"=",
"response",
".",
"read",
"(",
")",
"res",
"=",
"jsonex_loads",
"(",
"res_str",
")",
"except",
"HTTPError",
"as",
"e",
":",
"if",
"'Content-Type'",
"in",
"e",
".",
"headers",
"and",
"e",
".",
"headers",
"[",
"'Content-Type'",
"]",
"==",
"'application/json'",
":",
"res",
"=",
"jsonex_loads",
"(",
"e",
".",
"read",
"(",
")",
")",
"else",
":",
"raise",
"exc",
".",
"ServerError",
"(",
"'Server at \"{}\" failed: {}'",
".",
"format",
"(",
"url",
",",
"e",
")",
")",
"except",
"URLError",
"as",
"e",
":",
"raise",
"exc",
".",
"ConnectionError",
"(",
"'Connection to \"{}\" failed: {}'",
".",
"format",
"(",
"url",
",",
"e",
")",
")",
"# Errors?",
"if",
"'error'",
"in",
"res",
":",
"# Exception object",
"raise",
"res",
"[",
"'error'",
"]",
"# Error raised by the remote side",
"return",
"res"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
ForwardClientProvider.send
|
Send a message by forwarding it to the server
:param message: Message
:type message: smsframework.data.OutgoingMessage
:rtype: smsframework.data.OutgoingMessage
:raise Exception: any exception reported by the other side
:raise urllib2.URLError: Connection error
|
smsframework/providers/forward/provider.py
|
def send(self, message):
""" Send a message by forwarding it to the server
:param message: Message
:type message: smsframework.data.OutgoingMessage
:rtype: smsframework.data.OutgoingMessage
:raise Exception: any exception reported by the other side
:raise urllib2.URLError: Connection error
"""
res = jsonex_request(self.server_url + '/im'.lstrip('/'), {'message': message})
msg = res['message'] # OutgoingMessage object
# Replace properties in the original object (so it's the same object, like with other providers)
for k, v in msg.__dict__.items():
setattr(message, k, v)
return message
|
def send(self, message):
""" Send a message by forwarding it to the server
:param message: Message
:type message: smsframework.data.OutgoingMessage
:rtype: smsframework.data.OutgoingMessage
:raise Exception: any exception reported by the other side
:raise urllib2.URLError: Connection error
"""
res = jsonex_request(self.server_url + '/im'.lstrip('/'), {'message': message})
msg = res['message'] # OutgoingMessage object
# Replace properties in the original object (so it's the same object, like with other providers)
for k, v in msg.__dict__.items():
setattr(message, k, v)
return message
|
[
"Send",
"a",
"message",
"by",
"forwarding",
"it",
"to",
"the",
"server",
":",
"param",
"message",
":",
"Message",
":",
"type",
"message",
":",
"smsframework",
".",
"data",
".",
"OutgoingMessage",
":",
"rtype",
":",
"smsframework",
".",
"data",
".",
"OutgoingMessage",
":",
"raise",
"Exception",
":",
"any",
"exception",
"reported",
"by",
"the",
"other",
"side",
":",
"raise",
"urllib2",
".",
"URLError",
":",
"Connection",
"error"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/provider.py#L162-L176
|
[
"def",
"send",
"(",
"self",
",",
"message",
")",
":",
"res",
"=",
"jsonex_request",
"(",
"self",
".",
"server_url",
"+",
"'/im'",
".",
"lstrip",
"(",
"'/'",
")",
",",
"{",
"'message'",
":",
"message",
"}",
")",
"msg",
"=",
"res",
"[",
"'message'",
"]",
"# OutgoingMessage object",
"# Replace properties in the original object (so it's the same object, like with other providers)",
"for",
"k",
",",
"v",
"in",
"msg",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"message",
",",
"k",
",",
"v",
")",
"return",
"message"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
ForwardServerProvider._forward_object_to_client
|
Forward an object to client
:type client: str
:type obj: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:rtype: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:raise Exception: any exception reported by the other side
|
smsframework/providers/forward/provider.py
|
def _forward_object_to_client(self, client, obj):
""" Forward an object to client
:type client: str
:type obj: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:rtype: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:raise Exception: any exception reported by the other side
"""
url, name = ('/im', 'message') if isinstance(obj, IncomingMessage) else ('/status', 'status')
res = jsonex_request(client.rstrip('/') + '/' + url.lstrip('/'), {name: obj})
return res[name]
|
def _forward_object_to_client(self, client, obj):
""" Forward an object to client
:type client: str
:type obj: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:rtype: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:raise Exception: any exception reported by the other side
"""
url, name = ('/im', 'message') if isinstance(obj, IncomingMessage) else ('/status', 'status')
res = jsonex_request(client.rstrip('/') + '/' + url.lstrip('/'), {name: obj})
return res[name]
|
[
"Forward",
"an",
"object",
"to",
"client",
":",
"type",
"client",
":",
"str",
":",
"type",
"obj",
":",
"smsframework",
".",
"data",
".",
"IncomingMessage|smsframework",
".",
"data",
".",
"MessageStatus",
":",
"rtype",
":",
"smsframework",
".",
"data",
".",
"IncomingMessage|smsframework",
".",
"data",
".",
"MessageStatus",
":",
"raise",
"Exception",
":",
"any",
"exception",
"reported",
"by",
"the",
"other",
"side"
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/provider.py#L228-L237
|
[
"def",
"_forward_object_to_client",
"(",
"self",
",",
"client",
",",
"obj",
")",
":",
"url",
",",
"name",
"=",
"(",
"'/im'",
",",
"'message'",
")",
"if",
"isinstance",
"(",
"obj",
",",
"IncomingMessage",
")",
"else",
"(",
"'/status'",
",",
"'status'",
")",
"res",
"=",
"jsonex_request",
"(",
"client",
".",
"rstrip",
"(",
"'/'",
")",
"+",
"'/'",
"+",
"url",
".",
"lstrip",
"(",
"'/'",
")",
",",
"{",
"name",
":",
"obj",
"}",
")",
"return",
"res",
"[",
"name",
"]"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
ForwardServerProvider.forward
|
Forward an object to clients.
:param obj: The object to be forwarded
:type obj: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:raises Exception: if any of the clients failed
|
smsframework/providers/forward/provider.py
|
def forward(self, obj):
""" Forward an object to clients.
:param obj: The object to be forwarded
:type obj: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:raises Exception: if any of the clients failed
"""
assert isinstance(obj, (IncomingMessage, MessageStatus)), 'Tried to forward an object of an unsupported type: {}'.format(obj)
clients = self.choose_clients(obj)
if Parallel:
pll = Parallel(self._forward_object_to_client)
for client in clients:
pll(client, obj)
results, errors = pll.join()
if errors:
raise errors[0]
else:
for client in clients:
self._forward_object_to_client(client, obj)
|
def forward(self, obj):
""" Forward an object to clients.
:param obj: The object to be forwarded
:type obj: smsframework.data.IncomingMessage|smsframework.data.MessageStatus
:raises Exception: if any of the clients failed
"""
assert isinstance(obj, (IncomingMessage, MessageStatus)), 'Tried to forward an object of an unsupported type: {}'.format(obj)
clients = self.choose_clients(obj)
if Parallel:
pll = Parallel(self._forward_object_to_client)
for client in clients:
pll(client, obj)
results, errors = pll.join()
if errors:
raise errors[0]
else:
for client in clients:
self._forward_object_to_client(client, obj)
|
[
"Forward",
"an",
"object",
"to",
"clients",
"."
] |
kolypto/py-smsframework
|
python
|
https://github.com/kolypto/py-smsframework/blob/4f3d812711f5e2e037dc80c4014c815fe2d68a0b/smsframework/providers/forward/provider.py#L239-L258
|
[
"def",
"forward",
"(",
"self",
",",
"obj",
")",
":",
"assert",
"isinstance",
"(",
"obj",
",",
"(",
"IncomingMessage",
",",
"MessageStatus",
")",
")",
",",
"'Tried to forward an object of an unsupported type: {}'",
".",
"format",
"(",
"obj",
")",
"clients",
"=",
"self",
".",
"choose_clients",
"(",
"obj",
")",
"if",
"Parallel",
":",
"pll",
"=",
"Parallel",
"(",
"self",
".",
"_forward_object_to_client",
")",
"for",
"client",
"in",
"clients",
":",
"pll",
"(",
"client",
",",
"obj",
")",
"results",
",",
"errors",
"=",
"pll",
".",
"join",
"(",
")",
"if",
"errors",
":",
"raise",
"errors",
"[",
"0",
"]",
"else",
":",
"for",
"client",
"in",
"clients",
":",
"self",
".",
"_forward_object_to_client",
"(",
"client",
",",
"obj",
")"
] |
4f3d812711f5e2e037dc80c4014c815fe2d68a0b
|
test
|
Pytelemetry.stats
|
Returns a dictionnary of dictionnary that contains critical information
about the transport and protocol behavior, such as:
* amount of received frames
* amount of badly delimited frames
* amount of correctly delimited but still corrupted frames
* etc
|
pytelemetry/pytelemetry.py
|
def stats(self):
"""
Returns a dictionnary of dictionnary that contains critical information
about the transport and protocol behavior, such as:
* amount of received frames
* amount of badly delimited frames
* amount of correctly delimited but still corrupted frames
* etc
"""
d = dict()
d['framing'] = self.api.delimiter.stats()
d['protocol'] = self.api.stats()
return d
|
def stats(self):
"""
Returns a dictionnary of dictionnary that contains critical information
about the transport and protocol behavior, such as:
* amount of received frames
* amount of badly delimited frames
* amount of correctly delimited but still corrupted frames
* etc
"""
d = dict()
d['framing'] = self.api.delimiter.stats()
d['protocol'] = self.api.stats()
return d
|
[
"Returns",
"a",
"dictionnary",
"of",
"dictionnary",
"that",
"contains",
"critical",
"information",
"about",
"the",
"transport",
"and",
"protocol",
"behavior",
"such",
"as",
":",
"*",
"amount",
"of",
"received",
"frames",
"*",
"amount",
"of",
"badly",
"delimited",
"frames",
"*",
"amount",
"of",
"correctly",
"delimited",
"but",
"still",
"corrupted",
"frames",
"*",
"etc"
] |
Overdrivr/pytelemetry
|
python
|
https://github.com/Overdrivr/pytelemetry/blob/791b0129ddffc1832e1a8d90d9b97662422a40f0/pytelemetry/pytelemetry.py#L57-L70
|
[
"def",
"stats",
"(",
"self",
")",
":",
"d",
"=",
"dict",
"(",
")",
"d",
"[",
"'framing'",
"]",
"=",
"self",
".",
"api",
".",
"delimiter",
".",
"stats",
"(",
")",
"d",
"[",
"'protocol'",
"]",
"=",
"self",
".",
"api",
".",
"stats",
"(",
")",
"return",
"d"
] |
791b0129ddffc1832e1a8d90d9b97662422a40f0
|
test
|
Erc20Manager.get_balance
|
Get balance of address for `erc20_address`
:param address: owner address
:param erc20_address: erc20 token address
:return: balance
|
gnosis/eth/ethereum_client.py
|
def get_balance(self, address: str, erc20_address: str) -> int:
"""
Get balance of address for `erc20_address`
:param address: owner address
:param erc20_address: erc20 token address
:return: balance
"""
return get_erc20_contract(self.w3, erc20_address).functions.balanceOf(address).call()
|
def get_balance(self, address: str, erc20_address: str) -> int:
"""
Get balance of address for `erc20_address`
:param address: owner address
:param erc20_address: erc20 token address
:return: balance
"""
return get_erc20_contract(self.w3, erc20_address).functions.balanceOf(address).call()
|
[
"Get",
"balance",
"of",
"address",
"for",
"erc20_address",
":",
"param",
"address",
":",
"owner",
"address",
":",
"param",
"erc20_address",
":",
"erc20",
"token",
"address",
":",
"return",
":",
"balance"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L108-L115
|
[
"def",
"get_balance",
"(",
"self",
",",
"address",
":",
"str",
",",
"erc20_address",
":",
"str",
")",
"->",
"int",
":",
"return",
"get_erc20_contract",
"(",
"self",
".",
"w3",
",",
"erc20_address",
")",
".",
"functions",
".",
"balanceOf",
"(",
"address",
")",
".",
"call",
"(",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
Erc20Manager.get_info
|
Get erc20 information (`name`, `symbol` and `decimals`)
:param erc20_address:
:return: Erc20_Info
|
gnosis/eth/ethereum_client.py
|
def get_info(self, erc20_address: str) -> Erc20_Info:
"""
Get erc20 information (`name`, `symbol` and `decimals`)
:param erc20_address:
:return: Erc20_Info
"""
# We use the `example erc20` as the `erc20 interface` doesn't have `name`, `symbol` nor `decimals`
erc20 = get_example_erc20_contract(self.w3, erc20_address)
name = erc20.functions.name().call()
symbol = erc20.functions.symbol().call()
decimals = erc20.functions.decimals().call()
return Erc20_Info(name, symbol, decimals)
|
def get_info(self, erc20_address: str) -> Erc20_Info:
"""
Get erc20 information (`name`, `symbol` and `decimals`)
:param erc20_address:
:return: Erc20_Info
"""
# We use the `example erc20` as the `erc20 interface` doesn't have `name`, `symbol` nor `decimals`
erc20 = get_example_erc20_contract(self.w3, erc20_address)
name = erc20.functions.name().call()
symbol = erc20.functions.symbol().call()
decimals = erc20.functions.decimals().call()
return Erc20_Info(name, symbol, decimals)
|
[
"Get",
"erc20",
"information",
"(",
"name",
"symbol",
"and",
"decimals",
")",
":",
"param",
"erc20_address",
":",
":",
"return",
":",
"Erc20_Info"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L117-L128
|
[
"def",
"get_info",
"(",
"self",
",",
"erc20_address",
":",
"str",
")",
"->",
"Erc20_Info",
":",
"# We use the `example erc20` as the `erc20 interface` doesn't have `name`, `symbol` nor `decimals`",
"erc20",
"=",
"get_example_erc20_contract",
"(",
"self",
".",
"w3",
",",
"erc20_address",
")",
"name",
"=",
"erc20",
".",
"functions",
".",
"name",
"(",
")",
".",
"call",
"(",
")",
"symbol",
"=",
"erc20",
".",
"functions",
".",
"symbol",
"(",
")",
".",
"call",
"(",
")",
"decimals",
"=",
"erc20",
".",
"functions",
".",
"decimals",
"(",
")",
".",
"call",
"(",
")",
"return",
"Erc20_Info",
"(",
"name",
",",
"symbol",
",",
"decimals",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
Erc20Manager.get_transfer_history
|
Get events for erc20 transfers. At least one of `from_address`, `to_address` or `token_address` must be
defined
An example of event:
{
"args": {
"from": "0x1Ce67Ea59377A163D47DFFc9BaAB99423BE6EcF1",
"to": "0xaE9E15896fd32E59C7d89ce7a95a9352D6ebD70E",
"value": 15000000000000000
},
"event": "Transfer",
"logIndex": 42,
"transactionIndex": 60,
"transactionHash": "0x71d6d83fef3347bad848e83dfa0ab28296e2953de946ee152ea81c6dfb42d2b3",
"address": "0xfecA834E7da9D437645b474450688DA9327112a5",
"blockHash": "0x054de9a496fc7d10303068cbc7ee3e25181a3b26640497859a5e49f0342e7db2",
"blockNumber": 7265022
}
:param from_block: Block to start querying from
:param to_block: Block to stop querying from
:param from_address: Address sending the erc20 transfer
:param to_address: Address receiving the erc20 transfer
:param token_address: Address of the token
:return: List of events
:throws: ReadTimeout
|
gnosis/eth/ethereum_client.py
|
def get_transfer_history(self, from_block: int, to_block: Optional[int] = None,
from_address: Optional[str] = None, to_address: Optional[str] = None,
token_address: Optional[str] = None) -> List[Dict[str, any]]:
"""
Get events for erc20 transfers. At least one of `from_address`, `to_address` or `token_address` must be
defined
An example of event:
{
"args": {
"from": "0x1Ce67Ea59377A163D47DFFc9BaAB99423BE6EcF1",
"to": "0xaE9E15896fd32E59C7d89ce7a95a9352D6ebD70E",
"value": 15000000000000000
},
"event": "Transfer",
"logIndex": 42,
"transactionIndex": 60,
"transactionHash": "0x71d6d83fef3347bad848e83dfa0ab28296e2953de946ee152ea81c6dfb42d2b3",
"address": "0xfecA834E7da9D437645b474450688DA9327112a5",
"blockHash": "0x054de9a496fc7d10303068cbc7ee3e25181a3b26640497859a5e49f0342e7db2",
"blockNumber": 7265022
}
:param from_block: Block to start querying from
:param to_block: Block to stop querying from
:param from_address: Address sending the erc20 transfer
:param to_address: Address receiving the erc20 transfer
:param token_address: Address of the token
:return: List of events
:throws: ReadTimeout
"""
assert from_address or to_address or token_address, 'At least one parameter must be provided'
erc20 = get_erc20_contract(self.w3)
argument_filters = {}
if from_address:
argument_filters['from'] = from_address
if to_address:
argument_filters['to'] = to_address
return erc20.events.Transfer.createFilter(fromBlock=from_block,
toBlock=to_block,
address=token_address,
argument_filters=argument_filters).get_all_entries()
|
def get_transfer_history(self, from_block: int, to_block: Optional[int] = None,
from_address: Optional[str] = None, to_address: Optional[str] = None,
token_address: Optional[str] = None) -> List[Dict[str, any]]:
"""
Get events for erc20 transfers. At least one of `from_address`, `to_address` or `token_address` must be
defined
An example of event:
{
"args": {
"from": "0x1Ce67Ea59377A163D47DFFc9BaAB99423BE6EcF1",
"to": "0xaE9E15896fd32E59C7d89ce7a95a9352D6ebD70E",
"value": 15000000000000000
},
"event": "Transfer",
"logIndex": 42,
"transactionIndex": 60,
"transactionHash": "0x71d6d83fef3347bad848e83dfa0ab28296e2953de946ee152ea81c6dfb42d2b3",
"address": "0xfecA834E7da9D437645b474450688DA9327112a5",
"blockHash": "0x054de9a496fc7d10303068cbc7ee3e25181a3b26640497859a5e49f0342e7db2",
"blockNumber": 7265022
}
:param from_block: Block to start querying from
:param to_block: Block to stop querying from
:param from_address: Address sending the erc20 transfer
:param to_address: Address receiving the erc20 transfer
:param token_address: Address of the token
:return: List of events
:throws: ReadTimeout
"""
assert from_address or to_address or token_address, 'At least one parameter must be provided'
erc20 = get_erc20_contract(self.w3)
argument_filters = {}
if from_address:
argument_filters['from'] = from_address
if to_address:
argument_filters['to'] = to_address
return erc20.events.Transfer.createFilter(fromBlock=from_block,
toBlock=to_block,
address=token_address,
argument_filters=argument_filters).get_all_entries()
|
[
"Get",
"events",
"for",
"erc20",
"transfers",
".",
"At",
"least",
"one",
"of",
"from_address",
"to_address",
"or",
"token_address",
"must",
"be",
"defined",
"An",
"example",
"of",
"event",
":",
"{",
"args",
":",
"{",
"from",
":",
"0x1Ce67Ea59377A163D47DFFc9BaAB99423BE6EcF1",
"to",
":",
"0xaE9E15896fd32E59C7d89ce7a95a9352D6ebD70E",
"value",
":",
"15000000000000000",
"}",
"event",
":",
"Transfer",
"logIndex",
":",
"42",
"transactionIndex",
":",
"60",
"transactionHash",
":",
"0x71d6d83fef3347bad848e83dfa0ab28296e2953de946ee152ea81c6dfb42d2b3",
"address",
":",
"0xfecA834E7da9D437645b474450688DA9327112a5",
"blockHash",
":",
"0x054de9a496fc7d10303068cbc7ee3e25181a3b26640497859a5e49f0342e7db2",
"blockNumber",
":",
"7265022",
"}",
":",
"param",
"from_block",
":",
"Block",
"to",
"start",
"querying",
"from",
":",
"param",
"to_block",
":",
"Block",
"to",
"stop",
"querying",
"from",
":",
"param",
"from_address",
":",
"Address",
"sending",
"the",
"erc20",
"transfer",
":",
"param",
"to_address",
":",
"Address",
"receiving",
"the",
"erc20",
"transfer",
":",
"param",
"token_address",
":",
"Address",
"of",
"the",
"token",
":",
"return",
":",
"List",
"of",
"events",
":",
"throws",
":",
"ReadTimeout"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L130-L172
|
[
"def",
"get_transfer_history",
"(",
"self",
",",
"from_block",
":",
"int",
",",
"to_block",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"from_address",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"to_address",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"token_address",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"List",
"[",
"Dict",
"[",
"str",
",",
"any",
"]",
"]",
":",
"assert",
"from_address",
"or",
"to_address",
"or",
"token_address",
",",
"'At least one parameter must be provided'",
"erc20",
"=",
"get_erc20_contract",
"(",
"self",
".",
"w3",
")",
"argument_filters",
"=",
"{",
"}",
"if",
"from_address",
":",
"argument_filters",
"[",
"'from'",
"]",
"=",
"from_address",
"if",
"to_address",
":",
"argument_filters",
"[",
"'to'",
"]",
"=",
"to_address",
"return",
"erc20",
".",
"events",
".",
"Transfer",
".",
"createFilter",
"(",
"fromBlock",
"=",
"from_block",
",",
"toBlock",
"=",
"to_block",
",",
"address",
"=",
"token_address",
",",
"argument_filters",
"=",
"argument_filters",
")",
".",
"get_all_entries",
"(",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
Erc20Manager.send_tokens
|
Send tokens to address
:param to:
:param amount:
:param erc20_address:
:param private_key:
:return: tx_hash
|
gnosis/eth/ethereum_client.py
|
def send_tokens(self, to: str, amount: int, erc20_address: str, private_key: str) -> bytes:
"""
Send tokens to address
:param to:
:param amount:
:param erc20_address:
:param private_key:
:return: tx_hash
"""
erc20 = get_erc20_contract(self.w3, erc20_address)
account = Account.privateKeyToAccount(private_key)
tx = erc20.functions.transfer(to, amount).buildTransaction({'from': account.address})
return self.ethereum_client.send_unsigned_transaction(tx, private_key=private_key)
|
def send_tokens(self, to: str, amount: int, erc20_address: str, private_key: str) -> bytes:
"""
Send tokens to address
:param to:
:param amount:
:param erc20_address:
:param private_key:
:return: tx_hash
"""
erc20 = get_erc20_contract(self.w3, erc20_address)
account = Account.privateKeyToAccount(private_key)
tx = erc20.functions.transfer(to, amount).buildTransaction({'from': account.address})
return self.ethereum_client.send_unsigned_transaction(tx, private_key=private_key)
|
[
"Send",
"tokens",
"to",
"address",
":",
"param",
"to",
":",
":",
"param",
"amount",
":",
":",
"param",
"erc20_address",
":",
":",
"param",
"private_key",
":",
":",
"return",
":",
"tx_hash"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L174-L186
|
[
"def",
"send_tokens",
"(",
"self",
",",
"to",
":",
"str",
",",
"amount",
":",
"int",
",",
"erc20_address",
":",
"str",
",",
"private_key",
":",
"str",
")",
"->",
"bytes",
":",
"erc20",
"=",
"get_erc20_contract",
"(",
"self",
".",
"w3",
",",
"erc20_address",
")",
"account",
"=",
"Account",
".",
"privateKeyToAccount",
"(",
"private_key",
")",
"tx",
"=",
"erc20",
".",
"functions",
".",
"transfer",
"(",
"to",
",",
"amount",
")",
".",
"buildTransaction",
"(",
"{",
"'from'",
":",
"account",
".",
"address",
"}",
")",
"return",
"self",
".",
"ethereum_client",
".",
"send_unsigned_transaction",
"(",
"tx",
",",
"private_key",
"=",
"private_key",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
ParityManager.trace_filter
|
:param from_block: Quantity or Tag - (optional) From this block. `0` is not working, it needs to be `>= 1`
:param to_block: Quantity or Tag - (optional) To this block.
:param from_address: Array - (optional) Sent from these addresses.
:param to_address: Address - (optional) Sent to these addresses.
:param after: Quantity - (optional) The offset trace number
:param count: Quantity - (optional) Integer number of traces to display in a batch.
:return:
[
{
"action": {
"callType": "call",
"from": "0x32be343b94f860124dc4fee278fdcbd38c102d88",
"gas": "0x4c40d",
"input": "0x",
"to": "0x8bbb73bcb5d553b5a556358d27625323fd781d37",
"value": "0x3f0650ec47fd240000"
},
"blockHash": "0x86df301bcdd8248d982dbf039f09faf792684e1aeee99d5b58b77d620008b80f",
"blockNumber": 3068183,
"result": {
"gasUsed": "0x0",
"output": "0x"
},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x3321a7708b1083130bd78da0d62ead9f6683033231617c9d268e2c7e3fa6c104",
"transactionPosition": 3,
"type": "call"
},
{
"action": {
"from": "0x3b169a0fb55ea0b6bafe54c272b1fe4983742bf7",
"gas": "0x49b0b",
"init": "0x608060405234801561001057600080fd5b5060405161060a38038061060a833981018060405281019080805190602001909291908051820192919060200180519060200190929190805190602001909291908051906020019092919050505084848160008173ffffffffffffffffffffffffffffffffffffffff1614151515610116576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260248152602001807f496e76616c6964206d617374657220636f707920616464726573732070726f7681526020017f696465640000000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000815111156101a35773ffffffffffffffffffffffffffffffffffffffff60005416600080835160208501846127105a03f46040513d6000823e600082141561019f573d81fd5b5050505b5050600081111561036d57600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614156102b7578273ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051600060405180830381858888f1935050505015156102b2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f206574686572000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b61036c565b6102d1828483610377640100000000026401000000009004565b151561036b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f20746f6b656e000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b5b5b5050505050610490565b600060608383604051602401808373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040527fa9059cbb000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505090506000808251602084016000896127105a03f16040513d6000823e3d60008114610473576020811461047b5760009450610485565b829450610485565b8151158315171594505b505050509392505050565b61016b8061049f6000396000f30060806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d0410800290000000000000000000000008942595a2dc5181df0465af0d7be08c8f23c93af00000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000d9e09beaeb338d81a7c5688358df0071d498811500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001b15f91a8c35300000000000000000000000000000000000000000000000000000000000001640ec78d9e00000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000f763ea5fbb191d47dc4b083dcdc3cdfb586468f8000000000000000000000000ad25c9717d04c0a12086a1d352c1ccf4bf5fcbf80000000000000000000000000da7155692446c80a4e7ad72018e586f20fa3bfe000000000000000000000000bce0cc48ce44e0ac9ee38df4d586afbacef191fa0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"value": "0x0"
},
"blockHash": "0x03f9f64dfeb7807b5df608e6957dd4d521fd71685aac5533451d27f0abe03660",
"blockNumber": 3793534,
"result": {
"address": "0x61a7cc907c47c133d5ff5b685407201951fcbd08",
"code": "0x60806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d041080029",
"gasUsed": "0x4683f"
},
"subtraces": 2,
"traceAddress": [],
"transactionHash": "0x6c7e8f8778d33d81b29c4bd7526ee50a4cea340d69eed6c89ada4e6fab731789",
"transactionPosition": 1,
"type": "create"
},
...
]
|
gnosis/eth/ethereum_client.py
|
def trace_filter(self, from_block: int = 1, to_block: Optional[int] = None,
from_address: Optional[List[str]] = None, to_address: Optional[List[str]] = None,
after: Optional[int] = None, count: Optional[int] = None) -> List[Dict[str, any]]:
"""
:param from_block: Quantity or Tag - (optional) From this block. `0` is not working, it needs to be `>= 1`
:param to_block: Quantity or Tag - (optional) To this block.
:param from_address: Array - (optional) Sent from these addresses.
:param to_address: Address - (optional) Sent to these addresses.
:param after: Quantity - (optional) The offset trace number
:param count: Quantity - (optional) Integer number of traces to display in a batch.
:return:
[
{
"action": {
"callType": "call",
"from": "0x32be343b94f860124dc4fee278fdcbd38c102d88",
"gas": "0x4c40d",
"input": "0x",
"to": "0x8bbb73bcb5d553b5a556358d27625323fd781d37",
"value": "0x3f0650ec47fd240000"
},
"blockHash": "0x86df301bcdd8248d982dbf039f09faf792684e1aeee99d5b58b77d620008b80f",
"blockNumber": 3068183,
"result": {
"gasUsed": "0x0",
"output": "0x"
},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x3321a7708b1083130bd78da0d62ead9f6683033231617c9d268e2c7e3fa6c104",
"transactionPosition": 3,
"type": "call"
},
{
"action": {
"from": "0x3b169a0fb55ea0b6bafe54c272b1fe4983742bf7",
"gas": "0x49b0b",
"init": "0x608060405234801561001057600080fd5b5060405161060a38038061060a833981018060405281019080805190602001909291908051820192919060200180519060200190929190805190602001909291908051906020019092919050505084848160008173ffffffffffffffffffffffffffffffffffffffff1614151515610116576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260248152602001807f496e76616c6964206d617374657220636f707920616464726573732070726f7681526020017f696465640000000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000815111156101a35773ffffffffffffffffffffffffffffffffffffffff60005416600080835160208501846127105a03f46040513d6000823e600082141561019f573d81fd5b5050505b5050600081111561036d57600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614156102b7578273ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051600060405180830381858888f1935050505015156102b2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f206574686572000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b61036c565b6102d1828483610377640100000000026401000000009004565b151561036b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f20746f6b656e000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b5b5b5050505050610490565b600060608383604051602401808373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040527fa9059cbb000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505090506000808251602084016000896127105a03f16040513d6000823e3d60008114610473576020811461047b5760009450610485565b829450610485565b8151158315171594505b505050509392505050565b61016b8061049f6000396000f30060806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d0410800290000000000000000000000008942595a2dc5181df0465af0d7be08c8f23c93af00000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000d9e09beaeb338d81a7c5688358df0071d498811500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001b15f91a8c35300000000000000000000000000000000000000000000000000000000000001640ec78d9e00000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000f763ea5fbb191d47dc4b083dcdc3cdfb586468f8000000000000000000000000ad25c9717d04c0a12086a1d352c1ccf4bf5fcbf80000000000000000000000000da7155692446c80a4e7ad72018e586f20fa3bfe000000000000000000000000bce0cc48ce44e0ac9ee38df4d586afbacef191fa0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"value": "0x0"
},
"blockHash": "0x03f9f64dfeb7807b5df608e6957dd4d521fd71685aac5533451d27f0abe03660",
"blockNumber": 3793534,
"result": {
"address": "0x61a7cc907c47c133d5ff5b685407201951fcbd08",
"code": "0x60806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d041080029",
"gasUsed": "0x4683f"
},
"subtraces": 2,
"traceAddress": [],
"transactionHash": "0x6c7e8f8778d33d81b29c4bd7526ee50a4cea340d69eed6c89ada4e6fab731789",
"transactionPosition": 1,
"type": "create"
},
...
]
"""
assert from_address or to_address, 'You must provide at least `from_address` or `to_address`'
parameters = {}
if from_block:
parameters['fromBlock'] = '0x%x' % from_block
if to_block:
parameters['toBlock'] = '0x%x' % to_block
if from_address:
parameters['fromAddress'] = from_address
if to_address:
parameters['toAddress'] = to_address
if after:
parameters['after'] = after
if count:
parameters['count'] = count
try:
return self._decode_traces(self.slow_w3.parity.traceFilter(parameters))
except ParityTraceDecodeException as exc:
logger.warning('Problem decoding trace: %s - Retrying', exc)
return self._decode_traces(self.slow_w3.parity.traceFilter(parameters))
|
def trace_filter(self, from_block: int = 1, to_block: Optional[int] = None,
from_address: Optional[List[str]] = None, to_address: Optional[List[str]] = None,
after: Optional[int] = None, count: Optional[int] = None) -> List[Dict[str, any]]:
"""
:param from_block: Quantity or Tag - (optional) From this block. `0` is not working, it needs to be `>= 1`
:param to_block: Quantity or Tag - (optional) To this block.
:param from_address: Array - (optional) Sent from these addresses.
:param to_address: Address - (optional) Sent to these addresses.
:param after: Quantity - (optional) The offset trace number
:param count: Quantity - (optional) Integer number of traces to display in a batch.
:return:
[
{
"action": {
"callType": "call",
"from": "0x32be343b94f860124dc4fee278fdcbd38c102d88",
"gas": "0x4c40d",
"input": "0x",
"to": "0x8bbb73bcb5d553b5a556358d27625323fd781d37",
"value": "0x3f0650ec47fd240000"
},
"blockHash": "0x86df301bcdd8248d982dbf039f09faf792684e1aeee99d5b58b77d620008b80f",
"blockNumber": 3068183,
"result": {
"gasUsed": "0x0",
"output": "0x"
},
"subtraces": 0,
"traceAddress": [],
"transactionHash": "0x3321a7708b1083130bd78da0d62ead9f6683033231617c9d268e2c7e3fa6c104",
"transactionPosition": 3,
"type": "call"
},
{
"action": {
"from": "0x3b169a0fb55ea0b6bafe54c272b1fe4983742bf7",
"gas": "0x49b0b",
"init": "0x608060405234801561001057600080fd5b5060405161060a38038061060a833981018060405281019080805190602001909291908051820192919060200180519060200190929190805190602001909291908051906020019092919050505084848160008173ffffffffffffffffffffffffffffffffffffffff1614151515610116576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260248152602001807f496e76616c6964206d617374657220636f707920616464726573732070726f7681526020017f696465640000000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000815111156101a35773ffffffffffffffffffffffffffffffffffffffff60005416600080835160208501846127105a03f46040513d6000823e600082141561019f573d81fd5b5050505b5050600081111561036d57600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614156102b7578273ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051600060405180830381858888f1935050505015156102b2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f206574686572000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b61036c565b6102d1828483610377640100000000026401000000009004565b151561036b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f20746f6b656e000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b5b5b5050505050610490565b600060608383604051602401808373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040527fa9059cbb000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505090506000808251602084016000896127105a03f16040513d6000823e3d60008114610473576020811461047b5760009450610485565b829450610485565b8151158315171594505b505050509392505050565b61016b8061049f6000396000f30060806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d0410800290000000000000000000000008942595a2dc5181df0465af0d7be08c8f23c93af00000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000d9e09beaeb338d81a7c5688358df0071d498811500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001b15f91a8c35300000000000000000000000000000000000000000000000000000000000001640ec78d9e00000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000f763ea5fbb191d47dc4b083dcdc3cdfb586468f8000000000000000000000000ad25c9717d04c0a12086a1d352c1ccf4bf5fcbf80000000000000000000000000da7155692446c80a4e7ad72018e586f20fa3bfe000000000000000000000000bce0cc48ce44e0ac9ee38df4d586afbacef191fa0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"value": "0x0"
},
"blockHash": "0x03f9f64dfeb7807b5df608e6957dd4d521fd71685aac5533451d27f0abe03660",
"blockNumber": 3793534,
"result": {
"address": "0x61a7cc907c47c133d5ff5b685407201951fcbd08",
"code": "0x60806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d041080029",
"gasUsed": "0x4683f"
},
"subtraces": 2,
"traceAddress": [],
"transactionHash": "0x6c7e8f8778d33d81b29c4bd7526ee50a4cea340d69eed6c89ada4e6fab731789",
"transactionPosition": 1,
"type": "create"
},
...
]
"""
assert from_address or to_address, 'You must provide at least `from_address` or `to_address`'
parameters = {}
if from_block:
parameters['fromBlock'] = '0x%x' % from_block
if to_block:
parameters['toBlock'] = '0x%x' % to_block
if from_address:
parameters['fromAddress'] = from_address
if to_address:
parameters['toAddress'] = to_address
if after:
parameters['after'] = after
if count:
parameters['count'] = count
try:
return self._decode_traces(self.slow_w3.parity.traceFilter(parameters))
except ParityTraceDecodeException as exc:
logger.warning('Problem decoding trace: %s - Retrying', exc)
return self._decode_traces(self.slow_w3.parity.traceFilter(parameters))
|
[
":",
"param",
"from_block",
":",
"Quantity",
"or",
"Tag",
"-",
"(",
"optional",
")",
"From",
"this",
"block",
".",
"0",
"is",
"not",
"working",
"it",
"needs",
"to",
"be",
">",
"=",
"1",
":",
"param",
"to_block",
":",
"Quantity",
"or",
"Tag",
"-",
"(",
"optional",
")",
"To",
"this",
"block",
".",
":",
"param",
"from_address",
":",
"Array",
"-",
"(",
"optional",
")",
"Sent",
"from",
"these",
"addresses",
".",
":",
"param",
"to_address",
":",
"Address",
"-",
"(",
"optional",
")",
"Sent",
"to",
"these",
"addresses",
".",
":",
"param",
"after",
":",
"Quantity",
"-",
"(",
"optional",
")",
"The",
"offset",
"trace",
"number",
":",
"param",
"count",
":",
"Quantity",
"-",
"(",
"optional",
")",
"Integer",
"number",
"of",
"traces",
"to",
"display",
"in",
"a",
"batch",
".",
":",
"return",
":",
"[",
"{",
"action",
":",
"{",
"callType",
":",
"call",
"from",
":",
"0x32be343b94f860124dc4fee278fdcbd38c102d88",
"gas",
":",
"0x4c40d",
"input",
":",
"0x",
"to",
":",
"0x8bbb73bcb5d553b5a556358d27625323fd781d37",
"value",
":",
"0x3f0650ec47fd240000",
"}",
"blockHash",
":",
"0x86df301bcdd8248d982dbf039f09faf792684e1aeee99d5b58b77d620008b80f",
"blockNumber",
":",
"3068183",
"result",
":",
"{",
"gasUsed",
":",
"0x0",
"output",
":",
"0x",
"}",
"subtraces",
":",
"0",
"traceAddress",
":",
"[]",
"transactionHash",
":",
"0x3321a7708b1083130bd78da0d62ead9f6683033231617c9d268e2c7e3fa6c104",
"transactionPosition",
":",
"3",
"type",
":",
"call",
"}",
"{",
"action",
":",
"{",
"from",
":",
"0x3b169a0fb55ea0b6bafe54c272b1fe4983742bf7",
"gas",
":",
"0x49b0b",
"init",
":",
"0x608060405234801561001057600080fd5b5060405161060a38038061060a833981018060405281019080805190602001909291908051820192919060200180519060200190929190805190602001909291908051906020019092919050505084848160008173ffffffffffffffffffffffffffffffffffffffff1614151515610116576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260248152602001807f496e76616c6964206d617374657220636f707920616464726573732070726f7681526020017f696465640000000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b806000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550506000815111156101a35773ffffffffffffffffffffffffffffffffffffffff60005416600080835160208501846127105a03f46040513d6000823e600082141561019f573d81fd5b5050505b5050600081111561036d57600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614156102b7578273ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051600060405180830381858888f1935050505015156102b2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f206574686572000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b61036c565b6102d1828483610377640100000000026401000000009004565b151561036b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001807f436f756c64206e6f74207061792073616665206372656174696f6e207769746881526020017f20746f6b656e000000000000000000000000000000000000000000000000000081525060400191505060405180910390fd5b5b5b5050505050610490565b600060608383604051602401808373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040527fa9059cbb000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff838183161783525050505090506000808251602084016000896127105a03f16040513d6000823e3d60008114610473576020811461047b5760009450610485565b829450610485565b8151158315171594505b505050509392505050565b61016b8061049f6000396000f30060806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d0410800290000000000000000000000008942595a2dc5181df0465af0d7be08c8f23c93af00000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000d9e09beaeb338d81a7c5688358df0071d498811500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001b15f91a8c35300000000000000000000000000000000000000000000000000000000000001640ec78d9e00000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000f763ea5fbb191d47dc4b083dcdc3cdfb586468f8000000000000000000000000ad25c9717d04c0a12086a1d352c1ccf4bf5fcbf80000000000000000000000000da7155692446c80a4e7ad72018e586f20fa3bfe000000000000000000000000bce0cc48ce44e0ac9ee38df4d586afbacef191fa0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"value",
":",
"0x0",
"}",
"blockHash",
":",
"0x03f9f64dfeb7807b5df608e6957dd4d521fd71685aac5533451d27f0abe03660",
"blockNumber",
":",
"3793534",
"result",
":",
"{",
"address",
":",
"0x61a7cc907c47c133d5ff5b685407201951fcbd08",
"code",
":",
"0x60806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680634555d5c91461008b5780635c60da1b146100b6575b73ffffffffffffffffffffffffffffffffffffffff600054163660008037600080366000845af43d6000803e6000811415610086573d6000fd5b3d6000f35b34801561009757600080fd5b506100a061010d565b6040518082815260200191505060405180910390f35b3480156100c257600080fd5b506100cb610116565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b60006002905090565b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050905600a165627a7a7230582007fffd557dfc8c4d2fdf56ba6381a6ce5b65b6260e1492d87f26c6d4f1d041080029",
"gasUsed",
":",
"0x4683f",
"}",
"subtraces",
":",
"2",
"traceAddress",
":",
"[]",
"transactionHash",
":",
"0x6c7e8f8778d33d81b29c4bd7526ee50a4cea340d69eed6c89ada4e6fab731789",
"transactionPosition",
":",
"1",
"type",
":",
"create",
"}",
"...",
"]"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L252-L327
|
[
"def",
"trace_filter",
"(",
"self",
",",
"from_block",
":",
"int",
"=",
"1",
",",
"to_block",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"from_address",
":",
"Optional",
"[",
"List",
"[",
"str",
"]",
"]",
"=",
"None",
",",
"to_address",
":",
"Optional",
"[",
"List",
"[",
"str",
"]",
"]",
"=",
"None",
",",
"after",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"count",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
")",
"->",
"List",
"[",
"Dict",
"[",
"str",
",",
"any",
"]",
"]",
":",
"assert",
"from_address",
"or",
"to_address",
",",
"'You must provide at least `from_address` or `to_address`'",
"parameters",
"=",
"{",
"}",
"if",
"from_block",
":",
"parameters",
"[",
"'fromBlock'",
"]",
"=",
"'0x%x'",
"%",
"from_block",
"if",
"to_block",
":",
"parameters",
"[",
"'toBlock'",
"]",
"=",
"'0x%x'",
"%",
"to_block",
"if",
"from_address",
":",
"parameters",
"[",
"'fromAddress'",
"]",
"=",
"from_address",
"if",
"to_address",
":",
"parameters",
"[",
"'toAddress'",
"]",
"=",
"to_address",
"if",
"after",
":",
"parameters",
"[",
"'after'",
"]",
"=",
"after",
"if",
"count",
":",
"parameters",
"[",
"'count'",
"]",
"=",
"count",
"try",
":",
"return",
"self",
".",
"_decode_traces",
"(",
"self",
".",
"slow_w3",
".",
"parity",
".",
"traceFilter",
"(",
"parameters",
")",
")",
"except",
"ParityTraceDecodeException",
"as",
"exc",
":",
"logger",
".",
"warning",
"(",
"'Problem decoding trace: %s - Retrying'",
",",
"exc",
")",
"return",
"self",
".",
"_decode_traces",
"(",
"self",
".",
"slow_w3",
".",
"parity",
".",
"traceFilter",
"(",
"parameters",
")",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
EthereumClient.get_slow_provider
|
Get web3 provider for slow queries. Default `HTTPProvider` timeouts after 10 seconds
:param provider: Configured Web3 provider
:param timeout: Timeout to configure for internal requests (default is 10)
:return: A new web3 provider with the `slow_provider_timeout`
|
gnosis/eth/ethereum_client.py
|
def get_slow_provider(self, timeout: int):
"""
Get web3 provider for slow queries. Default `HTTPProvider` timeouts after 10 seconds
:param provider: Configured Web3 provider
:param timeout: Timeout to configure for internal requests (default is 10)
:return: A new web3 provider with the `slow_provider_timeout`
"""
if isinstance(self.w3_provider, AutoProvider):
return HTTPProvider(endpoint_uri='http://localhost:8545',
request_kwargs={'timeout': timeout})
elif isinstance(self.w3_provider, HTTPProvider):
return HTTPProvider(endpoint_uri=self.w3_provider.endpoint_uri,
request_kwargs={'timeout': timeout})
else:
return self.w3_provider
|
def get_slow_provider(self, timeout: int):
"""
Get web3 provider for slow queries. Default `HTTPProvider` timeouts after 10 seconds
:param provider: Configured Web3 provider
:param timeout: Timeout to configure for internal requests (default is 10)
:return: A new web3 provider with the `slow_provider_timeout`
"""
if isinstance(self.w3_provider, AutoProvider):
return HTTPProvider(endpoint_uri='http://localhost:8545',
request_kwargs={'timeout': timeout})
elif isinstance(self.w3_provider, HTTPProvider):
return HTTPProvider(endpoint_uri=self.w3_provider.endpoint_uri,
request_kwargs={'timeout': timeout})
else:
return self.w3_provider
|
[
"Get",
"web3",
"provider",
"for",
"slow",
"queries",
".",
"Default",
"HTTPProvider",
"timeouts",
"after",
"10",
"seconds",
":",
"param",
"provider",
":",
"Configured",
"Web3",
"provider",
":",
"param",
"timeout",
":",
"Timeout",
"to",
"configure",
"for",
"internal",
"requests",
"(",
"default",
"is",
"10",
")",
":",
"return",
":",
"A",
"new",
"web3",
"provider",
"with",
"the",
"slow_provider_timeout"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L350-L364
|
[
"def",
"get_slow_provider",
"(",
"self",
",",
"timeout",
":",
"int",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"w3_provider",
",",
"AutoProvider",
")",
":",
"return",
"HTTPProvider",
"(",
"endpoint_uri",
"=",
"'http://localhost:8545'",
",",
"request_kwargs",
"=",
"{",
"'timeout'",
":",
"timeout",
"}",
")",
"elif",
"isinstance",
"(",
"self",
".",
"w3_provider",
",",
"HTTPProvider",
")",
":",
"return",
"HTTPProvider",
"(",
"endpoint_uri",
"=",
"self",
".",
"w3_provider",
".",
"endpoint_uri",
",",
"request_kwargs",
"=",
"{",
"'timeout'",
":",
"timeout",
"}",
")",
"else",
":",
"return",
"self",
".",
"w3_provider"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
EthereumClient.send_unsigned_transaction
|
Send a tx using an unlocked public key in the node or a private key. Both `public_key` and
`private_key` cannot be `None`
:param tx:
:param private_key:
:param public_key:
:param retry: Retry if a problem with nonce is found
:param block_identifier:
:return: tx hash
|
gnosis/eth/ethereum_client.py
|
def send_unsigned_transaction(self, tx: Dict[str, any], private_key: Optional[str] = None,
public_key: Optional[str] = None, retry: bool = False,
block_identifier: Optional[str] = None) -> bytes:
"""
Send a tx using an unlocked public key in the node or a private key. Both `public_key` and
`private_key` cannot be `None`
:param tx:
:param private_key:
:param public_key:
:param retry: Retry if a problem with nonce is found
:param block_identifier:
:return: tx hash
"""
if private_key:
address = self.private_key_to_address(private_key)
elif public_key:
address = public_key
else:
logger.error('No ethereum account provided. Need a public_key or private_key')
raise ValueError("Ethereum account was not configured or unlocked in the node")
if tx.get('nonce') is None:
tx['nonce'] = self.get_nonce_for_account(address, block_identifier=block_identifier)
number_errors = 5
while number_errors >= 0:
try:
if private_key:
signed_tx = self.w3.eth.account.signTransaction(tx, private_key=private_key)
logger.debug('Sending %d wei from %s to %s', tx['value'], address, tx['to'])
try:
return self.send_raw_transaction(signed_tx.rawTransaction)
except TransactionAlreadyImported as e:
# Sometimes Parity 2.2.11 fails with Transaction already imported, even if it's not, but it's
# processed
tx_hash = signed_tx.hash
logger.error('Transaction with tx-hash=%s already imported: %s' % (tx_hash.hex(), str(e)))
return tx_hash
elif public_key:
tx['from'] = address
return self.send_transaction(tx)
except ReplacementTransactionUnderpriced as e:
if not retry or not number_errors:
raise e
logger.error('address=%s Tx with nonce=%d was already sent, retrying with nonce + 1',
address, tx['nonce'])
tx['nonce'] += 1
except InvalidNonce as e:
if not retry or not number_errors:
raise e
logger.error('address=%s Tx with invalid nonce=%d, retrying recovering nonce again',
address, tx['nonce'])
tx['nonce'] = self.get_nonce_for_account(address, block_identifier=block_identifier)
number_errors -= 1
|
def send_unsigned_transaction(self, tx: Dict[str, any], private_key: Optional[str] = None,
public_key: Optional[str] = None, retry: bool = False,
block_identifier: Optional[str] = None) -> bytes:
"""
Send a tx using an unlocked public key in the node or a private key. Both `public_key` and
`private_key` cannot be `None`
:param tx:
:param private_key:
:param public_key:
:param retry: Retry if a problem with nonce is found
:param block_identifier:
:return: tx hash
"""
if private_key:
address = self.private_key_to_address(private_key)
elif public_key:
address = public_key
else:
logger.error('No ethereum account provided. Need a public_key or private_key')
raise ValueError("Ethereum account was not configured or unlocked in the node")
if tx.get('nonce') is None:
tx['nonce'] = self.get_nonce_for_account(address, block_identifier=block_identifier)
number_errors = 5
while number_errors >= 0:
try:
if private_key:
signed_tx = self.w3.eth.account.signTransaction(tx, private_key=private_key)
logger.debug('Sending %d wei from %s to %s', tx['value'], address, tx['to'])
try:
return self.send_raw_transaction(signed_tx.rawTransaction)
except TransactionAlreadyImported as e:
# Sometimes Parity 2.2.11 fails with Transaction already imported, even if it's not, but it's
# processed
tx_hash = signed_tx.hash
logger.error('Transaction with tx-hash=%s already imported: %s' % (tx_hash.hex(), str(e)))
return tx_hash
elif public_key:
tx['from'] = address
return self.send_transaction(tx)
except ReplacementTransactionUnderpriced as e:
if not retry or not number_errors:
raise e
logger.error('address=%s Tx with nonce=%d was already sent, retrying with nonce + 1',
address, tx['nonce'])
tx['nonce'] += 1
except InvalidNonce as e:
if not retry or not number_errors:
raise e
logger.error('address=%s Tx with invalid nonce=%d, retrying recovering nonce again',
address, tx['nonce'])
tx['nonce'] = self.get_nonce_for_account(address, block_identifier=block_identifier)
number_errors -= 1
|
[
"Send",
"a",
"tx",
"using",
"an",
"unlocked",
"public",
"key",
"in",
"the",
"node",
"or",
"a",
"private",
"key",
".",
"Both",
"public_key",
"and",
"private_key",
"cannot",
"be",
"None",
":",
"param",
"tx",
":",
":",
"param",
"private_key",
":",
":",
"param",
"public_key",
":",
":",
"param",
"retry",
":",
"Retry",
"if",
"a",
"problem",
"with",
"nonce",
"is",
"found",
":",
"param",
"block_identifier",
":",
":",
"return",
":",
"tx",
"hash"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L463-L516
|
[
"def",
"send_unsigned_transaction",
"(",
"self",
",",
"tx",
":",
"Dict",
"[",
"str",
",",
"any",
"]",
",",
"private_key",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"public_key",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"retry",
":",
"bool",
"=",
"False",
",",
"block_identifier",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"bytes",
":",
"if",
"private_key",
":",
"address",
"=",
"self",
".",
"private_key_to_address",
"(",
"private_key",
")",
"elif",
"public_key",
":",
"address",
"=",
"public_key",
"else",
":",
"logger",
".",
"error",
"(",
"'No ethereum account provided. Need a public_key or private_key'",
")",
"raise",
"ValueError",
"(",
"\"Ethereum account was not configured or unlocked in the node\"",
")",
"if",
"tx",
".",
"get",
"(",
"'nonce'",
")",
"is",
"None",
":",
"tx",
"[",
"'nonce'",
"]",
"=",
"self",
".",
"get_nonce_for_account",
"(",
"address",
",",
"block_identifier",
"=",
"block_identifier",
")",
"number_errors",
"=",
"5",
"while",
"number_errors",
">=",
"0",
":",
"try",
":",
"if",
"private_key",
":",
"signed_tx",
"=",
"self",
".",
"w3",
".",
"eth",
".",
"account",
".",
"signTransaction",
"(",
"tx",
",",
"private_key",
"=",
"private_key",
")",
"logger",
".",
"debug",
"(",
"'Sending %d wei from %s to %s'",
",",
"tx",
"[",
"'value'",
"]",
",",
"address",
",",
"tx",
"[",
"'to'",
"]",
")",
"try",
":",
"return",
"self",
".",
"send_raw_transaction",
"(",
"signed_tx",
".",
"rawTransaction",
")",
"except",
"TransactionAlreadyImported",
"as",
"e",
":",
"# Sometimes Parity 2.2.11 fails with Transaction already imported, even if it's not, but it's",
"# processed",
"tx_hash",
"=",
"signed_tx",
".",
"hash",
"logger",
".",
"error",
"(",
"'Transaction with tx-hash=%s already imported: %s'",
"%",
"(",
"tx_hash",
".",
"hex",
"(",
")",
",",
"str",
"(",
"e",
")",
")",
")",
"return",
"tx_hash",
"elif",
"public_key",
":",
"tx",
"[",
"'from'",
"]",
"=",
"address",
"return",
"self",
".",
"send_transaction",
"(",
"tx",
")",
"except",
"ReplacementTransactionUnderpriced",
"as",
"e",
":",
"if",
"not",
"retry",
"or",
"not",
"number_errors",
":",
"raise",
"e",
"logger",
".",
"error",
"(",
"'address=%s Tx with nonce=%d was already sent, retrying with nonce + 1'",
",",
"address",
",",
"tx",
"[",
"'nonce'",
"]",
")",
"tx",
"[",
"'nonce'",
"]",
"+=",
"1",
"except",
"InvalidNonce",
"as",
"e",
":",
"if",
"not",
"retry",
"or",
"not",
"number_errors",
":",
"raise",
"e",
"logger",
".",
"error",
"(",
"'address=%s Tx with invalid nonce=%d, retrying recovering nonce again'",
",",
"address",
",",
"tx",
"[",
"'nonce'",
"]",
")",
"tx",
"[",
"'nonce'",
"]",
"=",
"self",
".",
"get_nonce_for_account",
"(",
"address",
",",
"block_identifier",
"=",
"block_identifier",
")",
"number_errors",
"-=",
"1"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
EthereumClient.send_eth_to
|
Send ether using configured account
:param to: to
:param gas_price: gas_price
:param value: value(wei)
:param gas: gas, defaults to 22000
:param retry: Retry if a problem is found
:param block_identifier: None default, 'pending' not confirmed txs
:return: tx_hash
|
gnosis/eth/ethereum_client.py
|
def send_eth_to(self, private_key: str, to: str, gas_price: int, value: int, gas: int=22000,
retry: bool = False, block_identifier=None, max_eth_to_send: int = 0) -> bytes:
"""
Send ether using configured account
:param to: to
:param gas_price: gas_price
:param value: value(wei)
:param gas: gas, defaults to 22000
:param retry: Retry if a problem is found
:param block_identifier: None default, 'pending' not confirmed txs
:return: tx_hash
"""
assert check_checksum(to)
if max_eth_to_send and value > self.w3.toWei(max_eth_to_send, 'ether'):
raise EtherLimitExceeded('%d is bigger than %f' % (value, max_eth_to_send))
tx = {
'to': to,
'value': value,
'gas': gas,
'gasPrice': gas_price,
}
return self.send_unsigned_transaction(tx, private_key=private_key, retry=retry,
block_identifier=block_identifier)
|
def send_eth_to(self, private_key: str, to: str, gas_price: int, value: int, gas: int=22000,
retry: bool = False, block_identifier=None, max_eth_to_send: int = 0) -> bytes:
"""
Send ether using configured account
:param to: to
:param gas_price: gas_price
:param value: value(wei)
:param gas: gas, defaults to 22000
:param retry: Retry if a problem is found
:param block_identifier: None default, 'pending' not confirmed txs
:return: tx_hash
"""
assert check_checksum(to)
if max_eth_to_send and value > self.w3.toWei(max_eth_to_send, 'ether'):
raise EtherLimitExceeded('%d is bigger than %f' % (value, max_eth_to_send))
tx = {
'to': to,
'value': value,
'gas': gas,
'gasPrice': gas_price,
}
return self.send_unsigned_transaction(tx, private_key=private_key, retry=retry,
block_identifier=block_identifier)
|
[
"Send",
"ether",
"using",
"configured",
"account",
":",
"param",
"to",
":",
"to",
":",
"param",
"gas_price",
":",
"gas_price",
":",
"param",
"value",
":",
"value",
"(",
"wei",
")",
":",
"param",
"gas",
":",
"gas",
"defaults",
"to",
"22000",
":",
"param",
"retry",
":",
"Retry",
"if",
"a",
"problem",
"is",
"found",
":",
"param",
"block_identifier",
":",
"None",
"default",
"pending",
"not",
"confirmed",
"txs",
":",
"return",
":",
"tx_hash"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L518-L543
|
[
"def",
"send_eth_to",
"(",
"self",
",",
"private_key",
":",
"str",
",",
"to",
":",
"str",
",",
"gas_price",
":",
"int",
",",
"value",
":",
"int",
",",
"gas",
":",
"int",
"=",
"22000",
",",
"retry",
":",
"bool",
"=",
"False",
",",
"block_identifier",
"=",
"None",
",",
"max_eth_to_send",
":",
"int",
"=",
"0",
")",
"->",
"bytes",
":",
"assert",
"check_checksum",
"(",
"to",
")",
"if",
"max_eth_to_send",
"and",
"value",
">",
"self",
".",
"w3",
".",
"toWei",
"(",
"max_eth_to_send",
",",
"'ether'",
")",
":",
"raise",
"EtherLimitExceeded",
"(",
"'%d is bigger than %f'",
"%",
"(",
"value",
",",
"max_eth_to_send",
")",
")",
"tx",
"=",
"{",
"'to'",
":",
"to",
",",
"'value'",
":",
"value",
",",
"'gas'",
":",
"gas",
",",
"'gasPrice'",
":",
"gas_price",
",",
"}",
"return",
"self",
".",
"send_unsigned_transaction",
"(",
"tx",
",",
"private_key",
"=",
"private_key",
",",
"retry",
"=",
"retry",
",",
"block_identifier",
"=",
"block_identifier",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
EthereumClient.check_tx_with_confirmations
|
Check tx hash and make sure it has the confirmations required
:param w3: Web3 instance
:param tx_hash: Hash of the tx
:param confirmations: Minimum number of confirmations required
:return: True if tx was mined with the number of confirmations required, False otherwise
|
gnosis/eth/ethereum_client.py
|
def check_tx_with_confirmations(self, tx_hash: str, confirmations: int) -> bool:
"""
Check tx hash and make sure it has the confirmations required
:param w3: Web3 instance
:param tx_hash: Hash of the tx
:param confirmations: Minimum number of confirmations required
:return: True if tx was mined with the number of confirmations required, False otherwise
"""
tx_receipt = self.w3.eth.getTransactionReceipt(tx_hash)
if not tx_receipt or tx_receipt['blockNumber'] is None:
# If tx_receipt exists but blockNumber is None, tx is still pending (just Parity)
return False
else:
return (self.w3.eth.blockNumber - tx_receipt['blockNumber']) >= confirmations
|
def check_tx_with_confirmations(self, tx_hash: str, confirmations: int) -> bool:
"""
Check tx hash and make sure it has the confirmations required
:param w3: Web3 instance
:param tx_hash: Hash of the tx
:param confirmations: Minimum number of confirmations required
:return: True if tx was mined with the number of confirmations required, False otherwise
"""
tx_receipt = self.w3.eth.getTransactionReceipt(tx_hash)
if not tx_receipt or tx_receipt['blockNumber'] is None:
# If tx_receipt exists but blockNumber is None, tx is still pending (just Parity)
return False
else:
return (self.w3.eth.blockNumber - tx_receipt['blockNumber']) >= confirmations
|
[
"Check",
"tx",
"hash",
"and",
"make",
"sure",
"it",
"has",
"the",
"confirmations",
"required",
":",
"param",
"w3",
":",
"Web3",
"instance",
":",
"param",
"tx_hash",
":",
"Hash",
"of",
"the",
"tx",
":",
"param",
"confirmations",
":",
"Minimum",
"number",
"of",
"confirmations",
"required",
":",
"return",
":",
"True",
"if",
"tx",
"was",
"mined",
"with",
"the",
"number",
"of",
"confirmations",
"required",
"False",
"otherwise"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L545-L558
|
[
"def",
"check_tx_with_confirmations",
"(",
"self",
",",
"tx_hash",
":",
"str",
",",
"confirmations",
":",
"int",
")",
"->",
"bool",
":",
"tx_receipt",
"=",
"self",
".",
"w3",
".",
"eth",
".",
"getTransactionReceipt",
"(",
"tx_hash",
")",
"if",
"not",
"tx_receipt",
"or",
"tx_receipt",
"[",
"'blockNumber'",
"]",
"is",
"None",
":",
"# If tx_receipt exists but blockNumber is None, tx is still pending (just Parity)",
"return",
"False",
"else",
":",
"return",
"(",
"self",
".",
"w3",
".",
"eth",
".",
"blockNumber",
"-",
"tx_receipt",
"[",
"'blockNumber'",
"]",
")",
">=",
"confirmations"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
EthereumClient.get_signing_address
|
:return: checksum encoded address starting by 0x, for example `0x568c93675A8dEb121700A6FAdDdfE7DFAb66Ae4A`
:rtype: str
|
gnosis/eth/ethereum_client.py
|
def get_signing_address(hash: Union[bytes, str], v: int, r: int, s: int) -> str:
"""
:return: checksum encoded address starting by 0x, for example `0x568c93675A8dEb121700A6FAdDdfE7DFAb66Ae4A`
:rtype: str
"""
encoded_64_address = ecrecover_to_pub(hash, v, r, s)
address_bytes = sha3(encoded_64_address)[-20:]
return checksum_encode(address_bytes)
|
def get_signing_address(hash: Union[bytes, str], v: int, r: int, s: int) -> str:
"""
:return: checksum encoded address starting by 0x, for example `0x568c93675A8dEb121700A6FAdDdfE7DFAb66Ae4A`
:rtype: str
"""
encoded_64_address = ecrecover_to_pub(hash, v, r, s)
address_bytes = sha3(encoded_64_address)[-20:]
return checksum_encode(address_bytes)
|
[
":",
"return",
":",
"checksum",
"encoded",
"address",
"starting",
"by",
"0x",
"for",
"example",
"0x568c93675A8dEb121700A6FAdDdfE7DFAb66Ae4A",
":",
"rtype",
":",
"str"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/ethereum_client.py#L565-L572
|
[
"def",
"get_signing_address",
"(",
"hash",
":",
"Union",
"[",
"bytes",
",",
"str",
"]",
",",
"v",
":",
"int",
",",
"r",
":",
"int",
",",
"s",
":",
"int",
")",
"->",
"str",
":",
"encoded_64_address",
"=",
"ecrecover_to_pub",
"(",
"hash",
",",
"v",
",",
"r",
",",
"s",
")",
"address_bytes",
"=",
"sha3",
"(",
"encoded_64_address",
")",
"[",
"-",
"20",
":",
"]",
"return",
"checksum_encode",
"(",
"address_bytes",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
test
|
generate_address_2
|
Generates an address for a contract created using CREATE2.
:param from_: The address which is creating this new address (need to be 20 bytes)
:param salt: A salt (32 bytes)
:param init_code: A init code of the contract being created
:return: Address of the new contract
|
gnosis/eth/utils.py
|
def generate_address_2(from_: Union[str, bytes], salt: Union[str, bytes], init_code: [str, bytes]) -> str:
"""
Generates an address for a contract created using CREATE2.
:param from_: The address which is creating this new address (need to be 20 bytes)
:param salt: A salt (32 bytes)
:param init_code: A init code of the contract being created
:return: Address of the new contract
"""
from_ = HexBytes(from_)
salt = HexBytes(salt)
init_code = HexBytes(init_code)
assert len(from_) == 20, "Address %s is not valid. Must be 20 bytes" % from_
assert len(salt) == 32, "Salt %s is not valid. Must be 32 bytes" % salt
assert len(init_code) > 0, "Init code %s is not valid" % init_code
init_code_hash = Web3.sha3(init_code)
contract_address = Web3.sha3(HexBytes('ff') + from_ + salt + init_code_hash)
return Web3.toChecksumAddress(contract_address[12:])
|
def generate_address_2(from_: Union[str, bytes], salt: Union[str, bytes], init_code: [str, bytes]) -> str:
"""
Generates an address for a contract created using CREATE2.
:param from_: The address which is creating this new address (need to be 20 bytes)
:param salt: A salt (32 bytes)
:param init_code: A init code of the contract being created
:return: Address of the new contract
"""
from_ = HexBytes(from_)
salt = HexBytes(salt)
init_code = HexBytes(init_code)
assert len(from_) == 20, "Address %s is not valid. Must be 20 bytes" % from_
assert len(salt) == 32, "Salt %s is not valid. Must be 32 bytes" % salt
assert len(init_code) > 0, "Init code %s is not valid" % init_code
init_code_hash = Web3.sha3(init_code)
contract_address = Web3.sha3(HexBytes('ff') + from_ + salt + init_code_hash)
return Web3.toChecksumAddress(contract_address[12:])
|
[
"Generates",
"an",
"address",
"for",
"a",
"contract",
"created",
"using",
"CREATE2",
".",
":",
"param",
"from_",
":",
"The",
"address",
"which",
"is",
"creating",
"this",
"new",
"address",
"(",
"need",
"to",
"be",
"20",
"bytes",
")",
":",
"param",
"salt",
":",
"A",
"salt",
"(",
"32",
"bytes",
")",
":",
"param",
"init_code",
":",
"A",
"init",
"code",
"of",
"the",
"contract",
"being",
"created",
":",
"return",
":",
"Address",
"of",
"the",
"new",
"contract"
] |
gnosis/gnosis-py
|
python
|
https://github.com/gnosis/gnosis-py/blob/2a9a5d75a375fc9813ac04df133e6910c82f9d49/gnosis/eth/utils.py#L25-L44
|
[
"def",
"generate_address_2",
"(",
"from_",
":",
"Union",
"[",
"str",
",",
"bytes",
"]",
",",
"salt",
":",
"Union",
"[",
"str",
",",
"bytes",
"]",
",",
"init_code",
":",
"[",
"str",
",",
"bytes",
"]",
")",
"->",
"str",
":",
"from_",
"=",
"HexBytes",
"(",
"from_",
")",
"salt",
"=",
"HexBytes",
"(",
"salt",
")",
"init_code",
"=",
"HexBytes",
"(",
"init_code",
")",
"assert",
"len",
"(",
"from_",
")",
"==",
"20",
",",
"\"Address %s is not valid. Must be 20 bytes\"",
"%",
"from_",
"assert",
"len",
"(",
"salt",
")",
"==",
"32",
",",
"\"Salt %s is not valid. Must be 32 bytes\"",
"%",
"salt",
"assert",
"len",
"(",
"init_code",
")",
">",
"0",
",",
"\"Init code %s is not valid\"",
"%",
"init_code",
"init_code_hash",
"=",
"Web3",
".",
"sha3",
"(",
"init_code",
")",
"contract_address",
"=",
"Web3",
".",
"sha3",
"(",
"HexBytes",
"(",
"'ff'",
")",
"+",
"from_",
"+",
"salt",
"+",
"init_code_hash",
")",
"return",
"Web3",
".",
"toChecksumAddress",
"(",
"contract_address",
"[",
"12",
":",
"]",
")"
] |
2a9a5d75a375fc9813ac04df133e6910c82f9d49
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.