id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
17,700
|
bids-standard/pybids
|
bids/analysis/hrf.py
|
_hrf_kernel
|
def _hrf_kernel(hrf_model, tr, oversampling=50, fir_delays=None):
""" Given the specification of the hemodynamic model and time parameters,
return the list of matching kernels
Parameters
----------
hrf_model : string or None,
identifier of the hrf model
tr : float
the repetition time in seconds
oversampling : int, optional
temporal oversampling factor to have a smooth hrf
fir_delays : list of floats,
list of delays for finite impulse response models
Returns
-------
hkernel : list of arrays
samples of the hrf (the number depends on the hrf_model used)
"""
acceptable_hrfs = [
'spm', 'spm + derivative', 'spm + derivative + dispersion', 'fir',
'glover', 'glover + derivative', 'glover + derivative + dispersion',
None]
if hrf_model == 'spm':
hkernel = [spm_hrf(tr, oversampling)]
elif hrf_model == 'spm + derivative':
hkernel = [spm_hrf(tr, oversampling),
spm_time_derivative(tr, oversampling)]
elif hrf_model == 'spm + derivative + dispersion':
hkernel = [spm_hrf(tr, oversampling),
spm_time_derivative(tr, oversampling),
spm_dispersion_derivative(tr, oversampling)]
elif hrf_model == 'glover':
hkernel = [glover_hrf(tr, oversampling)]
elif hrf_model == 'glover + derivative':
hkernel = [glover_hrf(tr, oversampling),
glover_time_derivative(tr, oversampling)]
elif hrf_model == 'glover + derivative + dispersion':
hkernel = [glover_hrf(tr, oversampling),
glover_time_derivative(tr, oversampling),
glover_dispersion_derivative(tr, oversampling)]
elif hrf_model == 'fir':
hkernel = [np.hstack((np.zeros(f * oversampling),
np.ones(oversampling)))
for f in fir_delays]
elif hrf_model is None:
hkernel = [np.hstack((1, np.zeros(oversampling - 1)))]
else:
raise ValueError('"{0}" is not a known hrf model. Use one of {1}'.
format(hrf_model, acceptable_hrfs))
return hkernel
|
python
|
def _hrf_kernel(hrf_model, tr, oversampling=50, fir_delays=None):
""" Given the specification of the hemodynamic model and time parameters,
return the list of matching kernels
Parameters
----------
hrf_model : string or None,
identifier of the hrf model
tr : float
the repetition time in seconds
oversampling : int, optional
temporal oversampling factor to have a smooth hrf
fir_delays : list of floats,
list of delays for finite impulse response models
Returns
-------
hkernel : list of arrays
samples of the hrf (the number depends on the hrf_model used)
"""
acceptable_hrfs = [
'spm', 'spm + derivative', 'spm + derivative + dispersion', 'fir',
'glover', 'glover + derivative', 'glover + derivative + dispersion',
None]
if hrf_model == 'spm':
hkernel = [spm_hrf(tr, oversampling)]
elif hrf_model == 'spm + derivative':
hkernel = [spm_hrf(tr, oversampling),
spm_time_derivative(tr, oversampling)]
elif hrf_model == 'spm + derivative + dispersion':
hkernel = [spm_hrf(tr, oversampling),
spm_time_derivative(tr, oversampling),
spm_dispersion_derivative(tr, oversampling)]
elif hrf_model == 'glover':
hkernel = [glover_hrf(tr, oversampling)]
elif hrf_model == 'glover + derivative':
hkernel = [glover_hrf(tr, oversampling),
glover_time_derivative(tr, oversampling)]
elif hrf_model == 'glover + derivative + dispersion':
hkernel = [glover_hrf(tr, oversampling),
glover_time_derivative(tr, oversampling),
glover_dispersion_derivative(tr, oversampling)]
elif hrf_model == 'fir':
hkernel = [np.hstack((np.zeros(f * oversampling),
np.ones(oversampling)))
for f in fir_delays]
elif hrf_model is None:
hkernel = [np.hstack((1, np.zeros(oversampling - 1)))]
else:
raise ValueError('"{0}" is not a known hrf model. Use one of {1}'.
format(hrf_model, acceptable_hrfs))
return hkernel
|
[
"def",
"_hrf_kernel",
"(",
"hrf_model",
",",
"tr",
",",
"oversampling",
"=",
"50",
",",
"fir_delays",
"=",
"None",
")",
":",
"acceptable_hrfs",
"=",
"[",
"'spm'",
",",
"'spm + derivative'",
",",
"'spm + derivative + dispersion'",
",",
"'fir'",
",",
"'glover'",
",",
"'glover + derivative'",
",",
"'glover + derivative + dispersion'",
",",
"None",
"]",
"if",
"hrf_model",
"==",
"'spm'",
":",
"hkernel",
"=",
"[",
"spm_hrf",
"(",
"tr",
",",
"oversampling",
")",
"]",
"elif",
"hrf_model",
"==",
"'spm + derivative'",
":",
"hkernel",
"=",
"[",
"spm_hrf",
"(",
"tr",
",",
"oversampling",
")",
",",
"spm_time_derivative",
"(",
"tr",
",",
"oversampling",
")",
"]",
"elif",
"hrf_model",
"==",
"'spm + derivative + dispersion'",
":",
"hkernel",
"=",
"[",
"spm_hrf",
"(",
"tr",
",",
"oversampling",
")",
",",
"spm_time_derivative",
"(",
"tr",
",",
"oversampling",
")",
",",
"spm_dispersion_derivative",
"(",
"tr",
",",
"oversampling",
")",
"]",
"elif",
"hrf_model",
"==",
"'glover'",
":",
"hkernel",
"=",
"[",
"glover_hrf",
"(",
"tr",
",",
"oversampling",
")",
"]",
"elif",
"hrf_model",
"==",
"'glover + derivative'",
":",
"hkernel",
"=",
"[",
"glover_hrf",
"(",
"tr",
",",
"oversampling",
")",
",",
"glover_time_derivative",
"(",
"tr",
",",
"oversampling",
")",
"]",
"elif",
"hrf_model",
"==",
"'glover + derivative + dispersion'",
":",
"hkernel",
"=",
"[",
"glover_hrf",
"(",
"tr",
",",
"oversampling",
")",
",",
"glover_time_derivative",
"(",
"tr",
",",
"oversampling",
")",
",",
"glover_dispersion_derivative",
"(",
"tr",
",",
"oversampling",
")",
"]",
"elif",
"hrf_model",
"==",
"'fir'",
":",
"hkernel",
"=",
"[",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"zeros",
"(",
"f",
"*",
"oversampling",
")",
",",
"np",
".",
"ones",
"(",
"oversampling",
")",
")",
")",
"for",
"f",
"in",
"fir_delays",
"]",
"elif",
"hrf_model",
"is",
"None",
":",
"hkernel",
"=",
"[",
"np",
".",
"hstack",
"(",
"(",
"1",
",",
"np",
".",
"zeros",
"(",
"oversampling",
"-",
"1",
")",
")",
")",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"'\"{0}\" is not a known hrf model. Use one of {1}'",
".",
"format",
"(",
"hrf_model",
",",
"acceptable_hrfs",
")",
")",
"return",
"hkernel"
] |
Given the specification of the hemodynamic model and time parameters,
return the list of matching kernels
Parameters
----------
hrf_model : string or None,
identifier of the hrf model
tr : float
the repetition time in seconds
oversampling : int, optional
temporal oversampling factor to have a smooth hrf
fir_delays : list of floats,
list of delays for finite impulse response models
Returns
-------
hkernel : list of arrays
samples of the hrf (the number depends on the hrf_model used)
|
[
"Given",
"the",
"specification",
"of",
"the",
"hemodynamic",
"model",
"and",
"time",
"parameters",
"return",
"the",
"list",
"of",
"matching",
"kernels"
] |
30d924ce770622bda0e390d613a8da42a2a20c32
|
https://github.com/bids-standard/pybids/blob/30d924ce770622bda0e390d613a8da42a2a20c32/bids/analysis/hrf.py#L378-L432
|
17,701
|
bids-standard/pybids
|
bids/analysis/hrf.py
|
compute_regressor
|
def compute_regressor(exp_condition, hrf_model, frame_times, con_id='cond',
oversampling=50, fir_delays=None, min_onset=-24):
""" This is the main function to convolve regressors with hrf model
Parameters
----------
exp_condition : array-like of shape (3, n_events)
yields description of events for this condition as a
(onsets, durations, amplitudes) triplet
hrf_model : {'spm', 'spm + derivative', 'spm + derivative + dispersion',
'glover', 'glover + derivative', 'fir', None}
Name of the hrf model to be used
frame_times : array of shape (n_scans)
the desired sampling times
con_id : string
optional identifier of the condition
oversampling : int, optional
oversampling factor to perform the convolution
fir_delays : 1D-array-like, optional
delays (in seconds) used in case of a finite impulse reponse model
min_onset : float, optional
minimal onset relative to frame_times[0] (in seconds)
events that start before frame_times[0] + min_onset are not considered
Returns
-------
computed_regressors: array of shape(n_scans, n_reg)
computed regressors sampled at frame times
reg_names: list of strings
corresponding regressor names
Notes
-----
The different hemodynamic models can be understood as follows:
'spm': this is the hrf model used in SPM
'spm + derivative': SPM model plus its time derivative (2 regressors)
'spm + time + dispersion': idem, plus dispersion derivative (3 regressors)
'glover': this one corresponds to the Glover hrf
'glover + derivative': the Glover hrf + time derivative (2 regressors)
'glover + derivative + dispersion': idem + dispersion derivative
(3 regressors)
'fir': finite impulse response basis, a set of delayed dirac models
with arbitrary length. This one currently assumes regularly spaced
frame times (i.e. fixed time of repetition).
It is expected that spm standard and Glover model would not yield
large differences in most cases.
In case of glover and spm models, the derived regressors are
orthogonalized wrt the main one.
"""
# this is the average tr in this session, not necessarily the true tr
tr = float(frame_times.max()) / (np.size(frame_times) - 1)
# 1. create the high temporal resolution regressor
hr_regressor, hr_frame_times = _sample_condition(
exp_condition, frame_times, oversampling, min_onset)
# 2. create the hrf model(s)
hkernel = _hrf_kernel(hrf_model, tr, oversampling, fir_delays)
# 3. convolve the regressor and hrf, and downsample the regressor
conv_reg = np.array([np.convolve(hr_regressor, h)[:hr_regressor.size]
for h in hkernel])
# 4. temporally resample the regressors
computed_regressors = _resample_regressor(
conv_reg, hr_frame_times, frame_times)
# 5. ortogonalize the regressors
if hrf_model != 'fir':
computed_regressors = _orthogonalize(computed_regressors)
# 6 generate regressor names
reg_names = _regressor_names(con_id, hrf_model, fir_delays=fir_delays)
return computed_regressors, reg_names
|
python
|
def compute_regressor(exp_condition, hrf_model, frame_times, con_id='cond',
oversampling=50, fir_delays=None, min_onset=-24):
""" This is the main function to convolve regressors with hrf model
Parameters
----------
exp_condition : array-like of shape (3, n_events)
yields description of events for this condition as a
(onsets, durations, amplitudes) triplet
hrf_model : {'spm', 'spm + derivative', 'spm + derivative + dispersion',
'glover', 'glover + derivative', 'fir', None}
Name of the hrf model to be used
frame_times : array of shape (n_scans)
the desired sampling times
con_id : string
optional identifier of the condition
oversampling : int, optional
oversampling factor to perform the convolution
fir_delays : 1D-array-like, optional
delays (in seconds) used in case of a finite impulse reponse model
min_onset : float, optional
minimal onset relative to frame_times[0] (in seconds)
events that start before frame_times[0] + min_onset are not considered
Returns
-------
computed_regressors: array of shape(n_scans, n_reg)
computed regressors sampled at frame times
reg_names: list of strings
corresponding regressor names
Notes
-----
The different hemodynamic models can be understood as follows:
'spm': this is the hrf model used in SPM
'spm + derivative': SPM model plus its time derivative (2 regressors)
'spm + time + dispersion': idem, plus dispersion derivative (3 regressors)
'glover': this one corresponds to the Glover hrf
'glover + derivative': the Glover hrf + time derivative (2 regressors)
'glover + derivative + dispersion': idem + dispersion derivative
(3 regressors)
'fir': finite impulse response basis, a set of delayed dirac models
with arbitrary length. This one currently assumes regularly spaced
frame times (i.e. fixed time of repetition).
It is expected that spm standard and Glover model would not yield
large differences in most cases.
In case of glover and spm models, the derived regressors are
orthogonalized wrt the main one.
"""
# this is the average tr in this session, not necessarily the true tr
tr = float(frame_times.max()) / (np.size(frame_times) - 1)
# 1. create the high temporal resolution regressor
hr_regressor, hr_frame_times = _sample_condition(
exp_condition, frame_times, oversampling, min_onset)
# 2. create the hrf model(s)
hkernel = _hrf_kernel(hrf_model, tr, oversampling, fir_delays)
# 3. convolve the regressor and hrf, and downsample the regressor
conv_reg = np.array([np.convolve(hr_regressor, h)[:hr_regressor.size]
for h in hkernel])
# 4. temporally resample the regressors
computed_regressors = _resample_regressor(
conv_reg, hr_frame_times, frame_times)
# 5. ortogonalize the regressors
if hrf_model != 'fir':
computed_regressors = _orthogonalize(computed_regressors)
# 6 generate regressor names
reg_names = _regressor_names(con_id, hrf_model, fir_delays=fir_delays)
return computed_regressors, reg_names
|
[
"def",
"compute_regressor",
"(",
"exp_condition",
",",
"hrf_model",
",",
"frame_times",
",",
"con_id",
"=",
"'cond'",
",",
"oversampling",
"=",
"50",
",",
"fir_delays",
"=",
"None",
",",
"min_onset",
"=",
"-",
"24",
")",
":",
"# this is the average tr in this session, not necessarily the true tr",
"tr",
"=",
"float",
"(",
"frame_times",
".",
"max",
"(",
")",
")",
"/",
"(",
"np",
".",
"size",
"(",
"frame_times",
")",
"-",
"1",
")",
"# 1. create the high temporal resolution regressor",
"hr_regressor",
",",
"hr_frame_times",
"=",
"_sample_condition",
"(",
"exp_condition",
",",
"frame_times",
",",
"oversampling",
",",
"min_onset",
")",
"# 2. create the hrf model(s)",
"hkernel",
"=",
"_hrf_kernel",
"(",
"hrf_model",
",",
"tr",
",",
"oversampling",
",",
"fir_delays",
")",
"# 3. convolve the regressor and hrf, and downsample the regressor",
"conv_reg",
"=",
"np",
".",
"array",
"(",
"[",
"np",
".",
"convolve",
"(",
"hr_regressor",
",",
"h",
")",
"[",
":",
"hr_regressor",
".",
"size",
"]",
"for",
"h",
"in",
"hkernel",
"]",
")",
"# 4. temporally resample the regressors",
"computed_regressors",
"=",
"_resample_regressor",
"(",
"conv_reg",
",",
"hr_frame_times",
",",
"frame_times",
")",
"# 5. ortogonalize the regressors",
"if",
"hrf_model",
"!=",
"'fir'",
":",
"computed_regressors",
"=",
"_orthogonalize",
"(",
"computed_regressors",
")",
"# 6 generate regressor names",
"reg_names",
"=",
"_regressor_names",
"(",
"con_id",
",",
"hrf_model",
",",
"fir_delays",
"=",
"fir_delays",
")",
"return",
"computed_regressors",
",",
"reg_names"
] |
This is the main function to convolve regressors with hrf model
Parameters
----------
exp_condition : array-like of shape (3, n_events)
yields description of events for this condition as a
(onsets, durations, amplitudes) triplet
hrf_model : {'spm', 'spm + derivative', 'spm + derivative + dispersion',
'glover', 'glover + derivative', 'fir', None}
Name of the hrf model to be used
frame_times : array of shape (n_scans)
the desired sampling times
con_id : string
optional identifier of the condition
oversampling : int, optional
oversampling factor to perform the convolution
fir_delays : 1D-array-like, optional
delays (in seconds) used in case of a finite impulse reponse model
min_onset : float, optional
minimal onset relative to frame_times[0] (in seconds)
events that start before frame_times[0] + min_onset are not considered
Returns
-------
computed_regressors: array of shape(n_scans, n_reg)
computed regressors sampled at frame times
reg_names: list of strings
corresponding regressor names
Notes
-----
The different hemodynamic models can be understood as follows:
'spm': this is the hrf model used in SPM
'spm + derivative': SPM model plus its time derivative (2 regressors)
'spm + time + dispersion': idem, plus dispersion derivative (3 regressors)
'glover': this one corresponds to the Glover hrf
'glover + derivative': the Glover hrf + time derivative (2 regressors)
'glover + derivative + dispersion': idem + dispersion derivative
(3 regressors)
'fir': finite impulse response basis, a set of delayed dirac models
with arbitrary length. This one currently assumes regularly spaced
frame times (i.e. fixed time of repetition).
It is expected that spm standard and Glover model would not yield
large differences in most cases.
In case of glover and spm models, the derived regressors are
orthogonalized wrt the main one.
|
[
"This",
"is",
"the",
"main",
"function",
"to",
"convolve",
"regressors",
"with",
"hrf",
"model"
] |
30d924ce770622bda0e390d613a8da42a2a20c32
|
https://github.com/bids-standard/pybids/blob/30d924ce770622bda0e390d613a8da42a2a20c32/bids/analysis/hrf.py#L435-L516
|
17,702
|
bids-standard/pybids
|
bids/utils.py
|
matches_entities
|
def matches_entities(obj, entities, strict=False):
''' Checks whether an object's entities match the input. '''
if strict and set(obj.entities.keys()) != set(entities.keys()):
return False
comm_ents = list(set(obj.entities.keys()) & set(entities.keys()))
for k in comm_ents:
current = obj.entities[k]
target = entities[k]
if isinstance(target, (list, tuple)):
if current not in target:
return False
elif current != target:
return False
return True
|
python
|
def matches_entities(obj, entities, strict=False):
''' Checks whether an object's entities match the input. '''
if strict and set(obj.entities.keys()) != set(entities.keys()):
return False
comm_ents = list(set(obj.entities.keys()) & set(entities.keys()))
for k in comm_ents:
current = obj.entities[k]
target = entities[k]
if isinstance(target, (list, tuple)):
if current not in target:
return False
elif current != target:
return False
return True
|
[
"def",
"matches_entities",
"(",
"obj",
",",
"entities",
",",
"strict",
"=",
"False",
")",
":",
"if",
"strict",
"and",
"set",
"(",
"obj",
".",
"entities",
".",
"keys",
"(",
")",
")",
"!=",
"set",
"(",
"entities",
".",
"keys",
"(",
")",
")",
":",
"return",
"False",
"comm_ents",
"=",
"list",
"(",
"set",
"(",
"obj",
".",
"entities",
".",
"keys",
"(",
")",
")",
"&",
"set",
"(",
"entities",
".",
"keys",
"(",
")",
")",
")",
"for",
"k",
"in",
"comm_ents",
":",
"current",
"=",
"obj",
".",
"entities",
"[",
"k",
"]",
"target",
"=",
"entities",
"[",
"k",
"]",
"if",
"isinstance",
"(",
"target",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"if",
"current",
"not",
"in",
"target",
":",
"return",
"False",
"elif",
"current",
"!=",
"target",
":",
"return",
"False",
"return",
"True"
] |
Checks whether an object's entities match the input.
|
[
"Checks",
"whether",
"an",
"object",
"s",
"entities",
"match",
"the",
"input",
"."
] |
30d924ce770622bda0e390d613a8da42a2a20c32
|
https://github.com/bids-standard/pybids/blob/30d924ce770622bda0e390d613a8da42a2a20c32/bids/utils.py#L12-L26
|
17,703
|
bids-standard/pybids
|
bids/utils.py
|
check_path_matches_patterns
|
def check_path_matches_patterns(path, patterns):
''' Check if the path matches at least one of the provided patterns. '''
path = os.path.abspath(path)
for patt in patterns:
if isinstance(patt, six.string_types):
if path == patt:
return True
elif patt.search(path):
return True
return False
|
python
|
def check_path_matches_patterns(path, patterns):
''' Check if the path matches at least one of the provided patterns. '''
path = os.path.abspath(path)
for patt in patterns:
if isinstance(patt, six.string_types):
if path == patt:
return True
elif patt.search(path):
return True
return False
|
[
"def",
"check_path_matches_patterns",
"(",
"path",
",",
"patterns",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"path",
")",
"for",
"patt",
"in",
"patterns",
":",
"if",
"isinstance",
"(",
"patt",
",",
"six",
".",
"string_types",
")",
":",
"if",
"path",
"==",
"patt",
":",
"return",
"True",
"elif",
"patt",
".",
"search",
"(",
"path",
")",
":",
"return",
"True",
"return",
"False"
] |
Check if the path matches at least one of the provided patterns.
|
[
"Check",
"if",
"the",
"path",
"matches",
"at",
"least",
"one",
"of",
"the",
"provided",
"patterns",
"."
] |
30d924ce770622bda0e390d613a8da42a2a20c32
|
https://github.com/bids-standard/pybids/blob/30d924ce770622bda0e390d613a8da42a2a20c32/bids/utils.py#L91-L100
|
17,704
|
bids-standard/pybids
|
bids/layout/core.py
|
Entity.count
|
def count(self, files=False):
""" Returns a count of unique values or files.
Args:
files (bool): When True, counts all files mapped to the Entity.
When False, counts all unique values.
Returns: an int.
"""
return len(self.files) if files else len(self.unique())
|
python
|
def count(self, files=False):
""" Returns a count of unique values or files.
Args:
files (bool): When True, counts all files mapped to the Entity.
When False, counts all unique values.
Returns: an int.
"""
return len(self.files) if files else len(self.unique())
|
[
"def",
"count",
"(",
"self",
",",
"files",
"=",
"False",
")",
":",
"return",
"len",
"(",
"self",
".",
"files",
")",
"if",
"files",
"else",
"len",
"(",
"self",
".",
"unique",
"(",
")",
")"
] |
Returns a count of unique values or files.
Args:
files (bool): When True, counts all files mapped to the Entity.
When False, counts all unique values.
Returns: an int.
|
[
"Returns",
"a",
"count",
"of",
"unique",
"values",
"or",
"files",
"."
] |
30d924ce770622bda0e390d613a8da42a2a20c32
|
https://github.com/bids-standard/pybids/blob/30d924ce770622bda0e390d613a8da42a2a20c32/bids/layout/core.py#L147-L155
|
17,705
|
bids-standard/pybids
|
bids/reports/parsing.py
|
general_acquisition_info
|
def general_acquisition_info(metadata):
"""
General sentence on data acquisition. Should be first sentence in MRI data
acquisition section.
Parameters
----------
metadata : :obj:`dict`
The metadata for the dataset.
Returns
-------
out_str : :obj:`str`
Output string with scanner information.
"""
out_str = ('MR data were acquired using a {tesla}-Tesla {manu} {model} '
'MRI scanner.')
out_str = out_str.format(tesla=metadata.get('MagneticFieldStrength',
'UNKNOWN'),
manu=metadata.get('Manufacturer', 'MANUFACTURER'),
model=metadata.get('ManufacturersModelName',
'MODEL'))
return out_str
|
python
|
def general_acquisition_info(metadata):
"""
General sentence on data acquisition. Should be first sentence in MRI data
acquisition section.
Parameters
----------
metadata : :obj:`dict`
The metadata for the dataset.
Returns
-------
out_str : :obj:`str`
Output string with scanner information.
"""
out_str = ('MR data were acquired using a {tesla}-Tesla {manu} {model} '
'MRI scanner.')
out_str = out_str.format(tesla=metadata.get('MagneticFieldStrength',
'UNKNOWN'),
manu=metadata.get('Manufacturer', 'MANUFACTURER'),
model=metadata.get('ManufacturersModelName',
'MODEL'))
return out_str
|
[
"def",
"general_acquisition_info",
"(",
"metadata",
")",
":",
"out_str",
"=",
"(",
"'MR data were acquired using a {tesla}-Tesla {manu} {model} '",
"'MRI scanner.'",
")",
"out_str",
"=",
"out_str",
".",
"format",
"(",
"tesla",
"=",
"metadata",
".",
"get",
"(",
"'MagneticFieldStrength'",
",",
"'UNKNOWN'",
")",
",",
"manu",
"=",
"metadata",
".",
"get",
"(",
"'Manufacturer'",
",",
"'MANUFACTURER'",
")",
",",
"model",
"=",
"metadata",
".",
"get",
"(",
"'ManufacturersModelName'",
",",
"'MODEL'",
")",
")",
"return",
"out_str"
] |
General sentence on data acquisition. Should be first sentence in MRI data
acquisition section.
Parameters
----------
metadata : :obj:`dict`
The metadata for the dataset.
Returns
-------
out_str : :obj:`str`
Output string with scanner information.
|
[
"General",
"sentence",
"on",
"data",
"acquisition",
".",
"Should",
"be",
"first",
"sentence",
"in",
"MRI",
"data",
"acquisition",
"section",
"."
] |
30d924ce770622bda0e390d613a8da42a2a20c32
|
https://github.com/bids-standard/pybids/blob/30d924ce770622bda0e390d613a8da42a2a20c32/bids/reports/parsing.py#L22-L44
|
17,706
|
bids-standard/pybids
|
bids/reports/parsing.py
|
parse_niftis
|
def parse_niftis(layout, niftis, subj, config, **kwargs):
"""
Loop through niftis in a BIDSLayout and generate the appropriate description
type for each scan. Compile all of the descriptions into a list.
Parameters
----------
layout : :obj:`bids.layout.BIDSLayout`
Layout object for a BIDS dataset.
niftis : :obj:`list` or :obj:`grabbit.core.File`
List of nifti files in layout corresponding to subject/session combo.
subj : :obj:`str`
Subject ID.
config : :obj:`dict`
Configuration info for methods generation.
"""
kwargs = {k: v for k, v in kwargs.items() if v is not None}
description_list = []
skip_task = {} # Only report each task once
for nifti_struct in niftis:
nii_file = nifti_struct.path
metadata = layout.get_metadata(nii_file)
if not metadata:
LOGGER.warning('No json file found for %s', nii_file)
else:
import nibabel as nib
img = nib.load(nii_file)
# Assume all data were acquired the same way.
if not description_list:
description_list.append(general_acquisition_info(metadata))
if nifti_struct.entities['datatype'] == 'func':
if not skip_task.get(nifti_struct.entities['task'], False):
echos = layout.get_echoes(subject=subj, extensions='nii.gz',
task=nifti_struct.entities['task'],
**kwargs)
n_echos = len(echos)
if n_echos > 0:
metadata['EchoTime'] = []
for echo in sorted(echos):
echo_struct = layout.get(subject=subj, echo=echo,
extensions='nii.gz',
task=nifti_struct.entities['task'],
**kwargs)[0]
echo_file = echo_struct.path
echo_meta = layout.get_metadata(echo_file)
metadata['EchoTime'].append(echo_meta['EchoTime'])
n_runs = len(layout.get_runs(subject=subj,
task=nifti_struct.entities['task'],
**kwargs))
description_list.append(func_info(nifti_struct.entities['task'],
n_runs, metadata, img,
config))
skip_task[nifti_struct.entities['task']] = True
elif nifti_struct.entities['datatype'] == 'anat':
suffix = nifti_struct.entities['suffix']
if suffix.endswith('w'):
suffix = suffix[:-1] + '-weighted'
description_list.append(anat_info(suffix, metadata, img,
config))
elif nifti_struct.entities['datatype'] == 'dwi':
bval_file = nii_file.replace('.nii.gz', '.bval')
description_list.append(dwi_info(bval_file, metadata, img,
config))
elif nifti_struct.entities['datatype'] == 'fmap':
description_list.append(fmap_info(metadata, img, config,
layout))
return description_list
|
python
|
def parse_niftis(layout, niftis, subj, config, **kwargs):
"""
Loop through niftis in a BIDSLayout and generate the appropriate description
type for each scan. Compile all of the descriptions into a list.
Parameters
----------
layout : :obj:`bids.layout.BIDSLayout`
Layout object for a BIDS dataset.
niftis : :obj:`list` or :obj:`grabbit.core.File`
List of nifti files in layout corresponding to subject/session combo.
subj : :obj:`str`
Subject ID.
config : :obj:`dict`
Configuration info for methods generation.
"""
kwargs = {k: v for k, v in kwargs.items() if v is not None}
description_list = []
skip_task = {} # Only report each task once
for nifti_struct in niftis:
nii_file = nifti_struct.path
metadata = layout.get_metadata(nii_file)
if not metadata:
LOGGER.warning('No json file found for %s', nii_file)
else:
import nibabel as nib
img = nib.load(nii_file)
# Assume all data were acquired the same way.
if not description_list:
description_list.append(general_acquisition_info(metadata))
if nifti_struct.entities['datatype'] == 'func':
if not skip_task.get(nifti_struct.entities['task'], False):
echos = layout.get_echoes(subject=subj, extensions='nii.gz',
task=nifti_struct.entities['task'],
**kwargs)
n_echos = len(echos)
if n_echos > 0:
metadata['EchoTime'] = []
for echo in sorted(echos):
echo_struct = layout.get(subject=subj, echo=echo,
extensions='nii.gz',
task=nifti_struct.entities['task'],
**kwargs)[0]
echo_file = echo_struct.path
echo_meta = layout.get_metadata(echo_file)
metadata['EchoTime'].append(echo_meta['EchoTime'])
n_runs = len(layout.get_runs(subject=subj,
task=nifti_struct.entities['task'],
**kwargs))
description_list.append(func_info(nifti_struct.entities['task'],
n_runs, metadata, img,
config))
skip_task[nifti_struct.entities['task']] = True
elif nifti_struct.entities['datatype'] == 'anat':
suffix = nifti_struct.entities['suffix']
if suffix.endswith('w'):
suffix = suffix[:-1] + '-weighted'
description_list.append(anat_info(suffix, metadata, img,
config))
elif nifti_struct.entities['datatype'] == 'dwi':
bval_file = nii_file.replace('.nii.gz', '.bval')
description_list.append(dwi_info(bval_file, metadata, img,
config))
elif nifti_struct.entities['datatype'] == 'fmap':
description_list.append(fmap_info(metadata, img, config,
layout))
return description_list
|
[
"def",
"parse_niftis",
"(",
"layout",
",",
"niftis",
",",
"subj",
",",
"config",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"kwargs",
".",
"items",
"(",
")",
"if",
"v",
"is",
"not",
"None",
"}",
"description_list",
"=",
"[",
"]",
"skip_task",
"=",
"{",
"}",
"# Only report each task once",
"for",
"nifti_struct",
"in",
"niftis",
":",
"nii_file",
"=",
"nifti_struct",
".",
"path",
"metadata",
"=",
"layout",
".",
"get_metadata",
"(",
"nii_file",
")",
"if",
"not",
"metadata",
":",
"LOGGER",
".",
"warning",
"(",
"'No json file found for %s'",
",",
"nii_file",
")",
"else",
":",
"import",
"nibabel",
"as",
"nib",
"img",
"=",
"nib",
".",
"load",
"(",
"nii_file",
")",
"# Assume all data were acquired the same way.",
"if",
"not",
"description_list",
":",
"description_list",
".",
"append",
"(",
"general_acquisition_info",
"(",
"metadata",
")",
")",
"if",
"nifti_struct",
".",
"entities",
"[",
"'datatype'",
"]",
"==",
"'func'",
":",
"if",
"not",
"skip_task",
".",
"get",
"(",
"nifti_struct",
".",
"entities",
"[",
"'task'",
"]",
",",
"False",
")",
":",
"echos",
"=",
"layout",
".",
"get_echoes",
"(",
"subject",
"=",
"subj",
",",
"extensions",
"=",
"'nii.gz'",
",",
"task",
"=",
"nifti_struct",
".",
"entities",
"[",
"'task'",
"]",
",",
"*",
"*",
"kwargs",
")",
"n_echos",
"=",
"len",
"(",
"echos",
")",
"if",
"n_echos",
">",
"0",
":",
"metadata",
"[",
"'EchoTime'",
"]",
"=",
"[",
"]",
"for",
"echo",
"in",
"sorted",
"(",
"echos",
")",
":",
"echo_struct",
"=",
"layout",
".",
"get",
"(",
"subject",
"=",
"subj",
",",
"echo",
"=",
"echo",
",",
"extensions",
"=",
"'nii.gz'",
",",
"task",
"=",
"nifti_struct",
".",
"entities",
"[",
"'task'",
"]",
",",
"*",
"*",
"kwargs",
")",
"[",
"0",
"]",
"echo_file",
"=",
"echo_struct",
".",
"path",
"echo_meta",
"=",
"layout",
".",
"get_metadata",
"(",
"echo_file",
")",
"metadata",
"[",
"'EchoTime'",
"]",
".",
"append",
"(",
"echo_meta",
"[",
"'EchoTime'",
"]",
")",
"n_runs",
"=",
"len",
"(",
"layout",
".",
"get_runs",
"(",
"subject",
"=",
"subj",
",",
"task",
"=",
"nifti_struct",
".",
"entities",
"[",
"'task'",
"]",
",",
"*",
"*",
"kwargs",
")",
")",
"description_list",
".",
"append",
"(",
"func_info",
"(",
"nifti_struct",
".",
"entities",
"[",
"'task'",
"]",
",",
"n_runs",
",",
"metadata",
",",
"img",
",",
"config",
")",
")",
"skip_task",
"[",
"nifti_struct",
".",
"entities",
"[",
"'task'",
"]",
"]",
"=",
"True",
"elif",
"nifti_struct",
".",
"entities",
"[",
"'datatype'",
"]",
"==",
"'anat'",
":",
"suffix",
"=",
"nifti_struct",
".",
"entities",
"[",
"'suffix'",
"]",
"if",
"suffix",
".",
"endswith",
"(",
"'w'",
")",
":",
"suffix",
"=",
"suffix",
"[",
":",
"-",
"1",
"]",
"+",
"'-weighted'",
"description_list",
".",
"append",
"(",
"anat_info",
"(",
"suffix",
",",
"metadata",
",",
"img",
",",
"config",
")",
")",
"elif",
"nifti_struct",
".",
"entities",
"[",
"'datatype'",
"]",
"==",
"'dwi'",
":",
"bval_file",
"=",
"nii_file",
".",
"replace",
"(",
"'.nii.gz'",
",",
"'.bval'",
")",
"description_list",
".",
"append",
"(",
"dwi_info",
"(",
"bval_file",
",",
"metadata",
",",
"img",
",",
"config",
")",
")",
"elif",
"nifti_struct",
".",
"entities",
"[",
"'datatype'",
"]",
"==",
"'fmap'",
":",
"description_list",
".",
"append",
"(",
"fmap_info",
"(",
"metadata",
",",
"img",
",",
"config",
",",
"layout",
")",
")",
"return",
"description_list"
] |
Loop through niftis in a BIDSLayout and generate the appropriate description
type for each scan. Compile all of the descriptions into a list.
Parameters
----------
layout : :obj:`bids.layout.BIDSLayout`
Layout object for a BIDS dataset.
niftis : :obj:`list` or :obj:`grabbit.core.File`
List of nifti files in layout corresponding to subject/session combo.
subj : :obj:`str`
Subject ID.
config : :obj:`dict`
Configuration info for methods generation.
|
[
"Loop",
"through",
"niftis",
"in",
"a",
"BIDSLayout",
"and",
"generate",
"the",
"appropriate",
"description",
"type",
"for",
"each",
"scan",
".",
"Compile",
"all",
"of",
"the",
"descriptions",
"into",
"a",
"list",
"."
] |
30d924ce770622bda0e390d613a8da42a2a20c32
|
https://github.com/bids-standard/pybids/blob/30d924ce770622bda0e390d613a8da42a2a20c32/bids/reports/parsing.py#L407-L479
|
17,707
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/TelemetryClient.py
|
TelemetryClient.track_exception
|
def track_exception(self, type=None, value=None, tb=None, properties=None, measurements=None):
""" Send information about a single exception that occurred in the application.
Args:
type (Type). the type of the exception that was thrown.\n
value (:class:`Exception`). the exception that the client wants to send.\n
tb (:class:`Traceback`). the traceback information as returned by :func:`sys.exc_info`.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
"""
if not type or not value or not tb:
type, value, tb = sys.exc_info()
if not type or not value or not tb:
try:
raise Exception(NULL_CONSTANT_STRING)
except:
type, value, tb = sys.exc_info()
details = channel.contracts.ExceptionDetails()
details.id = 1
details.outer_id = 0
details.type_name = type.__name__
details.message = str(value)
details.has_full_stack = True
counter = 0
for tb_frame_file, tb_frame_line, tb_frame_function, tb_frame_text in traceback.extract_tb(tb):
frame = channel.contracts.StackFrame()
frame.assembly = 'Unknown'
frame.file_name = tb_frame_file
frame.level = counter
frame.line = tb_frame_line
frame.method = tb_frame_function
details.parsed_stack.append(frame)
counter += 1
details.parsed_stack.reverse()
data = channel.contracts.ExceptionData()
data.handled_at = 'UserCode'
data.exceptions.append(details)
if properties:
data.properties = properties
if measurements:
data.measurements = measurements
self.track(data, self._context)
|
python
|
def track_exception(self, type=None, value=None, tb=None, properties=None, measurements=None):
""" Send information about a single exception that occurred in the application.
Args:
type (Type). the type of the exception that was thrown.\n
value (:class:`Exception`). the exception that the client wants to send.\n
tb (:class:`Traceback`). the traceback information as returned by :func:`sys.exc_info`.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
"""
if not type or not value or not tb:
type, value, tb = sys.exc_info()
if not type or not value or not tb:
try:
raise Exception(NULL_CONSTANT_STRING)
except:
type, value, tb = sys.exc_info()
details = channel.contracts.ExceptionDetails()
details.id = 1
details.outer_id = 0
details.type_name = type.__name__
details.message = str(value)
details.has_full_stack = True
counter = 0
for tb_frame_file, tb_frame_line, tb_frame_function, tb_frame_text in traceback.extract_tb(tb):
frame = channel.contracts.StackFrame()
frame.assembly = 'Unknown'
frame.file_name = tb_frame_file
frame.level = counter
frame.line = tb_frame_line
frame.method = tb_frame_function
details.parsed_stack.append(frame)
counter += 1
details.parsed_stack.reverse()
data = channel.contracts.ExceptionData()
data.handled_at = 'UserCode'
data.exceptions.append(details)
if properties:
data.properties = properties
if measurements:
data.measurements = measurements
self.track(data, self._context)
|
[
"def",
"track_exception",
"(",
"self",
",",
"type",
"=",
"None",
",",
"value",
"=",
"None",
",",
"tb",
"=",
"None",
",",
"properties",
"=",
"None",
",",
"measurements",
"=",
"None",
")",
":",
"if",
"not",
"type",
"or",
"not",
"value",
"or",
"not",
"tb",
":",
"type",
",",
"value",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"not",
"type",
"or",
"not",
"value",
"or",
"not",
"tb",
":",
"try",
":",
"raise",
"Exception",
"(",
"NULL_CONSTANT_STRING",
")",
"except",
":",
"type",
",",
"value",
",",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"details",
"=",
"channel",
".",
"contracts",
".",
"ExceptionDetails",
"(",
")",
"details",
".",
"id",
"=",
"1",
"details",
".",
"outer_id",
"=",
"0",
"details",
".",
"type_name",
"=",
"type",
".",
"__name__",
"details",
".",
"message",
"=",
"str",
"(",
"value",
")",
"details",
".",
"has_full_stack",
"=",
"True",
"counter",
"=",
"0",
"for",
"tb_frame_file",
",",
"tb_frame_line",
",",
"tb_frame_function",
",",
"tb_frame_text",
"in",
"traceback",
".",
"extract_tb",
"(",
"tb",
")",
":",
"frame",
"=",
"channel",
".",
"contracts",
".",
"StackFrame",
"(",
")",
"frame",
".",
"assembly",
"=",
"'Unknown'",
"frame",
".",
"file_name",
"=",
"tb_frame_file",
"frame",
".",
"level",
"=",
"counter",
"frame",
".",
"line",
"=",
"tb_frame_line",
"frame",
".",
"method",
"=",
"tb_frame_function",
"details",
".",
"parsed_stack",
".",
"append",
"(",
"frame",
")",
"counter",
"+=",
"1",
"details",
".",
"parsed_stack",
".",
"reverse",
"(",
")",
"data",
"=",
"channel",
".",
"contracts",
".",
"ExceptionData",
"(",
")",
"data",
".",
"handled_at",
"=",
"'UserCode'",
"data",
".",
"exceptions",
".",
"append",
"(",
"details",
")",
"if",
"properties",
":",
"data",
".",
"properties",
"=",
"properties",
"if",
"measurements",
":",
"data",
".",
"measurements",
"=",
"measurements",
"self",
".",
"track",
"(",
"data",
",",
"self",
".",
"_context",
")"
] |
Send information about a single exception that occurred in the application.
Args:
type (Type). the type of the exception that was thrown.\n
value (:class:`Exception`). the exception that the client wants to send.\n
tb (:class:`Traceback`). the traceback information as returned by :func:`sys.exc_info`.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
|
[
"Send",
"information",
"about",
"a",
"single",
"exception",
"that",
"occurred",
"in",
"the",
"application",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/TelemetryClient.py#L82-L126
|
17,708
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/TelemetryClient.py
|
TelemetryClient.track_event
|
def track_event(self, name, properties=None, measurements=None):
""" Send information about a single event that has occurred in the context of the application.
Args:
name (str). the data to associate to this event.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
"""
data = channel.contracts.EventData()
data.name = name or NULL_CONSTANT_STRING
if properties:
data.properties = properties
if measurements:
data.measurements = measurements
self.track(data, self._context)
|
python
|
def track_event(self, name, properties=None, measurements=None):
""" Send information about a single event that has occurred in the context of the application.
Args:
name (str). the data to associate to this event.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
"""
data = channel.contracts.EventData()
data.name = name or NULL_CONSTANT_STRING
if properties:
data.properties = properties
if measurements:
data.measurements = measurements
self.track(data, self._context)
|
[
"def",
"track_event",
"(",
"self",
",",
"name",
",",
"properties",
"=",
"None",
",",
"measurements",
"=",
"None",
")",
":",
"data",
"=",
"channel",
".",
"contracts",
".",
"EventData",
"(",
")",
"data",
".",
"name",
"=",
"name",
"or",
"NULL_CONSTANT_STRING",
"if",
"properties",
":",
"data",
".",
"properties",
"=",
"properties",
"if",
"measurements",
":",
"data",
".",
"measurements",
"=",
"measurements",
"self",
".",
"track",
"(",
"data",
",",
"self",
".",
"_context",
")"
] |
Send information about a single event that has occurred in the context of the application.
Args:
name (str). the data to associate to this event.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
|
[
"Send",
"information",
"about",
"a",
"single",
"event",
"that",
"has",
"occurred",
"in",
"the",
"context",
"of",
"the",
"application",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/TelemetryClient.py#L128-L143
|
17,709
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/TelemetryClient.py
|
TelemetryClient.track_metric
|
def track_metric(self, name, value, type=None, count=None, min=None, max=None, std_dev=None, properties=None):
"""Send information about a single metric data point that was captured for the application.
Args:
name (str). the name of the metric that was captured.\n
value (float). the value of the metric that was captured.\n
type (:class:`channel.contracts.DataPointType`). the type of the metric. (defaults to: :func:`channel.contracts.DataPointType.aggregation`)\n
count (int). the number of metrics that were aggregated into this data point. (defaults to: None)\n
min (float). the minimum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
max (float). the maximum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
std_dev (float). the standard deviation of all metrics collected that were aggregated into this data point. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
"""
dataPoint = channel.contracts.DataPoint()
dataPoint.name = name or NULL_CONSTANT_STRING
dataPoint.value = value or 0
dataPoint.kind = type or channel.contracts.DataPointType.aggregation
dataPoint.count = count
dataPoint.min = min
dataPoint.max = max
dataPoint.std_dev = std_dev
data = channel.contracts.MetricData()
data.metrics.append(dataPoint)
if properties:
data.properties = properties
self.track(data, self._context)
|
python
|
def track_metric(self, name, value, type=None, count=None, min=None, max=None, std_dev=None, properties=None):
"""Send information about a single metric data point that was captured for the application.
Args:
name (str). the name of the metric that was captured.\n
value (float). the value of the metric that was captured.\n
type (:class:`channel.contracts.DataPointType`). the type of the metric. (defaults to: :func:`channel.contracts.DataPointType.aggregation`)\n
count (int). the number of metrics that were aggregated into this data point. (defaults to: None)\n
min (float). the minimum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
max (float). the maximum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
std_dev (float). the standard deviation of all metrics collected that were aggregated into this data point. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
"""
dataPoint = channel.contracts.DataPoint()
dataPoint.name = name or NULL_CONSTANT_STRING
dataPoint.value = value or 0
dataPoint.kind = type or channel.contracts.DataPointType.aggregation
dataPoint.count = count
dataPoint.min = min
dataPoint.max = max
dataPoint.std_dev = std_dev
data = channel.contracts.MetricData()
data.metrics.append(dataPoint)
if properties:
data.properties = properties
self.track(data, self._context)
|
[
"def",
"track_metric",
"(",
"self",
",",
"name",
",",
"value",
",",
"type",
"=",
"None",
",",
"count",
"=",
"None",
",",
"min",
"=",
"None",
",",
"max",
"=",
"None",
",",
"std_dev",
"=",
"None",
",",
"properties",
"=",
"None",
")",
":",
"dataPoint",
"=",
"channel",
".",
"contracts",
".",
"DataPoint",
"(",
")",
"dataPoint",
".",
"name",
"=",
"name",
"or",
"NULL_CONSTANT_STRING",
"dataPoint",
".",
"value",
"=",
"value",
"or",
"0",
"dataPoint",
".",
"kind",
"=",
"type",
"or",
"channel",
".",
"contracts",
".",
"DataPointType",
".",
"aggregation",
"dataPoint",
".",
"count",
"=",
"count",
"dataPoint",
".",
"min",
"=",
"min",
"dataPoint",
".",
"max",
"=",
"max",
"dataPoint",
".",
"std_dev",
"=",
"std_dev",
"data",
"=",
"channel",
".",
"contracts",
".",
"MetricData",
"(",
")",
"data",
".",
"metrics",
".",
"append",
"(",
"dataPoint",
")",
"if",
"properties",
":",
"data",
".",
"properties",
"=",
"properties",
"self",
".",
"track",
"(",
"data",
",",
"self",
".",
"_context",
")"
] |
Send information about a single metric data point that was captured for the application.
Args:
name (str). the name of the metric that was captured.\n
value (float). the value of the metric that was captured.\n
type (:class:`channel.contracts.DataPointType`). the type of the metric. (defaults to: :func:`channel.contracts.DataPointType.aggregation`)\n
count (int). the number of metrics that were aggregated into this data point. (defaults to: None)\n
min (float). the minimum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
max (float). the maximum of all metrics collected that were aggregated into this data point. (defaults to: None)\n
std_dev (float). the standard deviation of all metrics collected that were aggregated into this data point. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
|
[
"Send",
"information",
"about",
"a",
"single",
"metric",
"data",
"point",
"that",
"was",
"captured",
"for",
"the",
"application",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/TelemetryClient.py#L145-L172
|
17,710
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/TelemetryClient.py
|
TelemetryClient.track_trace
|
def track_trace(self, name, properties=None, severity=None):
"""Sends a single trace statement.
Args:
name (str). the trace statement.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
severity (str). the severity level of this trace, one of DEBUG, INFO, WARNING, ERROR, CRITICAL
"""
data = channel.contracts.MessageData()
data.message = name or NULL_CONSTANT_STRING
if properties:
data.properties = properties
if severity is not None:
data.severity_level = channel.contracts.MessageData.PYTHON_LOGGING_LEVELS.get(severity)
self.track(data, self._context)
|
python
|
def track_trace(self, name, properties=None, severity=None):
"""Sends a single trace statement.
Args:
name (str). the trace statement.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
severity (str). the severity level of this trace, one of DEBUG, INFO, WARNING, ERROR, CRITICAL
"""
data = channel.contracts.MessageData()
data.message = name or NULL_CONSTANT_STRING
if properties:
data.properties = properties
if severity is not None:
data.severity_level = channel.contracts.MessageData.PYTHON_LOGGING_LEVELS.get(severity)
self.track(data, self._context)
|
[
"def",
"track_trace",
"(",
"self",
",",
"name",
",",
"properties",
"=",
"None",
",",
"severity",
"=",
"None",
")",
":",
"data",
"=",
"channel",
".",
"contracts",
".",
"MessageData",
"(",
")",
"data",
".",
"message",
"=",
"name",
"or",
"NULL_CONSTANT_STRING",
"if",
"properties",
":",
"data",
".",
"properties",
"=",
"properties",
"if",
"severity",
"is",
"not",
"None",
":",
"data",
".",
"severity_level",
"=",
"channel",
".",
"contracts",
".",
"MessageData",
".",
"PYTHON_LOGGING_LEVELS",
".",
"get",
"(",
"severity",
")",
"self",
".",
"track",
"(",
"data",
",",
"self",
".",
"_context",
")"
] |
Sends a single trace statement.
Args:
name (str). the trace statement.\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
severity (str). the severity level of this trace, one of DEBUG, INFO, WARNING, ERROR, CRITICAL
|
[
"Sends",
"a",
"single",
"trace",
"statement",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/TelemetryClient.py#L175-L190
|
17,711
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/TelemetryClient.py
|
TelemetryClient.track_request
|
def track_request(self, name, url, success, start_time=None, duration=None, response_code=None, http_method=None, properties=None, measurements=None, request_id=None):
"""Sends a single request that was captured for the application.
Args:
name (str). the name for this request. All requests with the same name will be grouped together.\n
url (str). the actual URL for this request (to show in individual request instances).\n
success (bool). true if the request ended in success, false otherwise.\n
start_time (str). the start time of the request. The value should look the same as the one returned by :func:`datetime.isoformat()` (defaults to: None)\n
duration (int). the number of milliseconds that this request lasted. (defaults to: None)\n
response_code (str). the response code that this request returned. (defaults to: None)\n
http_method (str). the HTTP method that triggered this request. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)\n
request_id (str). the id for this request. If None, a new uuid will be generated. (defaults to: None)
"""
data = channel.contracts.RequestData()
data.id = request_id or str(uuid.uuid4())
data.name = name
data.url = url
data.success = success
data.start_time = start_time or datetime.datetime.utcnow().isoformat() + 'Z'
data.duration = self.__ms_to_duration(duration)
data.response_code = str(response_code) or '200'
data.http_method = http_method or 'GET'
if properties:
data.properties = properties
if measurements:
data.measurements = measurements
self.track(data, self._context)
|
python
|
def track_request(self, name, url, success, start_time=None, duration=None, response_code=None, http_method=None, properties=None, measurements=None, request_id=None):
"""Sends a single request that was captured for the application.
Args:
name (str). the name for this request. All requests with the same name will be grouped together.\n
url (str). the actual URL for this request (to show in individual request instances).\n
success (bool). true if the request ended in success, false otherwise.\n
start_time (str). the start time of the request. The value should look the same as the one returned by :func:`datetime.isoformat()` (defaults to: None)\n
duration (int). the number of milliseconds that this request lasted. (defaults to: None)\n
response_code (str). the response code that this request returned. (defaults to: None)\n
http_method (str). the HTTP method that triggered this request. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)\n
request_id (str). the id for this request. If None, a new uuid will be generated. (defaults to: None)
"""
data = channel.contracts.RequestData()
data.id = request_id or str(uuid.uuid4())
data.name = name
data.url = url
data.success = success
data.start_time = start_time or datetime.datetime.utcnow().isoformat() + 'Z'
data.duration = self.__ms_to_duration(duration)
data.response_code = str(response_code) or '200'
data.http_method = http_method or 'GET'
if properties:
data.properties = properties
if measurements:
data.measurements = measurements
self.track(data, self._context)
|
[
"def",
"track_request",
"(",
"self",
",",
"name",
",",
"url",
",",
"success",
",",
"start_time",
"=",
"None",
",",
"duration",
"=",
"None",
",",
"response_code",
"=",
"None",
",",
"http_method",
"=",
"None",
",",
"properties",
"=",
"None",
",",
"measurements",
"=",
"None",
",",
"request_id",
"=",
"None",
")",
":",
"data",
"=",
"channel",
".",
"contracts",
".",
"RequestData",
"(",
")",
"data",
".",
"id",
"=",
"request_id",
"or",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
"data",
".",
"name",
"=",
"name",
"data",
".",
"url",
"=",
"url",
"data",
".",
"success",
"=",
"success",
"data",
".",
"start_time",
"=",
"start_time",
"or",
"datetime",
".",
"datetime",
".",
"utcnow",
"(",
")",
".",
"isoformat",
"(",
")",
"+",
"'Z'",
"data",
".",
"duration",
"=",
"self",
".",
"__ms_to_duration",
"(",
"duration",
")",
"data",
".",
"response_code",
"=",
"str",
"(",
"response_code",
")",
"or",
"'200'",
"data",
".",
"http_method",
"=",
"http_method",
"or",
"'GET'",
"if",
"properties",
":",
"data",
".",
"properties",
"=",
"properties",
"if",
"measurements",
":",
"data",
".",
"measurements",
"=",
"measurements",
"self",
".",
"track",
"(",
"data",
",",
"self",
".",
"_context",
")"
] |
Sends a single request that was captured for the application.
Args:
name (str). the name for this request. All requests with the same name will be grouped together.\n
url (str). the actual URL for this request (to show in individual request instances).\n
success (bool). true if the request ended in success, false otherwise.\n
start_time (str). the start time of the request. The value should look the same as the one returned by :func:`datetime.isoformat()` (defaults to: None)\n
duration (int). the number of milliseconds that this request lasted. (defaults to: None)\n
response_code (str). the response code that this request returned. (defaults to: None)\n
http_method (str). the HTTP method that triggered this request. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)\n
request_id (str). the id for this request. If None, a new uuid will be generated. (defaults to: None)
|
[
"Sends",
"a",
"single",
"request",
"that",
"was",
"captured",
"for",
"the",
"application",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/TelemetryClient.py#L193-L222
|
17,712
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/TelemetryClient.py
|
TelemetryClient.track_dependency
|
def track_dependency(self, name, data, type=None, target=None, duration=None, success=None, result_code=None, properties=None, measurements=None, dependency_id=None):
"""Sends a single dependency telemetry that was captured for the application.
Args:
name (str). the name of the command initiated with this dependency call. Low cardinality value. Examples are stored procedure name and URL path template.\n
data (str). the command initiated by this dependency call. Examples are SQL statement and HTTP URL with all query parameters.\n
type (str). the dependency type name. Low cardinality value for logical grouping of dependencies and interpretation of other fields like commandName and resultCode. Examples are SQL, Azure table, and HTTP. (default to: None)\n
target (str). the target site of a dependency call. Examples are server name, host address. (default to: None)\n
duration (int). the number of milliseconds that this dependency call lasted. (defaults to: None)\n
success (bool). true if the dependency call ended in success, false otherwise. (defaults to: None)\n
result_code (str). the result code of a dependency call. Examples are SQL error code and HTTP status code. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)\n
id (str). the id for this dependency call. If None, a new uuid will be generated. (defaults to: None)
"""
dependency_data = channel.contracts.RemoteDependencyData()
dependency_data.id = dependency_id or str(uuid.uuid4())
dependency_data.name = name
dependency_data.data = data
dependency_data.type = type
dependency_data.target = target
dependency_data.duration = self.__ms_to_duration(duration)
dependency_data.success = success
dependency_data.result_code = str(result_code) or '200'
if properties:
dependency_data.properties = properties
if measurements:
dependency_data.measurements = measurements
self.track(dependency_data, self._context)
|
python
|
def track_dependency(self, name, data, type=None, target=None, duration=None, success=None, result_code=None, properties=None, measurements=None, dependency_id=None):
"""Sends a single dependency telemetry that was captured for the application.
Args:
name (str). the name of the command initiated with this dependency call. Low cardinality value. Examples are stored procedure name and URL path template.\n
data (str). the command initiated by this dependency call. Examples are SQL statement and HTTP URL with all query parameters.\n
type (str). the dependency type name. Low cardinality value for logical grouping of dependencies and interpretation of other fields like commandName and resultCode. Examples are SQL, Azure table, and HTTP. (default to: None)\n
target (str). the target site of a dependency call. Examples are server name, host address. (default to: None)\n
duration (int). the number of milliseconds that this dependency call lasted. (defaults to: None)\n
success (bool). true if the dependency call ended in success, false otherwise. (defaults to: None)\n
result_code (str). the result code of a dependency call. Examples are SQL error code and HTTP status code. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)\n
id (str). the id for this dependency call. If None, a new uuid will be generated. (defaults to: None)
"""
dependency_data = channel.contracts.RemoteDependencyData()
dependency_data.id = dependency_id or str(uuid.uuid4())
dependency_data.name = name
dependency_data.data = data
dependency_data.type = type
dependency_data.target = target
dependency_data.duration = self.__ms_to_duration(duration)
dependency_data.success = success
dependency_data.result_code = str(result_code) or '200'
if properties:
dependency_data.properties = properties
if measurements:
dependency_data.measurements = measurements
self.track(dependency_data, self._context)
|
[
"def",
"track_dependency",
"(",
"self",
",",
"name",
",",
"data",
",",
"type",
"=",
"None",
",",
"target",
"=",
"None",
",",
"duration",
"=",
"None",
",",
"success",
"=",
"None",
",",
"result_code",
"=",
"None",
",",
"properties",
"=",
"None",
",",
"measurements",
"=",
"None",
",",
"dependency_id",
"=",
"None",
")",
":",
"dependency_data",
"=",
"channel",
".",
"contracts",
".",
"RemoteDependencyData",
"(",
")",
"dependency_data",
".",
"id",
"=",
"dependency_id",
"or",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
"dependency_data",
".",
"name",
"=",
"name",
"dependency_data",
".",
"data",
"=",
"data",
"dependency_data",
".",
"type",
"=",
"type",
"dependency_data",
".",
"target",
"=",
"target",
"dependency_data",
".",
"duration",
"=",
"self",
".",
"__ms_to_duration",
"(",
"duration",
")",
"dependency_data",
".",
"success",
"=",
"success",
"dependency_data",
".",
"result_code",
"=",
"str",
"(",
"result_code",
")",
"or",
"'200'",
"if",
"properties",
":",
"dependency_data",
".",
"properties",
"=",
"properties",
"if",
"measurements",
":",
"dependency_data",
".",
"measurements",
"=",
"measurements",
"self",
".",
"track",
"(",
"dependency_data",
",",
"self",
".",
"_context",
")"
] |
Sends a single dependency telemetry that was captured for the application.
Args:
name (str). the name of the command initiated with this dependency call. Low cardinality value. Examples are stored procedure name and URL path template.\n
data (str). the command initiated by this dependency call. Examples are SQL statement and HTTP URL with all query parameters.\n
type (str). the dependency type name. Low cardinality value for logical grouping of dependencies and interpretation of other fields like commandName and resultCode. Examples are SQL, Azure table, and HTTP. (default to: None)\n
target (str). the target site of a dependency call. Examples are server name, host address. (default to: None)\n
duration (int). the number of milliseconds that this dependency call lasted. (defaults to: None)\n
success (bool). true if the dependency call ended in success, false otherwise. (defaults to: None)\n
result_code (str). the result code of a dependency call. Examples are SQL error code and HTTP status code. (defaults to: None)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)\n
id (str). the id for this dependency call. If None, a new uuid will be generated. (defaults to: None)
|
[
"Sends",
"a",
"single",
"dependency",
"telemetry",
"that",
"was",
"captured",
"for",
"the",
"application",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/TelemetryClient.py#L224-L253
|
17,713
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/django/common.py
|
dummy_client
|
def dummy_client(reason):
"""Creates a dummy channel so even if we're not logging telemetry, we can still send
along the real object to things that depend on it to exist"""
sender = applicationinsights.channel.NullSender()
queue = applicationinsights.channel.SynchronousQueue(sender)
channel = applicationinsights.channel.TelemetryChannel(None, queue)
return applicationinsights.TelemetryClient("00000000-0000-0000-0000-000000000000", channel)
|
python
|
def dummy_client(reason):
"""Creates a dummy channel so even if we're not logging telemetry, we can still send
along the real object to things that depend on it to exist"""
sender = applicationinsights.channel.NullSender()
queue = applicationinsights.channel.SynchronousQueue(sender)
channel = applicationinsights.channel.TelemetryChannel(None, queue)
return applicationinsights.TelemetryClient("00000000-0000-0000-0000-000000000000", channel)
|
[
"def",
"dummy_client",
"(",
"reason",
")",
":",
"sender",
"=",
"applicationinsights",
".",
"channel",
".",
"NullSender",
"(",
")",
"queue",
"=",
"applicationinsights",
".",
"channel",
".",
"SynchronousQueue",
"(",
"sender",
")",
"channel",
"=",
"applicationinsights",
".",
"channel",
".",
"TelemetryChannel",
"(",
"None",
",",
"queue",
")",
"return",
"applicationinsights",
".",
"TelemetryClient",
"(",
"\"00000000-0000-0000-0000-000000000000\"",
",",
"channel",
")"
] |
Creates a dummy channel so even if we're not logging telemetry, we can still send
along the real object to things that depend on it to exist
|
[
"Creates",
"a",
"dummy",
"channel",
"so",
"even",
"if",
"we",
"re",
"not",
"logging",
"telemetry",
"we",
"can",
"still",
"send",
"along",
"the",
"real",
"object",
"to",
"things",
"that",
"depend",
"on",
"it",
"to",
"exist"
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/django/common.py#L75-L82
|
17,714
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/exceptions/enable.py
|
enable
|
def enable(instrumentation_key, *args, **kwargs):
"""Enables the automatic collection of unhandled exceptions. Captured exceptions will be sent to the Application
Insights service before being re-thrown. Multiple calls to this function with different instrumentation keys result
in multiple instances being submitted, one for each key.
.. code:: python
from applicationinsights.exceptions import enable
# set up exception capture
enable('<YOUR INSTRUMENTATION KEY GOES HERE>')
# raise an exception (this will be sent to the Application Insights service as an exception telemetry object)
raise Exception('Boom!')
Args:
instrumentation_key (str). the instrumentation key to use while sending telemetry to the service.
"""
if not instrumentation_key:
raise Exception('Instrumentation key was required but not provided')
global original_excepthook
global telemetry_channel
telemetry_channel = kwargs.get('telemetry_channel')
if not original_excepthook:
original_excepthook = sys.excepthook
sys.excepthook = intercept_excepthook
if instrumentation_key not in enabled_instrumentation_keys:
enabled_instrumentation_keys.append(instrumentation_key)
|
python
|
def enable(instrumentation_key, *args, **kwargs):
"""Enables the automatic collection of unhandled exceptions. Captured exceptions will be sent to the Application
Insights service before being re-thrown. Multiple calls to this function with different instrumentation keys result
in multiple instances being submitted, one for each key.
.. code:: python
from applicationinsights.exceptions import enable
# set up exception capture
enable('<YOUR INSTRUMENTATION KEY GOES HERE>')
# raise an exception (this will be sent to the Application Insights service as an exception telemetry object)
raise Exception('Boom!')
Args:
instrumentation_key (str). the instrumentation key to use while sending telemetry to the service.
"""
if not instrumentation_key:
raise Exception('Instrumentation key was required but not provided')
global original_excepthook
global telemetry_channel
telemetry_channel = kwargs.get('telemetry_channel')
if not original_excepthook:
original_excepthook = sys.excepthook
sys.excepthook = intercept_excepthook
if instrumentation_key not in enabled_instrumentation_keys:
enabled_instrumentation_keys.append(instrumentation_key)
|
[
"def",
"enable",
"(",
"instrumentation_key",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"instrumentation_key",
":",
"raise",
"Exception",
"(",
"'Instrumentation key was required but not provided'",
")",
"global",
"original_excepthook",
"global",
"telemetry_channel",
"telemetry_channel",
"=",
"kwargs",
".",
"get",
"(",
"'telemetry_channel'",
")",
"if",
"not",
"original_excepthook",
":",
"original_excepthook",
"=",
"sys",
".",
"excepthook",
"sys",
".",
"excepthook",
"=",
"intercept_excepthook",
"if",
"instrumentation_key",
"not",
"in",
"enabled_instrumentation_keys",
":",
"enabled_instrumentation_keys",
".",
"append",
"(",
"instrumentation_key",
")"
] |
Enables the automatic collection of unhandled exceptions. Captured exceptions will be sent to the Application
Insights service before being re-thrown. Multiple calls to this function with different instrumentation keys result
in multiple instances being submitted, one for each key.
.. code:: python
from applicationinsights.exceptions import enable
# set up exception capture
enable('<YOUR INSTRUMENTATION KEY GOES HERE>')
# raise an exception (this will be sent to the Application Insights service as an exception telemetry object)
raise Exception('Boom!')
Args:
instrumentation_key (str). the instrumentation key to use while sending telemetry to the service.
|
[
"Enables",
"the",
"automatic",
"collection",
"of",
"unhandled",
"exceptions",
".",
"Captured",
"exceptions",
"will",
"be",
"sent",
"to",
"the",
"Application",
"Insights",
"service",
"before",
"being",
"re",
"-",
"thrown",
".",
"Multiple",
"calls",
"to",
"this",
"function",
"with",
"different",
"instrumentation",
"keys",
"result",
"in",
"multiple",
"instances",
"being",
"submitted",
"one",
"for",
"each",
"key",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/exceptions/enable.py#L8-L35
|
17,715
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/flask/ext.py
|
AppInsights.init_app
|
def init_app(self, app):
"""
Initializes the extension for the provided Flask application.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
self._key = app.config.get(CONF_KEY) or getenv(CONF_KEY)
if not self._key:
return
self._endpoint_uri = app.config.get(CONF_ENDPOINT_URI)
sender = AsynchronousSender(self._endpoint_uri)
queue = AsynchronousQueue(sender)
self._channel = TelemetryChannel(None, queue)
self._init_request_logging(app)
self._init_trace_logging(app)
self._init_exception_logging(app)
|
python
|
def init_app(self, app):
"""
Initializes the extension for the provided Flask application.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
self._key = app.config.get(CONF_KEY) or getenv(CONF_KEY)
if not self._key:
return
self._endpoint_uri = app.config.get(CONF_ENDPOINT_URI)
sender = AsynchronousSender(self._endpoint_uri)
queue = AsynchronousQueue(sender)
self._channel = TelemetryChannel(None, queue)
self._init_request_logging(app)
self._init_trace_logging(app)
self._init_exception_logging(app)
|
[
"def",
"init_app",
"(",
"self",
",",
"app",
")",
":",
"self",
".",
"_key",
"=",
"app",
".",
"config",
".",
"get",
"(",
"CONF_KEY",
")",
"or",
"getenv",
"(",
"CONF_KEY",
")",
"if",
"not",
"self",
".",
"_key",
":",
"return",
"self",
".",
"_endpoint_uri",
"=",
"app",
".",
"config",
".",
"get",
"(",
"CONF_ENDPOINT_URI",
")",
"sender",
"=",
"AsynchronousSender",
"(",
"self",
".",
"_endpoint_uri",
")",
"queue",
"=",
"AsynchronousQueue",
"(",
"sender",
")",
"self",
".",
"_channel",
"=",
"TelemetryChannel",
"(",
"None",
",",
"queue",
")",
"self",
".",
"_init_request_logging",
"(",
"app",
")",
"self",
".",
"_init_trace_logging",
"(",
"app",
")",
"self",
".",
"_init_exception_logging",
"(",
"app",
")"
] |
Initializes the extension for the provided Flask application.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
|
[
"Initializes",
"the",
"extension",
"for",
"the",
"provided",
"Flask",
"application",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/flask/ext.py#L87-L107
|
17,716
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/flask/ext.py
|
AppInsights._init_request_logging
|
def _init_request_logging(self, app):
"""
Sets up request logging unless ``APPINSIGHTS_DISABLE_REQUEST_LOGGING``
is set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
enabled = not app.config.get(CONF_DISABLE_REQUEST_LOGGING, False)
if not enabled:
return
self._requests_middleware = WSGIApplication(
self._key, app.wsgi_app, telemetry_channel=self._channel)
app.wsgi_app = self._requests_middleware
|
python
|
def _init_request_logging(self, app):
"""
Sets up request logging unless ``APPINSIGHTS_DISABLE_REQUEST_LOGGING``
is set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
enabled = not app.config.get(CONF_DISABLE_REQUEST_LOGGING, False)
if not enabled:
return
self._requests_middleware = WSGIApplication(
self._key, app.wsgi_app, telemetry_channel=self._channel)
app.wsgi_app = self._requests_middleware
|
[
"def",
"_init_request_logging",
"(",
"self",
",",
"app",
")",
":",
"enabled",
"=",
"not",
"app",
".",
"config",
".",
"get",
"(",
"CONF_DISABLE_REQUEST_LOGGING",
",",
"False",
")",
"if",
"not",
"enabled",
":",
"return",
"self",
".",
"_requests_middleware",
"=",
"WSGIApplication",
"(",
"self",
".",
"_key",
",",
"app",
".",
"wsgi_app",
",",
"telemetry_channel",
"=",
"self",
".",
"_channel",
")",
"app",
".",
"wsgi_app",
"=",
"self",
".",
"_requests_middleware"
] |
Sets up request logging unless ``APPINSIGHTS_DISABLE_REQUEST_LOGGING``
is set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
|
[
"Sets",
"up",
"request",
"logging",
"unless",
"APPINSIGHTS_DISABLE_REQUEST_LOGGING",
"is",
"set",
"in",
"the",
"Flask",
"config",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/flask/ext.py#L119-L135
|
17,717
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/flask/ext.py
|
AppInsights._init_trace_logging
|
def _init_trace_logging(self, app):
"""
Sets up trace logging unless ``APPINSIGHTS_DISABLE_TRACE_LOGGING`` is
set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
enabled = not app.config.get(CONF_DISABLE_TRACE_LOGGING, False)
if not enabled:
return
self._trace_log_handler = LoggingHandler(
self._key, telemetry_channel=self._channel)
app.logger.addHandler(self._trace_log_handler)
|
python
|
def _init_trace_logging(self, app):
"""
Sets up trace logging unless ``APPINSIGHTS_DISABLE_TRACE_LOGGING`` is
set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
enabled = not app.config.get(CONF_DISABLE_TRACE_LOGGING, False)
if not enabled:
return
self._trace_log_handler = LoggingHandler(
self._key, telemetry_channel=self._channel)
app.logger.addHandler(self._trace_log_handler)
|
[
"def",
"_init_trace_logging",
"(",
"self",
",",
"app",
")",
":",
"enabled",
"=",
"not",
"app",
".",
"config",
".",
"get",
"(",
"CONF_DISABLE_TRACE_LOGGING",
",",
"False",
")",
"if",
"not",
"enabled",
":",
"return",
"self",
".",
"_trace_log_handler",
"=",
"LoggingHandler",
"(",
"self",
".",
"_key",
",",
"telemetry_channel",
"=",
"self",
".",
"_channel",
")",
"app",
".",
"logger",
".",
"addHandler",
"(",
"self",
".",
"_trace_log_handler",
")"
] |
Sets up trace logging unless ``APPINSIGHTS_DISABLE_TRACE_LOGGING`` is
set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
|
[
"Sets",
"up",
"trace",
"logging",
"unless",
"APPINSIGHTS_DISABLE_TRACE_LOGGING",
"is",
"set",
"in",
"the",
"Flask",
"config",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/flask/ext.py#L137-L153
|
17,718
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/flask/ext.py
|
AppInsights._init_exception_logging
|
def _init_exception_logging(self, app):
"""
Sets up exception logging unless ``APPINSIGHTS_DISABLE_EXCEPTION_LOGGING``
is set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
enabled = not app.config.get(CONF_DISABLE_EXCEPTION_LOGGING, False)
if not enabled:
return
exception_telemetry_client = TelemetryClient(
self._key, telemetry_channel=self._channel)
@app.errorhandler(Exception)
def exception_handler(exception):
if HTTPException and isinstance(exception, HTTPException):
return exception
try:
raise exception
except Exception:
exception_telemetry_client.track_exception()
finally:
raise exception
self._exception_telemetry_client = exception_telemetry_client
|
python
|
def _init_exception_logging(self, app):
"""
Sets up exception logging unless ``APPINSIGHTS_DISABLE_EXCEPTION_LOGGING``
is set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
enabled = not app.config.get(CONF_DISABLE_EXCEPTION_LOGGING, False)
if not enabled:
return
exception_telemetry_client = TelemetryClient(
self._key, telemetry_channel=self._channel)
@app.errorhandler(Exception)
def exception_handler(exception):
if HTTPException and isinstance(exception, HTTPException):
return exception
try:
raise exception
except Exception:
exception_telemetry_client.track_exception()
finally:
raise exception
self._exception_telemetry_client = exception_telemetry_client
|
[
"def",
"_init_exception_logging",
"(",
"self",
",",
"app",
")",
":",
"enabled",
"=",
"not",
"app",
".",
"config",
".",
"get",
"(",
"CONF_DISABLE_EXCEPTION_LOGGING",
",",
"False",
")",
"if",
"not",
"enabled",
":",
"return",
"exception_telemetry_client",
"=",
"TelemetryClient",
"(",
"self",
".",
"_key",
",",
"telemetry_channel",
"=",
"self",
".",
"_channel",
")",
"@",
"app",
".",
"errorhandler",
"(",
"Exception",
")",
"def",
"exception_handler",
"(",
"exception",
")",
":",
"if",
"HTTPException",
"and",
"isinstance",
"(",
"exception",
",",
"HTTPException",
")",
":",
"return",
"exception",
"try",
":",
"raise",
"exception",
"except",
"Exception",
":",
"exception_telemetry_client",
".",
"track_exception",
"(",
")",
"finally",
":",
"raise",
"exception",
"self",
".",
"_exception_telemetry_client",
"=",
"exception_telemetry_client"
] |
Sets up exception logging unless ``APPINSIGHTS_DISABLE_EXCEPTION_LOGGING``
is set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
|
[
"Sets",
"up",
"exception",
"logging",
"unless",
"APPINSIGHTS_DISABLE_EXCEPTION_LOGGING",
"is",
"set",
"in",
"the",
"Flask",
"config",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/flask/ext.py#L155-L183
|
17,719
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/flask/ext.py
|
AppInsights.flush
|
def flush(self):
"""Flushes the queued up telemetry to the service.
"""
if self._requests_middleware:
self._requests_middleware.flush()
if self._trace_log_handler:
self._trace_log_handler.flush()
if self._exception_telemetry_client:
self._exception_telemetry_client.flush()
|
python
|
def flush(self):
"""Flushes the queued up telemetry to the service.
"""
if self._requests_middleware:
self._requests_middleware.flush()
if self._trace_log_handler:
self._trace_log_handler.flush()
if self._exception_telemetry_client:
self._exception_telemetry_client.flush()
|
[
"def",
"flush",
"(",
"self",
")",
":",
"if",
"self",
".",
"_requests_middleware",
":",
"self",
".",
"_requests_middleware",
".",
"flush",
"(",
")",
"if",
"self",
".",
"_trace_log_handler",
":",
"self",
".",
"_trace_log_handler",
".",
"flush",
"(",
")",
"if",
"self",
".",
"_exception_telemetry_client",
":",
"self",
".",
"_exception_telemetry_client",
".",
"flush",
"(",
")"
] |
Flushes the queued up telemetry to the service.
|
[
"Flushes",
"the",
"queued",
"up",
"telemetry",
"to",
"the",
"service",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/flask/ext.py#L185-L195
|
17,720
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/channel/QueueBase.py
|
QueueBase.get
|
def get(self):
"""Gets a single item from the queue and returns it. If the queue is empty, this method will return None.
Returns:
:class:`contracts.Envelope`. a telemetry envelope object or None if the queue is empty.
"""
try:
item = self._queue.get_nowait()
except (Empty, PersistEmpty):
return None
if self._persistence_path:
self._queue.task_done()
return item
|
python
|
def get(self):
"""Gets a single item from the queue and returns it. If the queue is empty, this method will return None.
Returns:
:class:`contracts.Envelope`. a telemetry envelope object or None if the queue is empty.
"""
try:
item = self._queue.get_nowait()
except (Empty, PersistEmpty):
return None
if self._persistence_path:
self._queue.task_done()
return item
|
[
"def",
"get",
"(",
"self",
")",
":",
"try",
":",
"item",
"=",
"self",
".",
"_queue",
".",
"get_nowait",
"(",
")",
"except",
"(",
"Empty",
",",
"PersistEmpty",
")",
":",
"return",
"None",
"if",
"self",
".",
"_persistence_path",
":",
"self",
".",
"_queue",
".",
"task_done",
"(",
")",
"return",
"item"
] |
Gets a single item from the queue and returns it. If the queue is empty, this method will return None.
Returns:
:class:`contracts.Envelope`. a telemetry envelope object or None if the queue is empty.
|
[
"Gets",
"a",
"single",
"item",
"from",
"the",
"queue",
"and",
"returns",
"it",
".",
"If",
"the",
"queue",
"is",
"empty",
"this",
"method",
"will",
"return",
"None",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/channel/QueueBase.py#L92-L106
|
17,721
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/logging/LoggingHandler.py
|
enable
|
def enable(instrumentation_key, *args, **kwargs):
"""Enables the Application Insights logging handler for the root logger for the supplied instrumentation key.
Multiple calls to this function with different instrumentation keys result in multiple handler instances.
.. code:: python
import logging
from applicationinsights.logging import enable
# set up logging
enable('<YOUR INSTRUMENTATION KEY GOES HERE>')
# log something (this will be sent to the Application Insights service as a trace)
logging.info('This is a message')
# logging shutdown will cause a flush of all un-sent telemetry items
# alternatively set up an async channel via enable('<YOUR INSTRUMENTATION KEY GOES HERE>', async_=True)
Args:
instrumentation_key (str). the instrumentation key to use while sending telemetry to the service.
Keyword Args:
async_ (bool): Whether to use an async channel for the telemetry. Defaults to False.
endpoint (str): The custom endpoint to which to send the telemetry. Defaults to None.
level (Union[int, str]): The level to set for the logger. Defaults to INFO.
Returns:
:class:`ApplicationInsightsHandler`. the newly created or existing handler.
"""
if not instrumentation_key:
raise Exception('Instrumentation key was required but not provided')
if instrumentation_key in enabled_instrumentation_keys:
logging.getLogger().removeHandler(enabled_instrumentation_keys[instrumentation_key])
async_ = kwargs.pop('async_', False)
endpoint = kwargs.pop('endpoint', None)
telemetry_channel = kwargs.get('telemetry_channel')
if telemetry_channel and async_:
raise Exception('Incompatible arguments async_ and telemetry_channel')
if telemetry_channel and endpoint:
raise Exception('Incompatible arguments endpoint and telemetry_channel')
if not telemetry_channel:
if async_:
sender, queue = AsynchronousSender, AsynchronousQueue
else:
sender, queue = SynchronousSender, SynchronousQueue
kwargs['telemetry_channel'] = TelemetryChannel(queue=queue(sender(endpoint)))
log_level = kwargs.pop('level', logging.INFO)
handler = LoggingHandler(instrumentation_key, *args, **kwargs)
handler.setLevel(log_level)
enabled_instrumentation_keys[instrumentation_key] = handler
logging.getLogger().addHandler(handler)
return handler
|
python
|
def enable(instrumentation_key, *args, **kwargs):
"""Enables the Application Insights logging handler for the root logger for the supplied instrumentation key.
Multiple calls to this function with different instrumentation keys result in multiple handler instances.
.. code:: python
import logging
from applicationinsights.logging import enable
# set up logging
enable('<YOUR INSTRUMENTATION KEY GOES HERE>')
# log something (this will be sent to the Application Insights service as a trace)
logging.info('This is a message')
# logging shutdown will cause a flush of all un-sent telemetry items
# alternatively set up an async channel via enable('<YOUR INSTRUMENTATION KEY GOES HERE>', async_=True)
Args:
instrumentation_key (str). the instrumentation key to use while sending telemetry to the service.
Keyword Args:
async_ (bool): Whether to use an async channel for the telemetry. Defaults to False.
endpoint (str): The custom endpoint to which to send the telemetry. Defaults to None.
level (Union[int, str]): The level to set for the logger. Defaults to INFO.
Returns:
:class:`ApplicationInsightsHandler`. the newly created or existing handler.
"""
if not instrumentation_key:
raise Exception('Instrumentation key was required but not provided')
if instrumentation_key in enabled_instrumentation_keys:
logging.getLogger().removeHandler(enabled_instrumentation_keys[instrumentation_key])
async_ = kwargs.pop('async_', False)
endpoint = kwargs.pop('endpoint', None)
telemetry_channel = kwargs.get('telemetry_channel')
if telemetry_channel and async_:
raise Exception('Incompatible arguments async_ and telemetry_channel')
if telemetry_channel and endpoint:
raise Exception('Incompatible arguments endpoint and telemetry_channel')
if not telemetry_channel:
if async_:
sender, queue = AsynchronousSender, AsynchronousQueue
else:
sender, queue = SynchronousSender, SynchronousQueue
kwargs['telemetry_channel'] = TelemetryChannel(queue=queue(sender(endpoint)))
log_level = kwargs.pop('level', logging.INFO)
handler = LoggingHandler(instrumentation_key, *args, **kwargs)
handler.setLevel(log_level)
enabled_instrumentation_keys[instrumentation_key] = handler
logging.getLogger().addHandler(handler)
return handler
|
[
"def",
"enable",
"(",
"instrumentation_key",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"instrumentation_key",
":",
"raise",
"Exception",
"(",
"'Instrumentation key was required but not provided'",
")",
"if",
"instrumentation_key",
"in",
"enabled_instrumentation_keys",
":",
"logging",
".",
"getLogger",
"(",
")",
".",
"removeHandler",
"(",
"enabled_instrumentation_keys",
"[",
"instrumentation_key",
"]",
")",
"async_",
"=",
"kwargs",
".",
"pop",
"(",
"'async_'",
",",
"False",
")",
"endpoint",
"=",
"kwargs",
".",
"pop",
"(",
"'endpoint'",
",",
"None",
")",
"telemetry_channel",
"=",
"kwargs",
".",
"get",
"(",
"'telemetry_channel'",
")",
"if",
"telemetry_channel",
"and",
"async_",
":",
"raise",
"Exception",
"(",
"'Incompatible arguments async_ and telemetry_channel'",
")",
"if",
"telemetry_channel",
"and",
"endpoint",
":",
"raise",
"Exception",
"(",
"'Incompatible arguments endpoint and telemetry_channel'",
")",
"if",
"not",
"telemetry_channel",
":",
"if",
"async_",
":",
"sender",
",",
"queue",
"=",
"AsynchronousSender",
",",
"AsynchronousQueue",
"else",
":",
"sender",
",",
"queue",
"=",
"SynchronousSender",
",",
"SynchronousQueue",
"kwargs",
"[",
"'telemetry_channel'",
"]",
"=",
"TelemetryChannel",
"(",
"queue",
"=",
"queue",
"(",
"sender",
"(",
"endpoint",
")",
")",
")",
"log_level",
"=",
"kwargs",
".",
"pop",
"(",
"'level'",
",",
"logging",
".",
"INFO",
")",
"handler",
"=",
"LoggingHandler",
"(",
"instrumentation_key",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"handler",
".",
"setLevel",
"(",
"log_level",
")",
"enabled_instrumentation_keys",
"[",
"instrumentation_key",
"]",
"=",
"handler",
"logging",
".",
"getLogger",
"(",
")",
".",
"addHandler",
"(",
"handler",
")",
"return",
"handler"
] |
Enables the Application Insights logging handler for the root logger for the supplied instrumentation key.
Multiple calls to this function with different instrumentation keys result in multiple handler instances.
.. code:: python
import logging
from applicationinsights.logging import enable
# set up logging
enable('<YOUR INSTRUMENTATION KEY GOES HERE>')
# log something (this will be sent to the Application Insights service as a trace)
logging.info('This is a message')
# logging shutdown will cause a flush of all un-sent telemetry items
# alternatively set up an async channel via enable('<YOUR INSTRUMENTATION KEY GOES HERE>', async_=True)
Args:
instrumentation_key (str). the instrumentation key to use while sending telemetry to the service.
Keyword Args:
async_ (bool): Whether to use an async channel for the telemetry. Defaults to False.
endpoint (str): The custom endpoint to which to send the telemetry. Defaults to None.
level (Union[int, str]): The level to set for the logger. Defaults to INFO.
Returns:
:class:`ApplicationInsightsHandler`. the newly created or existing handler.
|
[
"Enables",
"the",
"Application",
"Insights",
"logging",
"handler",
"for",
"the",
"root",
"logger",
"for",
"the",
"supplied",
"instrumentation",
"key",
".",
"Multiple",
"calls",
"to",
"this",
"function",
"with",
"different",
"instrumentation",
"keys",
"result",
"in",
"multiple",
"handler",
"instances",
"."
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/logging/LoggingHandler.py#L10-L61
|
17,722
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/channel/AsynchronousSender.py
|
AsynchronousSender.start
|
def start(self):
"""Starts a new sender thread if none is not already there
"""
with self._lock_send_remaining_time:
if self._send_remaining_time <= 0.0:
local_send_interval = self._send_interval
if self._send_interval < 0.1:
local_send_interval = 0.1
self._send_remaining_time = self._send_time
if self._send_remaining_time < local_send_interval:
self._send_remaining_time = local_send_interval
thread = Thread(target=self._run)
thread.daemon = True
thread.start()
|
python
|
def start(self):
"""Starts a new sender thread if none is not already there
"""
with self._lock_send_remaining_time:
if self._send_remaining_time <= 0.0:
local_send_interval = self._send_interval
if self._send_interval < 0.1:
local_send_interval = 0.1
self._send_remaining_time = self._send_time
if self._send_remaining_time < local_send_interval:
self._send_remaining_time = local_send_interval
thread = Thread(target=self._run)
thread.daemon = True
thread.start()
|
[
"def",
"start",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock_send_remaining_time",
":",
"if",
"self",
".",
"_send_remaining_time",
"<=",
"0.0",
":",
"local_send_interval",
"=",
"self",
".",
"_send_interval",
"if",
"self",
".",
"_send_interval",
"<",
"0.1",
":",
"local_send_interval",
"=",
"0.1",
"self",
".",
"_send_remaining_time",
"=",
"self",
".",
"_send_time",
"if",
"self",
".",
"_send_remaining_time",
"<",
"local_send_interval",
":",
"self",
".",
"_send_remaining_time",
"=",
"local_send_interval",
"thread",
"=",
"Thread",
"(",
"target",
"=",
"self",
".",
"_run",
")",
"thread",
".",
"daemon",
"=",
"True",
"thread",
".",
"start",
"(",
")"
] |
Starts a new sender thread if none is not already there
|
[
"Starts",
"a",
"new",
"sender",
"thread",
"if",
"none",
"is",
"not",
"already",
"there"
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/channel/AsynchronousSender.py#L76-L89
|
17,723
|
Microsoft/ApplicationInsights-Python
|
applicationinsights/channel/TelemetryContext.py
|
device_initialize
|
def device_initialize(self):
""" The device initializer used to assign special properties to all device context objects"""
existing_device_initialize(self)
self.type = 'Other'
self.id = platform.node()
self.os_version = platform.version()
self.locale = locale.getdefaultlocale()[0]
|
python
|
def device_initialize(self):
""" The device initializer used to assign special properties to all device context objects"""
existing_device_initialize(self)
self.type = 'Other'
self.id = platform.node()
self.os_version = platform.version()
self.locale = locale.getdefaultlocale()[0]
|
[
"def",
"device_initialize",
"(",
"self",
")",
":",
"existing_device_initialize",
"(",
"self",
")",
"self",
".",
"type",
"=",
"'Other'",
"self",
".",
"id",
"=",
"platform",
".",
"node",
"(",
")",
"self",
".",
"os_version",
"=",
"platform",
".",
"version",
"(",
")",
"self",
".",
"locale",
"=",
"locale",
".",
"getdefaultlocale",
"(",
")",
"[",
"0",
"]"
] |
The device initializer used to assign special properties to all device context objects
|
[
"The",
"device",
"initializer",
"used",
"to",
"assign",
"special",
"properties",
"to",
"all",
"device",
"context",
"objects"
] |
8452ab7126f9bb6964637d4aa1258c2af17563d6
|
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/channel/TelemetryContext.py#L8-L14
|
17,724
|
hyperledger/indy-crypto
|
wrappers/python/indy_crypto/bls.py
|
Bls.sign
|
def sign(message: bytes, sign_key: SignKey) -> Signature:
"""
Signs the message and returns signature.
:param: message - Message to sign
:param: sign_key - Sign key
:return: Signature
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::sign: >>> message: %r, sign_key: %r", message, sign_key)
c_instance = c_void_p()
do_call('indy_crypto_bls_sign',
message, len(message),
sign_key.c_instance,
byref(c_instance))
res = Signature(c_instance)
logger.debug("Bls::sign: <<< res: %r", res)
return res
|
python
|
def sign(message: bytes, sign_key: SignKey) -> Signature:
"""
Signs the message and returns signature.
:param: message - Message to sign
:param: sign_key - Sign key
:return: Signature
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::sign: >>> message: %r, sign_key: %r", message, sign_key)
c_instance = c_void_p()
do_call('indy_crypto_bls_sign',
message, len(message),
sign_key.c_instance,
byref(c_instance))
res = Signature(c_instance)
logger.debug("Bls::sign: <<< res: %r", res)
return res
|
[
"def",
"sign",
"(",
"message",
":",
"bytes",
",",
"sign_key",
":",
"SignKey",
")",
"->",
"Signature",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"debug",
"(",
"\"Bls::sign: >>> message: %r, sign_key: %r\"",
",",
"message",
",",
"sign_key",
")",
"c_instance",
"=",
"c_void_p",
"(",
")",
"do_call",
"(",
"'indy_crypto_bls_sign'",
",",
"message",
",",
"len",
"(",
"message",
")",
",",
"sign_key",
".",
"c_instance",
",",
"byref",
"(",
"c_instance",
")",
")",
"res",
"=",
"Signature",
"(",
"c_instance",
")",
"logger",
".",
"debug",
"(",
"\"Bls::sign: <<< res: %r\"",
",",
"res",
")",
"return",
"res"
] |
Signs the message and returns signature.
:param: message - Message to sign
:param: sign_key - Sign key
:return: Signature
|
[
"Signs",
"the",
"message",
"and",
"returns",
"signature",
"."
] |
1675e29a2a5949b44899553d3d128335cf7a61b3
|
https://github.com/hyperledger/indy-crypto/blob/1675e29a2a5949b44899553d3d128335cf7a61b3/wrappers/python/indy_crypto/bls.py#L229-L250
|
17,725
|
hyperledger/indy-crypto
|
wrappers/python/indy_crypto/bls.py
|
Bls.verify
|
def verify(signature: Signature, message: bytes, ver_key: VerKey, gen: Generator) -> bool:
"""
Verifies the message signature and returns true - if signature valid or false otherwise.
:param: signature - Signature to verify
:param: message - Message to verify
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::verify: >>> signature: %r, message: %r, ver_key: %r, gen: %r", signature, message, ver_key,
gen)
valid = c_bool()
do_call('indy_crypto_bsl_verify',
signature.c_instance,
message, len(message),
ver_key.c_instance,
gen.c_instance,
byref(valid))
res = valid
logger.debug("Bls::verify: <<< res: %r", res)
return res
|
python
|
def verify(signature: Signature, message: bytes, ver_key: VerKey, gen: Generator) -> bool:
"""
Verifies the message signature and returns true - if signature valid or false otherwise.
:param: signature - Signature to verify
:param: message - Message to verify
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::verify: >>> signature: %r, message: %r, ver_key: %r, gen: %r", signature, message, ver_key,
gen)
valid = c_bool()
do_call('indy_crypto_bsl_verify',
signature.c_instance,
message, len(message),
ver_key.c_instance,
gen.c_instance,
byref(valid))
res = valid
logger.debug("Bls::verify: <<< res: %r", res)
return res
|
[
"def",
"verify",
"(",
"signature",
":",
"Signature",
",",
"message",
":",
"bytes",
",",
"ver_key",
":",
"VerKey",
",",
"gen",
":",
"Generator",
")",
"->",
"bool",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"debug",
"(",
"\"Bls::verify: >>> signature: %r, message: %r, ver_key: %r, gen: %r\"",
",",
"signature",
",",
"message",
",",
"ver_key",
",",
"gen",
")",
"valid",
"=",
"c_bool",
"(",
")",
"do_call",
"(",
"'indy_crypto_bsl_verify'",
",",
"signature",
".",
"c_instance",
",",
"message",
",",
"len",
"(",
"message",
")",
",",
"ver_key",
".",
"c_instance",
",",
"gen",
".",
"c_instance",
",",
"byref",
"(",
"valid",
")",
")",
"res",
"=",
"valid",
"logger",
".",
"debug",
"(",
"\"Bls::verify: <<< res: %r\"",
",",
"res",
")",
"return",
"res"
] |
Verifies the message signature and returns true - if signature valid or false otherwise.
:param: signature - Signature to verify
:param: message - Message to verify
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
|
[
"Verifies",
"the",
"message",
"signature",
"and",
"returns",
"true",
"-",
"if",
"signature",
"valid",
"or",
"false",
"otherwise",
"."
] |
1675e29a2a5949b44899553d3d128335cf7a61b3
|
https://github.com/hyperledger/indy-crypto/blob/1675e29a2a5949b44899553d3d128335cf7a61b3/wrappers/python/indy_crypto/bls.py#L253-L278
|
17,726
|
hyperledger/indy-crypto
|
wrappers/python/indy_crypto/bls.py
|
Bls.verify_pop
|
def verify_pop(pop: ProofOfPossession, ver_key: VerKey, gen: Generator) -> bool:
"""
Verifies the proof of possession and returns true - if signature valid or false otherwise.
:param: pop - Proof of possession
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::verify_pop: >>> pop: %r, ver_key: %r, gen: %r",
pop,
ver_key,
gen)
valid = c_bool()
do_call('indy_crypto_bsl_verify_pop',
pop.c_instance,
ver_key.c_instance,
gen.c_instance,
byref(valid))
res = valid
logger.debug("Bls::verify_pop: <<< res: %r", res)
return res
|
python
|
def verify_pop(pop: ProofOfPossession, ver_key: VerKey, gen: Generator) -> bool:
"""
Verifies the proof of possession and returns true - if signature valid or false otherwise.
:param: pop - Proof of possession
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::verify_pop: >>> pop: %r, ver_key: %r, gen: %r",
pop,
ver_key,
gen)
valid = c_bool()
do_call('indy_crypto_bsl_verify_pop',
pop.c_instance,
ver_key.c_instance,
gen.c_instance,
byref(valid))
res = valid
logger.debug("Bls::verify_pop: <<< res: %r", res)
return res
|
[
"def",
"verify_pop",
"(",
"pop",
":",
"ProofOfPossession",
",",
"ver_key",
":",
"VerKey",
",",
"gen",
":",
"Generator",
")",
"->",
"bool",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"debug",
"(",
"\"Bls::verify_pop: >>> pop: %r, ver_key: %r, gen: %r\"",
",",
"pop",
",",
"ver_key",
",",
"gen",
")",
"valid",
"=",
"c_bool",
"(",
")",
"do_call",
"(",
"'indy_crypto_bsl_verify_pop'",
",",
"pop",
".",
"c_instance",
",",
"ver_key",
".",
"c_instance",
",",
"gen",
".",
"c_instance",
",",
"byref",
"(",
"valid",
")",
")",
"res",
"=",
"valid",
"logger",
".",
"debug",
"(",
"\"Bls::verify_pop: <<< res: %r\"",
",",
"res",
")",
"return",
"res"
] |
Verifies the proof of possession and returns true - if signature valid or false otherwise.
:param: pop - Proof of possession
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
|
[
"Verifies",
"the",
"proof",
"of",
"possession",
"and",
"returns",
"true",
"-",
"if",
"signature",
"valid",
"or",
"false",
"otherwise",
"."
] |
1675e29a2a5949b44899553d3d128335cf7a61b3
|
https://github.com/hyperledger/indy-crypto/blob/1675e29a2a5949b44899553d3d128335cf7a61b3/wrappers/python/indy_crypto/bls.py#L281-L306
|
17,727
|
hyperledger/indy-crypto
|
wrappers/python/indy_crypto/bls.py
|
Bls.verify_multi_sig
|
def verify_multi_sig(multi_sig: MultiSignature, message: bytes, ver_keys: [VerKey], gen: Generator) -> bool:
"""
Verifies the message multi signature and returns true - if signature valid or false otherwise.
:param: multi_sig - Multi signature to verify
:param: message - Message to verify
:param: ver_keys - List of verification keys
:param: gen - Generator point
:return: true if multi signature valid.
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::verify_multi_sig: >>> multi_sig: %r, message: %r, ver_keys: %r, gen: %r",
multi_sig, message, ver_keys, gen)
# noinspection PyCallingNonCallable,PyTypeChecker
ver_key_c_instances = (c_void_p * len(ver_keys))()
for i in range(len(ver_keys)):
ver_key_c_instances[i] = ver_keys[i].c_instance
valid = c_bool()
do_call('indy_crypto_bls_verify_multi_sig',
multi_sig.c_instance,
message, len(message),
ver_key_c_instances, len(ver_keys),
gen.c_instance,
byref(valid))
res = valid
logger.debug("Bls::verify_multi_sig: <<< res: %r", res)
return res
|
python
|
def verify_multi_sig(multi_sig: MultiSignature, message: bytes, ver_keys: [VerKey], gen: Generator) -> bool:
"""
Verifies the message multi signature and returns true - if signature valid or false otherwise.
:param: multi_sig - Multi signature to verify
:param: message - Message to verify
:param: ver_keys - List of verification keys
:param: gen - Generator point
:return: true if multi signature valid.
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::verify_multi_sig: >>> multi_sig: %r, message: %r, ver_keys: %r, gen: %r",
multi_sig, message, ver_keys, gen)
# noinspection PyCallingNonCallable,PyTypeChecker
ver_key_c_instances = (c_void_p * len(ver_keys))()
for i in range(len(ver_keys)):
ver_key_c_instances[i] = ver_keys[i].c_instance
valid = c_bool()
do_call('indy_crypto_bls_verify_multi_sig',
multi_sig.c_instance,
message, len(message),
ver_key_c_instances, len(ver_keys),
gen.c_instance,
byref(valid))
res = valid
logger.debug("Bls::verify_multi_sig: <<< res: %r", res)
return res
|
[
"def",
"verify_multi_sig",
"(",
"multi_sig",
":",
"MultiSignature",
",",
"message",
":",
"bytes",
",",
"ver_keys",
":",
"[",
"VerKey",
"]",
",",
"gen",
":",
"Generator",
")",
"->",
"bool",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"debug",
"(",
"\"Bls::verify_multi_sig: >>> multi_sig: %r, message: %r, ver_keys: %r, gen: %r\"",
",",
"multi_sig",
",",
"message",
",",
"ver_keys",
",",
"gen",
")",
"# noinspection PyCallingNonCallable,PyTypeChecker",
"ver_key_c_instances",
"=",
"(",
"c_void_p",
"*",
"len",
"(",
"ver_keys",
")",
")",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"ver_keys",
")",
")",
":",
"ver_key_c_instances",
"[",
"i",
"]",
"=",
"ver_keys",
"[",
"i",
"]",
".",
"c_instance",
"valid",
"=",
"c_bool",
"(",
")",
"do_call",
"(",
"'indy_crypto_bls_verify_multi_sig'",
",",
"multi_sig",
".",
"c_instance",
",",
"message",
",",
"len",
"(",
"message",
")",
",",
"ver_key_c_instances",
",",
"len",
"(",
"ver_keys",
")",
",",
"gen",
".",
"c_instance",
",",
"byref",
"(",
"valid",
")",
")",
"res",
"=",
"valid",
"logger",
".",
"debug",
"(",
"\"Bls::verify_multi_sig: <<< res: %r\"",
",",
"res",
")",
"return",
"res"
] |
Verifies the message multi signature and returns true - if signature valid or false otherwise.
:param: multi_sig - Multi signature to verify
:param: message - Message to verify
:param: ver_keys - List of verification keys
:param: gen - Generator point
:return: true if multi signature valid.
|
[
"Verifies",
"the",
"message",
"multi",
"signature",
"and",
"returns",
"true",
"-",
"if",
"signature",
"valid",
"or",
"false",
"otherwise",
"."
] |
1675e29a2a5949b44899553d3d128335cf7a61b3
|
https://github.com/hyperledger/indy-crypto/blob/1675e29a2a5949b44899553d3d128335cf7a61b3/wrappers/python/indy_crypto/bls.py#L309-L340
|
17,728
|
nephila/djangocms-blog
|
djangocms_blog/admin.py
|
PostAdmin.get_urls
|
def get_urls(self):
"""
Customize the modeladmin urls
"""
urls = [
url(r'^publish/([0-9]+)/$', self.admin_site.admin_view(self.publish_post),
name='djangocms_blog_publish_article'),
]
urls.extend(super(PostAdmin, self).get_urls())
return urls
|
python
|
def get_urls(self):
"""
Customize the modeladmin urls
"""
urls = [
url(r'^publish/([0-9]+)/$', self.admin_site.admin_view(self.publish_post),
name='djangocms_blog_publish_article'),
]
urls.extend(super(PostAdmin, self).get_urls())
return urls
|
[
"def",
"get_urls",
"(",
"self",
")",
":",
"urls",
"=",
"[",
"url",
"(",
"r'^publish/([0-9]+)/$'",
",",
"self",
".",
"admin_site",
".",
"admin_view",
"(",
"self",
".",
"publish_post",
")",
",",
"name",
"=",
"'djangocms_blog_publish_article'",
")",
",",
"]",
"urls",
".",
"extend",
"(",
"super",
"(",
"PostAdmin",
",",
"self",
")",
".",
"get_urls",
"(",
")",
")",
"return",
"urls"
] |
Customize the modeladmin urls
|
[
"Customize",
"the",
"modeladmin",
"urls"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/admin.py#L224-L233
|
17,729
|
nephila/djangocms-blog
|
djangocms_blog/admin.py
|
PostAdmin.publish_post
|
def publish_post(self, request, pk):
"""
Admin view to publish a single post
:param request: request
:param pk: primary key of the post to publish
:return: Redirect to the post itself (if found) or fallback urls
"""
language = get_language_from_request(request, check_path=True)
try:
post = Post.objects.get(pk=int(pk))
post.publish = True
post.save()
return HttpResponseRedirect(post.get_absolute_url(language))
except Exception:
try:
return HttpResponseRedirect(request.META['HTTP_REFERER'])
except KeyError:
return HttpResponseRedirect(reverse('djangocms_blog:posts-latest'))
|
python
|
def publish_post(self, request, pk):
"""
Admin view to publish a single post
:param request: request
:param pk: primary key of the post to publish
:return: Redirect to the post itself (if found) or fallback urls
"""
language = get_language_from_request(request, check_path=True)
try:
post = Post.objects.get(pk=int(pk))
post.publish = True
post.save()
return HttpResponseRedirect(post.get_absolute_url(language))
except Exception:
try:
return HttpResponseRedirect(request.META['HTTP_REFERER'])
except KeyError:
return HttpResponseRedirect(reverse('djangocms_blog:posts-latest'))
|
[
"def",
"publish_post",
"(",
"self",
",",
"request",
",",
"pk",
")",
":",
"language",
"=",
"get_language_from_request",
"(",
"request",
",",
"check_path",
"=",
"True",
")",
"try",
":",
"post",
"=",
"Post",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"int",
"(",
"pk",
")",
")",
"post",
".",
"publish",
"=",
"True",
"post",
".",
"save",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"post",
".",
"get_absolute_url",
"(",
"language",
")",
")",
"except",
"Exception",
":",
"try",
":",
"return",
"HttpResponseRedirect",
"(",
"request",
".",
"META",
"[",
"'HTTP_REFERER'",
"]",
")",
"except",
"KeyError",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'djangocms_blog:posts-latest'",
")",
")"
] |
Admin view to publish a single post
:param request: request
:param pk: primary key of the post to publish
:return: Redirect to the post itself (if found) or fallback urls
|
[
"Admin",
"view",
"to",
"publish",
"a",
"single",
"post"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/admin.py#L247-L265
|
17,730
|
nephila/djangocms-blog
|
djangocms_blog/admin.py
|
PostAdmin.has_restricted_sites
|
def has_restricted_sites(self, request):
"""
Whether the current user has permission on one site only
:param request: current request
:return: boolean: user has permission on only one site
"""
sites = self.get_restricted_sites(request)
return sites and sites.count() == 1
|
python
|
def has_restricted_sites(self, request):
"""
Whether the current user has permission on one site only
:param request: current request
:return: boolean: user has permission on only one site
"""
sites = self.get_restricted_sites(request)
return sites and sites.count() == 1
|
[
"def",
"has_restricted_sites",
"(",
"self",
",",
"request",
")",
":",
"sites",
"=",
"self",
".",
"get_restricted_sites",
"(",
"request",
")",
"return",
"sites",
"and",
"sites",
".",
"count",
"(",
")",
"==",
"1"
] |
Whether the current user has permission on one site only
:param request: current request
:return: boolean: user has permission on only one site
|
[
"Whether",
"the",
"current",
"user",
"has",
"permission",
"on",
"one",
"site",
"only"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/admin.py#L267-L275
|
17,731
|
nephila/djangocms-blog
|
djangocms_blog/admin.py
|
PostAdmin.get_restricted_sites
|
def get_restricted_sites(self, request):
"""
The sites on which the user has permission on.
To return the permissions, the method check for the ``get_sites``
method on the user instance (e.g.: ``return request.user.get_sites()``)
which must return the queryset of enabled sites.
If the attribute does not exists, the user is considered enabled
for all the websites.
:param request: current request
:return: boolean or a queryset of available sites
"""
try:
return request.user.get_sites()
except AttributeError: # pragma: no cover
return Site.objects.none()
|
python
|
def get_restricted_sites(self, request):
"""
The sites on which the user has permission on.
To return the permissions, the method check for the ``get_sites``
method on the user instance (e.g.: ``return request.user.get_sites()``)
which must return the queryset of enabled sites.
If the attribute does not exists, the user is considered enabled
for all the websites.
:param request: current request
:return: boolean or a queryset of available sites
"""
try:
return request.user.get_sites()
except AttributeError: # pragma: no cover
return Site.objects.none()
|
[
"def",
"get_restricted_sites",
"(",
"self",
",",
"request",
")",
":",
"try",
":",
"return",
"request",
".",
"user",
".",
"get_sites",
"(",
")",
"except",
"AttributeError",
":",
"# pragma: no cover",
"return",
"Site",
".",
"objects",
".",
"none",
"(",
")"
] |
The sites on which the user has permission on.
To return the permissions, the method check for the ``get_sites``
method on the user instance (e.g.: ``return request.user.get_sites()``)
which must return the queryset of enabled sites.
If the attribute does not exists, the user is considered enabled
for all the websites.
:param request: current request
:return: boolean or a queryset of available sites
|
[
"The",
"sites",
"on",
"which",
"the",
"user",
"has",
"permission",
"on",
"."
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/admin.py#L277-L293
|
17,732
|
nephila/djangocms-blog
|
djangocms_blog/admin.py
|
PostAdmin.get_fieldsets
|
def get_fieldsets(self, request, obj=None):
"""
Customize the fieldsets according to the app settings
:param request: request
:param obj: post
:return: fieldsets configuration
"""
app_config_default = self._app_config_select(request, obj)
if app_config_default is None and request.method == 'GET':
return super(PostAdmin, self).get_fieldsets(request, obj)
if not obj:
config = app_config_default
else:
config = obj.app_config
fsets = deepcopy(self._fieldsets)
if config:
abstract = bool(config.use_abstract)
placeholder = bool(config.use_placeholder)
related = bool(config.use_related)
else:
abstract = get_setting('USE_ABSTRACT')
placeholder = get_setting('USE_PLACEHOLDER')
related = get_setting('USE_RELATED')
if abstract:
fsets[0][1]['fields'].append('abstract')
if not placeholder:
fsets[0][1]['fields'].append('post_text')
if get_setting('MULTISITE') and not self.has_restricted_sites(request):
fsets[1][1]['fields'][0].append('sites')
if request.user.is_superuser:
fsets[1][1]['fields'][0].append('author')
if apps.is_installed('djangocms_blog.liveblog'):
fsets[2][1]['fields'][2].append('enable_liveblog')
filter_function = get_setting('ADMIN_POST_FIELDSET_FILTER')
if related and Post.objects.namespace(config.namespace).active_translations().exists():
fsets[1][1]['fields'][0].append('related')
if callable(filter_function):
fsets = filter_function(fsets, request, obj=obj)
return fsets
|
python
|
def get_fieldsets(self, request, obj=None):
"""
Customize the fieldsets according to the app settings
:param request: request
:param obj: post
:return: fieldsets configuration
"""
app_config_default = self._app_config_select(request, obj)
if app_config_default is None and request.method == 'GET':
return super(PostAdmin, self).get_fieldsets(request, obj)
if not obj:
config = app_config_default
else:
config = obj.app_config
fsets = deepcopy(self._fieldsets)
if config:
abstract = bool(config.use_abstract)
placeholder = bool(config.use_placeholder)
related = bool(config.use_related)
else:
abstract = get_setting('USE_ABSTRACT')
placeholder = get_setting('USE_PLACEHOLDER')
related = get_setting('USE_RELATED')
if abstract:
fsets[0][1]['fields'].append('abstract')
if not placeholder:
fsets[0][1]['fields'].append('post_text')
if get_setting('MULTISITE') and not self.has_restricted_sites(request):
fsets[1][1]['fields'][0].append('sites')
if request.user.is_superuser:
fsets[1][1]['fields'][0].append('author')
if apps.is_installed('djangocms_blog.liveblog'):
fsets[2][1]['fields'][2].append('enable_liveblog')
filter_function = get_setting('ADMIN_POST_FIELDSET_FILTER')
if related and Post.objects.namespace(config.namespace).active_translations().exists():
fsets[1][1]['fields'][0].append('related')
if callable(filter_function):
fsets = filter_function(fsets, request, obj=obj)
return fsets
|
[
"def",
"get_fieldsets",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
")",
":",
"app_config_default",
"=",
"self",
".",
"_app_config_select",
"(",
"request",
",",
"obj",
")",
"if",
"app_config_default",
"is",
"None",
"and",
"request",
".",
"method",
"==",
"'GET'",
":",
"return",
"super",
"(",
"PostAdmin",
",",
"self",
")",
".",
"get_fieldsets",
"(",
"request",
",",
"obj",
")",
"if",
"not",
"obj",
":",
"config",
"=",
"app_config_default",
"else",
":",
"config",
"=",
"obj",
".",
"app_config",
"fsets",
"=",
"deepcopy",
"(",
"self",
".",
"_fieldsets",
")",
"if",
"config",
":",
"abstract",
"=",
"bool",
"(",
"config",
".",
"use_abstract",
")",
"placeholder",
"=",
"bool",
"(",
"config",
".",
"use_placeholder",
")",
"related",
"=",
"bool",
"(",
"config",
".",
"use_related",
")",
"else",
":",
"abstract",
"=",
"get_setting",
"(",
"'USE_ABSTRACT'",
")",
"placeholder",
"=",
"get_setting",
"(",
"'USE_PLACEHOLDER'",
")",
"related",
"=",
"get_setting",
"(",
"'USE_RELATED'",
")",
"if",
"abstract",
":",
"fsets",
"[",
"0",
"]",
"[",
"1",
"]",
"[",
"'fields'",
"]",
".",
"append",
"(",
"'abstract'",
")",
"if",
"not",
"placeholder",
":",
"fsets",
"[",
"0",
"]",
"[",
"1",
"]",
"[",
"'fields'",
"]",
".",
"append",
"(",
"'post_text'",
")",
"if",
"get_setting",
"(",
"'MULTISITE'",
")",
"and",
"not",
"self",
".",
"has_restricted_sites",
"(",
"request",
")",
":",
"fsets",
"[",
"1",
"]",
"[",
"1",
"]",
"[",
"'fields'",
"]",
"[",
"0",
"]",
".",
"append",
"(",
"'sites'",
")",
"if",
"request",
".",
"user",
".",
"is_superuser",
":",
"fsets",
"[",
"1",
"]",
"[",
"1",
"]",
"[",
"'fields'",
"]",
"[",
"0",
"]",
".",
"append",
"(",
"'author'",
")",
"if",
"apps",
".",
"is_installed",
"(",
"'djangocms_blog.liveblog'",
")",
":",
"fsets",
"[",
"2",
"]",
"[",
"1",
"]",
"[",
"'fields'",
"]",
"[",
"2",
"]",
".",
"append",
"(",
"'enable_liveblog'",
")",
"filter_function",
"=",
"get_setting",
"(",
"'ADMIN_POST_FIELDSET_FILTER'",
")",
"if",
"related",
"and",
"Post",
".",
"objects",
".",
"namespace",
"(",
"config",
".",
"namespace",
")",
".",
"active_translations",
"(",
")",
".",
"exists",
"(",
")",
":",
"fsets",
"[",
"1",
"]",
"[",
"1",
"]",
"[",
"'fields'",
"]",
"[",
"0",
"]",
".",
"append",
"(",
"'related'",
")",
"if",
"callable",
"(",
"filter_function",
")",
":",
"fsets",
"=",
"filter_function",
"(",
"fsets",
",",
"request",
",",
"obj",
"=",
"obj",
")",
"return",
"fsets"
] |
Customize the fieldsets according to the app settings
:param request: request
:param obj: post
:return: fieldsets configuration
|
[
"Customize",
"the",
"fieldsets",
"according",
"to",
"the",
"app",
"settings"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/admin.py#L302-L342
|
17,733
|
nephila/djangocms-blog
|
djangocms_blog/admin.py
|
BlogConfigAdmin.save_model
|
def save_model(self, request, obj, form, change):
"""
Clear menu cache when changing menu structure
"""
if 'config.menu_structure' in form.changed_data:
from menus.menu_pool import menu_pool
menu_pool.clear(all=True)
return super(BlogConfigAdmin, self).save_model(request, obj, form, change)
|
python
|
def save_model(self, request, obj, form, change):
"""
Clear menu cache when changing menu structure
"""
if 'config.menu_structure' in form.changed_data:
from menus.menu_pool import menu_pool
menu_pool.clear(all=True)
return super(BlogConfigAdmin, self).save_model(request, obj, form, change)
|
[
"def",
"save_model",
"(",
"self",
",",
"request",
",",
"obj",
",",
"form",
",",
"change",
")",
":",
"if",
"'config.menu_structure'",
"in",
"form",
".",
"changed_data",
":",
"from",
"menus",
".",
"menu_pool",
"import",
"menu_pool",
"menu_pool",
".",
"clear",
"(",
"all",
"=",
"True",
")",
"return",
"super",
"(",
"BlogConfigAdmin",
",",
"self",
")",
".",
"save_model",
"(",
"request",
",",
"obj",
",",
"form",
",",
"change",
")"
] |
Clear menu cache when changing menu structure
|
[
"Clear",
"menu",
"cache",
"when",
"changing",
"menu",
"structure"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/admin.py#L453-L460
|
17,734
|
nephila/djangocms-blog
|
djangocms_blog/cms_wizards.py
|
PostWizardForm.clean_slug
|
def clean_slug(self):
"""
Generate a valid slug, in case the given one is taken
"""
source = self.cleaned_data.get('slug', '')
lang_choice = self.language_code
if not source:
source = slugify(self.cleaned_data.get('title', ''))
qs = Post._default_manager.active_translations(lang_choice).language(lang_choice)
used = list(qs.values_list('translations__slug', flat=True))
slug = source
i = 1
while slug in used:
slug = '%s-%s' % (source, i)
i += 1
return slug
|
python
|
def clean_slug(self):
"""
Generate a valid slug, in case the given one is taken
"""
source = self.cleaned_data.get('slug', '')
lang_choice = self.language_code
if not source:
source = slugify(self.cleaned_data.get('title', ''))
qs = Post._default_manager.active_translations(lang_choice).language(lang_choice)
used = list(qs.values_list('translations__slug', flat=True))
slug = source
i = 1
while slug in used:
slug = '%s-%s' % (source, i)
i += 1
return slug
|
[
"def",
"clean_slug",
"(",
"self",
")",
":",
"source",
"=",
"self",
".",
"cleaned_data",
".",
"get",
"(",
"'slug'",
",",
"''",
")",
"lang_choice",
"=",
"self",
".",
"language_code",
"if",
"not",
"source",
":",
"source",
"=",
"slugify",
"(",
"self",
".",
"cleaned_data",
".",
"get",
"(",
"'title'",
",",
"''",
")",
")",
"qs",
"=",
"Post",
".",
"_default_manager",
".",
"active_translations",
"(",
"lang_choice",
")",
".",
"language",
"(",
"lang_choice",
")",
"used",
"=",
"list",
"(",
"qs",
".",
"values_list",
"(",
"'translations__slug'",
",",
"flat",
"=",
"True",
")",
")",
"slug",
"=",
"source",
"i",
"=",
"1",
"while",
"slug",
"in",
"used",
":",
"slug",
"=",
"'%s-%s'",
"%",
"(",
"source",
",",
"i",
")",
"i",
"+=",
"1",
"return",
"slug"
] |
Generate a valid slug, in case the given one is taken
|
[
"Generate",
"a",
"valid",
"slug",
"in",
"case",
"the",
"given",
"one",
"is",
"taken"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/cms_wizards.py#L55-L70
|
17,735
|
nephila/djangocms-blog
|
djangocms_blog/managers.py
|
TaggedFilterItem.tagged
|
def tagged(self, other_model=None, queryset=None):
"""
Restituisce una queryset di elementi del model taggati,
o con gli stessi tag di un model o un queryset
"""
tags = self._taglist(other_model, queryset)
return self.get_queryset().filter(tags__in=tags).distinct()
|
python
|
def tagged(self, other_model=None, queryset=None):
"""
Restituisce una queryset di elementi del model taggati,
o con gli stessi tag di un model o un queryset
"""
tags = self._taglist(other_model, queryset)
return self.get_queryset().filter(tags__in=tags).distinct()
|
[
"def",
"tagged",
"(",
"self",
",",
"other_model",
"=",
"None",
",",
"queryset",
"=",
"None",
")",
":",
"tags",
"=",
"self",
".",
"_taglist",
"(",
"other_model",
",",
"queryset",
")",
"return",
"self",
".",
"get_queryset",
"(",
")",
".",
"filter",
"(",
"tags__in",
"=",
"tags",
")",
".",
"distinct",
"(",
")"
] |
Restituisce una queryset di elementi del model taggati,
o con gli stessi tag di un model o un queryset
|
[
"Restituisce",
"una",
"queryset",
"di",
"elementi",
"del",
"model",
"taggati",
"o",
"con",
"gli",
"stessi",
"tag",
"di",
"un",
"model",
"o",
"un",
"queryset"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/managers.py#L16-L22
|
17,736
|
nephila/djangocms-blog
|
djangocms_blog/managers.py
|
TaggedFilterItem._taglist
|
def _taglist(self, other_model=None, queryset=None):
"""
Restituisce una lista di id di tag comuni al model corrente e al model
o queryset passati come argomento
"""
from taggit.models import TaggedItem
filter = None
if queryset is not None:
filter = set()
for item in queryset.all():
filter.update(item.tags.all())
filter = set([tag.id for tag in filter])
elif other_model is not None:
filter = set(TaggedItem.objects.filter(
content_type__model=other_model.__name__.lower()
).values_list('tag_id', flat=True))
tags = set(TaggedItem.objects.filter(
content_type__model=self.model.__name__.lower()
).values_list('tag_id', flat=True))
if filter is not None:
tags = tags.intersection(filter)
return list(tags)
|
python
|
def _taglist(self, other_model=None, queryset=None):
"""
Restituisce una lista di id di tag comuni al model corrente e al model
o queryset passati come argomento
"""
from taggit.models import TaggedItem
filter = None
if queryset is not None:
filter = set()
for item in queryset.all():
filter.update(item.tags.all())
filter = set([tag.id for tag in filter])
elif other_model is not None:
filter = set(TaggedItem.objects.filter(
content_type__model=other_model.__name__.lower()
).values_list('tag_id', flat=True))
tags = set(TaggedItem.objects.filter(
content_type__model=self.model.__name__.lower()
).values_list('tag_id', flat=True))
if filter is not None:
tags = tags.intersection(filter)
return list(tags)
|
[
"def",
"_taglist",
"(",
"self",
",",
"other_model",
"=",
"None",
",",
"queryset",
"=",
"None",
")",
":",
"from",
"taggit",
".",
"models",
"import",
"TaggedItem",
"filter",
"=",
"None",
"if",
"queryset",
"is",
"not",
"None",
":",
"filter",
"=",
"set",
"(",
")",
"for",
"item",
"in",
"queryset",
".",
"all",
"(",
")",
":",
"filter",
".",
"update",
"(",
"item",
".",
"tags",
".",
"all",
"(",
")",
")",
"filter",
"=",
"set",
"(",
"[",
"tag",
".",
"id",
"for",
"tag",
"in",
"filter",
"]",
")",
"elif",
"other_model",
"is",
"not",
"None",
":",
"filter",
"=",
"set",
"(",
"TaggedItem",
".",
"objects",
".",
"filter",
"(",
"content_type__model",
"=",
"other_model",
".",
"__name__",
".",
"lower",
"(",
")",
")",
".",
"values_list",
"(",
"'tag_id'",
",",
"flat",
"=",
"True",
")",
")",
"tags",
"=",
"set",
"(",
"TaggedItem",
".",
"objects",
".",
"filter",
"(",
"content_type__model",
"=",
"self",
".",
"model",
".",
"__name__",
".",
"lower",
"(",
")",
")",
".",
"values_list",
"(",
"'tag_id'",
",",
"flat",
"=",
"True",
")",
")",
"if",
"filter",
"is",
"not",
"None",
":",
"tags",
"=",
"tags",
".",
"intersection",
"(",
"filter",
")",
"return",
"list",
"(",
"tags",
")"
] |
Restituisce una lista di id di tag comuni al model corrente e al model
o queryset passati come argomento
|
[
"Restituisce",
"una",
"lista",
"di",
"id",
"di",
"tag",
"comuni",
"al",
"model",
"corrente",
"e",
"al",
"model",
"o",
"queryset",
"passati",
"come",
"argomento"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/managers.py#L24-L45
|
17,737
|
nephila/djangocms-blog
|
djangocms_blog/managers.py
|
TaggedFilterItem.tag_list
|
def tag_list(self, other_model=None, queryset=None):
"""
Restituisce un queryset di tag comuni al model corrente e
al model o queryset passati come argomento
"""
from taggit.models import Tag
return Tag.objects.filter(id__in=self._taglist(other_model, queryset))
|
python
|
def tag_list(self, other_model=None, queryset=None):
"""
Restituisce un queryset di tag comuni al model corrente e
al model o queryset passati come argomento
"""
from taggit.models import Tag
return Tag.objects.filter(id__in=self._taglist(other_model, queryset))
|
[
"def",
"tag_list",
"(",
"self",
",",
"other_model",
"=",
"None",
",",
"queryset",
"=",
"None",
")",
":",
"from",
"taggit",
".",
"models",
"import",
"Tag",
"return",
"Tag",
".",
"objects",
".",
"filter",
"(",
"id__in",
"=",
"self",
".",
"_taglist",
"(",
"other_model",
",",
"queryset",
")",
")"
] |
Restituisce un queryset di tag comuni al model corrente e
al model o queryset passati come argomento
|
[
"Restituisce",
"un",
"queryset",
"di",
"tag",
"comuni",
"al",
"model",
"corrente",
"e",
"al",
"model",
"o",
"queryset",
"passati",
"come",
"argomento"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/managers.py#L47-L53
|
17,738
|
nephila/djangocms-blog
|
djangocms_blog/liveblog/consumers.py
|
liveblog_connect
|
def liveblog_connect(message, apphook, lang, post):
"""
Connect users to the group of the given post according to the given language
Return with an error message if a post cannot be found
:param message: channel connect message
:param apphook: apphook config namespace
:param lang: language
:param post: post slug
"""
try:
post = Post.objects.namespace(apphook).language(lang).active_translations(slug=post).get()
except Post.DoesNotExist:
message.reply_channel.send({
'text': json.dumps({'error': 'no_post'}),
})
return
Group(post.liveblog_group).add(message.reply_channel)
message.reply_channel.send({"accept": True})
|
python
|
def liveblog_connect(message, apphook, lang, post):
"""
Connect users to the group of the given post according to the given language
Return with an error message if a post cannot be found
:param message: channel connect message
:param apphook: apphook config namespace
:param lang: language
:param post: post slug
"""
try:
post = Post.objects.namespace(apphook).language(lang).active_translations(slug=post).get()
except Post.DoesNotExist:
message.reply_channel.send({
'text': json.dumps({'error': 'no_post'}),
})
return
Group(post.liveblog_group).add(message.reply_channel)
message.reply_channel.send({"accept": True})
|
[
"def",
"liveblog_connect",
"(",
"message",
",",
"apphook",
",",
"lang",
",",
"post",
")",
":",
"try",
":",
"post",
"=",
"Post",
".",
"objects",
".",
"namespace",
"(",
"apphook",
")",
".",
"language",
"(",
"lang",
")",
".",
"active_translations",
"(",
"slug",
"=",
"post",
")",
".",
"get",
"(",
")",
"except",
"Post",
".",
"DoesNotExist",
":",
"message",
".",
"reply_channel",
".",
"send",
"(",
"{",
"'text'",
":",
"json",
".",
"dumps",
"(",
"{",
"'error'",
":",
"'no_post'",
"}",
")",
",",
"}",
")",
"return",
"Group",
"(",
"post",
".",
"liveblog_group",
")",
".",
"add",
"(",
"message",
".",
"reply_channel",
")",
"message",
".",
"reply_channel",
".",
"send",
"(",
"{",
"\"accept\"",
":",
"True",
"}",
")"
] |
Connect users to the group of the given post according to the given language
Return with an error message if a post cannot be found
:param message: channel connect message
:param apphook: apphook config namespace
:param lang: language
:param post: post slug
|
[
"Connect",
"users",
"to",
"the",
"group",
"of",
"the",
"given",
"post",
"according",
"to",
"the",
"given",
"language"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/liveblog/consumers.py#L11-L30
|
17,739
|
nephila/djangocms-blog
|
djangocms_blog/liveblog/consumers.py
|
liveblog_disconnect
|
def liveblog_disconnect(message, apphook, lang, post):
"""
Disconnect users to the group of the given post according to the given language
Return with an error message if a post cannot be found
:param message: channel connect message
:param apphook: apphook config namespace
:param lang: language
:param post: post slug
"""
try:
post = Post.objects.namespace(apphook).language(lang).active_translations(slug=post).get()
except Post.DoesNotExist:
message.reply_channel.send({
'text': json.dumps({'error': 'no_post'}),
})
return
Group(post.liveblog_group).discard(message.reply_channel)
|
python
|
def liveblog_disconnect(message, apphook, lang, post):
"""
Disconnect users to the group of the given post according to the given language
Return with an error message if a post cannot be found
:param message: channel connect message
:param apphook: apphook config namespace
:param lang: language
:param post: post slug
"""
try:
post = Post.objects.namespace(apphook).language(lang).active_translations(slug=post).get()
except Post.DoesNotExist:
message.reply_channel.send({
'text': json.dumps({'error': 'no_post'}),
})
return
Group(post.liveblog_group).discard(message.reply_channel)
|
[
"def",
"liveblog_disconnect",
"(",
"message",
",",
"apphook",
",",
"lang",
",",
"post",
")",
":",
"try",
":",
"post",
"=",
"Post",
".",
"objects",
".",
"namespace",
"(",
"apphook",
")",
".",
"language",
"(",
"lang",
")",
".",
"active_translations",
"(",
"slug",
"=",
"post",
")",
".",
"get",
"(",
")",
"except",
"Post",
".",
"DoesNotExist",
":",
"message",
".",
"reply_channel",
".",
"send",
"(",
"{",
"'text'",
":",
"json",
".",
"dumps",
"(",
"{",
"'error'",
":",
"'no_post'",
"}",
")",
",",
"}",
")",
"return",
"Group",
"(",
"post",
".",
"liveblog_group",
")",
".",
"discard",
"(",
"message",
".",
"reply_channel",
")"
] |
Disconnect users to the group of the given post according to the given language
Return with an error message if a post cannot be found
:param message: channel connect message
:param apphook: apphook config namespace
:param lang: language
:param post: post slug
|
[
"Disconnect",
"users",
"to",
"the",
"group",
"of",
"the",
"given",
"post",
"according",
"to",
"the",
"given",
"language"
] |
3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d
|
https://github.com/nephila/djangocms-blog/blob/3fdfbd4ba48947df0ee4c6d42e3a1c812b6dd95d/djangocms_blog/liveblog/consumers.py#L33-L51
|
17,740
|
tchellomello/python-amcrest
|
src/amcrest/video.py
|
Video.video_in_option
|
def video_in_option(self, param, profile='Day'):
"""
Return video input option.
Params:
param - parameter, such as 'DayNightColor'
profile - 'Day', 'Night' or 'Normal'
"""
if profile == 'Day':
field = param
else:
field = '{}Options.{}'.format(profile, param)
return utils.pretty(
[opt for opt in self.video_in_options.split()
if '].{}='.format(field) in opt][0])
|
python
|
def video_in_option(self, param, profile='Day'):
"""
Return video input option.
Params:
param - parameter, such as 'DayNightColor'
profile - 'Day', 'Night' or 'Normal'
"""
if profile == 'Day':
field = param
else:
field = '{}Options.{}'.format(profile, param)
return utils.pretty(
[opt for opt in self.video_in_options.split()
if '].{}='.format(field) in opt][0])
|
[
"def",
"video_in_option",
"(",
"self",
",",
"param",
",",
"profile",
"=",
"'Day'",
")",
":",
"if",
"profile",
"==",
"'Day'",
":",
"field",
"=",
"param",
"else",
":",
"field",
"=",
"'{}Options.{}'",
".",
"format",
"(",
"profile",
",",
"param",
")",
"return",
"utils",
".",
"pretty",
"(",
"[",
"opt",
"for",
"opt",
"in",
"self",
".",
"video_in_options",
".",
"split",
"(",
")",
"if",
"'].{}='",
".",
"format",
"(",
"field",
")",
"in",
"opt",
"]",
"[",
"0",
"]",
")"
] |
Return video input option.
Params:
param - parameter, such as 'DayNightColor'
profile - 'Day', 'Night' or 'Normal'
|
[
"Return",
"video",
"input",
"option",
"."
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/video.py#L132-L146
|
17,741
|
tchellomello/python-amcrest
|
src/amcrest/http.py
|
Http._generate_token
|
def _generate_token(self):
"""Create authentation to use with requests."""
session = self.get_session()
url = self.__base_url('magicBox.cgi?action=getMachineName')
try:
# try old basic method
auth = requests.auth.HTTPBasicAuth(self._user, self._password)
req = session.get(url, auth=auth, timeout=self._timeout_default)
if not req.ok:
# try new digest method
auth = requests.auth.HTTPDigestAuth(
self._user, self._password)
req = session.get(
url, auth=auth, timeout=self._timeout_default)
req.raise_for_status()
except requests.RequestException as error:
_LOGGER.error(error)
raise CommError('Could not communicate with camera')
# check if user passed
result = req.text.lower()
if 'invalid' in result or 'error' in result:
_LOGGER.error('Result from camera: %s',
req.text.strip().replace('\r\n', ': '))
raise LoginError('Invalid credentials')
return auth
|
python
|
def _generate_token(self):
"""Create authentation to use with requests."""
session = self.get_session()
url = self.__base_url('magicBox.cgi?action=getMachineName')
try:
# try old basic method
auth = requests.auth.HTTPBasicAuth(self._user, self._password)
req = session.get(url, auth=auth, timeout=self._timeout_default)
if not req.ok:
# try new digest method
auth = requests.auth.HTTPDigestAuth(
self._user, self._password)
req = session.get(
url, auth=auth, timeout=self._timeout_default)
req.raise_for_status()
except requests.RequestException as error:
_LOGGER.error(error)
raise CommError('Could not communicate with camera')
# check if user passed
result = req.text.lower()
if 'invalid' in result or 'error' in result:
_LOGGER.error('Result from camera: %s',
req.text.strip().replace('\r\n', ': '))
raise LoginError('Invalid credentials')
return auth
|
[
"def",
"_generate_token",
"(",
"self",
")",
":",
"session",
"=",
"self",
".",
"get_session",
"(",
")",
"url",
"=",
"self",
".",
"__base_url",
"(",
"'magicBox.cgi?action=getMachineName'",
")",
"try",
":",
"# try old basic method",
"auth",
"=",
"requests",
".",
"auth",
".",
"HTTPBasicAuth",
"(",
"self",
".",
"_user",
",",
"self",
".",
"_password",
")",
"req",
"=",
"session",
".",
"get",
"(",
"url",
",",
"auth",
"=",
"auth",
",",
"timeout",
"=",
"self",
".",
"_timeout_default",
")",
"if",
"not",
"req",
".",
"ok",
":",
"# try new digest method",
"auth",
"=",
"requests",
".",
"auth",
".",
"HTTPDigestAuth",
"(",
"self",
".",
"_user",
",",
"self",
".",
"_password",
")",
"req",
"=",
"session",
".",
"get",
"(",
"url",
",",
"auth",
"=",
"auth",
",",
"timeout",
"=",
"self",
".",
"_timeout_default",
")",
"req",
".",
"raise_for_status",
"(",
")",
"except",
"requests",
".",
"RequestException",
"as",
"error",
":",
"_LOGGER",
".",
"error",
"(",
"error",
")",
"raise",
"CommError",
"(",
"'Could not communicate with camera'",
")",
"# check if user passed",
"result",
"=",
"req",
".",
"text",
".",
"lower",
"(",
")",
"if",
"'invalid'",
"in",
"result",
"or",
"'error'",
"in",
"result",
":",
"_LOGGER",
".",
"error",
"(",
"'Result from camera: %s'",
",",
"req",
".",
"text",
".",
"strip",
"(",
")",
".",
"replace",
"(",
"'\\r\\n'",
",",
"': '",
")",
")",
"raise",
"LoginError",
"(",
"'Invalid credentials'",
")",
"return",
"auth"
] |
Create authentation to use with requests.
|
[
"Create",
"authentation",
"to",
"use",
"with",
"requests",
"."
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/http.py#L73-L99
|
17,742
|
tchellomello/python-amcrest
|
src/amcrest/http.py
|
Http._set_name
|
def _set_name(self):
"""Set device name."""
try:
self._name = pretty(self.machine_name)
self._serial = self.serial_number
except AttributeError:
self._name = None
self._serial = None
|
python
|
def _set_name(self):
"""Set device name."""
try:
self._name = pretty(self.machine_name)
self._serial = self.serial_number
except AttributeError:
self._name = None
self._serial = None
|
[
"def",
"_set_name",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"_name",
"=",
"pretty",
"(",
"self",
".",
"machine_name",
")",
"self",
".",
"_serial",
"=",
"self",
".",
"serial_number",
"except",
"AttributeError",
":",
"self",
".",
"_name",
"=",
"None",
"self",
".",
"_serial",
"=",
"None"
] |
Set device name.
|
[
"Set",
"device",
"name",
"."
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/http.py#L101-L108
|
17,743
|
tchellomello/python-amcrest
|
src/amcrest/utils.py
|
to_unit
|
def to_unit(value, unit='B'):
"""Convert bytes to give unit."""
byte_array = ['B', 'KB', 'MB', 'GB', 'TB']
if not isinstance(value, (int, float)):
value = float(value)
if unit in byte_array:
result = value / 1024**byte_array.index(unit)
return round(result, PRECISION), unit
return value
|
python
|
def to_unit(value, unit='B'):
"""Convert bytes to give unit."""
byte_array = ['B', 'KB', 'MB', 'GB', 'TB']
if not isinstance(value, (int, float)):
value = float(value)
if unit in byte_array:
result = value / 1024**byte_array.index(unit)
return round(result, PRECISION), unit
return value
|
[
"def",
"to_unit",
"(",
"value",
",",
"unit",
"=",
"'B'",
")",
":",
"byte_array",
"=",
"[",
"'B'",
",",
"'KB'",
",",
"'MB'",
",",
"'GB'",
",",
"'TB'",
"]",
"if",
"not",
"isinstance",
"(",
"value",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"value",
"=",
"float",
"(",
"value",
")",
"if",
"unit",
"in",
"byte_array",
":",
"result",
"=",
"value",
"/",
"1024",
"**",
"byte_array",
".",
"index",
"(",
"unit",
")",
"return",
"round",
"(",
"result",
",",
"PRECISION",
")",
",",
"unit",
"return",
"value"
] |
Convert bytes to give unit.
|
[
"Convert",
"bytes",
"to",
"give",
"unit",
"."
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/utils.py#L57-L68
|
17,744
|
tchellomello/python-amcrest
|
src/amcrest/special.py
|
Special.realtime_stream
|
def realtime_stream(self, channel=1, typeno=0, path_file=None):
"""
If the stream is redirect to a file, use mplayer tool to
visualize the video record
camera.realtime_stream(path_file="/home/user/Desktop/myvideo)
$ mplayer /home/user/Desktop/myvideo
"""
ret = self.command(
'realmonitor.cgi?action=getStream&channel={0}&subtype={1}'.format(
channel, typeno)
)
if path_file:
with open(path_file, 'wb') as out_file:
shutil.copyfileobj(ret.raw, out_file)
return ret.raw
|
python
|
def realtime_stream(self, channel=1, typeno=0, path_file=None):
"""
If the stream is redirect to a file, use mplayer tool to
visualize the video record
camera.realtime_stream(path_file="/home/user/Desktop/myvideo)
$ mplayer /home/user/Desktop/myvideo
"""
ret = self.command(
'realmonitor.cgi?action=getStream&channel={0}&subtype={1}'.format(
channel, typeno)
)
if path_file:
with open(path_file, 'wb') as out_file:
shutil.copyfileobj(ret.raw, out_file)
return ret.raw
|
[
"def",
"realtime_stream",
"(",
"self",
",",
"channel",
"=",
"1",
",",
"typeno",
"=",
"0",
",",
"path_file",
"=",
"None",
")",
":",
"ret",
"=",
"self",
".",
"command",
"(",
"'realmonitor.cgi?action=getStream&channel={0}&subtype={1}'",
".",
"format",
"(",
"channel",
",",
"typeno",
")",
")",
"if",
"path_file",
":",
"with",
"open",
"(",
"path_file",
",",
"'wb'",
")",
"as",
"out_file",
":",
"shutil",
".",
"copyfileobj",
"(",
"ret",
".",
"raw",
",",
"out_file",
")",
"return",
"ret",
".",
"raw"
] |
If the stream is redirect to a file, use mplayer tool to
visualize the video record
camera.realtime_stream(path_file="/home/user/Desktop/myvideo)
$ mplayer /home/user/Desktop/myvideo
|
[
"If",
"the",
"stream",
"is",
"redirect",
"to",
"a",
"file",
"use",
"mplayer",
"tool",
"to",
"visualize",
"the",
"video",
"record"
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/special.py#L20-L37
|
17,745
|
tchellomello/python-amcrest
|
src/amcrest/special.py
|
Special.rtsp_url
|
def rtsp_url(self, channelno=None, typeno=None):
"""
Return RTSP streaming url
Params:
channelno: integer, the video channel index which starts from 1,
default 1 if not specified.
typeno: the stream type, default 0 if not specified. It can be
the following value:
0-Main Stream
1-Extra Stream 1 (Sub Stream)
2-Extra Stream 2 (Sub Stream)
"""
if channelno is None:
channelno = 1
if typeno is None:
typeno = 0
cmd = 'cam/realmonitor?channel={0}&subtype={1}'.format(
channelno, typeno)
try:
port = ':' + [x.split('=')[1] for x in self.rtsp_config.split()
if x.startswith('table.RTSP.Port=')][0]
except IndexError:
port = ''
return 'rtsp://{}:{}@{}{}/{}'.format(
self._user, self._password, self._host, port, cmd)
|
python
|
def rtsp_url(self, channelno=None, typeno=None):
"""
Return RTSP streaming url
Params:
channelno: integer, the video channel index which starts from 1,
default 1 if not specified.
typeno: the stream type, default 0 if not specified. It can be
the following value:
0-Main Stream
1-Extra Stream 1 (Sub Stream)
2-Extra Stream 2 (Sub Stream)
"""
if channelno is None:
channelno = 1
if typeno is None:
typeno = 0
cmd = 'cam/realmonitor?channel={0}&subtype={1}'.format(
channelno, typeno)
try:
port = ':' + [x.split('=')[1] for x in self.rtsp_config.split()
if x.startswith('table.RTSP.Port=')][0]
except IndexError:
port = ''
return 'rtsp://{}:{}@{}{}/{}'.format(
self._user, self._password, self._host, port, cmd)
|
[
"def",
"rtsp_url",
"(",
"self",
",",
"channelno",
"=",
"None",
",",
"typeno",
"=",
"None",
")",
":",
"if",
"channelno",
"is",
"None",
":",
"channelno",
"=",
"1",
"if",
"typeno",
"is",
"None",
":",
"typeno",
"=",
"0",
"cmd",
"=",
"'cam/realmonitor?channel={0}&subtype={1}'",
".",
"format",
"(",
"channelno",
",",
"typeno",
")",
"try",
":",
"port",
"=",
"':'",
"+",
"[",
"x",
".",
"split",
"(",
"'='",
")",
"[",
"1",
"]",
"for",
"x",
"in",
"self",
".",
"rtsp_config",
".",
"split",
"(",
")",
"if",
"x",
".",
"startswith",
"(",
"'table.RTSP.Port='",
")",
"]",
"[",
"0",
"]",
"except",
"IndexError",
":",
"port",
"=",
"''",
"return",
"'rtsp://{}:{}@{}{}/{}'",
".",
"format",
"(",
"self",
".",
"_user",
",",
"self",
".",
"_password",
",",
"self",
".",
"_host",
",",
"port",
",",
"cmd",
")"
] |
Return RTSP streaming url
Params:
channelno: integer, the video channel index which starts from 1,
default 1 if not specified.
typeno: the stream type, default 0 if not specified. It can be
the following value:
0-Main Stream
1-Extra Stream 1 (Sub Stream)
2-Extra Stream 2 (Sub Stream)
|
[
"Return",
"RTSP",
"streaming",
"url"
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/special.py#L39-L69
|
17,746
|
tchellomello/python-amcrest
|
src/amcrest/special.py
|
Special.mjpeg_url
|
def mjpeg_url(self, channelno=None, typeno=None):
"""
Return MJPEG streaming url
Params:
channelno: integer, the video channel index which starts from 1,
default 1 if not specified.
typeno: the stream type, default 0 if not specified. It can be
the following value:
0-Main Stream
1-Extra Stream 1 (Sub Stream)
2-Extra Stream 2 (Sub Stream)
"""
if channelno is None:
channelno = 0
if typeno is None:
typeno = 1
cmd = "mjpg/video.cgi?channel={0}&subtype={1}".format(
channelno, typeno)
return '{0}{1}'.format(self._base_url, cmd)
|
python
|
def mjpeg_url(self, channelno=None, typeno=None):
"""
Return MJPEG streaming url
Params:
channelno: integer, the video channel index which starts from 1,
default 1 if not specified.
typeno: the stream type, default 0 if not specified. It can be
the following value:
0-Main Stream
1-Extra Stream 1 (Sub Stream)
2-Extra Stream 2 (Sub Stream)
"""
if channelno is None:
channelno = 0
if typeno is None:
typeno = 1
cmd = "mjpg/video.cgi?channel={0}&subtype={1}".format(
channelno, typeno)
return '{0}{1}'.format(self._base_url, cmd)
|
[
"def",
"mjpeg_url",
"(",
"self",
",",
"channelno",
"=",
"None",
",",
"typeno",
"=",
"None",
")",
":",
"if",
"channelno",
"is",
"None",
":",
"channelno",
"=",
"0",
"if",
"typeno",
"is",
"None",
":",
"typeno",
"=",
"1",
"cmd",
"=",
"\"mjpg/video.cgi?channel={0}&subtype={1}\"",
".",
"format",
"(",
"channelno",
",",
"typeno",
")",
"return",
"'{0}{1}'",
".",
"format",
"(",
"self",
".",
"_base_url",
",",
"cmd",
")"
] |
Return MJPEG streaming url
Params:
channelno: integer, the video channel index which starts from 1,
default 1 if not specified.
typeno: the stream type, default 0 if not specified. It can be
the following value:
0-Main Stream
1-Extra Stream 1 (Sub Stream)
2-Extra Stream 2 (Sub Stream)
|
[
"Return",
"MJPEG",
"streaming",
"url"
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/special.py#L95-L118
|
17,747
|
tchellomello/python-amcrest
|
src/amcrest/network.py
|
Network.scan_devices
|
def scan_devices(self, subnet, timeout=None):
"""
Scan cameras in a range of ips
Params:
subnet - subnet, i.e: 192.168.1.0/24
if mask not used, assuming mask 24
timeout_sec - timeout in sec
Returns:
"""
# Maximum range from mask
# Format is mask: max_range
max_range = {
16: 256,
24: 256,
25: 128,
27: 32,
28: 16,
29: 8,
30: 4,
31: 2
}
# If user didn't provide mask, use /24
if "/" not in subnet:
mask = int(24)
network = subnet
else:
network, mask = subnet.split("/")
mask = int(mask)
if mask not in max_range:
raise RuntimeError("Cannot determine the subnet mask!")
# Default logic is remove everything from last "." to the end
# This logic change in case mask is 16
network = network.rpartition(".")[0]
if mask == 16:
# For mask 16, we must cut the last two
# entries with .
# pylint: disable=unused-variable
for i in range(0, 1):
network = network.rpartition(".")[0]
# Trigger the scan
# For clear coding, let's keep the logic in if/else (mask16)
# instead of only one if
if mask == 16:
for seq1 in range(0, max_range[mask]):
for seq2 in range(0, max_range[mask]):
ipaddr = "{0}.{1}.{2}".format(network, seq1, seq2)
thd = threading.Thread(
target=self.__raw_scan, args=(ipaddr, timeout)
)
thd.start()
else:
for seq1 in range(0, max_range[mask]):
ipaddr = "{0}.{1}".format(network, seq1)
thd = threading.Thread(
target=self.__raw_scan, args=(ipaddr, timeout)
)
thd.start()
return self.amcrest_ips
|
python
|
def scan_devices(self, subnet, timeout=None):
"""
Scan cameras in a range of ips
Params:
subnet - subnet, i.e: 192.168.1.0/24
if mask not used, assuming mask 24
timeout_sec - timeout in sec
Returns:
"""
# Maximum range from mask
# Format is mask: max_range
max_range = {
16: 256,
24: 256,
25: 128,
27: 32,
28: 16,
29: 8,
30: 4,
31: 2
}
# If user didn't provide mask, use /24
if "/" not in subnet:
mask = int(24)
network = subnet
else:
network, mask = subnet.split("/")
mask = int(mask)
if mask not in max_range:
raise RuntimeError("Cannot determine the subnet mask!")
# Default logic is remove everything from last "." to the end
# This logic change in case mask is 16
network = network.rpartition(".")[0]
if mask == 16:
# For mask 16, we must cut the last two
# entries with .
# pylint: disable=unused-variable
for i in range(0, 1):
network = network.rpartition(".")[0]
# Trigger the scan
# For clear coding, let's keep the logic in if/else (mask16)
# instead of only one if
if mask == 16:
for seq1 in range(0, max_range[mask]):
for seq2 in range(0, max_range[mask]):
ipaddr = "{0}.{1}.{2}".format(network, seq1, seq2)
thd = threading.Thread(
target=self.__raw_scan, args=(ipaddr, timeout)
)
thd.start()
else:
for seq1 in range(0, max_range[mask]):
ipaddr = "{0}.{1}".format(network, seq1)
thd = threading.Thread(
target=self.__raw_scan, args=(ipaddr, timeout)
)
thd.start()
return self.amcrest_ips
|
[
"def",
"scan_devices",
"(",
"self",
",",
"subnet",
",",
"timeout",
"=",
"None",
")",
":",
"# Maximum range from mask",
"# Format is mask: max_range",
"max_range",
"=",
"{",
"16",
":",
"256",
",",
"24",
":",
"256",
",",
"25",
":",
"128",
",",
"27",
":",
"32",
",",
"28",
":",
"16",
",",
"29",
":",
"8",
",",
"30",
":",
"4",
",",
"31",
":",
"2",
"}",
"# If user didn't provide mask, use /24",
"if",
"\"/\"",
"not",
"in",
"subnet",
":",
"mask",
"=",
"int",
"(",
"24",
")",
"network",
"=",
"subnet",
"else",
":",
"network",
",",
"mask",
"=",
"subnet",
".",
"split",
"(",
"\"/\"",
")",
"mask",
"=",
"int",
"(",
"mask",
")",
"if",
"mask",
"not",
"in",
"max_range",
":",
"raise",
"RuntimeError",
"(",
"\"Cannot determine the subnet mask!\"",
")",
"# Default logic is remove everything from last \".\" to the end",
"# This logic change in case mask is 16",
"network",
"=",
"network",
".",
"rpartition",
"(",
"\".\"",
")",
"[",
"0",
"]",
"if",
"mask",
"==",
"16",
":",
"# For mask 16, we must cut the last two",
"# entries with .",
"# pylint: disable=unused-variable",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"1",
")",
":",
"network",
"=",
"network",
".",
"rpartition",
"(",
"\".\"",
")",
"[",
"0",
"]",
"# Trigger the scan",
"# For clear coding, let's keep the logic in if/else (mask16)",
"# instead of only one if",
"if",
"mask",
"==",
"16",
":",
"for",
"seq1",
"in",
"range",
"(",
"0",
",",
"max_range",
"[",
"mask",
"]",
")",
":",
"for",
"seq2",
"in",
"range",
"(",
"0",
",",
"max_range",
"[",
"mask",
"]",
")",
":",
"ipaddr",
"=",
"\"{0}.{1}.{2}\"",
".",
"format",
"(",
"network",
",",
"seq1",
",",
"seq2",
")",
"thd",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"__raw_scan",
",",
"args",
"=",
"(",
"ipaddr",
",",
"timeout",
")",
")",
"thd",
".",
"start",
"(",
")",
"else",
":",
"for",
"seq1",
"in",
"range",
"(",
"0",
",",
"max_range",
"[",
"mask",
"]",
")",
":",
"ipaddr",
"=",
"\"{0}.{1}\"",
".",
"format",
"(",
"network",
",",
"seq1",
")",
"thd",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"__raw_scan",
",",
"args",
"=",
"(",
"ipaddr",
",",
"timeout",
")",
")",
"thd",
".",
"start",
"(",
")",
"return",
"self",
".",
"amcrest_ips"
] |
Scan cameras in a range of ips
Params:
subnet - subnet, i.e: 192.168.1.0/24
if mask not used, assuming mask 24
timeout_sec - timeout in sec
Returns:
|
[
"Scan",
"cameras",
"in",
"a",
"range",
"of",
"ips"
] |
ed842139e234de2eaf6ee8fb480214711cde1249
|
https://github.com/tchellomello/python-amcrest/blob/ed842139e234de2eaf6ee8fb480214711cde1249/src/amcrest/network.py#L41-L109
|
17,748
|
peerplays-network/python-peerplays
|
peerplays/peerplays.py
|
PeerPlays.disallow
|
def disallow(
self, foreign, permission="active", account=None, threshold=None, **kwargs
):
""" Remove additional access to an account by some other public
key or account.
:param str foreign: The foreign account that will obtain access
:param str permission: (optional) The actual permission to
modify (defaults to ``active``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
:param int threshold: The threshold that needs to be reached
by signatures to be able to interact
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
if permission not in ["owner", "active"]:
raise ValueError("Permission needs to be either 'owner', or 'active")
account = Account(account, blockchain_instance=self)
authority = account[permission]
try:
pubkey = PublicKey(foreign, prefix=self.prefix)
affected_items = list(
filter(lambda x: x[0] == str(pubkey), authority["key_auths"])
)
authority["key_auths"] = list(
filter(lambda x: x[0] != str(pubkey), authority["key_auths"])
)
except:
try:
foreign_account = Account(foreign, blockchain_instance=self)
affected_items = list(
filter(
lambda x: x[0] == foreign_account["id"],
authority["account_auths"],
)
)
authority["account_auths"] = list(
filter(
lambda x: x[0] != foreign_account["id"],
authority["account_auths"],
)
)
except:
raise ValueError("Unknown foreign account or unvalid public key")
if not affected_items:
raise ValueError("Changes nothing!")
removed_weight = affected_items[0][1]
# Define threshold
if threshold:
authority["weight_threshold"] = threshold
# Correct threshold (at most by the amount removed from the
# authority)
try:
self._test_weights_treshold(authority)
except:
log.critical(
"The account's threshold will be reduced by %d" % (removed_weight)
)
authority["weight_threshold"] -= removed_weight
self._test_weights_treshold(authority)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
permission: authority,
"extensions": {},
}
)
if permission == "owner":
return self.finalizeOp(op, account["name"], "owner", **kwargs)
else:
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
python
|
def disallow(
self, foreign, permission="active", account=None, threshold=None, **kwargs
):
""" Remove additional access to an account by some other public
key or account.
:param str foreign: The foreign account that will obtain access
:param str permission: (optional) The actual permission to
modify (defaults to ``active``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
:param int threshold: The threshold that needs to be reached
by signatures to be able to interact
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
if permission not in ["owner", "active"]:
raise ValueError("Permission needs to be either 'owner', or 'active")
account = Account(account, blockchain_instance=self)
authority = account[permission]
try:
pubkey = PublicKey(foreign, prefix=self.prefix)
affected_items = list(
filter(lambda x: x[0] == str(pubkey), authority["key_auths"])
)
authority["key_auths"] = list(
filter(lambda x: x[0] != str(pubkey), authority["key_auths"])
)
except:
try:
foreign_account = Account(foreign, blockchain_instance=self)
affected_items = list(
filter(
lambda x: x[0] == foreign_account["id"],
authority["account_auths"],
)
)
authority["account_auths"] = list(
filter(
lambda x: x[0] != foreign_account["id"],
authority["account_auths"],
)
)
except:
raise ValueError("Unknown foreign account or unvalid public key")
if not affected_items:
raise ValueError("Changes nothing!")
removed_weight = affected_items[0][1]
# Define threshold
if threshold:
authority["weight_threshold"] = threshold
# Correct threshold (at most by the amount removed from the
# authority)
try:
self._test_weights_treshold(authority)
except:
log.critical(
"The account's threshold will be reduced by %d" % (removed_weight)
)
authority["weight_threshold"] -= removed_weight
self._test_weights_treshold(authority)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
permission: authority,
"extensions": {},
}
)
if permission == "owner":
return self.finalizeOp(op, account["name"], "owner", **kwargs)
else:
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
[
"def",
"disallow",
"(",
"self",
",",
"foreign",
",",
"permission",
"=",
"\"active\"",
",",
"account",
"=",
"None",
",",
"threshold",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"account",
":",
"if",
"\"default_account\"",
"in",
"self",
".",
"config",
":",
"account",
"=",
"self",
".",
"config",
"[",
"\"default_account\"",
"]",
"if",
"not",
"account",
":",
"raise",
"ValueError",
"(",
"\"You need to provide an account\"",
")",
"if",
"permission",
"not",
"in",
"[",
"\"owner\"",
",",
"\"active\"",
"]",
":",
"raise",
"ValueError",
"(",
"\"Permission needs to be either 'owner', or 'active\"",
")",
"account",
"=",
"Account",
"(",
"account",
",",
"blockchain_instance",
"=",
"self",
")",
"authority",
"=",
"account",
"[",
"permission",
"]",
"try",
":",
"pubkey",
"=",
"PublicKey",
"(",
"foreign",
",",
"prefix",
"=",
"self",
".",
"prefix",
")",
"affected_items",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
"==",
"str",
"(",
"pubkey",
")",
",",
"authority",
"[",
"\"key_auths\"",
"]",
")",
")",
"authority",
"[",
"\"key_auths\"",
"]",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
"!=",
"str",
"(",
"pubkey",
")",
",",
"authority",
"[",
"\"key_auths\"",
"]",
")",
")",
"except",
":",
"try",
":",
"foreign_account",
"=",
"Account",
"(",
"foreign",
",",
"blockchain_instance",
"=",
"self",
")",
"affected_items",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
"==",
"foreign_account",
"[",
"\"id\"",
"]",
",",
"authority",
"[",
"\"account_auths\"",
"]",
",",
")",
")",
"authority",
"[",
"\"account_auths\"",
"]",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
"!=",
"foreign_account",
"[",
"\"id\"",
"]",
",",
"authority",
"[",
"\"account_auths\"",
"]",
",",
")",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"Unknown foreign account or unvalid public key\"",
")",
"if",
"not",
"affected_items",
":",
"raise",
"ValueError",
"(",
"\"Changes nothing!\"",
")",
"removed_weight",
"=",
"affected_items",
"[",
"0",
"]",
"[",
"1",
"]",
"# Define threshold",
"if",
"threshold",
":",
"authority",
"[",
"\"weight_threshold\"",
"]",
"=",
"threshold",
"# Correct threshold (at most by the amount removed from the",
"# authority)",
"try",
":",
"self",
".",
"_test_weights_treshold",
"(",
"authority",
")",
"except",
":",
"log",
".",
"critical",
"(",
"\"The account's threshold will be reduced by %d\"",
"%",
"(",
"removed_weight",
")",
")",
"authority",
"[",
"\"weight_threshold\"",
"]",
"-=",
"removed_weight",
"self",
".",
"_test_weights_treshold",
"(",
"authority",
")",
"op",
"=",
"operations",
".",
"Account_update",
"(",
"*",
"*",
"{",
"\"fee\"",
":",
"{",
"\"amount\"",
":",
"0",
",",
"\"asset_id\"",
":",
"\"1.3.0\"",
"}",
",",
"\"account\"",
":",
"account",
"[",
"\"id\"",
"]",
",",
"permission",
":",
"authority",
",",
"\"extensions\"",
":",
"{",
"}",
",",
"}",
")",
"if",
"permission",
"==",
"\"owner\"",
":",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"owner\"",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"active\"",
",",
"*",
"*",
"kwargs",
")"
] |
Remove additional access to an account by some other public
key or account.
:param str foreign: The foreign account that will obtain access
:param str permission: (optional) The actual permission to
modify (defaults to ``active``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
:param int threshold: The threshold that needs to be reached
by signatures to be able to interact
|
[
"Remove",
"additional",
"access",
"to",
"an",
"account",
"by",
"some",
"other",
"public",
"key",
"or",
"account",
"."
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/peerplays.py#L439-L520
|
17,749
|
peerplays-network/python-peerplays
|
peerplays/peerplays.py
|
PeerPlays.approvewitness
|
def approvewitness(self, witnesses, account=None, **kwargs):
""" Approve a witness
:param list witnesses: list of Witness name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(witnesses, (list, set, tuple)):
witnesses = {witnesses}
for witness in witnesses:
witness = Witness(witness, blockchain_instance=self)
options["votes"].append(witness["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_witness"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 1, options["votes"]))
)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
python
|
def approvewitness(self, witnesses, account=None, **kwargs):
""" Approve a witness
:param list witnesses: list of Witness name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(witnesses, (list, set, tuple)):
witnesses = {witnesses}
for witness in witnesses:
witness = Witness(witness, blockchain_instance=self)
options["votes"].append(witness["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_witness"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 1, options["votes"]))
)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
[
"def",
"approvewitness",
"(",
"self",
",",
"witnesses",
",",
"account",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"account",
":",
"if",
"\"default_account\"",
"in",
"self",
".",
"config",
":",
"account",
"=",
"self",
".",
"config",
"[",
"\"default_account\"",
"]",
"if",
"not",
"account",
":",
"raise",
"ValueError",
"(",
"\"You need to provide an account\"",
")",
"account",
"=",
"Account",
"(",
"account",
",",
"blockchain_instance",
"=",
"self",
")",
"options",
"=",
"account",
"[",
"\"options\"",
"]",
"if",
"not",
"isinstance",
"(",
"witnesses",
",",
"(",
"list",
",",
"set",
",",
"tuple",
")",
")",
":",
"witnesses",
"=",
"{",
"witnesses",
"}",
"for",
"witness",
"in",
"witnesses",
":",
"witness",
"=",
"Witness",
"(",
"witness",
",",
"blockchain_instance",
"=",
"self",
")",
"options",
"[",
"\"votes\"",
"]",
".",
"append",
"(",
"witness",
"[",
"\"vote_id\"",
"]",
")",
"options",
"[",
"\"votes\"",
"]",
"=",
"list",
"(",
"set",
"(",
"options",
"[",
"\"votes\"",
"]",
")",
")",
"options",
"[",
"\"num_witness\"",
"]",
"=",
"len",
"(",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"float",
"(",
"x",
".",
"split",
"(",
"\":\"",
")",
"[",
"0",
"]",
")",
"==",
"1",
",",
"options",
"[",
"\"votes\"",
"]",
")",
")",
")",
"op",
"=",
"operations",
".",
"Account_update",
"(",
"*",
"*",
"{",
"\"fee\"",
":",
"{",
"\"amount\"",
":",
"0",
",",
"\"asset_id\"",
":",
"\"1.3.0\"",
"}",
",",
"\"account\"",
":",
"account",
"[",
"\"id\"",
"]",
",",
"\"new_options\"",
":",
"options",
",",
"\"extensions\"",
":",
"{",
"}",
",",
"\"prefix\"",
":",
"self",
".",
"prefix",
",",
"}",
")",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"active\"",
",",
"*",
"*",
"kwargs",
")"
] |
Approve a witness
:param list witnesses: list of Witness name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
|
[
"Approve",
"a",
"witness"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/peerplays.py#L556-L592
|
17,750
|
peerplays-network/python-peerplays
|
peerplays/peerplays.py
|
PeerPlays.approvecommittee
|
def approvecommittee(self, committees, account=None, **kwargs):
""" Approve a committee
:param list committees: list of committee member name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(committees, (list, set, tuple)):
committees = {committees}
for committee in committees:
committee = Committee(committee, blockchain_instance=self)
options["votes"].append(committee["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_committee"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 0, options["votes"]))
)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
python
|
def approvecommittee(self, committees, account=None, **kwargs):
""" Approve a committee
:param list committees: list of committee member name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account, blockchain_instance=self)
options = account["options"]
if not isinstance(committees, (list, set, tuple)):
committees = {committees}
for committee in committees:
committee = Committee(committee, blockchain_instance=self)
options["votes"].append(committee["vote_id"])
options["votes"] = list(set(options["votes"]))
options["num_committee"] = len(
list(filter(lambda x: float(x.split(":")[0]) == 0, options["votes"]))
)
op = operations.Account_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"account": account["id"],
"new_options": options,
"extensions": {},
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
[
"def",
"approvecommittee",
"(",
"self",
",",
"committees",
",",
"account",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"account",
":",
"if",
"\"default_account\"",
"in",
"self",
".",
"config",
":",
"account",
"=",
"self",
".",
"config",
"[",
"\"default_account\"",
"]",
"if",
"not",
"account",
":",
"raise",
"ValueError",
"(",
"\"You need to provide an account\"",
")",
"account",
"=",
"Account",
"(",
"account",
",",
"blockchain_instance",
"=",
"self",
")",
"options",
"=",
"account",
"[",
"\"options\"",
"]",
"if",
"not",
"isinstance",
"(",
"committees",
",",
"(",
"list",
",",
"set",
",",
"tuple",
")",
")",
":",
"committees",
"=",
"{",
"committees",
"}",
"for",
"committee",
"in",
"committees",
":",
"committee",
"=",
"Committee",
"(",
"committee",
",",
"blockchain_instance",
"=",
"self",
")",
"options",
"[",
"\"votes\"",
"]",
".",
"append",
"(",
"committee",
"[",
"\"vote_id\"",
"]",
")",
"options",
"[",
"\"votes\"",
"]",
"=",
"list",
"(",
"set",
"(",
"options",
"[",
"\"votes\"",
"]",
")",
")",
"options",
"[",
"\"num_committee\"",
"]",
"=",
"len",
"(",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"float",
"(",
"x",
".",
"split",
"(",
"\":\"",
")",
"[",
"0",
"]",
")",
"==",
"0",
",",
"options",
"[",
"\"votes\"",
"]",
")",
")",
")",
"op",
"=",
"operations",
".",
"Account_update",
"(",
"*",
"*",
"{",
"\"fee\"",
":",
"{",
"\"amount\"",
":",
"0",
",",
"\"asset_id\"",
":",
"\"1.3.0\"",
"}",
",",
"\"account\"",
":",
"account",
"[",
"\"id\"",
"]",
",",
"\"new_options\"",
":",
"options",
",",
"\"extensions\"",
":",
"{",
"}",
",",
"\"prefix\"",
":",
"self",
".",
"prefix",
",",
"}",
")",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"active\"",
",",
"*",
"*",
"kwargs",
")"
] |
Approve a committee
:param list committees: list of committee member name or id
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
|
[
"Approve",
"a",
"committee"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/peerplays.py#L633-L669
|
17,751
|
peerplays-network/python-peerplays
|
peerplays/peerplays.py
|
PeerPlays.betting_market_rules_create
|
def betting_market_rules_create(self, names, descriptions, account=None, **kwargs):
""" Create betting market rules
:param list names: Internationalized names, e.g. ``[['de', 'Foo'],
['en', 'bar']]``
:param list descriptions: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(names, list)
assert isinstance(descriptions, list)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
op = operations.Betting_market_rules_create(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"name": names,
"description": descriptions,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
python
|
def betting_market_rules_create(self, names, descriptions, account=None, **kwargs):
""" Create betting market rules
:param list names: Internationalized names, e.g. ``[['de', 'Foo'],
['en', 'bar']]``
:param list descriptions: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(names, list)
assert isinstance(descriptions, list)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
op = operations.Betting_market_rules_create(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"name": names,
"description": descriptions,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
[
"def",
"betting_market_rules_create",
"(",
"self",
",",
"names",
",",
"descriptions",
",",
"account",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"isinstance",
"(",
"names",
",",
"list",
")",
"assert",
"isinstance",
"(",
"descriptions",
",",
"list",
")",
"if",
"not",
"account",
":",
"if",
"\"default_account\"",
"in",
"self",
".",
"config",
":",
"account",
"=",
"self",
".",
"config",
"[",
"\"default_account\"",
"]",
"if",
"not",
"account",
":",
"raise",
"ValueError",
"(",
"\"You need to provide an account\"",
")",
"account",
"=",
"Account",
"(",
"account",
")",
"op",
"=",
"operations",
".",
"Betting_market_rules_create",
"(",
"*",
"*",
"{",
"\"fee\"",
":",
"{",
"\"amount\"",
":",
"0",
",",
"\"asset_id\"",
":",
"\"1.3.0\"",
"}",
",",
"\"name\"",
":",
"names",
",",
"\"description\"",
":",
"descriptions",
",",
"\"prefix\"",
":",
"self",
".",
"prefix",
",",
"}",
")",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"active\"",
",",
"*",
"*",
"kwargs",
")"
] |
Create betting market rules
:param list names: Internationalized names, e.g. ``[['de', 'Foo'],
['en', 'bar']]``
:param list descriptions: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
|
[
"Create",
"betting",
"market",
"rules"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/peerplays.py#L1149-L1176
|
17,752
|
peerplays-network/python-peerplays
|
peerplays/peerplays.py
|
PeerPlays.betting_market_rules_update
|
def betting_market_rules_update(
self, rules_id, names, descriptions, account=None, **kwargs
):
""" Update betting market rules
:param str rules_id: Id of the betting market rules to update
:param list names: Internationalized names, e.g. ``[['de', 'Foo'],
['en', 'bar']]``
:param list descriptions: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(names, list)
assert isinstance(descriptions, list)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
rule = Rule(rules_id)
op = operations.Betting_market_rules_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"betting_market_rules_id": rule["id"],
"new_name": names,
"new_description": descriptions,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
python
|
def betting_market_rules_update(
self, rules_id, names, descriptions, account=None, **kwargs
):
""" Update betting market rules
:param str rules_id: Id of the betting market rules to update
:param list names: Internationalized names, e.g. ``[['de', 'Foo'],
['en', 'bar']]``
:param list descriptions: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
"""
assert isinstance(names, list)
assert isinstance(descriptions, list)
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
rule = Rule(rules_id)
op = operations.Betting_market_rules_update(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"betting_market_rules_id": rule["id"],
"new_name": names,
"new_description": descriptions,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
[
"def",
"betting_market_rules_update",
"(",
"self",
",",
"rules_id",
",",
"names",
",",
"descriptions",
",",
"account",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"isinstance",
"(",
"names",
",",
"list",
")",
"assert",
"isinstance",
"(",
"descriptions",
",",
"list",
")",
"if",
"not",
"account",
":",
"if",
"\"default_account\"",
"in",
"self",
".",
"config",
":",
"account",
"=",
"self",
".",
"config",
"[",
"\"default_account\"",
"]",
"if",
"not",
"account",
":",
"raise",
"ValueError",
"(",
"\"You need to provide an account\"",
")",
"account",
"=",
"Account",
"(",
"account",
")",
"rule",
"=",
"Rule",
"(",
"rules_id",
")",
"op",
"=",
"operations",
".",
"Betting_market_rules_update",
"(",
"*",
"*",
"{",
"\"fee\"",
":",
"{",
"\"amount\"",
":",
"0",
",",
"\"asset_id\"",
":",
"\"1.3.0\"",
"}",
",",
"\"betting_market_rules_id\"",
":",
"rule",
"[",
"\"id\"",
"]",
",",
"\"new_name\"",
":",
"names",
",",
"\"new_description\"",
":",
"descriptions",
",",
"\"prefix\"",
":",
"self",
".",
"prefix",
",",
"}",
")",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"active\"",
",",
"*",
"*",
"kwargs",
")"
] |
Update betting market rules
:param str rules_id: Id of the betting market rules to update
:param list names: Internationalized names, e.g. ``[['de', 'Foo'],
['en', 'bar']]``
:param list descriptions: Internationalized descriptions, e.g.
``[['de', 'Foo'], ['en', 'bar']]``
:param str account: (optional) the account to allow access
to (defaults to ``default_account``)
|
[
"Update",
"betting",
"market",
"rules"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/peerplays.py#L1178-L1210
|
17,753
|
peerplays-network/python-peerplays
|
peerplays/peerplays.py
|
PeerPlays.bet_place
|
def bet_place(
self,
betting_market_id,
amount_to_bet,
backer_multiplier,
back_or_lay,
account=None,
**kwargs
):
""" Place a bet
:param str betting_market_id: The identifier for the market to bet
in
:param peerplays.amount.Amount amount_to_bet: Amount to bet with
:param int backer_multiplier: Multipler for backer
:param str back_or_lay: "back" or "lay" the bet
:param str account: (optional) the account to bet (defaults
to ``default_account``)
"""
from . import GRAPHENE_BETTING_ODDS_PRECISION
assert isinstance(amount_to_bet, Amount)
assert back_or_lay in ["back", "lay"]
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
bm = BettingMarket(betting_market_id)
op = operations.Bet_place(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"bettor_id": account["id"],
"betting_market_id": bm["id"],
"amount_to_bet": amount_to_bet.json(),
"backer_multiplier": (
int(backer_multiplier * GRAPHENE_BETTING_ODDS_PRECISION)
),
"back_or_lay": back_or_lay,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
python
|
def bet_place(
self,
betting_market_id,
amount_to_bet,
backer_multiplier,
back_or_lay,
account=None,
**kwargs
):
""" Place a bet
:param str betting_market_id: The identifier for the market to bet
in
:param peerplays.amount.Amount amount_to_bet: Amount to bet with
:param int backer_multiplier: Multipler for backer
:param str back_or_lay: "back" or "lay" the bet
:param str account: (optional) the account to bet (defaults
to ``default_account``)
"""
from . import GRAPHENE_BETTING_ODDS_PRECISION
assert isinstance(amount_to_bet, Amount)
assert back_or_lay in ["back", "lay"]
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
bm = BettingMarket(betting_market_id)
op = operations.Bet_place(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"bettor_id": account["id"],
"betting_market_id": bm["id"],
"amount_to_bet": amount_to_bet.json(),
"backer_multiplier": (
int(backer_multiplier * GRAPHENE_BETTING_ODDS_PRECISION)
),
"back_or_lay": back_or_lay,
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
[
"def",
"bet_place",
"(",
"self",
",",
"betting_market_id",
",",
"amount_to_bet",
",",
"backer_multiplier",
",",
"back_or_lay",
",",
"account",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"import",
"GRAPHENE_BETTING_ODDS_PRECISION",
"assert",
"isinstance",
"(",
"amount_to_bet",
",",
"Amount",
")",
"assert",
"back_or_lay",
"in",
"[",
"\"back\"",
",",
"\"lay\"",
"]",
"if",
"not",
"account",
":",
"if",
"\"default_account\"",
"in",
"self",
".",
"config",
":",
"account",
"=",
"self",
".",
"config",
"[",
"\"default_account\"",
"]",
"if",
"not",
"account",
":",
"raise",
"ValueError",
"(",
"\"You need to provide an account\"",
")",
"account",
"=",
"Account",
"(",
"account",
")",
"bm",
"=",
"BettingMarket",
"(",
"betting_market_id",
")",
"op",
"=",
"operations",
".",
"Bet_place",
"(",
"*",
"*",
"{",
"\"fee\"",
":",
"{",
"\"amount\"",
":",
"0",
",",
"\"asset_id\"",
":",
"\"1.3.0\"",
"}",
",",
"\"bettor_id\"",
":",
"account",
"[",
"\"id\"",
"]",
",",
"\"betting_market_id\"",
":",
"bm",
"[",
"\"id\"",
"]",
",",
"\"amount_to_bet\"",
":",
"amount_to_bet",
".",
"json",
"(",
")",
",",
"\"backer_multiplier\"",
":",
"(",
"int",
"(",
"backer_multiplier",
"*",
"GRAPHENE_BETTING_ODDS_PRECISION",
")",
")",
",",
"\"back_or_lay\"",
":",
"back_or_lay",
",",
"\"prefix\"",
":",
"self",
".",
"prefix",
",",
"}",
")",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"active\"",
",",
"*",
"*",
"kwargs",
")"
] |
Place a bet
:param str betting_market_id: The identifier for the market to bet
in
:param peerplays.amount.Amount amount_to_bet: Amount to bet with
:param int backer_multiplier: Multipler for backer
:param str back_or_lay: "back" or "lay" the bet
:param str account: (optional) the account to bet (defaults
to ``default_account``)
|
[
"Place",
"a",
"bet"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/peerplays.py#L1488-L1531
|
17,754
|
peerplays-network/python-peerplays
|
peerplays/peerplays.py
|
PeerPlays.bet_cancel
|
def bet_cancel(self, bet_to_cancel, account=None, **kwargs):
""" Cancel a bet
:param str bet_to_cancel: The identifier that identifies the bet to
cancel
:param str account: (optional) the account that owns the bet
(defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
bet = Bet(bet_to_cancel)
op = operations.Bet_cancel(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"bettor_id": account["id"],
"bet_to_cancel": bet["id"],
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
python
|
def bet_cancel(self, bet_to_cancel, account=None, **kwargs):
""" Cancel a bet
:param str bet_to_cancel: The identifier that identifies the bet to
cancel
:param str account: (optional) the account that owns the bet
(defaults to ``default_account``)
"""
if not account:
if "default_account" in self.config:
account = self.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
account = Account(account)
bet = Bet(bet_to_cancel)
op = operations.Bet_cancel(
**{
"fee": {"amount": 0, "asset_id": "1.3.0"},
"bettor_id": account["id"],
"bet_to_cancel": bet["id"],
"prefix": self.prefix,
}
)
return self.finalizeOp(op, account["name"], "active", **kwargs)
|
[
"def",
"bet_cancel",
"(",
"self",
",",
"bet_to_cancel",
",",
"account",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"account",
":",
"if",
"\"default_account\"",
"in",
"self",
".",
"config",
":",
"account",
"=",
"self",
".",
"config",
"[",
"\"default_account\"",
"]",
"if",
"not",
"account",
":",
"raise",
"ValueError",
"(",
"\"You need to provide an account\"",
")",
"account",
"=",
"Account",
"(",
"account",
")",
"bet",
"=",
"Bet",
"(",
"bet_to_cancel",
")",
"op",
"=",
"operations",
".",
"Bet_cancel",
"(",
"*",
"*",
"{",
"\"fee\"",
":",
"{",
"\"amount\"",
":",
"0",
",",
"\"asset_id\"",
":",
"\"1.3.0\"",
"}",
",",
"\"bettor_id\"",
":",
"account",
"[",
"\"id\"",
"]",
",",
"\"bet_to_cancel\"",
":",
"bet",
"[",
"\"id\"",
"]",
",",
"\"prefix\"",
":",
"self",
".",
"prefix",
",",
"}",
")",
"return",
"self",
".",
"finalizeOp",
"(",
"op",
",",
"account",
"[",
"\"name\"",
"]",
",",
"\"active\"",
",",
"*",
"*",
"kwargs",
")"
] |
Cancel a bet
:param str bet_to_cancel: The identifier that identifies the bet to
cancel
:param str account: (optional) the account that owns the bet
(defaults to ``default_account``)
|
[
"Cancel",
"a",
"bet"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/peerplays.py#L1533-L1556
|
17,755
|
peerplays-network/python-peerplays
|
peerplays/cli/decorators.py
|
verbose
|
def verbose(f):
""" Add verbose flags and add logging handlers
"""
@click.pass_context
def new_func(ctx, *args, **kwargs):
global log
verbosity = ["critical", "error", "warn", "info", "debug"][
int(min(ctx.obj.get("verbose", 0), 4))
]
log.setLevel(getattr(logging, verbosity.upper()))
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
ch = logging.StreamHandler()
ch.setLevel(getattr(logging, verbosity.upper()))
ch.setFormatter(formatter)
log.addHandler(ch)
# GrapheneAPI logging
if ctx.obj.get("verbose", 0) > 4:
verbosity = ["critical", "error", "warn", "info", "debug"][
int(min(ctx.obj.get("verbose", 4) - 4, 4))
]
log = logging.getLogger("grapheneapi")
log.setLevel(getattr(logging, verbosity.upper()))
log.addHandler(ch)
if ctx.obj.get("verbose", 0) > 8:
verbosity = ["critical", "error", "warn", "info", "debug"][
int(min(ctx.obj.get("verbose", 8) - 8, 4))
]
log = logging.getLogger("graphenebase")
log.setLevel(getattr(logging, verbosity.upper()))
log.addHandler(ch)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
|
python
|
def verbose(f):
""" Add verbose flags and add logging handlers
"""
@click.pass_context
def new_func(ctx, *args, **kwargs):
global log
verbosity = ["critical", "error", "warn", "info", "debug"][
int(min(ctx.obj.get("verbose", 0), 4))
]
log.setLevel(getattr(logging, verbosity.upper()))
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
ch = logging.StreamHandler()
ch.setLevel(getattr(logging, verbosity.upper()))
ch.setFormatter(formatter)
log.addHandler(ch)
# GrapheneAPI logging
if ctx.obj.get("verbose", 0) > 4:
verbosity = ["critical", "error", "warn", "info", "debug"][
int(min(ctx.obj.get("verbose", 4) - 4, 4))
]
log = logging.getLogger("grapheneapi")
log.setLevel(getattr(logging, verbosity.upper()))
log.addHandler(ch)
if ctx.obj.get("verbose", 0) > 8:
verbosity = ["critical", "error", "warn", "info", "debug"][
int(min(ctx.obj.get("verbose", 8) - 8, 4))
]
log = logging.getLogger("graphenebase")
log.setLevel(getattr(logging, verbosity.upper()))
log.addHandler(ch)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
|
[
"def",
"verbose",
"(",
"f",
")",
":",
"@",
"click",
".",
"pass_context",
"def",
"new_func",
"(",
"ctx",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"global",
"log",
"verbosity",
"=",
"[",
"\"critical\"",
",",
"\"error\"",
",",
"\"warn\"",
",",
"\"info\"",
",",
"\"debug\"",
"]",
"[",
"int",
"(",
"min",
"(",
"ctx",
".",
"obj",
".",
"get",
"(",
"\"verbose\"",
",",
"0",
")",
",",
"4",
")",
")",
"]",
"log",
".",
"setLevel",
"(",
"getattr",
"(",
"logging",
",",
"verbosity",
".",
"upper",
"(",
")",
")",
")",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"\"%(asctime)s - %(name)s - %(levelname)s - %(message)s\"",
")",
"ch",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"ch",
".",
"setLevel",
"(",
"getattr",
"(",
"logging",
",",
"verbosity",
".",
"upper",
"(",
")",
")",
")",
"ch",
".",
"setFormatter",
"(",
"formatter",
")",
"log",
".",
"addHandler",
"(",
"ch",
")",
"# GrapheneAPI logging",
"if",
"ctx",
".",
"obj",
".",
"get",
"(",
"\"verbose\"",
",",
"0",
")",
">",
"4",
":",
"verbosity",
"=",
"[",
"\"critical\"",
",",
"\"error\"",
",",
"\"warn\"",
",",
"\"info\"",
",",
"\"debug\"",
"]",
"[",
"int",
"(",
"min",
"(",
"ctx",
".",
"obj",
".",
"get",
"(",
"\"verbose\"",
",",
"4",
")",
"-",
"4",
",",
"4",
")",
")",
"]",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"\"grapheneapi\"",
")",
"log",
".",
"setLevel",
"(",
"getattr",
"(",
"logging",
",",
"verbosity",
".",
"upper",
"(",
")",
")",
")",
"log",
".",
"addHandler",
"(",
"ch",
")",
"if",
"ctx",
".",
"obj",
".",
"get",
"(",
"\"verbose\"",
",",
"0",
")",
">",
"8",
":",
"verbosity",
"=",
"[",
"\"critical\"",
",",
"\"error\"",
",",
"\"warn\"",
",",
"\"info\"",
",",
"\"debug\"",
"]",
"[",
"int",
"(",
"min",
"(",
"ctx",
".",
"obj",
".",
"get",
"(",
"\"verbose\"",
",",
"8",
")",
"-",
"8",
",",
"4",
")",
")",
"]",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"\"graphenebase\"",
")",
"log",
".",
"setLevel",
"(",
"getattr",
"(",
"logging",
",",
"verbosity",
".",
"upper",
"(",
")",
")",
")",
"log",
".",
"addHandler",
"(",
"ch",
")",
"return",
"ctx",
".",
"invoke",
"(",
"f",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"update_wrapper",
"(",
"new_func",
",",
"f",
")"
] |
Add verbose flags and add logging handlers
|
[
"Add",
"verbose",
"flags",
"and",
"add",
"logging",
"handlers"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/cli/decorators.py#L13-L51
|
17,756
|
peerplays-network/python-peerplays
|
peerplays/cli/decorators.py
|
offline
|
def offline(f):
""" This decorator allows you to access ``ctx.peerplays`` which is
an instance of PeerPlays with ``offline=True``.
"""
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
ctx.obj["offline"] = True
ctx.peerplays = PeerPlays(**ctx.obj)
ctx.blockchain = ctx.peerplays
set_shared_peerplays_instance(ctx.peerplays)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
|
python
|
def offline(f):
""" This decorator allows you to access ``ctx.peerplays`` which is
an instance of PeerPlays with ``offline=True``.
"""
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
ctx.obj["offline"] = True
ctx.peerplays = PeerPlays(**ctx.obj)
ctx.blockchain = ctx.peerplays
set_shared_peerplays_instance(ctx.peerplays)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
|
[
"def",
"offline",
"(",
"f",
")",
":",
"@",
"click",
".",
"pass_context",
"@",
"verbose",
"def",
"new_func",
"(",
"ctx",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"ctx",
".",
"obj",
"[",
"\"offline\"",
"]",
"=",
"True",
"ctx",
".",
"peerplays",
"=",
"PeerPlays",
"(",
"*",
"*",
"ctx",
".",
"obj",
")",
"ctx",
".",
"blockchain",
"=",
"ctx",
".",
"peerplays",
"set_shared_peerplays_instance",
"(",
"ctx",
".",
"peerplays",
")",
"return",
"ctx",
".",
"invoke",
"(",
"f",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"update_wrapper",
"(",
"new_func",
",",
"f",
")"
] |
This decorator allows you to access ``ctx.peerplays`` which is
an instance of PeerPlays with ``offline=True``.
|
[
"This",
"decorator",
"allows",
"you",
"to",
"access",
"ctx",
".",
"peerplays",
"which",
"is",
"an",
"instance",
"of",
"PeerPlays",
"with",
"offline",
"=",
"True",
"."
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/cli/decorators.py#L54-L68
|
17,757
|
peerplays-network/python-peerplays
|
peerplays/cli/decorators.py
|
configfile
|
def configfile(f):
""" This decorator will parse a configuration file in YAML format
and store the dictionary in ``ctx.blockchain.config``
"""
@click.pass_context
def new_func(ctx, *args, **kwargs):
ctx.config = yaml.load(open(ctx.obj["configfile"]))
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
|
python
|
def configfile(f):
""" This decorator will parse a configuration file in YAML format
and store the dictionary in ``ctx.blockchain.config``
"""
@click.pass_context
def new_func(ctx, *args, **kwargs):
ctx.config = yaml.load(open(ctx.obj["configfile"]))
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
|
[
"def",
"configfile",
"(",
"f",
")",
":",
"@",
"click",
".",
"pass_context",
"def",
"new_func",
"(",
"ctx",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"ctx",
".",
"config",
"=",
"yaml",
".",
"load",
"(",
"open",
"(",
"ctx",
".",
"obj",
"[",
"\"configfile\"",
"]",
")",
")",
"return",
"ctx",
".",
"invoke",
"(",
"f",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"update_wrapper",
"(",
"new_func",
",",
"f",
")"
] |
This decorator will parse a configuration file in YAML format
and store the dictionary in ``ctx.blockchain.config``
|
[
"This",
"decorator",
"will",
"parse",
"a",
"configuration",
"file",
"in",
"YAML",
"format",
"and",
"store",
"the",
"dictionary",
"in",
"ctx",
".",
"blockchain",
".",
"config"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplays/cli/decorators.py#L154-L164
|
17,758
|
peerplays-network/python-peerplays
|
peerplaysapi/websocket.py
|
PeerPlaysWebsocket.on_message
|
def on_message(self, ws, reply, *args):
""" This method is called by the websocket connection on every
message that is received. If we receive a ``notice``, we
hand over post-processing and signalling of events to
``process_notice``.
"""
log.debug("Received message: %s" % str(reply))
data = {}
try:
data = json.loads(reply, strict=False)
except ValueError:
raise ValueError("API node returned invalid format. Expected JSON!")
if data.get("method") == "notice":
id = data["params"][0]
if id >= len(self.__events__):
log.critical("Received an id that is out of range\n\n" + str(data))
return
# This is a "general" object change notification
if id == self.__events__.index("on_object"):
# Let's see if a specific object has changed
for notice in data["params"][1]:
try:
if "id" in notice:
self.process_notice(notice)
else:
for obj in notice:
if "id" in obj:
self.process_notice(obj)
except Exception as e:
log.critical(
"Error in process_notice: {}\n\n{}".format(
str(e), traceback.format_exc
)
)
else:
try:
callbackname = self.__events__[id]
log.info("Patching through to call %s" % callbackname)
[getattr(self.events, callbackname)(x) for x in data["params"][1]]
except Exception as e:
log.critical(
"Error in {}: {}\n\n{}".format(
callbackname, str(e), traceback.format_exc()
)
)
|
python
|
def on_message(self, ws, reply, *args):
""" This method is called by the websocket connection on every
message that is received. If we receive a ``notice``, we
hand over post-processing and signalling of events to
``process_notice``.
"""
log.debug("Received message: %s" % str(reply))
data = {}
try:
data = json.loads(reply, strict=False)
except ValueError:
raise ValueError("API node returned invalid format. Expected JSON!")
if data.get("method") == "notice":
id = data["params"][0]
if id >= len(self.__events__):
log.critical("Received an id that is out of range\n\n" + str(data))
return
# This is a "general" object change notification
if id == self.__events__.index("on_object"):
# Let's see if a specific object has changed
for notice in data["params"][1]:
try:
if "id" in notice:
self.process_notice(notice)
else:
for obj in notice:
if "id" in obj:
self.process_notice(obj)
except Exception as e:
log.critical(
"Error in process_notice: {}\n\n{}".format(
str(e), traceback.format_exc
)
)
else:
try:
callbackname = self.__events__[id]
log.info("Patching through to call %s" % callbackname)
[getattr(self.events, callbackname)(x) for x in data["params"][1]]
except Exception as e:
log.critical(
"Error in {}: {}\n\n{}".format(
callbackname, str(e), traceback.format_exc()
)
)
|
[
"def",
"on_message",
"(",
"self",
",",
"ws",
",",
"reply",
",",
"*",
"args",
")",
":",
"log",
".",
"debug",
"(",
"\"Received message: %s\"",
"%",
"str",
"(",
"reply",
")",
")",
"data",
"=",
"{",
"}",
"try",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"reply",
",",
"strict",
"=",
"False",
")",
"except",
"ValueError",
":",
"raise",
"ValueError",
"(",
"\"API node returned invalid format. Expected JSON!\"",
")",
"if",
"data",
".",
"get",
"(",
"\"method\"",
")",
"==",
"\"notice\"",
":",
"id",
"=",
"data",
"[",
"\"params\"",
"]",
"[",
"0",
"]",
"if",
"id",
">=",
"len",
"(",
"self",
".",
"__events__",
")",
":",
"log",
".",
"critical",
"(",
"\"Received an id that is out of range\\n\\n\"",
"+",
"str",
"(",
"data",
")",
")",
"return",
"# This is a \"general\" object change notification",
"if",
"id",
"==",
"self",
".",
"__events__",
".",
"index",
"(",
"\"on_object\"",
")",
":",
"# Let's see if a specific object has changed",
"for",
"notice",
"in",
"data",
"[",
"\"params\"",
"]",
"[",
"1",
"]",
":",
"try",
":",
"if",
"\"id\"",
"in",
"notice",
":",
"self",
".",
"process_notice",
"(",
"notice",
")",
"else",
":",
"for",
"obj",
"in",
"notice",
":",
"if",
"\"id\"",
"in",
"obj",
":",
"self",
".",
"process_notice",
"(",
"obj",
")",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"critical",
"(",
"\"Error in process_notice: {}\\n\\n{}\"",
".",
"format",
"(",
"str",
"(",
"e",
")",
",",
"traceback",
".",
"format_exc",
")",
")",
"else",
":",
"try",
":",
"callbackname",
"=",
"self",
".",
"__events__",
"[",
"id",
"]",
"log",
".",
"info",
"(",
"\"Patching through to call %s\"",
"%",
"callbackname",
")",
"[",
"getattr",
"(",
"self",
".",
"events",
",",
"callbackname",
")",
"(",
"x",
")",
"for",
"x",
"in",
"data",
"[",
"\"params\"",
"]",
"[",
"1",
"]",
"]",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"critical",
"(",
"\"Error in {}: {}\\n\\n{}\"",
".",
"format",
"(",
"callbackname",
",",
"str",
"(",
"e",
")",
",",
"traceback",
".",
"format_exc",
"(",
")",
")",
")"
] |
This method is called by the websocket connection on every
message that is received. If we receive a ``notice``, we
hand over post-processing and signalling of events to
``process_notice``.
|
[
"This",
"method",
"is",
"called",
"by",
"the",
"websocket",
"connection",
"on",
"every",
"message",
"that",
"is",
"received",
".",
"If",
"we",
"receive",
"a",
"notice",
"we",
"hand",
"over",
"post",
"-",
"processing",
"and",
"signalling",
"of",
"events",
"to",
"process_notice",
"."
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplaysapi/websocket.py#L216-L263
|
17,759
|
peerplays-network/python-peerplays
|
peerplaysapi/websocket.py
|
PeerPlaysWebsocket.on_close
|
def on_close(self, ws):
""" Called when websocket connection is closed
"""
log.debug("Closing WebSocket connection with {}".format(self.url))
if self.keepalive and self.keepalive.is_alive():
self.keepalive.do_run = False
self.keepalive.join()
|
python
|
def on_close(self, ws):
""" Called when websocket connection is closed
"""
log.debug("Closing WebSocket connection with {}".format(self.url))
if self.keepalive and self.keepalive.is_alive():
self.keepalive.do_run = False
self.keepalive.join()
|
[
"def",
"on_close",
"(",
"self",
",",
"ws",
")",
":",
"log",
".",
"debug",
"(",
"\"Closing WebSocket connection with {}\"",
".",
"format",
"(",
"self",
".",
"url",
")",
")",
"if",
"self",
".",
"keepalive",
"and",
"self",
".",
"keepalive",
".",
"is_alive",
"(",
")",
":",
"self",
".",
"keepalive",
".",
"do_run",
"=",
"False",
"self",
".",
"keepalive",
".",
"join",
"(",
")"
] |
Called when websocket connection is closed
|
[
"Called",
"when",
"websocket",
"connection",
"is",
"closed"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplaysapi/websocket.py#L270-L276
|
17,760
|
peerplays-network/python-peerplays
|
peerplaysapi/websocket.py
|
PeerPlaysWebsocket.run_forever
|
def run_forever(self):
""" This method is used to run the websocket app continuously.
It will execute callbacks as defined and try to stay
connected with the provided APIs
"""
cnt = 0
while True:
cnt += 1
self.url = next(self.urls)
log.debug("Trying to connect to node %s" % self.url)
try:
# websocket.enableTrace(True)
self.ws = websocket.WebSocketApp(
self.url,
on_message=self.on_message,
# on_data=self.on_message,
on_error=self.on_error,
on_close=self.on_close,
on_open=self.on_open,
)
self.ws.run_forever()
except websocket.WebSocketException as exc:
if self.num_retries >= 0 and cnt > self.num_retries:
raise NumRetriesReached()
sleeptime = (cnt - 1) * 2 if cnt < 10 else 10
if sleeptime:
log.warning(
"Lost connection to node during wsconnect(): %s (%d/%d) "
% (self.url, cnt, self.num_retries)
+ "Retrying in %d seconds" % sleeptime
)
time.sleep(sleeptime)
except KeyboardInterrupt:
self.ws.keep_running = False
raise
except Exception as e:
log.critical("{}\n\n{}".format(str(e), traceback.format_exc()))
|
python
|
def run_forever(self):
""" This method is used to run the websocket app continuously.
It will execute callbacks as defined and try to stay
connected with the provided APIs
"""
cnt = 0
while True:
cnt += 1
self.url = next(self.urls)
log.debug("Trying to connect to node %s" % self.url)
try:
# websocket.enableTrace(True)
self.ws = websocket.WebSocketApp(
self.url,
on_message=self.on_message,
# on_data=self.on_message,
on_error=self.on_error,
on_close=self.on_close,
on_open=self.on_open,
)
self.ws.run_forever()
except websocket.WebSocketException as exc:
if self.num_retries >= 0 and cnt > self.num_retries:
raise NumRetriesReached()
sleeptime = (cnt - 1) * 2 if cnt < 10 else 10
if sleeptime:
log.warning(
"Lost connection to node during wsconnect(): %s (%d/%d) "
% (self.url, cnt, self.num_retries)
+ "Retrying in %d seconds" % sleeptime
)
time.sleep(sleeptime)
except KeyboardInterrupt:
self.ws.keep_running = False
raise
except Exception as e:
log.critical("{}\n\n{}".format(str(e), traceback.format_exc()))
|
[
"def",
"run_forever",
"(",
"self",
")",
":",
"cnt",
"=",
"0",
"while",
"True",
":",
"cnt",
"+=",
"1",
"self",
".",
"url",
"=",
"next",
"(",
"self",
".",
"urls",
")",
"log",
".",
"debug",
"(",
"\"Trying to connect to node %s\"",
"%",
"self",
".",
"url",
")",
"try",
":",
"# websocket.enableTrace(True)",
"self",
".",
"ws",
"=",
"websocket",
".",
"WebSocketApp",
"(",
"self",
".",
"url",
",",
"on_message",
"=",
"self",
".",
"on_message",
",",
"# on_data=self.on_message,",
"on_error",
"=",
"self",
".",
"on_error",
",",
"on_close",
"=",
"self",
".",
"on_close",
",",
"on_open",
"=",
"self",
".",
"on_open",
",",
")",
"self",
".",
"ws",
".",
"run_forever",
"(",
")",
"except",
"websocket",
".",
"WebSocketException",
"as",
"exc",
":",
"if",
"self",
".",
"num_retries",
">=",
"0",
"and",
"cnt",
">",
"self",
".",
"num_retries",
":",
"raise",
"NumRetriesReached",
"(",
")",
"sleeptime",
"=",
"(",
"cnt",
"-",
"1",
")",
"*",
"2",
"if",
"cnt",
"<",
"10",
"else",
"10",
"if",
"sleeptime",
":",
"log",
".",
"warning",
"(",
"\"Lost connection to node during wsconnect(): %s (%d/%d) \"",
"%",
"(",
"self",
".",
"url",
",",
"cnt",
",",
"self",
".",
"num_retries",
")",
"+",
"\"Retrying in %d seconds\"",
"%",
"sleeptime",
")",
"time",
".",
"sleep",
"(",
"sleeptime",
")",
"except",
"KeyboardInterrupt",
":",
"self",
".",
"ws",
".",
"keep_running",
"=",
"False",
"raise",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"critical",
"(",
"\"{}\\n\\n{}\"",
".",
"format",
"(",
"str",
"(",
"e",
")",
",",
"traceback",
".",
"format_exc",
"(",
")",
")",
")"
] |
This method is used to run the websocket app continuously.
It will execute callbacks as defined and try to stay
connected with the provided APIs
|
[
"This",
"method",
"is",
"used",
"to",
"run",
"the",
"websocket",
"app",
"continuously",
".",
"It",
"will",
"execute",
"callbacks",
"as",
"defined",
"and",
"try",
"to",
"stay",
"connected",
"with",
"the",
"provided",
"APIs"
] |
188f04238e7e21d5f73e9b01099eea44289ef6b7
|
https://github.com/peerplays-network/python-peerplays/blob/188f04238e7e21d5f73e9b01099eea44289ef6b7/peerplaysapi/websocket.py#L278-L317
|
17,761
|
Zsailer/pandas_flavor
|
pandas_flavor/register.py
|
register_dataframe_method
|
def register_dataframe_method(method):
"""Register a function as a method attached to the Pandas DataFrame.
Example
-------
.. code-block:: python
@register_dataframe_method
def print_column(df, col):
'''Print the dataframe column given'''
print(df[col])
"""
def inner(*args, **kwargs):
class AccessorMethod(object):
def __init__(self, pandas_obj):
self._obj = pandas_obj
@wraps(method)
def __call__(self, *args, **kwargs):
return method(self._obj, *args, **kwargs)
register_dataframe_accessor(method.__name__)(AccessorMethod)
return method
return inner()
|
python
|
def register_dataframe_method(method):
"""Register a function as a method attached to the Pandas DataFrame.
Example
-------
.. code-block:: python
@register_dataframe_method
def print_column(df, col):
'''Print the dataframe column given'''
print(df[col])
"""
def inner(*args, **kwargs):
class AccessorMethod(object):
def __init__(self, pandas_obj):
self._obj = pandas_obj
@wraps(method)
def __call__(self, *args, **kwargs):
return method(self._obj, *args, **kwargs)
register_dataframe_accessor(method.__name__)(AccessorMethod)
return method
return inner()
|
[
"def",
"register_dataframe_method",
"(",
"method",
")",
":",
"def",
"inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"class",
"AccessorMethod",
"(",
"object",
")",
":",
"def",
"__init__",
"(",
"self",
",",
"pandas_obj",
")",
":",
"self",
".",
"_obj",
"=",
"pandas_obj",
"@",
"wraps",
"(",
"method",
")",
"def",
"__call__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"method",
"(",
"self",
".",
"_obj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"register_dataframe_accessor",
"(",
"method",
".",
"__name__",
")",
"(",
"AccessorMethod",
")",
"return",
"method",
"return",
"inner",
"(",
")"
] |
Register a function as a method attached to the Pandas DataFrame.
Example
-------
.. code-block:: python
@register_dataframe_method
def print_column(df, col):
'''Print the dataframe column given'''
print(df[col])
|
[
"Register",
"a",
"function",
"as",
"a",
"method",
"attached",
"to",
"the",
"Pandas",
"DataFrame",
"."
] |
1953aeee09424300d69a11dd2ffd3460a806fb65
|
https://github.com/Zsailer/pandas_flavor/blob/1953aeee09424300d69a11dd2ffd3460a806fb65/pandas_flavor/register.py#L6-L35
|
17,762
|
Zsailer/pandas_flavor
|
pandas_flavor/register.py
|
register_series_method
|
def register_series_method(method):
"""Register a function as a method attached to the Pandas Series.
"""
def inner(*args, **kwargs):
class AccessorMethod(object):
__doc__ = method.__doc__
def __init__(self, pandas_obj):
self._obj = pandas_obj
@wraps(method)
def __call__(self, *args, **kwargs):
return method(self._obj, *args, **kwargs)
register_series_accessor(method.__name__)(AccessorMethod)
return method
return inner()
|
python
|
def register_series_method(method):
"""Register a function as a method attached to the Pandas Series.
"""
def inner(*args, **kwargs):
class AccessorMethod(object):
__doc__ = method.__doc__
def __init__(self, pandas_obj):
self._obj = pandas_obj
@wraps(method)
def __call__(self, *args, **kwargs):
return method(self._obj, *args, **kwargs)
register_series_accessor(method.__name__)(AccessorMethod)
return method
return inner()
|
[
"def",
"register_series_method",
"(",
"method",
")",
":",
"def",
"inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"class",
"AccessorMethod",
"(",
"object",
")",
":",
"__doc__",
"=",
"method",
".",
"__doc__",
"def",
"__init__",
"(",
"self",
",",
"pandas_obj",
")",
":",
"self",
".",
"_obj",
"=",
"pandas_obj",
"@",
"wraps",
"(",
"method",
")",
"def",
"__call__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"method",
"(",
"self",
".",
"_obj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"register_series_accessor",
"(",
"method",
".",
"__name__",
")",
"(",
"AccessorMethod",
")",
"return",
"method",
"return",
"inner",
"(",
")"
] |
Register a function as a method attached to the Pandas Series.
|
[
"Register",
"a",
"function",
"as",
"a",
"method",
"attached",
"to",
"the",
"Pandas",
"Series",
"."
] |
1953aeee09424300d69a11dd2ffd3460a806fb65
|
https://github.com/Zsailer/pandas_flavor/blob/1953aeee09424300d69a11dd2ffd3460a806fb65/pandas_flavor/register.py#L38-L57
|
17,763
|
pinax/pinax-invitations
|
pinax/invitations/models.py
|
InvitationStat.add_invites_to_user
|
def add_invites_to_user(cls, user, amount):
"""
Add the specified number of invites to current allocated total.
"""
stat, _ = InvitationStat.objects.get_or_create(user=user)
if stat.invites_allocated != -1:
stat.invites_allocated += amount
stat.save()
|
python
|
def add_invites_to_user(cls, user, amount):
"""
Add the specified number of invites to current allocated total.
"""
stat, _ = InvitationStat.objects.get_or_create(user=user)
if stat.invites_allocated != -1:
stat.invites_allocated += amount
stat.save()
|
[
"def",
"add_invites_to_user",
"(",
"cls",
",",
"user",
",",
"amount",
")",
":",
"stat",
",",
"_",
"=",
"InvitationStat",
".",
"objects",
".",
"get_or_create",
"(",
"user",
"=",
"user",
")",
"if",
"stat",
".",
"invites_allocated",
"!=",
"-",
"1",
":",
"stat",
".",
"invites_allocated",
"+=",
"amount",
"stat",
".",
"save",
"(",
")"
] |
Add the specified number of invites to current allocated total.
|
[
"Add",
"the",
"specified",
"number",
"of",
"invites",
"to",
"current",
"allocated",
"total",
"."
] |
6c6e863da179a1c620074efe5b5728cd1e6eff1b
|
https://github.com/pinax/pinax-invitations/blob/6c6e863da179a1c620074efe5b5728cd1e6eff1b/pinax/invitations/models.py#L111-L118
|
17,764
|
pinax/pinax-invitations
|
pinax/invitations/models.py
|
InvitationStat.add_invites
|
def add_invites(cls, amount):
"""
Add invites for all users.
"""
for user in get_user_model().objects.all():
cls.add_invites_to_user(user, amount)
|
python
|
def add_invites(cls, amount):
"""
Add invites for all users.
"""
for user in get_user_model().objects.all():
cls.add_invites_to_user(user, amount)
|
[
"def",
"add_invites",
"(",
"cls",
",",
"amount",
")",
":",
"for",
"user",
"in",
"get_user_model",
"(",
")",
".",
"objects",
".",
"all",
"(",
")",
":",
"cls",
".",
"add_invites_to_user",
"(",
"user",
",",
"amount",
")"
] |
Add invites for all users.
|
[
"Add",
"invites",
"for",
"all",
"users",
"."
] |
6c6e863da179a1c620074efe5b5728cd1e6eff1b
|
https://github.com/pinax/pinax-invitations/blob/6c6e863da179a1c620074efe5b5728cd1e6eff1b/pinax/invitations/models.py#L121-L126
|
17,765
|
pinax/pinax-invitations
|
pinax/invitations/models.py
|
InvitationStat.topoff_user
|
def topoff_user(cls, user, amount):
"""
Ensure user has a minimum number of invites.
"""
stat, _ = cls.objects.get_or_create(user=user)
remaining = stat.invites_remaining()
if remaining != -1 and remaining < amount:
stat.invites_allocated += (amount - remaining)
stat.save()
|
python
|
def topoff_user(cls, user, amount):
"""
Ensure user has a minimum number of invites.
"""
stat, _ = cls.objects.get_or_create(user=user)
remaining = stat.invites_remaining()
if remaining != -1 and remaining < amount:
stat.invites_allocated += (amount - remaining)
stat.save()
|
[
"def",
"topoff_user",
"(",
"cls",
",",
"user",
",",
"amount",
")",
":",
"stat",
",",
"_",
"=",
"cls",
".",
"objects",
".",
"get_or_create",
"(",
"user",
"=",
"user",
")",
"remaining",
"=",
"stat",
".",
"invites_remaining",
"(",
")",
"if",
"remaining",
"!=",
"-",
"1",
"and",
"remaining",
"<",
"amount",
":",
"stat",
".",
"invites_allocated",
"+=",
"(",
"amount",
"-",
"remaining",
")",
"stat",
".",
"save",
"(",
")"
] |
Ensure user has a minimum number of invites.
|
[
"Ensure",
"user",
"has",
"a",
"minimum",
"number",
"of",
"invites",
"."
] |
6c6e863da179a1c620074efe5b5728cd1e6eff1b
|
https://github.com/pinax/pinax-invitations/blob/6c6e863da179a1c620074efe5b5728cd1e6eff1b/pinax/invitations/models.py#L129-L137
|
17,766
|
pinax/pinax-invitations
|
pinax/invitations/models.py
|
InvitationStat.topoff
|
def topoff(cls, amount):
"""
Ensure all users have a minimum number of invites.
"""
for user in get_user_model().objects.all():
cls.topoff_user(user, amount)
|
python
|
def topoff(cls, amount):
"""
Ensure all users have a minimum number of invites.
"""
for user in get_user_model().objects.all():
cls.topoff_user(user, amount)
|
[
"def",
"topoff",
"(",
"cls",
",",
"amount",
")",
":",
"for",
"user",
"in",
"get_user_model",
"(",
")",
".",
"objects",
".",
"all",
"(",
")",
":",
"cls",
".",
"topoff_user",
"(",
"user",
",",
"amount",
")"
] |
Ensure all users have a minimum number of invites.
|
[
"Ensure",
"all",
"users",
"have",
"a",
"minimum",
"number",
"of",
"invites",
"."
] |
6c6e863da179a1c620074efe5b5728cd1e6eff1b
|
https://github.com/pinax/pinax-invitations/blob/6c6e863da179a1c620074efe5b5728cd1e6eff1b/pinax/invitations/models.py#L140-L145
|
17,767
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.align
|
def align(self, alignment = None):
"""
Repositions the current reader to match architecture alignment
"""
if alignment is None:
if self.reader.sysinfo.ProcessorArchitecture == PROCESSOR_ARCHITECTURE.AMD64:
alignment = 8
else:
alignment = 4
offset = self.current_position % alignment
if offset == 0:
return
offset_to_aligned = (alignment - offset) % alignment
self.seek(offset_to_aligned, 1)
return
|
python
|
def align(self, alignment = None):
"""
Repositions the current reader to match architecture alignment
"""
if alignment is None:
if self.reader.sysinfo.ProcessorArchitecture == PROCESSOR_ARCHITECTURE.AMD64:
alignment = 8
else:
alignment = 4
offset = self.current_position % alignment
if offset == 0:
return
offset_to_aligned = (alignment - offset) % alignment
self.seek(offset_to_aligned, 1)
return
|
[
"def",
"align",
"(",
"self",
",",
"alignment",
"=",
"None",
")",
":",
"if",
"alignment",
"is",
"None",
":",
"if",
"self",
".",
"reader",
".",
"sysinfo",
".",
"ProcessorArchitecture",
"==",
"PROCESSOR_ARCHITECTURE",
".",
"AMD64",
":",
"alignment",
"=",
"8",
"else",
":",
"alignment",
"=",
"4",
"offset",
"=",
"self",
".",
"current_position",
"%",
"alignment",
"if",
"offset",
"==",
"0",
":",
"return",
"offset_to_aligned",
"=",
"(",
"alignment",
"-",
"offset",
")",
"%",
"alignment",
"self",
".",
"seek",
"(",
"offset_to_aligned",
",",
"1",
")",
"return"
] |
Repositions the current reader to match architecture alignment
|
[
"Repositions",
"the",
"current",
"reader",
"to",
"match",
"architecture",
"alignment"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L87-L101
|
17,768
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.peek
|
def peek(self, length):
"""
Returns up to length bytes from the current memory segment
"""
t = self.current_position + length
if not self.current_segment.inrange(t):
raise Exception('Would read over segment boundaries!')
return self.current_segment.data[self.current_position - self.current_segment.start_address :t - self.current_segment.start_address]
|
python
|
def peek(self, length):
"""
Returns up to length bytes from the current memory segment
"""
t = self.current_position + length
if not self.current_segment.inrange(t):
raise Exception('Would read over segment boundaries!')
return self.current_segment.data[self.current_position - self.current_segment.start_address :t - self.current_segment.start_address]
|
[
"def",
"peek",
"(",
"self",
",",
"length",
")",
":",
"t",
"=",
"self",
".",
"current_position",
"+",
"length",
"if",
"not",
"self",
".",
"current_segment",
".",
"inrange",
"(",
"t",
")",
":",
"raise",
"Exception",
"(",
"'Would read over segment boundaries!'",
")",
"return",
"self",
".",
"current_segment",
".",
"data",
"[",
"self",
".",
"current_position",
"-",
"self",
".",
"current_segment",
".",
"start_address",
":",
"t",
"-",
"self",
".",
"current_segment",
".",
"start_address",
"]"
] |
Returns up to length bytes from the current memory segment
|
[
"Returns",
"up",
"to",
"length",
"bytes",
"from",
"the",
"current",
"memory",
"segment"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L109-L116
|
17,769
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.read
|
def read(self, size = -1):
"""
Returns data bytes of size size from the current segment. If size is -1 it returns all the remaining data bytes from memory segment
"""
if size < -1:
raise Exception('You shouldnt be doing this')
if size == -1:
t = self.current_segment.remaining_len(self.current_position)
if not t:
return None
old_new_pos = self.current_position
self.current_position = self.current_segment.end_address
return self.current_segment.data[old_new_pos - self.current_segment.start_address:]
t = self.current_position + size
if not self.current_segment.inrange(t):
raise Exception('Would read over segment boundaries!')
old_new_pos = self.current_position
self.current_position = t
return self.current_segment.data[old_new_pos - self.current_segment.start_address :t - self.current_segment.start_address]
|
python
|
def read(self, size = -1):
"""
Returns data bytes of size size from the current segment. If size is -1 it returns all the remaining data bytes from memory segment
"""
if size < -1:
raise Exception('You shouldnt be doing this')
if size == -1:
t = self.current_segment.remaining_len(self.current_position)
if not t:
return None
old_new_pos = self.current_position
self.current_position = self.current_segment.end_address
return self.current_segment.data[old_new_pos - self.current_segment.start_address:]
t = self.current_position + size
if not self.current_segment.inrange(t):
raise Exception('Would read over segment boundaries!')
old_new_pos = self.current_position
self.current_position = t
return self.current_segment.data[old_new_pos - self.current_segment.start_address :t - self.current_segment.start_address]
|
[
"def",
"read",
"(",
"self",
",",
"size",
"=",
"-",
"1",
")",
":",
"if",
"size",
"<",
"-",
"1",
":",
"raise",
"Exception",
"(",
"'You shouldnt be doing this'",
")",
"if",
"size",
"==",
"-",
"1",
":",
"t",
"=",
"self",
".",
"current_segment",
".",
"remaining_len",
"(",
"self",
".",
"current_position",
")",
"if",
"not",
"t",
":",
"return",
"None",
"old_new_pos",
"=",
"self",
".",
"current_position",
"self",
".",
"current_position",
"=",
"self",
".",
"current_segment",
".",
"end_address",
"return",
"self",
".",
"current_segment",
".",
"data",
"[",
"old_new_pos",
"-",
"self",
".",
"current_segment",
".",
"start_address",
":",
"]",
"t",
"=",
"self",
".",
"current_position",
"+",
"size",
"if",
"not",
"self",
".",
"current_segment",
".",
"inrange",
"(",
"t",
")",
":",
"raise",
"Exception",
"(",
"'Would read over segment boundaries!'",
")",
"old_new_pos",
"=",
"self",
".",
"current_position",
"self",
".",
"current_position",
"=",
"t",
"return",
"self",
".",
"current_segment",
".",
"data",
"[",
"old_new_pos",
"-",
"self",
".",
"current_segment",
".",
"start_address",
":",
"t",
"-",
"self",
".",
"current_segment",
".",
"start_address",
"]"
] |
Returns data bytes of size size from the current segment. If size is -1 it returns all the remaining data bytes from memory segment
|
[
"Returns",
"data",
"bytes",
"of",
"size",
"size",
"from",
"the",
"current",
"segment",
".",
"If",
"size",
"is",
"-",
"1",
"it",
"returns",
"all",
"the",
"remaining",
"data",
"bytes",
"from",
"memory",
"segment"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L118-L139
|
17,770
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.read_int
|
def read_int(self):
"""
Reads an integer. The size depends on the architecture.
Reads a 4 byte small-endian singed int on 32 bit arch
Reads an 8 byte small-endian singed int on 64 bit arch
"""
if self.reader.sysinfo.ProcessorArchitecture == PROCESSOR_ARCHITECTURE.AMD64:
return int.from_bytes(self.read(8), byteorder = 'little', signed = True)
else:
return int.from_bytes(self.read(4), byteorder = 'little', signed = True)
|
python
|
def read_int(self):
"""
Reads an integer. The size depends on the architecture.
Reads a 4 byte small-endian singed int on 32 bit arch
Reads an 8 byte small-endian singed int on 64 bit arch
"""
if self.reader.sysinfo.ProcessorArchitecture == PROCESSOR_ARCHITECTURE.AMD64:
return int.from_bytes(self.read(8), byteorder = 'little', signed = True)
else:
return int.from_bytes(self.read(4), byteorder = 'little', signed = True)
|
[
"def",
"read_int",
"(",
"self",
")",
":",
"if",
"self",
".",
"reader",
".",
"sysinfo",
".",
"ProcessorArchitecture",
"==",
"PROCESSOR_ARCHITECTURE",
".",
"AMD64",
":",
"return",
"int",
".",
"from_bytes",
"(",
"self",
".",
"read",
"(",
"8",
")",
",",
"byteorder",
"=",
"'little'",
",",
"signed",
"=",
"True",
")",
"else",
":",
"return",
"int",
".",
"from_bytes",
"(",
"self",
".",
"read",
"(",
"4",
")",
",",
"byteorder",
"=",
"'little'",
",",
"signed",
"=",
"True",
")"
] |
Reads an integer. The size depends on the architecture.
Reads a 4 byte small-endian singed int on 32 bit arch
Reads an 8 byte small-endian singed int on 64 bit arch
|
[
"Reads",
"an",
"integer",
".",
"The",
"size",
"depends",
"on",
"the",
"architecture",
".",
"Reads",
"a",
"4",
"byte",
"small",
"-",
"endian",
"singed",
"int",
"on",
"32",
"bit",
"arch",
"Reads",
"an",
"8",
"byte",
"small",
"-",
"endian",
"singed",
"int",
"on",
"64",
"bit",
"arch"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L141-L150
|
17,771
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.read_uint
|
def read_uint(self):
"""
Reads an integer. The size depends on the architecture.
Reads a 4 byte small-endian unsinged int on 32 bit arch
Reads an 8 byte small-endian unsinged int on 64 bit arch
"""
if self.reader.sysinfo.ProcessorArchitecture == PROCESSOR_ARCHITECTURE.AMD64:
return int.from_bytes(self.read(8), byteorder = 'little', signed = False)
else:
return int.from_bytes(self.read(4), byteorder = 'little', signed = False)
|
python
|
def read_uint(self):
"""
Reads an integer. The size depends on the architecture.
Reads a 4 byte small-endian unsinged int on 32 bit arch
Reads an 8 byte small-endian unsinged int on 64 bit arch
"""
if self.reader.sysinfo.ProcessorArchitecture == PROCESSOR_ARCHITECTURE.AMD64:
return int.from_bytes(self.read(8), byteorder = 'little', signed = False)
else:
return int.from_bytes(self.read(4), byteorder = 'little', signed = False)
|
[
"def",
"read_uint",
"(",
"self",
")",
":",
"if",
"self",
".",
"reader",
".",
"sysinfo",
".",
"ProcessorArchitecture",
"==",
"PROCESSOR_ARCHITECTURE",
".",
"AMD64",
":",
"return",
"int",
".",
"from_bytes",
"(",
"self",
".",
"read",
"(",
"8",
")",
",",
"byteorder",
"=",
"'little'",
",",
"signed",
"=",
"False",
")",
"else",
":",
"return",
"int",
".",
"from_bytes",
"(",
"self",
".",
"read",
"(",
"4",
")",
",",
"byteorder",
"=",
"'little'",
",",
"signed",
"=",
"False",
")"
] |
Reads an integer. The size depends on the architecture.
Reads a 4 byte small-endian unsinged int on 32 bit arch
Reads an 8 byte small-endian unsinged int on 64 bit arch
|
[
"Reads",
"an",
"integer",
".",
"The",
"size",
"depends",
"on",
"the",
"architecture",
".",
"Reads",
"a",
"4",
"byte",
"small",
"-",
"endian",
"unsinged",
"int",
"on",
"32",
"bit",
"arch",
"Reads",
"an",
"8",
"byte",
"small",
"-",
"endian",
"unsinged",
"int",
"on",
"64",
"bit",
"arch"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L152-L161
|
17,772
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.find
|
def find(self, pattern):
"""
Searches for a pattern in the current memory segment
"""
pos = self.current_segment.data.find(pattern)
if pos == -1:
return -1
return pos + self.current_position
|
python
|
def find(self, pattern):
"""
Searches for a pattern in the current memory segment
"""
pos = self.current_segment.data.find(pattern)
if pos == -1:
return -1
return pos + self.current_position
|
[
"def",
"find",
"(",
"self",
",",
"pattern",
")",
":",
"pos",
"=",
"self",
".",
"current_segment",
".",
"data",
".",
"find",
"(",
"pattern",
")",
"if",
"pos",
"==",
"-",
"1",
":",
"return",
"-",
"1",
"return",
"pos",
"+",
"self",
".",
"current_position"
] |
Searches for a pattern in the current memory segment
|
[
"Searches",
"for",
"a",
"pattern",
"in",
"the",
"current",
"memory",
"segment"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L163-L170
|
17,773
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.find_all
|
def find_all(self, pattern):
"""
Searches for all occurrences of a pattern in the current memory segment, returns all occurrences as a list
"""
pos = []
last_found = -1
while True:
last_found = self.current_segment.data.find(pattern, last_found + 1)
if last_found == -1:
break
pos.append(last_found + self.current_segment.start_address)
return pos
|
python
|
def find_all(self, pattern):
"""
Searches for all occurrences of a pattern in the current memory segment, returns all occurrences as a list
"""
pos = []
last_found = -1
while True:
last_found = self.current_segment.data.find(pattern, last_found + 1)
if last_found == -1:
break
pos.append(last_found + self.current_segment.start_address)
return pos
|
[
"def",
"find_all",
"(",
"self",
",",
"pattern",
")",
":",
"pos",
"=",
"[",
"]",
"last_found",
"=",
"-",
"1",
"while",
"True",
":",
"last_found",
"=",
"self",
".",
"current_segment",
".",
"data",
".",
"find",
"(",
"pattern",
",",
"last_found",
"+",
"1",
")",
"if",
"last_found",
"==",
"-",
"1",
":",
"break",
"pos",
".",
"append",
"(",
"last_found",
"+",
"self",
".",
"current_segment",
".",
"start_address",
")",
"return",
"pos"
] |
Searches for all occurrences of a pattern in the current memory segment, returns all occurrences as a list
|
[
"Searches",
"for",
"all",
"occurrences",
"of",
"a",
"pattern",
"in",
"the",
"current",
"memory",
"segment",
"returns",
"all",
"occurrences",
"as",
"a",
"list"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L172-L184
|
17,774
|
skelsec/minidump
|
minidump/minidumpreader.py
|
MinidumpBufferedReader.find_global
|
def find_global(self, pattern):
"""
Searches for the pattern in the whole process memory space and returns the first occurrence.
This is exhaustive!
"""
pos_s = self.reader.search(pattern)
if len(pos_s) == 0:
return -1
return pos_s[0]
|
python
|
def find_global(self, pattern):
"""
Searches for the pattern in the whole process memory space and returns the first occurrence.
This is exhaustive!
"""
pos_s = self.reader.search(pattern)
if len(pos_s) == 0:
return -1
return pos_s[0]
|
[
"def",
"find_global",
"(",
"self",
",",
"pattern",
")",
":",
"pos_s",
"=",
"self",
".",
"reader",
".",
"search",
"(",
"pattern",
")",
"if",
"len",
"(",
"pos_s",
")",
"==",
"0",
":",
"return",
"-",
"1",
"return",
"pos_s",
"[",
"0",
"]"
] |
Searches for the pattern in the whole process memory space and returns the first occurrence.
This is exhaustive!
|
[
"Searches",
"for",
"the",
"pattern",
"in",
"the",
"whole",
"process",
"memory",
"space",
"and",
"returns",
"the",
"first",
"occurrence",
".",
"This",
"is",
"exhaustive!"
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/minidumpreader.py#L186-L195
|
17,775
|
skelsec/minidump
|
minidump/utils/privileges.py
|
report_privilege_information
|
def report_privilege_information():
"Report all privilege information assigned to the current process."
privileges = get_privilege_information()
print("found {0} privileges".format(privileges.count))
tuple(map(print, privileges))
|
python
|
def report_privilege_information():
"Report all privilege information assigned to the current process."
privileges = get_privilege_information()
print("found {0} privileges".format(privileges.count))
tuple(map(print, privileges))
|
[
"def",
"report_privilege_information",
"(",
")",
":",
"privileges",
"=",
"get_privilege_information",
"(",
")",
"print",
"(",
"\"found {0} privileges\"",
".",
"format",
"(",
"privileges",
".",
"count",
")",
")",
"tuple",
"(",
"map",
"(",
"print",
",",
"privileges",
")",
")"
] |
Report all privilege information assigned to the current process.
|
[
"Report",
"all",
"privilege",
"information",
"assigned",
"to",
"the",
"current",
"process",
"."
] |
0c4dcabe6f11d7a403440919ffa9e3c9889c5212
|
https://github.com/skelsec/minidump/blob/0c4dcabe6f11d7a403440919ffa9e3c9889c5212/minidump/utils/privileges.py#L171-L175
|
17,776
|
rajasimon/beatserver
|
beatserver/server.py
|
BeatServer.handle
|
async def handle(self):
"""
Listens on all the provided channels and handles the messages.
"""
# For each channel, launch its own listening coroutine
listeners = []
for key, value in self.beat_config.items():
listeners.append(asyncio.ensure_future(
self.listener(key)
))
# For each beat configuration, launch it's own sending pattern
emitters = []
for key, value in self.beat_config.items():
emitters.append(asyncio.ensure_future(
self.emitters(key, value)
))
# Wait for them all to exit
await asyncio.wait(emitters)
await asyncio.wait(listeners)
|
python
|
async def handle(self):
"""
Listens on all the provided channels and handles the messages.
"""
# For each channel, launch its own listening coroutine
listeners = []
for key, value in self.beat_config.items():
listeners.append(asyncio.ensure_future(
self.listener(key)
))
# For each beat configuration, launch it's own sending pattern
emitters = []
for key, value in self.beat_config.items():
emitters.append(asyncio.ensure_future(
self.emitters(key, value)
))
# Wait for them all to exit
await asyncio.wait(emitters)
await asyncio.wait(listeners)
|
[
"async",
"def",
"handle",
"(",
"self",
")",
":",
"# For each channel, launch its own listening coroutine",
"listeners",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"self",
".",
"beat_config",
".",
"items",
"(",
")",
":",
"listeners",
".",
"append",
"(",
"asyncio",
".",
"ensure_future",
"(",
"self",
".",
"listener",
"(",
"key",
")",
")",
")",
"# For each beat configuration, launch it's own sending pattern",
"emitters",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"self",
".",
"beat_config",
".",
"items",
"(",
")",
":",
"emitters",
".",
"append",
"(",
"asyncio",
".",
"ensure_future",
"(",
"self",
".",
"emitters",
"(",
"key",
",",
"value",
")",
")",
")",
"# Wait for them all to exit",
"await",
"asyncio",
".",
"wait",
"(",
"emitters",
")",
"await",
"asyncio",
".",
"wait",
"(",
"listeners",
")"
] |
Listens on all the provided channels and handles the messages.
|
[
"Listens",
"on",
"all",
"the",
"provided",
"channels",
"and",
"handles",
"the",
"messages",
"."
] |
8c653c46cdcf98398ca9d0bc6d3e47e5d621bb6a
|
https://github.com/rajasimon/beatserver/blob/8c653c46cdcf98398ca9d0bc6d3e47e5d621bb6a/beatserver/server.py#L16-L37
|
17,777
|
rajasimon/beatserver
|
beatserver/server.py
|
BeatServer.emitters
|
async def emitters(self, key, value):
"""
Single-channel emitter
"""
while True:
await asyncio.sleep(value['schedule'].total_seconds())
await self.channel_layer.send(key, {
"type": value['type'],
"message": value['message']
})
|
python
|
async def emitters(self, key, value):
"""
Single-channel emitter
"""
while True:
await asyncio.sleep(value['schedule'].total_seconds())
await self.channel_layer.send(key, {
"type": value['type'],
"message": value['message']
})
|
[
"async",
"def",
"emitters",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"while",
"True",
":",
"await",
"asyncio",
".",
"sleep",
"(",
"value",
"[",
"'schedule'",
"]",
".",
"total_seconds",
"(",
")",
")",
"await",
"self",
".",
"channel_layer",
".",
"send",
"(",
"key",
",",
"{",
"\"type\"",
":",
"value",
"[",
"'type'",
"]",
",",
"\"message\"",
":",
"value",
"[",
"'message'",
"]",
"}",
")"
] |
Single-channel emitter
|
[
"Single",
"-",
"channel",
"emitter"
] |
8c653c46cdcf98398ca9d0bc6d3e47e5d621bb6a
|
https://github.com/rajasimon/beatserver/blob/8c653c46cdcf98398ca9d0bc6d3e47e5d621bb6a/beatserver/server.py#L40-L50
|
17,778
|
rajasimon/beatserver
|
beatserver/server.py
|
BeatServer.listener
|
async def listener(self, channel):
"""
Single-channel listener
"""
while True:
message = await self.channel_layer.receive(channel)
if not message.get("type", None):
raise ValueError("Worker received message with no type.")
# Make a scope and get an application instance for it
scope = {"type": "channel", "channel": channel}
instance_queue = self.get_or_create_application_instance(channel, scope)
# Run the message into the app
await instance_queue.put(message)
|
python
|
async def listener(self, channel):
"""
Single-channel listener
"""
while True:
message = await self.channel_layer.receive(channel)
if not message.get("type", None):
raise ValueError("Worker received message with no type.")
# Make a scope and get an application instance for it
scope = {"type": "channel", "channel": channel}
instance_queue = self.get_or_create_application_instance(channel, scope)
# Run the message into the app
await instance_queue.put(message)
|
[
"async",
"def",
"listener",
"(",
"self",
",",
"channel",
")",
":",
"while",
"True",
":",
"message",
"=",
"await",
"self",
".",
"channel_layer",
".",
"receive",
"(",
"channel",
")",
"if",
"not",
"message",
".",
"get",
"(",
"\"type\"",
",",
"None",
")",
":",
"raise",
"ValueError",
"(",
"\"Worker received message with no type.\"",
")",
"# Make a scope and get an application instance for it",
"scope",
"=",
"{",
"\"type\"",
":",
"\"channel\"",
",",
"\"channel\"",
":",
"channel",
"}",
"instance_queue",
"=",
"self",
".",
"get_or_create_application_instance",
"(",
"channel",
",",
"scope",
")",
"# Run the message into the app",
"await",
"instance_queue",
".",
"put",
"(",
"message",
")"
] |
Single-channel listener
|
[
"Single",
"-",
"channel",
"listener"
] |
8c653c46cdcf98398ca9d0bc6d3e47e5d621bb6a
|
https://github.com/rajasimon/beatserver/blob/8c653c46cdcf98398ca9d0bc6d3e47e5d621bb6a/beatserver/server.py#L54-L66
|
17,779
|
pinax/pinax-ratings
|
pinax/ratings/templatetags/pinax_ratings_tags.py
|
rating_count
|
def rating_count(obj):
"""
Total amount of users who have submitted a positive rating for this object.
Usage:
{% rating_count obj %}
"""
count = Rating.objects.filter(
object_id=obj.pk,
content_type=ContentType.objects.get_for_model(obj),
).exclude(rating=0).count()
return count
|
python
|
def rating_count(obj):
"""
Total amount of users who have submitted a positive rating for this object.
Usage:
{% rating_count obj %}
"""
count = Rating.objects.filter(
object_id=obj.pk,
content_type=ContentType.objects.get_for_model(obj),
).exclude(rating=0).count()
return count
|
[
"def",
"rating_count",
"(",
"obj",
")",
":",
"count",
"=",
"Rating",
".",
"objects",
".",
"filter",
"(",
"object_id",
"=",
"obj",
".",
"pk",
",",
"content_type",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"obj",
")",
",",
")",
".",
"exclude",
"(",
"rating",
"=",
"0",
")",
".",
"count",
"(",
")",
"return",
"count"
] |
Total amount of users who have submitted a positive rating for this object.
Usage:
{% rating_count obj %}
|
[
"Total",
"amount",
"of",
"users",
"who",
"have",
"submitted",
"a",
"positive",
"rating",
"for",
"this",
"object",
"."
] |
eca388fea1ccd09ba844ac29a7489e41b64267f5
|
https://github.com/pinax/pinax-ratings/blob/eca388fea1ccd09ba844ac29a7489e41b64267f5/pinax/ratings/templatetags/pinax_ratings_tags.py#L115-L126
|
17,780
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/BicolorMatrix8x8.py
|
BicolorMatrix8x8.set_pixel
|
def set_pixel(self, x, y, value):
"""Set pixel at position x, y to the given value. X and Y should be values
of 0 to 8. Value should be OFF, GREEN, RED, or YELLOW.
"""
if x < 0 or x > 7 or y < 0 or y > 7:
# Ignore out of bounds pixels.
return
# Set green LED based on 1st bit in value.
self.set_led(y * 16 + x, 1 if value & GREEN > 0 else 0)
# Set red LED based on 2nd bit in value.
self.set_led(y * 16 + x + 8, 1 if value & RED > 0 else 0)
|
python
|
def set_pixel(self, x, y, value):
"""Set pixel at position x, y to the given value. X and Y should be values
of 0 to 8. Value should be OFF, GREEN, RED, or YELLOW.
"""
if x < 0 or x > 7 or y < 0 or y > 7:
# Ignore out of bounds pixels.
return
# Set green LED based on 1st bit in value.
self.set_led(y * 16 + x, 1 if value & GREEN > 0 else 0)
# Set red LED based on 2nd bit in value.
self.set_led(y * 16 + x + 8, 1 if value & RED > 0 else 0)
|
[
"def",
"set_pixel",
"(",
"self",
",",
"x",
",",
"y",
",",
"value",
")",
":",
"if",
"x",
"<",
"0",
"or",
"x",
">",
"7",
"or",
"y",
"<",
"0",
"or",
"y",
">",
"7",
":",
"# Ignore out of bounds pixels.",
"return",
"# Set green LED based on 1st bit in value.",
"self",
".",
"set_led",
"(",
"y",
"*",
"16",
"+",
"x",
",",
"1",
"if",
"value",
"&",
"GREEN",
">",
"0",
"else",
"0",
")",
"# Set red LED based on 2nd bit in value.",
"self",
".",
"set_led",
"(",
"y",
"*",
"16",
"+",
"x",
"+",
"8",
",",
"1",
"if",
"value",
"&",
"RED",
">",
"0",
"else",
"0",
")"
] |
Set pixel at position x, y to the given value. X and Y should be values
of 0 to 8. Value should be OFF, GREEN, RED, or YELLOW.
|
[
"Set",
"pixel",
"at",
"position",
"x",
"y",
"to",
"the",
"given",
"value",
".",
"X",
"and",
"Y",
"should",
"be",
"values",
"of",
"0",
"to",
"8",
".",
"Value",
"should",
"be",
"OFF",
"GREEN",
"RED",
"or",
"YELLOW",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/BicolorMatrix8x8.py#L41-L51
|
17,781
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/BicolorBargraph24.py
|
BicolorBargraph24.set_bar
|
def set_bar(self, bar, value):
"""Set bar to desired color. Bar should be a value of 0 to 23, and value
should be OFF, GREEN, RED, or YELLOW.
"""
if bar < 0 or bar > 23:
# Ignore out of bounds bars.
return
# Compute cathode and anode value.
c = (bar if bar < 12 else bar - 12) // 4
a = bar % 4
if bar >= 12:
a += 4
# Set green LED based on 1st bit in value.
self.set_led(c*16+a+8, 1 if value & GREEN > 0 else 0)
# Set red LED based on 2nd bit in value.
self.set_led(c*16+a, 1 if value & RED > 0 else 0)
|
python
|
def set_bar(self, bar, value):
"""Set bar to desired color. Bar should be a value of 0 to 23, and value
should be OFF, GREEN, RED, or YELLOW.
"""
if bar < 0 or bar > 23:
# Ignore out of bounds bars.
return
# Compute cathode and anode value.
c = (bar if bar < 12 else bar - 12) // 4
a = bar % 4
if bar >= 12:
a += 4
# Set green LED based on 1st bit in value.
self.set_led(c*16+a+8, 1 if value & GREEN > 0 else 0)
# Set red LED based on 2nd bit in value.
self.set_led(c*16+a, 1 if value & RED > 0 else 0)
|
[
"def",
"set_bar",
"(",
"self",
",",
"bar",
",",
"value",
")",
":",
"if",
"bar",
"<",
"0",
"or",
"bar",
">",
"23",
":",
"# Ignore out of bounds bars.",
"return",
"# Compute cathode and anode value.",
"c",
"=",
"(",
"bar",
"if",
"bar",
"<",
"12",
"else",
"bar",
"-",
"12",
")",
"//",
"4",
"a",
"=",
"bar",
"%",
"4",
"if",
"bar",
">=",
"12",
":",
"a",
"+=",
"4",
"# Set green LED based on 1st bit in value.",
"self",
".",
"set_led",
"(",
"c",
"*",
"16",
"+",
"a",
"+",
"8",
",",
"1",
"if",
"value",
"&",
"GREEN",
">",
"0",
"else",
"0",
")",
"# Set red LED based on 2nd bit in value.",
"self",
".",
"set_led",
"(",
"c",
"*",
"16",
"+",
"a",
",",
"1",
"if",
"value",
"&",
"RED",
">",
"0",
"else",
"0",
")"
] |
Set bar to desired color. Bar should be a value of 0 to 23, and value
should be OFF, GREEN, RED, or YELLOW.
|
[
"Set",
"bar",
"to",
"desired",
"color",
".",
"Bar",
"should",
"be",
"a",
"value",
"of",
"0",
"to",
"23",
"and",
"value",
"should",
"be",
"OFF",
"GREEN",
"RED",
"or",
"YELLOW",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/BicolorBargraph24.py#L44-L59
|
17,782
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/Matrix8x8.py
|
Matrix8x8.animate
|
def animate(self, images, delay=.25):
"""Displays each of the input images in order, pausing for "delay"
seconds after each image.
Keyword arguments:
image -- An iterable collection of Image objects.
delay -- How many seconds to wait after displaying an image before
displaying the next one. (Default = .25)
"""
for image in images:
# Draw the image on the display buffer.
self.set_image(image)
# Draw the buffer to the display hardware.
self.write_display()
time.sleep(delay)
|
python
|
def animate(self, images, delay=.25):
"""Displays each of the input images in order, pausing for "delay"
seconds after each image.
Keyword arguments:
image -- An iterable collection of Image objects.
delay -- How many seconds to wait after displaying an image before
displaying the next one. (Default = .25)
"""
for image in images:
# Draw the image on the display buffer.
self.set_image(image)
# Draw the buffer to the display hardware.
self.write_display()
time.sleep(delay)
|
[
"def",
"animate",
"(",
"self",
",",
"images",
",",
"delay",
"=",
".25",
")",
":",
"for",
"image",
"in",
"images",
":",
"# Draw the image on the display buffer.",
"self",
".",
"set_image",
"(",
"image",
")",
"# Draw the buffer to the display hardware.",
"self",
".",
"write_display",
"(",
")",
"time",
".",
"sleep",
"(",
"delay",
")"
] |
Displays each of the input images in order, pausing for "delay"
seconds after each image.
Keyword arguments:
image -- An iterable collection of Image objects.
delay -- How many seconds to wait after displaying an image before
displaying the next one. (Default = .25)
|
[
"Displays",
"each",
"of",
"the",
"input",
"images",
"in",
"order",
"pausing",
"for",
"delay",
"seconds",
"after",
"each",
"image",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/Matrix8x8.py#L160-L175
|
17,783
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/Matrix8x16.py
|
Matrix8x16.set_pixel
|
def set_pixel(self, x, y, value):
"""Set pixel at position x, y to the given value. X and Y should be values
of 0 to 7 and 0 to 15, resp. Value should be 0 for off and non-zero for on.
"""
if x < 0 or x > 7 or y < 0 or y > 15:
# Ignore out of bounds pixels.
return
self.set_led((7 - x) * 16 + y, value)
|
python
|
def set_pixel(self, x, y, value):
"""Set pixel at position x, y to the given value. X and Y should be values
of 0 to 7 and 0 to 15, resp. Value should be 0 for off and non-zero for on.
"""
if x < 0 or x > 7 or y < 0 or y > 15:
# Ignore out of bounds pixels.
return
self.set_led((7 - x) * 16 + y, value)
|
[
"def",
"set_pixel",
"(",
"self",
",",
"x",
",",
"y",
",",
"value",
")",
":",
"if",
"x",
"<",
"0",
"or",
"x",
">",
"7",
"or",
"y",
"<",
"0",
"or",
"y",
">",
"15",
":",
"# Ignore out of bounds pixels.",
"return",
"self",
".",
"set_led",
"(",
"(",
"7",
"-",
"x",
")",
"*",
"16",
"+",
"y",
",",
"value",
")"
] |
Set pixel at position x, y to the given value. X and Y should be values
of 0 to 7 and 0 to 15, resp. Value should be 0 for off and non-zero for on.
|
[
"Set",
"pixel",
"at",
"position",
"x",
"y",
"to",
"the",
"given",
"value",
".",
"X",
"and",
"Y",
"should",
"be",
"values",
"of",
"0",
"to",
"7",
"and",
"0",
"to",
"15",
"resp",
".",
"Value",
"should",
"be",
"0",
"for",
"off",
"and",
"non",
"-",
"zero",
"for",
"on",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/Matrix8x16.py#L35-L42
|
17,784
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/Matrix8x16.py
|
Matrix8x16.set_image
|
def set_image(self, image):
"""Set display buffer to Python Image Library image. Image will be converted
to 1 bit color and non-zero color values will light the LEDs.
"""
imwidth, imheight = image.size
if imwidth != 8 or imheight != 16:
raise ValueError('Image must be an 8x16 pixels in size.')
# Convert image to 1 bit color and grab all the pixels.
pix = image.convert('1').load()
# Loop through each pixel and write the display buffer pixel.
for x in xrange(8):
for y in xrange(16):
color = pix[(x, y)]
# Handle the color of the pixel, off or on.
if color == 0:
self.set_pixel(x, y, 0)
else:
self.set_pixel(x, y, 1)
|
python
|
def set_image(self, image):
"""Set display buffer to Python Image Library image. Image will be converted
to 1 bit color and non-zero color values will light the LEDs.
"""
imwidth, imheight = image.size
if imwidth != 8 or imheight != 16:
raise ValueError('Image must be an 8x16 pixels in size.')
# Convert image to 1 bit color and grab all the pixels.
pix = image.convert('1').load()
# Loop through each pixel and write the display buffer pixel.
for x in xrange(8):
for y in xrange(16):
color = pix[(x, y)]
# Handle the color of the pixel, off or on.
if color == 0:
self.set_pixel(x, y, 0)
else:
self.set_pixel(x, y, 1)
|
[
"def",
"set_image",
"(",
"self",
",",
"image",
")",
":",
"imwidth",
",",
"imheight",
"=",
"image",
".",
"size",
"if",
"imwidth",
"!=",
"8",
"or",
"imheight",
"!=",
"16",
":",
"raise",
"ValueError",
"(",
"'Image must be an 8x16 pixels in size.'",
")",
"# Convert image to 1 bit color and grab all the pixels.",
"pix",
"=",
"image",
".",
"convert",
"(",
"'1'",
")",
".",
"load",
"(",
")",
"# Loop through each pixel and write the display buffer pixel.",
"for",
"x",
"in",
"xrange",
"(",
"8",
")",
":",
"for",
"y",
"in",
"xrange",
"(",
"16",
")",
":",
"color",
"=",
"pix",
"[",
"(",
"x",
",",
"y",
")",
"]",
"# Handle the color of the pixel, off or on.",
"if",
"color",
"==",
"0",
":",
"self",
".",
"set_pixel",
"(",
"x",
",",
"y",
",",
"0",
")",
"else",
":",
"self",
".",
"set_pixel",
"(",
"x",
",",
"y",
",",
"1",
")"
] |
Set display buffer to Python Image Library image. Image will be converted
to 1 bit color and non-zero color values will light the LEDs.
|
[
"Set",
"display",
"buffer",
"to",
"Python",
"Image",
"Library",
"image",
".",
"Image",
"will",
"be",
"converted",
"to",
"1",
"bit",
"color",
"and",
"non",
"-",
"zero",
"color",
"values",
"will",
"light",
"the",
"LEDs",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/Matrix8x16.py#L44-L61
|
17,785
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/Matrix8x16.py
|
Matrix8x16.horizontal_scroll
|
def horizontal_scroll(self, image, padding=True):
"""Returns a list of images which appear to scroll from left to right
across the input image when displayed on the LED matrix in order.
The input image is not limited to being 8x16. If the input image is
larger than this, then all columns will be scrolled through but only
the top 16 rows of pixels will be displayed.
Keyword arguments:
image -- The image to scroll across.
padding -- If True, the animation will begin with a blank screen and the
input image will scroll into the blank screen one pixel column at a
time. Similarly, after scrolling across the whole input image, the
end of the image will scroll out of a blank screen one column at a
time. If this is not True, then only the input image will be scroll
across without beginning or ending with "whitespace."
(Default = True)
"""
image_list = list()
width = image.size[0]
# Scroll into the blank image.
if padding:
for x in range(8):
section = image.crop((0, 0, x, 16))
display_section = self.create_blank_image()
display_section.paste(section, (8 - x, 0, 8, 16))
image_list.append(display_section)
#Scroll across the input image.
for x in range(8, width + 1):
section = image.crop((x - 8, 0, x, 16))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 8, 16))
image_list.append(display_section)
#Scroll out, leaving the blank image.
if padding:
for x in range(width - 7, width + 1):
section = image.crop((x, 0, width, 16))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 7 - (x - (width - 7)), 16))
image_list.append(display_section)
#Return the list of images created
return image_list
|
python
|
def horizontal_scroll(self, image, padding=True):
"""Returns a list of images which appear to scroll from left to right
across the input image when displayed on the LED matrix in order.
The input image is not limited to being 8x16. If the input image is
larger than this, then all columns will be scrolled through but only
the top 16 rows of pixels will be displayed.
Keyword arguments:
image -- The image to scroll across.
padding -- If True, the animation will begin with a blank screen and the
input image will scroll into the blank screen one pixel column at a
time. Similarly, after scrolling across the whole input image, the
end of the image will scroll out of a blank screen one column at a
time. If this is not True, then only the input image will be scroll
across without beginning or ending with "whitespace."
(Default = True)
"""
image_list = list()
width = image.size[0]
# Scroll into the blank image.
if padding:
for x in range(8):
section = image.crop((0, 0, x, 16))
display_section = self.create_blank_image()
display_section.paste(section, (8 - x, 0, 8, 16))
image_list.append(display_section)
#Scroll across the input image.
for x in range(8, width + 1):
section = image.crop((x - 8, 0, x, 16))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 8, 16))
image_list.append(display_section)
#Scroll out, leaving the blank image.
if padding:
for x in range(width - 7, width + 1):
section = image.crop((x, 0, width, 16))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 7 - (x - (width - 7)), 16))
image_list.append(display_section)
#Return the list of images created
return image_list
|
[
"def",
"horizontal_scroll",
"(",
"self",
",",
"image",
",",
"padding",
"=",
"True",
")",
":",
"image_list",
"=",
"list",
"(",
")",
"width",
"=",
"image",
".",
"size",
"[",
"0",
"]",
"# Scroll into the blank image.",
"if",
"padding",
":",
"for",
"x",
"in",
"range",
"(",
"8",
")",
":",
"section",
"=",
"image",
".",
"crop",
"(",
"(",
"0",
",",
"0",
",",
"x",
",",
"16",
")",
")",
"display_section",
"=",
"self",
".",
"create_blank_image",
"(",
")",
"display_section",
".",
"paste",
"(",
"section",
",",
"(",
"8",
"-",
"x",
",",
"0",
",",
"8",
",",
"16",
")",
")",
"image_list",
".",
"append",
"(",
"display_section",
")",
"#Scroll across the input image.",
"for",
"x",
"in",
"range",
"(",
"8",
",",
"width",
"+",
"1",
")",
":",
"section",
"=",
"image",
".",
"crop",
"(",
"(",
"x",
"-",
"8",
",",
"0",
",",
"x",
",",
"16",
")",
")",
"display_section",
"=",
"self",
".",
"create_blank_image",
"(",
")",
"display_section",
".",
"paste",
"(",
"section",
",",
"(",
"0",
",",
"0",
",",
"8",
",",
"16",
")",
")",
"image_list",
".",
"append",
"(",
"display_section",
")",
"#Scroll out, leaving the blank image.",
"if",
"padding",
":",
"for",
"x",
"in",
"range",
"(",
"width",
"-",
"7",
",",
"width",
"+",
"1",
")",
":",
"section",
"=",
"image",
".",
"crop",
"(",
"(",
"x",
",",
"0",
",",
"width",
",",
"16",
")",
")",
"display_section",
"=",
"self",
".",
"create_blank_image",
"(",
")",
"display_section",
".",
"paste",
"(",
"section",
",",
"(",
"0",
",",
"0",
",",
"7",
"-",
"(",
"x",
"-",
"(",
"width",
"-",
"7",
")",
")",
",",
"16",
")",
")",
"image_list",
".",
"append",
"(",
"display_section",
")",
"#Return the list of images created",
"return",
"image_list"
] |
Returns a list of images which appear to scroll from left to right
across the input image when displayed on the LED matrix in order.
The input image is not limited to being 8x16. If the input image is
larger than this, then all columns will be scrolled through but only
the top 16 rows of pixels will be displayed.
Keyword arguments:
image -- The image to scroll across.
padding -- If True, the animation will begin with a blank screen and the
input image will scroll into the blank screen one pixel column at a
time. Similarly, after scrolling across the whole input image, the
end of the image will scroll out of a blank screen one column at a
time. If this is not True, then only the input image will be scroll
across without beginning or ending with "whitespace."
(Default = True)
|
[
"Returns",
"a",
"list",
"of",
"images",
"which",
"appear",
"to",
"scroll",
"from",
"left",
"to",
"right",
"across",
"the",
"input",
"image",
"when",
"displayed",
"on",
"the",
"LED",
"matrix",
"in",
"order",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/Matrix8x16.py#L67-L112
|
17,786
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/Matrix8x16.py
|
Matrix8x16.vertical_scroll
|
def vertical_scroll(self, image, padding=True):
"""Returns a list of images which appear to scroll from top to bottom
down the input image when displayed on the LED matrix in order.
The input image is not limited to being 8x16. If the input image is
largerthan this, then all rows will be scrolled through but only the
left-most 8 columns of pixels will be displayed.
Keyword arguments:
image -- The image to scroll down.
padding -- If True, the animation will begin with a blank screen and the
input image will scroll into the blank screen one pixel row at a
time. Similarly, after scrolling down the whole input image, the end
of the image will scroll out of a blank screen one row at a time.
If this is not True, then only the input image will be scroll down
without beginning or ending with "whitespace." (Default = True)
"""
image_list = list()
height = image.size[1]
# Scroll into the blank image.
if padding:
for y in range(16):
section = image.crop((0, 0, 8, y))
display_section = self.create_blank_image()
display_section.paste(section, (0, 8 - y, 8, 16))
image_list.append(display_section)
#Scroll across the input image.
for y in range(16, height + 1):
section = image.crop((0, y - 16, 8, y))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 8, 16))
image_list.append(display_section)
#Scroll out, leaving the blank image.
if padding:
for y in range(height - 15, height + 1):
section = image.crop((0, y, 8, height))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 8, 7 - (y - (height - 15))))
image_list.append(display_section)
#Return the list of images created
return image_list
|
python
|
def vertical_scroll(self, image, padding=True):
"""Returns a list of images which appear to scroll from top to bottom
down the input image when displayed on the LED matrix in order.
The input image is not limited to being 8x16. If the input image is
largerthan this, then all rows will be scrolled through but only the
left-most 8 columns of pixels will be displayed.
Keyword arguments:
image -- The image to scroll down.
padding -- If True, the animation will begin with a blank screen and the
input image will scroll into the blank screen one pixel row at a
time. Similarly, after scrolling down the whole input image, the end
of the image will scroll out of a blank screen one row at a time.
If this is not True, then only the input image will be scroll down
without beginning or ending with "whitespace." (Default = True)
"""
image_list = list()
height = image.size[1]
# Scroll into the blank image.
if padding:
for y in range(16):
section = image.crop((0, 0, 8, y))
display_section = self.create_blank_image()
display_section.paste(section, (0, 8 - y, 8, 16))
image_list.append(display_section)
#Scroll across the input image.
for y in range(16, height + 1):
section = image.crop((0, y - 16, 8, y))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 8, 16))
image_list.append(display_section)
#Scroll out, leaving the blank image.
if padding:
for y in range(height - 15, height + 1):
section = image.crop((0, y, 8, height))
display_section = self.create_blank_image()
display_section.paste(section, (0, 0, 8, 7 - (y - (height - 15))))
image_list.append(display_section)
#Return the list of images created
return image_list
|
[
"def",
"vertical_scroll",
"(",
"self",
",",
"image",
",",
"padding",
"=",
"True",
")",
":",
"image_list",
"=",
"list",
"(",
")",
"height",
"=",
"image",
".",
"size",
"[",
"1",
"]",
"# Scroll into the blank image.",
"if",
"padding",
":",
"for",
"y",
"in",
"range",
"(",
"16",
")",
":",
"section",
"=",
"image",
".",
"crop",
"(",
"(",
"0",
",",
"0",
",",
"8",
",",
"y",
")",
")",
"display_section",
"=",
"self",
".",
"create_blank_image",
"(",
")",
"display_section",
".",
"paste",
"(",
"section",
",",
"(",
"0",
",",
"8",
"-",
"y",
",",
"8",
",",
"16",
")",
")",
"image_list",
".",
"append",
"(",
"display_section",
")",
"#Scroll across the input image.",
"for",
"y",
"in",
"range",
"(",
"16",
",",
"height",
"+",
"1",
")",
":",
"section",
"=",
"image",
".",
"crop",
"(",
"(",
"0",
",",
"y",
"-",
"16",
",",
"8",
",",
"y",
")",
")",
"display_section",
"=",
"self",
".",
"create_blank_image",
"(",
")",
"display_section",
".",
"paste",
"(",
"section",
",",
"(",
"0",
",",
"0",
",",
"8",
",",
"16",
")",
")",
"image_list",
".",
"append",
"(",
"display_section",
")",
"#Scroll out, leaving the blank image.",
"if",
"padding",
":",
"for",
"y",
"in",
"range",
"(",
"height",
"-",
"15",
",",
"height",
"+",
"1",
")",
":",
"section",
"=",
"image",
".",
"crop",
"(",
"(",
"0",
",",
"y",
",",
"8",
",",
"height",
")",
")",
"display_section",
"=",
"self",
".",
"create_blank_image",
"(",
")",
"display_section",
".",
"paste",
"(",
"section",
",",
"(",
"0",
",",
"0",
",",
"8",
",",
"7",
"-",
"(",
"y",
"-",
"(",
"height",
"-",
"15",
")",
")",
")",
")",
"image_list",
".",
"append",
"(",
"display_section",
")",
"#Return the list of images created",
"return",
"image_list"
] |
Returns a list of images which appear to scroll from top to bottom
down the input image when displayed on the LED matrix in order.
The input image is not limited to being 8x16. If the input image is
largerthan this, then all rows will be scrolled through but only the
left-most 8 columns of pixels will be displayed.
Keyword arguments:
image -- The image to scroll down.
padding -- If True, the animation will begin with a blank screen and the
input image will scroll into the blank screen one pixel row at a
time. Similarly, after scrolling down the whole input image, the end
of the image will scroll out of a blank screen one row at a time.
If this is not True, then only the input image will be scroll down
without beginning or ending with "whitespace." (Default = True)
|
[
"Returns",
"a",
"list",
"of",
"images",
"which",
"appear",
"to",
"scroll",
"from",
"top",
"to",
"bottom",
"down",
"the",
"input",
"image",
"when",
"displayed",
"on",
"the",
"LED",
"matrix",
"in",
"order",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/Matrix8x16.py#L114-L158
|
17,787
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/AlphaNum4.py
|
AlphaNum4.print_number_str
|
def print_number_str(self, value, justify_right=True):
"""Print a 4 character long string of numeric values to the display. This
function is similar to print_str but will interpret periods not as
characters but as decimal points associated with the previous character.
"""
# Calculate length of value without decimals.
length = len(value.translate(None, '.'))
# Error if value without decimals is longer than 4 characters.
if length > 4:
self.print_str('----')
return
# Calculcate starting position of digits based on justification.
pos = (4-length) if justify_right else 0
# Go through each character and print it on the display.
for i, ch in enumerate(value):
if ch == '.':
# Print decimal points on the previous digit.
self.set_decimal(pos-1, True)
else:
self.set_digit(pos, ch)
pos += 1
|
python
|
def print_number_str(self, value, justify_right=True):
"""Print a 4 character long string of numeric values to the display. This
function is similar to print_str but will interpret periods not as
characters but as decimal points associated with the previous character.
"""
# Calculate length of value without decimals.
length = len(value.translate(None, '.'))
# Error if value without decimals is longer than 4 characters.
if length > 4:
self.print_str('----')
return
# Calculcate starting position of digits based on justification.
pos = (4-length) if justify_right else 0
# Go through each character and print it on the display.
for i, ch in enumerate(value):
if ch == '.':
# Print decimal points on the previous digit.
self.set_decimal(pos-1, True)
else:
self.set_digit(pos, ch)
pos += 1
|
[
"def",
"print_number_str",
"(",
"self",
",",
"value",
",",
"justify_right",
"=",
"True",
")",
":",
"# Calculate length of value without decimals.",
"length",
"=",
"len",
"(",
"value",
".",
"translate",
"(",
"None",
",",
"'.'",
")",
")",
"# Error if value without decimals is longer than 4 characters.",
"if",
"length",
">",
"4",
":",
"self",
".",
"print_str",
"(",
"'----'",
")",
"return",
"# Calculcate starting position of digits based on justification.",
"pos",
"=",
"(",
"4",
"-",
"length",
")",
"if",
"justify_right",
"else",
"0",
"# Go through each character and print it on the display.",
"for",
"i",
",",
"ch",
"in",
"enumerate",
"(",
"value",
")",
":",
"if",
"ch",
"==",
"'.'",
":",
"# Print decimal points on the previous digit.",
"self",
".",
"set_decimal",
"(",
"pos",
"-",
"1",
",",
"True",
")",
"else",
":",
"self",
".",
"set_digit",
"(",
"pos",
",",
"ch",
")",
"pos",
"+=",
"1"
] |
Print a 4 character long string of numeric values to the display. This
function is similar to print_str but will interpret periods not as
characters but as decimal points associated with the previous character.
|
[
"Print",
"a",
"4",
"character",
"long",
"string",
"of",
"numeric",
"values",
"to",
"the",
"display",
".",
"This",
"function",
"is",
"similar",
"to",
"print_str",
"but",
"will",
"interpret",
"periods",
"not",
"as",
"characters",
"but",
"as",
"decimal",
"points",
"associated",
"with",
"the",
"previous",
"character",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/AlphaNum4.py#L177-L197
|
17,788
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/AlphaNum4.py
|
AlphaNum4.print_float
|
def print_float(self, value, decimal_digits=2, justify_right=True):
"""Print a numeric value to the display. If value is negative
it will be printed with a leading minus sign. Decimal digits is the
desired number of digits after the decimal point.
"""
format_string = '{{0:0.{0}F}}'.format(decimal_digits)
self.print_number_str(format_string.format(value), justify_right)
|
python
|
def print_float(self, value, decimal_digits=2, justify_right=True):
"""Print a numeric value to the display. If value is negative
it will be printed with a leading minus sign. Decimal digits is the
desired number of digits after the decimal point.
"""
format_string = '{{0:0.{0}F}}'.format(decimal_digits)
self.print_number_str(format_string.format(value), justify_right)
|
[
"def",
"print_float",
"(",
"self",
",",
"value",
",",
"decimal_digits",
"=",
"2",
",",
"justify_right",
"=",
"True",
")",
":",
"format_string",
"=",
"'{{0:0.{0}F}}'",
".",
"format",
"(",
"decimal_digits",
")",
"self",
".",
"print_number_str",
"(",
"format_string",
".",
"format",
"(",
"value",
")",
",",
"justify_right",
")"
] |
Print a numeric value to the display. If value is negative
it will be printed with a leading minus sign. Decimal digits is the
desired number of digits after the decimal point.
|
[
"Print",
"a",
"numeric",
"value",
"to",
"the",
"display",
".",
"If",
"value",
"is",
"negative",
"it",
"will",
"be",
"printed",
"with",
"a",
"leading",
"minus",
"sign",
".",
"Decimal",
"digits",
"is",
"the",
"desired",
"number",
"of",
"digits",
"after",
"the",
"decimal",
"point",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/AlphaNum4.py#L199-L205
|
17,789
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/SevenSegment.py
|
SevenSegment.set_left_colon
|
def set_left_colon(self, show_colon):
"""Turn the left colon on with show color True, or off with show colon
False. Only the large 1.2" 7-segment display has a left colon.
"""
if show_colon:
self.buffer[4] |= 0x04
self.buffer[4] |= 0x08
else:
self.buffer[4] &= (~0x04) & 0xFF
self.buffer[4] &= (~0x08) & 0xFF
|
python
|
def set_left_colon(self, show_colon):
"""Turn the left colon on with show color True, or off with show colon
False. Only the large 1.2" 7-segment display has a left colon.
"""
if show_colon:
self.buffer[4] |= 0x04
self.buffer[4] |= 0x08
else:
self.buffer[4] &= (~0x04) & 0xFF
self.buffer[4] &= (~0x08) & 0xFF
|
[
"def",
"set_left_colon",
"(",
"self",
",",
"show_colon",
")",
":",
"if",
"show_colon",
":",
"self",
".",
"buffer",
"[",
"4",
"]",
"|=",
"0x04",
"self",
".",
"buffer",
"[",
"4",
"]",
"|=",
"0x08",
"else",
":",
"self",
".",
"buffer",
"[",
"4",
"]",
"&=",
"(",
"~",
"0x04",
")",
"&",
"0xFF",
"self",
".",
"buffer",
"[",
"4",
"]",
"&=",
"(",
"~",
"0x08",
")",
"&",
"0xFF"
] |
Turn the left colon on with show color True, or off with show colon
False. Only the large 1.2" 7-segment display has a left colon.
|
[
"Turn",
"the",
"left",
"colon",
"on",
"with",
"show",
"color",
"True",
"or",
"off",
"with",
"show",
"colon",
"False",
".",
"Only",
"the",
"large",
"1",
".",
"2",
"7",
"-",
"segment",
"display",
"has",
"a",
"left",
"colon",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/SevenSegment.py#L145-L154
|
17,790
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/SevenSegment.py
|
SevenSegment.print_number_str
|
def print_number_str(self, value, justify_right=True):
"""Print a 4 character long string of numeric values to the display.
Characters in the string should be any supported character by set_digit,
or a decimal point. Decimal point characters will be associated with
the previous character.
"""
# Calculate length of value without decimals.
length = sum(map(lambda x: 1 if x != '.' else 0, value))
# Error if value without decimals is longer than 4 characters.
if length > 4:
self.print_number_str('----')
return
# Calculcate starting position of digits based on justification.
pos = (4-length) if justify_right else 0
# Go through each character and print it on the display.
for i, ch in enumerate(value):
if ch == '.':
# Print decimal points on the previous digit.
self.set_decimal(pos-1, True)
else:
self.set_digit(pos, ch)
pos += 1
|
python
|
def print_number_str(self, value, justify_right=True):
"""Print a 4 character long string of numeric values to the display.
Characters in the string should be any supported character by set_digit,
or a decimal point. Decimal point characters will be associated with
the previous character.
"""
# Calculate length of value without decimals.
length = sum(map(lambda x: 1 if x != '.' else 0, value))
# Error if value without decimals is longer than 4 characters.
if length > 4:
self.print_number_str('----')
return
# Calculcate starting position of digits based on justification.
pos = (4-length) if justify_right else 0
# Go through each character and print it on the display.
for i, ch in enumerate(value):
if ch == '.':
# Print decimal points on the previous digit.
self.set_decimal(pos-1, True)
else:
self.set_digit(pos, ch)
pos += 1
|
[
"def",
"print_number_str",
"(",
"self",
",",
"value",
",",
"justify_right",
"=",
"True",
")",
":",
"# Calculate length of value without decimals.",
"length",
"=",
"sum",
"(",
"map",
"(",
"lambda",
"x",
":",
"1",
"if",
"x",
"!=",
"'.'",
"else",
"0",
",",
"value",
")",
")",
"# Error if value without decimals is longer than 4 characters.",
"if",
"length",
">",
"4",
":",
"self",
".",
"print_number_str",
"(",
"'----'",
")",
"return",
"# Calculcate starting position of digits based on justification.",
"pos",
"=",
"(",
"4",
"-",
"length",
")",
"if",
"justify_right",
"else",
"0",
"# Go through each character and print it on the display.",
"for",
"i",
",",
"ch",
"in",
"enumerate",
"(",
"value",
")",
":",
"if",
"ch",
"==",
"'.'",
":",
"# Print decimal points on the previous digit.",
"self",
".",
"set_decimal",
"(",
"pos",
"-",
"1",
",",
"True",
")",
"else",
":",
"self",
".",
"set_digit",
"(",
"pos",
",",
"ch",
")",
"pos",
"+=",
"1"
] |
Print a 4 character long string of numeric values to the display.
Characters in the string should be any supported character by set_digit,
or a decimal point. Decimal point characters will be associated with
the previous character.
|
[
"Print",
"a",
"4",
"character",
"long",
"string",
"of",
"numeric",
"values",
"to",
"the",
"display",
".",
"Characters",
"in",
"the",
"string",
"should",
"be",
"any",
"supported",
"character",
"by",
"set_digit",
"or",
"a",
"decimal",
"point",
".",
"Decimal",
"point",
"characters",
"will",
"be",
"associated",
"with",
"the",
"previous",
"character",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/SevenSegment.py#L167-L188
|
17,791
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/HT16K33.py
|
HT16K33.begin
|
def begin(self):
"""Initialize driver with LEDs enabled and all turned off."""
# Turn on the oscillator.
self._device.writeList(HT16K33_SYSTEM_SETUP | HT16K33_OSCILLATOR, [])
# Turn display on with no blinking.
self.set_blink(HT16K33_BLINK_OFF)
# Set display to full brightness.
self.set_brightness(15)
|
python
|
def begin(self):
"""Initialize driver with LEDs enabled and all turned off."""
# Turn on the oscillator.
self._device.writeList(HT16K33_SYSTEM_SETUP | HT16K33_OSCILLATOR, [])
# Turn display on with no blinking.
self.set_blink(HT16K33_BLINK_OFF)
# Set display to full brightness.
self.set_brightness(15)
|
[
"def",
"begin",
"(",
"self",
")",
":",
"# Turn on the oscillator.",
"self",
".",
"_device",
".",
"writeList",
"(",
"HT16K33_SYSTEM_SETUP",
"|",
"HT16K33_OSCILLATOR",
",",
"[",
"]",
")",
"# Turn display on with no blinking.",
"self",
".",
"set_blink",
"(",
"HT16K33_BLINK_OFF",
")",
"# Set display to full brightness.",
"self",
".",
"set_brightness",
"(",
"15",
")"
] |
Initialize driver with LEDs enabled and all turned off.
|
[
"Initialize",
"driver",
"with",
"LEDs",
"enabled",
"and",
"all",
"turned",
"off",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/HT16K33.py#L50-L57
|
17,792
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/HT16K33.py
|
HT16K33.write_display
|
def write_display(self):
"""Write display buffer to display hardware."""
for i, value in enumerate(self.buffer):
self._device.write8(i, value)
|
python
|
def write_display(self):
"""Write display buffer to display hardware."""
for i, value in enumerate(self.buffer):
self._device.write8(i, value)
|
[
"def",
"write_display",
"(",
"self",
")",
":",
"for",
"i",
",",
"value",
"in",
"enumerate",
"(",
"self",
".",
"buffer",
")",
":",
"self",
".",
"_device",
".",
"write8",
"(",
"i",
",",
"value",
")"
] |
Write display buffer to display hardware.
|
[
"Write",
"display",
"buffer",
"to",
"display",
"hardware",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/HT16K33.py#L93-L96
|
17,793
|
adafruit/Adafruit_Python_LED_Backpack
|
Adafruit_LED_Backpack/HT16K33.py
|
HT16K33.clear
|
def clear(self):
"""Clear contents of display buffer."""
for i, value in enumerate(self.buffer):
self.buffer[i] = 0
|
python
|
def clear(self):
"""Clear contents of display buffer."""
for i, value in enumerate(self.buffer):
self.buffer[i] = 0
|
[
"def",
"clear",
"(",
"self",
")",
":",
"for",
"i",
",",
"value",
"in",
"enumerate",
"(",
"self",
".",
"buffer",
")",
":",
"self",
".",
"buffer",
"[",
"i",
"]",
"=",
"0"
] |
Clear contents of display buffer.
|
[
"Clear",
"contents",
"of",
"display",
"buffer",
"."
] |
7356b4dd8b4bb162d60987878c2cb752fdd017d5
|
https://github.com/adafruit/Adafruit_Python_LED_Backpack/blob/7356b4dd8b4bb162d60987878c2cb752fdd017d5/Adafruit_LED_Backpack/HT16K33.py#L98-L101
|
17,794
|
swisscom/cleanerversion
|
versions/admin.py
|
VersionedAdmin.get_readonly_fields
|
def get_readonly_fields(self, request, obj=None):
"""
This is required a subclass of VersionedAdmin has readonly_fields
ours won't be undone
"""
if obj:
return list(self.readonly_fields) + ['id', 'identity',
'is_current']
return self.readonly_fields
|
python
|
def get_readonly_fields(self, request, obj=None):
"""
This is required a subclass of VersionedAdmin has readonly_fields
ours won't be undone
"""
if obj:
return list(self.readonly_fields) + ['id', 'identity',
'is_current']
return self.readonly_fields
|
[
"def",
"get_readonly_fields",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
")",
":",
"if",
"obj",
":",
"return",
"list",
"(",
"self",
".",
"readonly_fields",
")",
"+",
"[",
"'id'",
",",
"'identity'",
",",
"'is_current'",
"]",
"return",
"self",
".",
"readonly_fields"
] |
This is required a subclass of VersionedAdmin has readonly_fields
ours won't be undone
|
[
"This",
"is",
"required",
"a",
"subclass",
"of",
"VersionedAdmin",
"has",
"readonly_fields",
"ours",
"won",
"t",
"be",
"undone"
] |
becadbab5d7b474a0e9a596b99e97682402d2f2c
|
https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/admin.py#L144-L152
|
17,795
|
swisscom/cleanerversion
|
versions/admin.py
|
VersionedAdmin.get_list_display
|
def get_list_display(self, request):
"""
This method determines which fields go in the changelist
"""
# Force cast to list as super get_list_display could return a tuple
list_display = list(
super(VersionedAdmin, self).get_list_display(request))
# Preprend the following fields to list display
if self.list_display_show_identity:
list_display = ['identity_shortener', ] + list_display
# Append the following fields to list display
if self.list_display_show_start_date:
list_display += ['version_start_date', ]
if self.list_display_show_end_date:
list_display += ['version_end_date', ]
return list_display + ['is_current', ]
|
python
|
def get_list_display(self, request):
"""
This method determines which fields go in the changelist
"""
# Force cast to list as super get_list_display could return a tuple
list_display = list(
super(VersionedAdmin, self).get_list_display(request))
# Preprend the following fields to list display
if self.list_display_show_identity:
list_display = ['identity_shortener', ] + list_display
# Append the following fields to list display
if self.list_display_show_start_date:
list_display += ['version_start_date', ]
if self.list_display_show_end_date:
list_display += ['version_end_date', ]
return list_display + ['is_current', ]
|
[
"def",
"get_list_display",
"(",
"self",
",",
"request",
")",
":",
"# Force cast to list as super get_list_display could return a tuple",
"list_display",
"=",
"list",
"(",
"super",
"(",
"VersionedAdmin",
",",
"self",
")",
".",
"get_list_display",
"(",
"request",
")",
")",
"# Preprend the following fields to list display",
"if",
"self",
".",
"list_display_show_identity",
":",
"list_display",
"=",
"[",
"'identity_shortener'",
",",
"]",
"+",
"list_display",
"# Append the following fields to list display",
"if",
"self",
".",
"list_display_show_start_date",
":",
"list_display",
"+=",
"[",
"'version_start_date'",
",",
"]",
"if",
"self",
".",
"list_display_show_end_date",
":",
"list_display",
"+=",
"[",
"'version_end_date'",
",",
"]",
"return",
"list_display",
"+",
"[",
"'is_current'",
",",
"]"
] |
This method determines which fields go in the changelist
|
[
"This",
"method",
"determines",
"which",
"fields",
"go",
"in",
"the",
"changelist"
] |
becadbab5d7b474a0e9a596b99e97682402d2f2c
|
https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/admin.py#L157-L176
|
17,796
|
swisscom/cleanerversion
|
versions/admin.py
|
VersionedAdmin.get_list_filter
|
def get_list_filter(self, request):
"""
Adds versionable custom filtering ability to changelist
"""
list_filter = super(VersionedAdmin, self).get_list_filter(request)
return list(list_filter) + [('version_start_date', DateTimeFilter),
IsCurrentFilter]
|
python
|
def get_list_filter(self, request):
"""
Adds versionable custom filtering ability to changelist
"""
list_filter = super(VersionedAdmin, self).get_list_filter(request)
return list(list_filter) + [('version_start_date', DateTimeFilter),
IsCurrentFilter]
|
[
"def",
"get_list_filter",
"(",
"self",
",",
"request",
")",
":",
"list_filter",
"=",
"super",
"(",
"VersionedAdmin",
",",
"self",
")",
".",
"get_list_filter",
"(",
"request",
")",
"return",
"list",
"(",
"list_filter",
")",
"+",
"[",
"(",
"'version_start_date'",
",",
"DateTimeFilter",
")",
",",
"IsCurrentFilter",
"]"
] |
Adds versionable custom filtering ability to changelist
|
[
"Adds",
"versionable",
"custom",
"filtering",
"ability",
"to",
"changelist"
] |
becadbab5d7b474a0e9a596b99e97682402d2f2c
|
https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/admin.py#L178-L184
|
17,797
|
swisscom/cleanerversion
|
versions/admin.py
|
VersionedAdmin.restore
|
def restore(self, request, *args, **kwargs):
"""
View for restoring object from change view
"""
paths = request.path_info.split('/')
object_id_index = paths.index("restore") - 2
object_id = paths[object_id_index]
obj = super(VersionedAdmin, self).get_object(request, object_id)
obj.restore()
admin_wordIndex = object_id_index - 3
path = "/%s" % ("/".join(paths[admin_wordIndex:object_id_index]))
opts = self.model._meta
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': format_html('<a href="{}">{}</a>',
urlquote(request.path), obj),
}
msg = format_html(_('The {name} "{obj}" was restored successfully.'),
**msg_dict)
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(path)
|
python
|
def restore(self, request, *args, **kwargs):
"""
View for restoring object from change view
"""
paths = request.path_info.split('/')
object_id_index = paths.index("restore") - 2
object_id = paths[object_id_index]
obj = super(VersionedAdmin, self).get_object(request, object_id)
obj.restore()
admin_wordIndex = object_id_index - 3
path = "/%s" % ("/".join(paths[admin_wordIndex:object_id_index]))
opts = self.model._meta
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': format_html('<a href="{}">{}</a>',
urlquote(request.path), obj),
}
msg = format_html(_('The {name} "{obj}" was restored successfully.'),
**msg_dict)
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(path)
|
[
"def",
"restore",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"paths",
"=",
"request",
".",
"path_info",
".",
"split",
"(",
"'/'",
")",
"object_id_index",
"=",
"paths",
".",
"index",
"(",
"\"restore\"",
")",
"-",
"2",
"object_id",
"=",
"paths",
"[",
"object_id_index",
"]",
"obj",
"=",
"super",
"(",
"VersionedAdmin",
",",
"self",
")",
".",
"get_object",
"(",
"request",
",",
"object_id",
")",
"obj",
".",
"restore",
"(",
")",
"admin_wordIndex",
"=",
"object_id_index",
"-",
"3",
"path",
"=",
"\"/%s\"",
"%",
"(",
"\"/\"",
".",
"join",
"(",
"paths",
"[",
"admin_wordIndex",
":",
"object_id_index",
"]",
")",
")",
"opts",
"=",
"self",
".",
"model",
".",
"_meta",
"msg_dict",
"=",
"{",
"'name'",
":",
"force_text",
"(",
"opts",
".",
"verbose_name",
")",
",",
"'obj'",
":",
"format_html",
"(",
"'<a href=\"{}\">{}</a>'",
",",
"urlquote",
"(",
"request",
".",
"path",
")",
",",
"obj",
")",
",",
"}",
"msg",
"=",
"format_html",
"(",
"_",
"(",
"'The {name} \"{obj}\" was restored successfully.'",
")",
",",
"*",
"*",
"msg_dict",
")",
"self",
".",
"message_user",
"(",
"request",
",",
"msg",
",",
"messages",
".",
"SUCCESS",
")",
"return",
"HttpResponseRedirect",
"(",
"path",
")"
] |
View for restoring object from change view
|
[
"View",
"for",
"restoring",
"object",
"from",
"change",
"view"
] |
becadbab5d7b474a0e9a596b99e97682402d2f2c
|
https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/admin.py#L186-L209
|
17,798
|
swisscom/cleanerversion
|
versions/admin.py
|
VersionedAdmin.will_not_clone
|
def will_not_clone(self, request, *args, **kwargs):
"""
Add save but not clone capability in the changeview
"""
paths = request.path_info.split('/')
index_of_object_id = paths.index("will_not_clone") - 1
object_id = paths[index_of_object_id]
self.change_view(request, object_id)
admin_wordInUrl = index_of_object_id - 3
# This gets the adminsite for the app, and the model name and joins
# together with /
path = '/' + '/'.join(paths[admin_wordInUrl:index_of_object_id])
return HttpResponseRedirect(path)
|
python
|
def will_not_clone(self, request, *args, **kwargs):
"""
Add save but not clone capability in the changeview
"""
paths = request.path_info.split('/')
index_of_object_id = paths.index("will_not_clone") - 1
object_id = paths[index_of_object_id]
self.change_view(request, object_id)
admin_wordInUrl = index_of_object_id - 3
# This gets the adminsite for the app, and the model name and joins
# together with /
path = '/' + '/'.join(paths[admin_wordInUrl:index_of_object_id])
return HttpResponseRedirect(path)
|
[
"def",
"will_not_clone",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"paths",
"=",
"request",
".",
"path_info",
".",
"split",
"(",
"'/'",
")",
"index_of_object_id",
"=",
"paths",
".",
"index",
"(",
"\"will_not_clone\"",
")",
"-",
"1",
"object_id",
"=",
"paths",
"[",
"index_of_object_id",
"]",
"self",
".",
"change_view",
"(",
"request",
",",
"object_id",
")",
"admin_wordInUrl",
"=",
"index_of_object_id",
"-",
"3",
"# This gets the adminsite for the app, and the model name and joins",
"# together with /",
"path",
"=",
"'/'",
"+",
"'/'",
".",
"join",
"(",
"paths",
"[",
"admin_wordInUrl",
":",
"index_of_object_id",
"]",
")",
"return",
"HttpResponseRedirect",
"(",
"path",
")"
] |
Add save but not clone capability in the changeview
|
[
"Add",
"save",
"but",
"not",
"clone",
"capability",
"in",
"the",
"changeview"
] |
becadbab5d7b474a0e9a596b99e97682402d2f2c
|
https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/admin.py#L211-L224
|
17,799
|
swisscom/cleanerversion
|
versions/admin.py
|
VersionedAdmin.exclude
|
def exclude(self):
"""
Custom descriptor for exclude since there is no get_exclude method to
be overridden
"""
exclude = self.VERSIONED_EXCLUDE
if super(VersionedAdmin, self).exclude is not None:
# Force cast to list as super exclude could return a tuple
exclude = list(super(VersionedAdmin, self).exclude) + exclude
return exclude
|
python
|
def exclude(self):
"""
Custom descriptor for exclude since there is no get_exclude method to
be overridden
"""
exclude = self.VERSIONED_EXCLUDE
if super(VersionedAdmin, self).exclude is not None:
# Force cast to list as super exclude could return a tuple
exclude = list(super(VersionedAdmin, self).exclude) + exclude
return exclude
|
[
"def",
"exclude",
"(",
"self",
")",
":",
"exclude",
"=",
"self",
".",
"VERSIONED_EXCLUDE",
"if",
"super",
"(",
"VersionedAdmin",
",",
"self",
")",
".",
"exclude",
"is",
"not",
"None",
":",
"# Force cast to list as super exclude could return a tuple",
"exclude",
"=",
"list",
"(",
"super",
"(",
"VersionedAdmin",
",",
"self",
")",
".",
"exclude",
")",
"+",
"exclude",
"return",
"exclude"
] |
Custom descriptor for exclude since there is no get_exclude method to
be overridden
|
[
"Custom",
"descriptor",
"for",
"exclude",
"since",
"there",
"is",
"no",
"get_exclude",
"method",
"to",
"be",
"overridden"
] |
becadbab5d7b474a0e9a596b99e97682402d2f2c
|
https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/admin.py#L227-L238
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.