id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
14,000
|
rootpy/rootpy
|
rootpy/extern/byteplay2/__init__.py
|
Code.from_code
|
def from_code(cls, co):
"""Disassemble a Python code object into a Code object."""
co_code = co.co_code
labels = dict((addr, Label()) for addr in findlabels(co_code))
linestarts = dict(cls._findlinestarts(co))
cellfree = co.co_cellvars + co.co_freevars
code = CodeList()
n = len(co_code)
i = 0
extended_arg = 0
while i < n:
op = Opcode(ord(co_code[i]))
if i in labels:
code.append((labels[i], None))
if i in linestarts:
code.append((SetLineno, linestarts[i]))
i += 1
if op in hascode:
lastop, lastarg = code[-1]
if lastop != LOAD_CONST:
raise ValueError(
"%s should be preceded by LOAD_CONST code" % op)
code[-1] = (LOAD_CONST, Code.from_code(lastarg))
if op not in hasarg:
code.append((op, None))
else:
arg = ord(co_code[i]) + ord(co_code[i+1])*256 + extended_arg
extended_arg = 0
i += 2
if op == opcode.EXTENDED_ARG:
extended_arg = arg << 16
elif op in hasconst:
code.append((op, co.co_consts[arg]))
elif op in hasname:
code.append((op, co.co_names[arg]))
elif op in hasjabs:
code.append((op, labels[arg]))
elif op in hasjrel:
code.append((op, labels[i + arg]))
elif op in haslocal:
code.append((op, co.co_varnames[arg]))
elif op in hascompare:
code.append((op, cmp_op[arg]))
elif op in hasfree:
code.append((op, cellfree[arg]))
else:
code.append((op, arg))
varargs = bool(co.co_flags & CO_VARARGS)
varkwargs = bool(co.co_flags & CO_VARKEYWORDS)
newlocals = bool(co.co_flags & CO_NEWLOCALS)
args = co.co_varnames[:co.co_argcount + varargs + varkwargs]
if co.co_consts and isinstance(co.co_consts[0], basestring):
docstring = co.co_consts[0]
else:
docstring = None
return cls(code = code,
freevars = co.co_freevars,
args = args,
varargs = varargs,
varkwargs = varkwargs,
newlocals = newlocals,
name = co.co_name,
filename = co.co_filename,
firstlineno = co.co_firstlineno,
docstring = docstring,
)
|
python
|
def from_code(cls, co):
"""Disassemble a Python code object into a Code object."""
co_code = co.co_code
labels = dict((addr, Label()) for addr in findlabels(co_code))
linestarts = dict(cls._findlinestarts(co))
cellfree = co.co_cellvars + co.co_freevars
code = CodeList()
n = len(co_code)
i = 0
extended_arg = 0
while i < n:
op = Opcode(ord(co_code[i]))
if i in labels:
code.append((labels[i], None))
if i in linestarts:
code.append((SetLineno, linestarts[i]))
i += 1
if op in hascode:
lastop, lastarg = code[-1]
if lastop != LOAD_CONST:
raise ValueError(
"%s should be preceded by LOAD_CONST code" % op)
code[-1] = (LOAD_CONST, Code.from_code(lastarg))
if op not in hasarg:
code.append((op, None))
else:
arg = ord(co_code[i]) + ord(co_code[i+1])*256 + extended_arg
extended_arg = 0
i += 2
if op == opcode.EXTENDED_ARG:
extended_arg = arg << 16
elif op in hasconst:
code.append((op, co.co_consts[arg]))
elif op in hasname:
code.append((op, co.co_names[arg]))
elif op in hasjabs:
code.append((op, labels[arg]))
elif op in hasjrel:
code.append((op, labels[i + arg]))
elif op in haslocal:
code.append((op, co.co_varnames[arg]))
elif op in hascompare:
code.append((op, cmp_op[arg]))
elif op in hasfree:
code.append((op, cellfree[arg]))
else:
code.append((op, arg))
varargs = bool(co.co_flags & CO_VARARGS)
varkwargs = bool(co.co_flags & CO_VARKEYWORDS)
newlocals = bool(co.co_flags & CO_NEWLOCALS)
args = co.co_varnames[:co.co_argcount + varargs + varkwargs]
if co.co_consts and isinstance(co.co_consts[0], basestring):
docstring = co.co_consts[0]
else:
docstring = None
return cls(code = code,
freevars = co.co_freevars,
args = args,
varargs = varargs,
varkwargs = varkwargs,
newlocals = newlocals,
name = co.co_name,
filename = co.co_filename,
firstlineno = co.co_firstlineno,
docstring = docstring,
)
|
[
"def",
"from_code",
"(",
"cls",
",",
"co",
")",
":",
"co_code",
"=",
"co",
".",
"co_code",
"labels",
"=",
"dict",
"(",
"(",
"addr",
",",
"Label",
"(",
")",
")",
"for",
"addr",
"in",
"findlabels",
"(",
"co_code",
")",
")",
"linestarts",
"=",
"dict",
"(",
"cls",
".",
"_findlinestarts",
"(",
"co",
")",
")",
"cellfree",
"=",
"co",
".",
"co_cellvars",
"+",
"co",
".",
"co_freevars",
"code",
"=",
"CodeList",
"(",
")",
"n",
"=",
"len",
"(",
"co_code",
")",
"i",
"=",
"0",
"extended_arg",
"=",
"0",
"while",
"i",
"<",
"n",
":",
"op",
"=",
"Opcode",
"(",
"ord",
"(",
"co_code",
"[",
"i",
"]",
")",
")",
"if",
"i",
"in",
"labels",
":",
"code",
".",
"append",
"(",
"(",
"labels",
"[",
"i",
"]",
",",
"None",
")",
")",
"if",
"i",
"in",
"linestarts",
":",
"code",
".",
"append",
"(",
"(",
"SetLineno",
",",
"linestarts",
"[",
"i",
"]",
")",
")",
"i",
"+=",
"1",
"if",
"op",
"in",
"hascode",
":",
"lastop",
",",
"lastarg",
"=",
"code",
"[",
"-",
"1",
"]",
"if",
"lastop",
"!=",
"LOAD_CONST",
":",
"raise",
"ValueError",
"(",
"\"%s should be preceded by LOAD_CONST code\"",
"%",
"op",
")",
"code",
"[",
"-",
"1",
"]",
"=",
"(",
"LOAD_CONST",
",",
"Code",
".",
"from_code",
"(",
"lastarg",
")",
")",
"if",
"op",
"not",
"in",
"hasarg",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"None",
")",
")",
"else",
":",
"arg",
"=",
"ord",
"(",
"co_code",
"[",
"i",
"]",
")",
"+",
"ord",
"(",
"co_code",
"[",
"i",
"+",
"1",
"]",
")",
"*",
"256",
"+",
"extended_arg",
"extended_arg",
"=",
"0",
"i",
"+=",
"2",
"if",
"op",
"==",
"opcode",
".",
"EXTENDED_ARG",
":",
"extended_arg",
"=",
"arg",
"<<",
"16",
"elif",
"op",
"in",
"hasconst",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"co",
".",
"co_consts",
"[",
"arg",
"]",
")",
")",
"elif",
"op",
"in",
"hasname",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"co",
".",
"co_names",
"[",
"arg",
"]",
")",
")",
"elif",
"op",
"in",
"hasjabs",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"labels",
"[",
"arg",
"]",
")",
")",
"elif",
"op",
"in",
"hasjrel",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"labels",
"[",
"i",
"+",
"arg",
"]",
")",
")",
"elif",
"op",
"in",
"haslocal",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"co",
".",
"co_varnames",
"[",
"arg",
"]",
")",
")",
"elif",
"op",
"in",
"hascompare",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"cmp_op",
"[",
"arg",
"]",
")",
")",
"elif",
"op",
"in",
"hasfree",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"cellfree",
"[",
"arg",
"]",
")",
")",
"else",
":",
"code",
".",
"append",
"(",
"(",
"op",
",",
"arg",
")",
")",
"varargs",
"=",
"bool",
"(",
"co",
".",
"co_flags",
"&",
"CO_VARARGS",
")",
"varkwargs",
"=",
"bool",
"(",
"co",
".",
"co_flags",
"&",
"CO_VARKEYWORDS",
")",
"newlocals",
"=",
"bool",
"(",
"co",
".",
"co_flags",
"&",
"CO_NEWLOCALS",
")",
"args",
"=",
"co",
".",
"co_varnames",
"[",
":",
"co",
".",
"co_argcount",
"+",
"varargs",
"+",
"varkwargs",
"]",
"if",
"co",
".",
"co_consts",
"and",
"isinstance",
"(",
"co",
".",
"co_consts",
"[",
"0",
"]",
",",
"basestring",
")",
":",
"docstring",
"=",
"co",
".",
"co_consts",
"[",
"0",
"]",
"else",
":",
"docstring",
"=",
"None",
"return",
"cls",
"(",
"code",
"=",
"code",
",",
"freevars",
"=",
"co",
".",
"co_freevars",
",",
"args",
"=",
"args",
",",
"varargs",
"=",
"varargs",
",",
"varkwargs",
"=",
"varkwargs",
",",
"newlocals",
"=",
"newlocals",
",",
"name",
"=",
"co",
".",
"co_name",
",",
"filename",
"=",
"co",
".",
"co_filename",
",",
"firstlineno",
"=",
"co",
".",
"co_firstlineno",
",",
"docstring",
"=",
"docstring",
",",
")"
] |
Disassemble a Python code object into a Code object.
|
[
"Disassemble",
"a",
"Python",
"code",
"object",
"into",
"a",
"Code",
"object",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/extern/byteplay2/__init__.py#L320-L387
|
14,001
|
rootpy/rootpy
|
rootpy/plotting/contrib/quantiles.py
|
effective_sample_size
|
def effective_sample_size(h):
"""
Calculate the effective sample size for a histogram
the same way as ROOT does.
"""
sum = 0
ew = 0
w = 0
for bin in h.bins(overflow=False):
sum += bin.value
ew = bin.error
w += ew * ew
esum = sum * sum / w
return esum
|
python
|
def effective_sample_size(h):
"""
Calculate the effective sample size for a histogram
the same way as ROOT does.
"""
sum = 0
ew = 0
w = 0
for bin in h.bins(overflow=False):
sum += bin.value
ew = bin.error
w += ew * ew
esum = sum * sum / w
return esum
|
[
"def",
"effective_sample_size",
"(",
"h",
")",
":",
"sum",
"=",
"0",
"ew",
"=",
"0",
"w",
"=",
"0",
"for",
"bin",
"in",
"h",
".",
"bins",
"(",
"overflow",
"=",
"False",
")",
":",
"sum",
"+=",
"bin",
".",
"value",
"ew",
"=",
"bin",
".",
"error",
"w",
"+=",
"ew",
"*",
"ew",
"esum",
"=",
"sum",
"*",
"sum",
"/",
"w",
"return",
"esum"
] |
Calculate the effective sample size for a histogram
the same way as ROOT does.
|
[
"Calculate",
"the",
"effective",
"sample",
"size",
"for",
"a",
"histogram",
"the",
"same",
"way",
"as",
"ROOT",
"does",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/contrib/quantiles.py#L107-L120
|
14,002
|
rootpy/rootpy
|
rootpy/plotting/contrib/quantiles.py
|
critical_value
|
def critical_value(n, p):
"""
This function calculates the critical value given
n and p, and confidence level = 1 - p.
"""
dn = 1
delta = 0.5
res = ROOT.TMath.KolmogorovProb(dn * sqrt(n))
while res > 1.0001 * p or res < 0.9999 * p:
if (res > 1.0001 * p):
dn = dn + delta
if (res < 0.9999 * p):
dn = dn - delta
delta = delta / 2.
res = ROOT.TMath.KolmogorovProb(dn * sqrt(n))
return dn
|
python
|
def critical_value(n, p):
"""
This function calculates the critical value given
n and p, and confidence level = 1 - p.
"""
dn = 1
delta = 0.5
res = ROOT.TMath.KolmogorovProb(dn * sqrt(n))
while res > 1.0001 * p or res < 0.9999 * p:
if (res > 1.0001 * p):
dn = dn + delta
if (res < 0.9999 * p):
dn = dn - delta
delta = delta / 2.
res = ROOT.TMath.KolmogorovProb(dn * sqrt(n))
return dn
|
[
"def",
"critical_value",
"(",
"n",
",",
"p",
")",
":",
"dn",
"=",
"1",
"delta",
"=",
"0.5",
"res",
"=",
"ROOT",
".",
"TMath",
".",
"KolmogorovProb",
"(",
"dn",
"*",
"sqrt",
"(",
"n",
")",
")",
"while",
"res",
">",
"1.0001",
"*",
"p",
"or",
"res",
"<",
"0.9999",
"*",
"p",
":",
"if",
"(",
"res",
">",
"1.0001",
"*",
"p",
")",
":",
"dn",
"=",
"dn",
"+",
"delta",
"if",
"(",
"res",
"<",
"0.9999",
"*",
"p",
")",
":",
"dn",
"=",
"dn",
"-",
"delta",
"delta",
"=",
"delta",
"/",
"2.",
"res",
"=",
"ROOT",
".",
"TMath",
".",
"KolmogorovProb",
"(",
"dn",
"*",
"sqrt",
"(",
"n",
")",
")",
"return",
"dn"
] |
This function calculates the critical value given
n and p, and confidence level = 1 - p.
|
[
"This",
"function",
"calculates",
"the",
"critical",
"value",
"given",
"n",
"and",
"p",
"and",
"confidence",
"level",
"=",
"1",
"-",
"p",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/contrib/quantiles.py#L123-L138
|
14,003
|
rootpy/rootpy
|
rootpy/io/pickler.py
|
dump
|
def dump(obj, root_file, proto=0, key=None):
"""Dump an object into a ROOT TFile.
`root_file` may be an open ROOT file or directory, or a string path to an
existing ROOT file.
"""
if isinstance(root_file, string_types):
root_file = root_open(root_file, 'recreate')
own_file = True
else:
own_file = False
ret = Pickler(root_file, proto).dump(obj, key)
if own_file:
root_file.Close()
return ret
|
python
|
def dump(obj, root_file, proto=0, key=None):
"""Dump an object into a ROOT TFile.
`root_file` may be an open ROOT file or directory, or a string path to an
existing ROOT file.
"""
if isinstance(root_file, string_types):
root_file = root_open(root_file, 'recreate')
own_file = True
else:
own_file = False
ret = Pickler(root_file, proto).dump(obj, key)
if own_file:
root_file.Close()
return ret
|
[
"def",
"dump",
"(",
"obj",
",",
"root_file",
",",
"proto",
"=",
"0",
",",
"key",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"root_file",
",",
"string_types",
")",
":",
"root_file",
"=",
"root_open",
"(",
"root_file",
",",
"'recreate'",
")",
"own_file",
"=",
"True",
"else",
":",
"own_file",
"=",
"False",
"ret",
"=",
"Pickler",
"(",
"root_file",
",",
"proto",
")",
".",
"dump",
"(",
"obj",
",",
"key",
")",
"if",
"own_file",
":",
"root_file",
".",
"Close",
"(",
")",
"return",
"ret"
] |
Dump an object into a ROOT TFile.
`root_file` may be an open ROOT file or directory, or a string path to an
existing ROOT file.
|
[
"Dump",
"an",
"object",
"into",
"a",
"ROOT",
"TFile",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/pickler.py#L344-L358
|
14,004
|
rootpy/rootpy
|
rootpy/io/pickler.py
|
load
|
def load(root_file, use_proxy=True, key=None):
"""Load an object from a ROOT TFile.
`root_file` may be an open ROOT file or directory, or a string path to an
existing ROOT file.
"""
if isinstance(root_file, string_types):
root_file = root_open(root_file)
own_file = True
else:
own_file = False
obj = Unpickler(root_file, use_proxy).load(key)
if own_file:
root_file.Close()
return obj
|
python
|
def load(root_file, use_proxy=True, key=None):
"""Load an object from a ROOT TFile.
`root_file` may be an open ROOT file or directory, or a string path to an
existing ROOT file.
"""
if isinstance(root_file, string_types):
root_file = root_open(root_file)
own_file = True
else:
own_file = False
obj = Unpickler(root_file, use_proxy).load(key)
if own_file:
root_file.Close()
return obj
|
[
"def",
"load",
"(",
"root_file",
",",
"use_proxy",
"=",
"True",
",",
"key",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"root_file",
",",
"string_types",
")",
":",
"root_file",
"=",
"root_open",
"(",
"root_file",
")",
"own_file",
"=",
"True",
"else",
":",
"own_file",
"=",
"False",
"obj",
"=",
"Unpickler",
"(",
"root_file",
",",
"use_proxy",
")",
".",
"load",
"(",
"key",
")",
"if",
"own_file",
":",
"root_file",
".",
"Close",
"(",
")",
"return",
"obj"
] |
Load an object from a ROOT TFile.
`root_file` may be an open ROOT file or directory, or a string path to an
existing ROOT file.
|
[
"Load",
"an",
"object",
"from",
"a",
"ROOT",
"TFile",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/pickler.py#L361-L375
|
14,005
|
rootpy/rootpy
|
rootpy/io/pickler.py
|
Pickler.dump
|
def dump(self, obj, key=None):
"""Write a pickled representation of obj to the open TFile."""
if key is None:
key = '_pickle'
with preserve_current_directory():
self.__file.cd()
if sys.version_info[0] < 3:
pickle.Pickler.dump(self, obj)
else:
super(Pickler, self).dump(obj)
s = ROOT.TObjString(self.__io.getvalue())
self.__io.reopen()
s.Write(key)
self.__file.GetFile().Flush()
self.__pmap.clear()
|
python
|
def dump(self, obj, key=None):
"""Write a pickled representation of obj to the open TFile."""
if key is None:
key = '_pickle'
with preserve_current_directory():
self.__file.cd()
if sys.version_info[0] < 3:
pickle.Pickler.dump(self, obj)
else:
super(Pickler, self).dump(obj)
s = ROOT.TObjString(self.__io.getvalue())
self.__io.reopen()
s.Write(key)
self.__file.GetFile().Flush()
self.__pmap.clear()
|
[
"def",
"dump",
"(",
"self",
",",
"obj",
",",
"key",
"=",
"None",
")",
":",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"'_pickle'",
"with",
"preserve_current_directory",
"(",
")",
":",
"self",
".",
"__file",
".",
"cd",
"(",
")",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"pickle",
".",
"Pickler",
".",
"dump",
"(",
"self",
",",
"obj",
")",
"else",
":",
"super",
"(",
"Pickler",
",",
"self",
")",
".",
"dump",
"(",
"obj",
")",
"s",
"=",
"ROOT",
".",
"TObjString",
"(",
"self",
".",
"__io",
".",
"getvalue",
"(",
")",
")",
"self",
".",
"__io",
".",
"reopen",
"(",
")",
"s",
".",
"Write",
"(",
"key",
")",
"self",
".",
"__file",
".",
"GetFile",
"(",
")",
".",
"Flush",
"(",
")",
"self",
".",
"__pmap",
".",
"clear",
"(",
")"
] |
Write a pickled representation of obj to the open TFile.
|
[
"Write",
"a",
"pickled",
"representation",
"of",
"obj",
"to",
"the",
"open",
"TFile",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/pickler.py#L162-L176
|
14,006
|
rootpy/rootpy
|
rootpy/io/pickler.py
|
Unpickler.load
|
def load(self, key=None):
"""Read a pickled object representation from the open file."""
if key is None:
key = '_pickle'
obj = None
if _compat_hooks:
save = _compat_hooks[0]()
try:
self.__n += 1
s = self.__file.Get(key + ';{0:d}'.format(self.__n))
self.__io.setvalue(s.GetName())
if sys.version_info[0] < 3:
obj = pickle.Unpickler.load(self)
else:
obj = super(Unpickler, self).load()
self.__io.reopen()
finally:
if _compat_hooks:
save = _compat_hooks[1](save)
return obj
|
python
|
def load(self, key=None):
"""Read a pickled object representation from the open file."""
if key is None:
key = '_pickle'
obj = None
if _compat_hooks:
save = _compat_hooks[0]()
try:
self.__n += 1
s = self.__file.Get(key + ';{0:d}'.format(self.__n))
self.__io.setvalue(s.GetName())
if sys.version_info[0] < 3:
obj = pickle.Unpickler.load(self)
else:
obj = super(Unpickler, self).load()
self.__io.reopen()
finally:
if _compat_hooks:
save = _compat_hooks[1](save)
return obj
|
[
"def",
"load",
"(",
"self",
",",
"key",
"=",
"None",
")",
":",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"'_pickle'",
"obj",
"=",
"None",
"if",
"_compat_hooks",
":",
"save",
"=",
"_compat_hooks",
"[",
"0",
"]",
"(",
")",
"try",
":",
"self",
".",
"__n",
"+=",
"1",
"s",
"=",
"self",
".",
"__file",
".",
"Get",
"(",
"key",
"+",
"';{0:d}'",
".",
"format",
"(",
"self",
".",
"__n",
")",
")",
"self",
".",
"__io",
".",
"setvalue",
"(",
"s",
".",
"GetName",
"(",
")",
")",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"obj",
"=",
"pickle",
".",
"Unpickler",
".",
"load",
"(",
"self",
")",
"else",
":",
"obj",
"=",
"super",
"(",
"Unpickler",
",",
"self",
")",
".",
"load",
"(",
")",
"self",
".",
"__io",
".",
"reopen",
"(",
")",
"finally",
":",
"if",
"_compat_hooks",
":",
"save",
"=",
"_compat_hooks",
"[",
"1",
"]",
"(",
"save",
")",
"return",
"obj"
] |
Read a pickled object representation from the open file.
|
[
"Read",
"a",
"pickled",
"object",
"representation",
"from",
"the",
"open",
"file",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/pickler.py#L272-L291
|
14,007
|
rootpy/rootpy
|
rootpy/utils/extras.py
|
iter_ROOT_classes
|
def iter_ROOT_classes():
"""
Iterator over all available ROOT classes
"""
class_index = "http://root.cern.ch/root/html/ClassIndex.html"
for s in minidom.parse(urlopen(class_index)).getElementsByTagName("span"):
if ("class", "typename") in s.attributes.items():
class_name = s.childNodes[0].nodeValue
try:
yield getattr(QROOT, class_name)
except AttributeError:
pass
|
python
|
def iter_ROOT_classes():
"""
Iterator over all available ROOT classes
"""
class_index = "http://root.cern.ch/root/html/ClassIndex.html"
for s in minidom.parse(urlopen(class_index)).getElementsByTagName("span"):
if ("class", "typename") in s.attributes.items():
class_name = s.childNodes[0].nodeValue
try:
yield getattr(QROOT, class_name)
except AttributeError:
pass
|
[
"def",
"iter_ROOT_classes",
"(",
")",
":",
"class_index",
"=",
"\"http://root.cern.ch/root/html/ClassIndex.html\"",
"for",
"s",
"in",
"minidom",
".",
"parse",
"(",
"urlopen",
"(",
"class_index",
")",
")",
".",
"getElementsByTagName",
"(",
"\"span\"",
")",
":",
"if",
"(",
"\"class\"",
",",
"\"typename\"",
")",
"in",
"s",
".",
"attributes",
".",
"items",
"(",
")",
":",
"class_name",
"=",
"s",
".",
"childNodes",
"[",
"0",
"]",
".",
"nodeValue",
"try",
":",
"yield",
"getattr",
"(",
"QROOT",
",",
"class_name",
")",
"except",
"AttributeError",
":",
"pass"
] |
Iterator over all available ROOT classes
|
[
"Iterator",
"over",
"all",
"available",
"ROOT",
"classes"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/extras.py#L27-L38
|
14,008
|
rootpy/rootpy
|
rootpy/plotting/style/cmstdr/labels.py
|
CMS_label
|
def CMS_label(text="Preliminary 2012", sqrts=8, pad=None):
""" Add a 'CMS Preliminary' style label to the current Pad.
The blurbs are drawn in the top margin. The label "CMS " + text is drawn
in the upper left. If sqrts is None, it will be omitted. Otherwise, it
will be drawn in the upper right.
"""
if pad is None:
pad = ROOT.gPad
with preserve_current_canvas():
pad.cd()
left_margin = pad.GetLeftMargin()
top_margin = pad.GetTopMargin()
ypos = 1 - top_margin / 2.
l = ROOT.TLatex(left_margin, ypos, "CMS " + text)
l.SetTextAlign(12) # left-middle
l.SetNDC()
# The text is 90% as tall as the margin it lives in.
l.SetTextSize(0.90 * top_margin)
l.Draw()
keepalive(pad, l)
# Draw sqrt(s) label, if desired
if sqrts:
right_margin = pad.GetRightMargin()
p = ROOT.TLatex(1 - right_margin, ypos,
"#sqrt{{s}}={0:d}TeV".format(sqrts))
p.SetTextAlign(32) # right-middle
p.SetNDC()
p.SetTextSize(0.90 * top_margin)
p.Draw()
keepalive(pad, p)
else:
p = None
pad.Modified()
pad.Update()
return l, p
|
python
|
def CMS_label(text="Preliminary 2012", sqrts=8, pad=None):
""" Add a 'CMS Preliminary' style label to the current Pad.
The blurbs are drawn in the top margin. The label "CMS " + text is drawn
in the upper left. If sqrts is None, it will be omitted. Otherwise, it
will be drawn in the upper right.
"""
if pad is None:
pad = ROOT.gPad
with preserve_current_canvas():
pad.cd()
left_margin = pad.GetLeftMargin()
top_margin = pad.GetTopMargin()
ypos = 1 - top_margin / 2.
l = ROOT.TLatex(left_margin, ypos, "CMS " + text)
l.SetTextAlign(12) # left-middle
l.SetNDC()
# The text is 90% as tall as the margin it lives in.
l.SetTextSize(0.90 * top_margin)
l.Draw()
keepalive(pad, l)
# Draw sqrt(s) label, if desired
if sqrts:
right_margin = pad.GetRightMargin()
p = ROOT.TLatex(1 - right_margin, ypos,
"#sqrt{{s}}={0:d}TeV".format(sqrts))
p.SetTextAlign(32) # right-middle
p.SetNDC()
p.SetTextSize(0.90 * top_margin)
p.Draw()
keepalive(pad, p)
else:
p = None
pad.Modified()
pad.Update()
return l, p
|
[
"def",
"CMS_label",
"(",
"text",
"=",
"\"Preliminary 2012\"",
",",
"sqrts",
"=",
"8",
",",
"pad",
"=",
"None",
")",
":",
"if",
"pad",
"is",
"None",
":",
"pad",
"=",
"ROOT",
".",
"gPad",
"with",
"preserve_current_canvas",
"(",
")",
":",
"pad",
".",
"cd",
"(",
")",
"left_margin",
"=",
"pad",
".",
"GetLeftMargin",
"(",
")",
"top_margin",
"=",
"pad",
".",
"GetTopMargin",
"(",
")",
"ypos",
"=",
"1",
"-",
"top_margin",
"/",
"2.",
"l",
"=",
"ROOT",
".",
"TLatex",
"(",
"left_margin",
",",
"ypos",
",",
"\"CMS \"",
"+",
"text",
")",
"l",
".",
"SetTextAlign",
"(",
"12",
")",
"# left-middle",
"l",
".",
"SetNDC",
"(",
")",
"# The text is 90% as tall as the margin it lives in.",
"l",
".",
"SetTextSize",
"(",
"0.90",
"*",
"top_margin",
")",
"l",
".",
"Draw",
"(",
")",
"keepalive",
"(",
"pad",
",",
"l",
")",
"# Draw sqrt(s) label, if desired",
"if",
"sqrts",
":",
"right_margin",
"=",
"pad",
".",
"GetRightMargin",
"(",
")",
"p",
"=",
"ROOT",
".",
"TLatex",
"(",
"1",
"-",
"right_margin",
",",
"ypos",
",",
"\"#sqrt{{s}}={0:d}TeV\"",
".",
"format",
"(",
"sqrts",
")",
")",
"p",
".",
"SetTextAlign",
"(",
"32",
")",
"# right-middle",
"p",
".",
"SetNDC",
"(",
")",
"p",
".",
"SetTextSize",
"(",
"0.90",
"*",
"top_margin",
")",
"p",
".",
"Draw",
"(",
")",
"keepalive",
"(",
"pad",
",",
"p",
")",
"else",
":",
"p",
"=",
"None",
"pad",
".",
"Modified",
"(",
")",
"pad",
".",
"Update",
"(",
")",
"return",
"l",
",",
"p"
] |
Add a 'CMS Preliminary' style label to the current Pad.
The blurbs are drawn in the top margin. The label "CMS " + text is drawn
in the upper left. If sqrts is None, it will be omitted. Otherwise, it
will be drawn in the upper right.
|
[
"Add",
"a",
"CMS",
"Preliminary",
"style",
"label",
"to",
"the",
"current",
"Pad",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/style/cmstdr/labels.py#L15-L50
|
14,009
|
rootpy/rootpy
|
rootpy/stats/histfactory/utils.py
|
make_channel
|
def make_channel(name, samples, data=None, verbose=False):
"""
Create a Channel from a list of Samples
"""
if verbose:
llog = log['make_channel']
llog.info("creating channel {0}".format(name))
# avoid segfault if name begins with a digit by using "channel_" prefix
chan = Channel('channel_{0}'.format(name))
chan.SetStatErrorConfig(0.05, "Poisson")
if data is not None:
if verbose:
llog.info("setting data")
chan.SetData(data)
for sample in samples:
if verbose:
llog.info("adding sample {0}".format(sample.GetName()))
chan.AddSample(sample)
return chan
|
python
|
def make_channel(name, samples, data=None, verbose=False):
"""
Create a Channel from a list of Samples
"""
if verbose:
llog = log['make_channel']
llog.info("creating channel {0}".format(name))
# avoid segfault if name begins with a digit by using "channel_" prefix
chan = Channel('channel_{0}'.format(name))
chan.SetStatErrorConfig(0.05, "Poisson")
if data is not None:
if verbose:
llog.info("setting data")
chan.SetData(data)
for sample in samples:
if verbose:
llog.info("adding sample {0}".format(sample.GetName()))
chan.AddSample(sample)
return chan
|
[
"def",
"make_channel",
"(",
"name",
",",
"samples",
",",
"data",
"=",
"None",
",",
"verbose",
"=",
"False",
")",
":",
"if",
"verbose",
":",
"llog",
"=",
"log",
"[",
"'make_channel'",
"]",
"llog",
".",
"info",
"(",
"\"creating channel {0}\"",
".",
"format",
"(",
"name",
")",
")",
"# avoid segfault if name begins with a digit by using \"channel_\" prefix",
"chan",
"=",
"Channel",
"(",
"'channel_{0}'",
".",
"format",
"(",
"name",
")",
")",
"chan",
".",
"SetStatErrorConfig",
"(",
"0.05",
",",
"\"Poisson\"",
")",
"if",
"data",
"is",
"not",
"None",
":",
"if",
"verbose",
":",
"llog",
".",
"info",
"(",
"\"setting data\"",
")",
"chan",
".",
"SetData",
"(",
"data",
")",
"for",
"sample",
"in",
"samples",
":",
"if",
"verbose",
":",
"llog",
".",
"info",
"(",
"\"adding sample {0}\"",
".",
"format",
"(",
"sample",
".",
"GetName",
"(",
")",
")",
")",
"chan",
".",
"AddSample",
"(",
"sample",
")",
"return",
"chan"
] |
Create a Channel from a list of Samples
|
[
"Create",
"a",
"Channel",
"from",
"a",
"list",
"of",
"Samples"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/histfactory/utils.py#L32-L53
|
14,010
|
rootpy/rootpy
|
rootpy/stats/histfactory/utils.py
|
make_measurement
|
def make_measurement(name,
channels,
lumi=1.0, lumi_rel_error=0.1,
output_prefix='./histfactory',
POI=None,
const_params=None,
verbose=False):
"""
Create a Measurement from a list of Channels
"""
if verbose:
llog = log['make_measurement']
llog.info("creating measurement {0}".format(name))
if not isinstance(channels, (list, tuple)):
channels = [channels]
# Create the measurement
meas = Measurement('measurement_{0}'.format(name), '')
meas.SetOutputFilePrefix(output_prefix)
if POI is not None:
if isinstance(POI, string_types):
if verbose:
llog.info("setting POI {0}".format(POI))
meas.SetPOI(POI)
else:
if verbose:
llog.info("adding POIs {0}".format(', '.join(POI)))
for p in POI:
meas.AddPOI(p)
if verbose:
llog.info("setting lumi={0:f} +/- {1:f}".format(lumi, lumi_rel_error))
meas.lumi = lumi
meas.lumi_rel_error = lumi_rel_error
for channel in channels:
if verbose:
llog.info("adding channel {0}".format(channel.GetName()))
meas.AddChannel(channel)
if const_params is not None:
if verbose:
llog.info("adding constant parameters {0}".format(
', '.join(const_params)))
for param in const_params:
meas.AddConstantParam(param)
return meas
|
python
|
def make_measurement(name,
channels,
lumi=1.0, lumi_rel_error=0.1,
output_prefix='./histfactory',
POI=None,
const_params=None,
verbose=False):
"""
Create a Measurement from a list of Channels
"""
if verbose:
llog = log['make_measurement']
llog.info("creating measurement {0}".format(name))
if not isinstance(channels, (list, tuple)):
channels = [channels]
# Create the measurement
meas = Measurement('measurement_{0}'.format(name), '')
meas.SetOutputFilePrefix(output_prefix)
if POI is not None:
if isinstance(POI, string_types):
if verbose:
llog.info("setting POI {0}".format(POI))
meas.SetPOI(POI)
else:
if verbose:
llog.info("adding POIs {0}".format(', '.join(POI)))
for p in POI:
meas.AddPOI(p)
if verbose:
llog.info("setting lumi={0:f} +/- {1:f}".format(lumi, lumi_rel_error))
meas.lumi = lumi
meas.lumi_rel_error = lumi_rel_error
for channel in channels:
if verbose:
llog.info("adding channel {0}".format(channel.GetName()))
meas.AddChannel(channel)
if const_params is not None:
if verbose:
llog.info("adding constant parameters {0}".format(
', '.join(const_params)))
for param in const_params:
meas.AddConstantParam(param)
return meas
|
[
"def",
"make_measurement",
"(",
"name",
",",
"channels",
",",
"lumi",
"=",
"1.0",
",",
"lumi_rel_error",
"=",
"0.1",
",",
"output_prefix",
"=",
"'./histfactory'",
",",
"POI",
"=",
"None",
",",
"const_params",
"=",
"None",
",",
"verbose",
"=",
"False",
")",
":",
"if",
"verbose",
":",
"llog",
"=",
"log",
"[",
"'make_measurement'",
"]",
"llog",
".",
"info",
"(",
"\"creating measurement {0}\"",
".",
"format",
"(",
"name",
")",
")",
"if",
"not",
"isinstance",
"(",
"channels",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"channels",
"=",
"[",
"channels",
"]",
"# Create the measurement",
"meas",
"=",
"Measurement",
"(",
"'measurement_{0}'",
".",
"format",
"(",
"name",
")",
",",
"''",
")",
"meas",
".",
"SetOutputFilePrefix",
"(",
"output_prefix",
")",
"if",
"POI",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"POI",
",",
"string_types",
")",
":",
"if",
"verbose",
":",
"llog",
".",
"info",
"(",
"\"setting POI {0}\"",
".",
"format",
"(",
"POI",
")",
")",
"meas",
".",
"SetPOI",
"(",
"POI",
")",
"else",
":",
"if",
"verbose",
":",
"llog",
".",
"info",
"(",
"\"adding POIs {0}\"",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"POI",
")",
")",
")",
"for",
"p",
"in",
"POI",
":",
"meas",
".",
"AddPOI",
"(",
"p",
")",
"if",
"verbose",
":",
"llog",
".",
"info",
"(",
"\"setting lumi={0:f} +/- {1:f}\"",
".",
"format",
"(",
"lumi",
",",
"lumi_rel_error",
")",
")",
"meas",
".",
"lumi",
"=",
"lumi",
"meas",
".",
"lumi_rel_error",
"=",
"lumi_rel_error",
"for",
"channel",
"in",
"channels",
":",
"if",
"verbose",
":",
"llog",
".",
"info",
"(",
"\"adding channel {0}\"",
".",
"format",
"(",
"channel",
".",
"GetName",
"(",
")",
")",
")",
"meas",
".",
"AddChannel",
"(",
"channel",
")",
"if",
"const_params",
"is",
"not",
"None",
":",
"if",
"verbose",
":",
"llog",
".",
"info",
"(",
"\"adding constant parameters {0}\"",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"const_params",
")",
")",
")",
"for",
"param",
"in",
"const_params",
":",
"meas",
".",
"AddConstantParam",
"(",
"param",
")",
"return",
"meas"
] |
Create a Measurement from a list of Channels
|
[
"Create",
"a",
"Measurement",
"from",
"a",
"list",
"of",
"Channels"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/histfactory/utils.py#L56-L104
|
14,011
|
rootpy/rootpy
|
rootpy/stats/histfactory/utils.py
|
make_workspace
|
def make_workspace(measurement, channel=None, name=None, silence=False):
"""
Create a workspace containing the model for a measurement
If `channel` is None then include all channels in the model
If `silence` is True, then silence HistFactory's output on
stdout and stderr.
"""
context = silence_sout_serr if silence else do_nothing
with context():
hist2workspace = ROOT.RooStats.HistFactory.HistoToWorkspaceFactoryFast(
measurement)
if channel is not None:
workspace = hist2workspace.MakeSingleChannelModel(
measurement, channel)
else:
workspace = hist2workspace.MakeCombinedModel(measurement)
workspace = asrootpy(workspace)
keepalive(workspace, measurement)
if name is not None:
workspace.SetName('workspace_{0}'.format(name))
return workspace
|
python
|
def make_workspace(measurement, channel=None, name=None, silence=False):
"""
Create a workspace containing the model for a measurement
If `channel` is None then include all channels in the model
If `silence` is True, then silence HistFactory's output on
stdout and stderr.
"""
context = silence_sout_serr if silence else do_nothing
with context():
hist2workspace = ROOT.RooStats.HistFactory.HistoToWorkspaceFactoryFast(
measurement)
if channel is not None:
workspace = hist2workspace.MakeSingleChannelModel(
measurement, channel)
else:
workspace = hist2workspace.MakeCombinedModel(measurement)
workspace = asrootpy(workspace)
keepalive(workspace, measurement)
if name is not None:
workspace.SetName('workspace_{0}'.format(name))
return workspace
|
[
"def",
"make_workspace",
"(",
"measurement",
",",
"channel",
"=",
"None",
",",
"name",
"=",
"None",
",",
"silence",
"=",
"False",
")",
":",
"context",
"=",
"silence_sout_serr",
"if",
"silence",
"else",
"do_nothing",
"with",
"context",
"(",
")",
":",
"hist2workspace",
"=",
"ROOT",
".",
"RooStats",
".",
"HistFactory",
".",
"HistoToWorkspaceFactoryFast",
"(",
"measurement",
")",
"if",
"channel",
"is",
"not",
"None",
":",
"workspace",
"=",
"hist2workspace",
".",
"MakeSingleChannelModel",
"(",
"measurement",
",",
"channel",
")",
"else",
":",
"workspace",
"=",
"hist2workspace",
".",
"MakeCombinedModel",
"(",
"measurement",
")",
"workspace",
"=",
"asrootpy",
"(",
"workspace",
")",
"keepalive",
"(",
"workspace",
",",
"measurement",
")",
"if",
"name",
"is",
"not",
"None",
":",
"workspace",
".",
"SetName",
"(",
"'workspace_{0}'",
".",
"format",
"(",
"name",
")",
")",
"return",
"workspace"
] |
Create a workspace containing the model for a measurement
If `channel` is None then include all channels in the model
If `silence` is True, then silence HistFactory's output on
stdout and stderr.
|
[
"Create",
"a",
"workspace",
"containing",
"the",
"model",
"for",
"a",
"measurement"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/histfactory/utils.py#L107-L129
|
14,012
|
rootpy/rootpy
|
rootpy/stats/histfactory/utils.py
|
measurements_from_xml
|
def measurements_from_xml(filename,
collect_histograms=True,
cd_parent=False,
silence=False):
"""
Read in a list of Measurements from XML
"""
if not os.path.isfile(filename):
raise OSError("the file {0} does not exist".format(filename))
silence_context = silence_sout_serr if silence else do_nothing
filename = os.path.abspath(os.path.normpath(filename))
if cd_parent:
xml_directory = os.path.dirname(filename)
parent = os.path.abspath(os.path.join(xml_directory, os.pardir))
cd_context = working_directory
else:
parent = None
cd_context = do_nothing
log.info("parsing XML in {0} ...".format(filename))
with cd_context(parent):
parser = ROOT.RooStats.HistFactory.ConfigParser()
with silence_context():
measurements_vect = parser.GetMeasurementsFromXML(filename)
# prevent measurements_vect from being garbage collected
ROOT.SetOwnership(measurements_vect, False)
measurements = []
for m in measurements_vect:
if collect_histograms:
with silence_context():
m.CollectHistograms()
measurements.append(asrootpy(m))
return measurements
|
python
|
def measurements_from_xml(filename,
collect_histograms=True,
cd_parent=False,
silence=False):
"""
Read in a list of Measurements from XML
"""
if not os.path.isfile(filename):
raise OSError("the file {0} does not exist".format(filename))
silence_context = silence_sout_serr if silence else do_nothing
filename = os.path.abspath(os.path.normpath(filename))
if cd_parent:
xml_directory = os.path.dirname(filename)
parent = os.path.abspath(os.path.join(xml_directory, os.pardir))
cd_context = working_directory
else:
parent = None
cd_context = do_nothing
log.info("parsing XML in {0} ...".format(filename))
with cd_context(parent):
parser = ROOT.RooStats.HistFactory.ConfigParser()
with silence_context():
measurements_vect = parser.GetMeasurementsFromXML(filename)
# prevent measurements_vect from being garbage collected
ROOT.SetOwnership(measurements_vect, False)
measurements = []
for m in measurements_vect:
if collect_histograms:
with silence_context():
m.CollectHistograms()
measurements.append(asrootpy(m))
return measurements
|
[
"def",
"measurements_from_xml",
"(",
"filename",
",",
"collect_histograms",
"=",
"True",
",",
"cd_parent",
"=",
"False",
",",
"silence",
"=",
"False",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
":",
"raise",
"OSError",
"(",
"\"the file {0} does not exist\"",
".",
"format",
"(",
"filename",
")",
")",
"silence_context",
"=",
"silence_sout_serr",
"if",
"silence",
"else",
"do_nothing",
"filename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"filename",
")",
")",
"if",
"cd_parent",
":",
"xml_directory",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"filename",
")",
"parent",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"xml_directory",
",",
"os",
".",
"pardir",
")",
")",
"cd_context",
"=",
"working_directory",
"else",
":",
"parent",
"=",
"None",
"cd_context",
"=",
"do_nothing",
"log",
".",
"info",
"(",
"\"parsing XML in {0} ...\"",
".",
"format",
"(",
"filename",
")",
")",
"with",
"cd_context",
"(",
"parent",
")",
":",
"parser",
"=",
"ROOT",
".",
"RooStats",
".",
"HistFactory",
".",
"ConfigParser",
"(",
")",
"with",
"silence_context",
"(",
")",
":",
"measurements_vect",
"=",
"parser",
".",
"GetMeasurementsFromXML",
"(",
"filename",
")",
"# prevent measurements_vect from being garbage collected",
"ROOT",
".",
"SetOwnership",
"(",
"measurements_vect",
",",
"False",
")",
"measurements",
"=",
"[",
"]",
"for",
"m",
"in",
"measurements_vect",
":",
"if",
"collect_histograms",
":",
"with",
"silence_context",
"(",
")",
":",
"m",
".",
"CollectHistograms",
"(",
")",
"measurements",
".",
"append",
"(",
"asrootpy",
"(",
"m",
")",
")",
"return",
"measurements"
] |
Read in a list of Measurements from XML
|
[
"Read",
"in",
"a",
"list",
"of",
"Measurements",
"from",
"XML"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/histfactory/utils.py#L132-L166
|
14,013
|
rootpy/rootpy
|
rootpy/stats/histfactory/utils.py
|
write_measurement
|
def write_measurement(measurement,
root_file=None,
xml_path=None,
output_path=None,
output_suffix=None,
write_workspaces=False,
apply_xml_patches=True,
silence=False):
"""
Write a measurement and RooWorkspaces for all contained channels
into a ROOT file and write the XML files into a directory.
Parameters
----------
measurement : HistFactory::Measurement
An asrootpy'd ``HistFactory::Measurement`` object
root_file : ROOT TFile or string, optional (default=None)
A ROOT file or string file name. The measurement and workspaces
will be written to this file. If ``root_file is None`` then a
new file will be created with the same name as the measurement and
with the prefix ``ws_``.
xml_path : string, optional (default=None)
A directory path to write the XML into. If None, a new directory with
the same name as the measurement and with the prefix ``xml_`` will be
created.
output_path : string, optional (default=None)
If ``root_file is None``, create the ROOT file under this path.
If ``xml_path is None``, create the XML directory under this path.
output_suffix : string, optional (default=None)
If ``root_file is None`` then a new file is created with the same name
as the measurement and with the prefix ``ws_``. ``output_suffix`` will
append a suffix to this file name (before the .root extension).
If ``xml_path is None``, then a new directory is created with the
same name as the measurement and with the prefix ``xml_``.
``output_suffix`` will append a suffix to this directory name.
write_workspaces : bool, optional (default=False)
If True then also write a RooWorkspace for each channel and for all
channels combined.
apply_xml_patches : bool, optional (default=True)
Apply fixes on the output of ``Measurement::PrintXML()`` to avoid known
HistFactory bugs. Some of the patches assume that the ROOT file
containing the histograms will exist one directory level up from the
XML and that hist2workspace, or any tool that later reads the XML will
run from that same directory containing the ROOT file.
silence : bool, optional (default=False)
If True then capture and silence all stdout/stderr output from
HistFactory.
"""
context = silence_sout_serr if silence else do_nothing
output_name = measurement.name
if output_suffix is not None:
output_name += '_{0}'.format(output_suffix)
output_name = output_name.replace(' ', '_')
if xml_path is None:
xml_path = 'xml_{0}'.format(output_name)
if output_path is not None:
xml_path = os.path.join(output_path, xml_path)
if not os.path.exists(xml_path):
mkdir_p(xml_path)
if root_file is None:
root_file = 'ws_{0}.root'.format(output_name)
if output_path is not None:
root_file = os.path.join(output_path, root_file)
own_file = False
if isinstance(root_file, string_types):
root_file = root_open(root_file, 'recreate')
own_file = True
with preserve_current_directory():
root_file.cd()
log.info("writing histograms and measurement in {0} ...".format(
root_file.GetName()))
with context():
measurement.writeToFile(root_file)
# get modified measurement
out_m = root_file.Get(measurement.name)
log.info("writing XML in {0} ...".format(xml_path))
with context():
out_m.PrintXML(xml_path)
if write_workspaces:
log.info("writing combined model in {0} ...".format(
root_file.GetName()))
workspace = make_workspace(measurement, silence=silence)
workspace.Write()
for channel in measurement.channels:
log.info("writing model for channel `{0}` in {1} ...".format(
channel.name, root_file.GetName()))
workspace = make_workspace(
measurement, channel=channel, silence=silence)
workspace.Write()
if apply_xml_patches:
# patch the output XML to avoid HistFactory bugs
patch_xml(glob(os.path.join(xml_path, '*.xml')),
root_file=os.path.basename(root_file.GetName()))
if own_file:
root_file.Close()
|
python
|
def write_measurement(measurement,
root_file=None,
xml_path=None,
output_path=None,
output_suffix=None,
write_workspaces=False,
apply_xml_patches=True,
silence=False):
"""
Write a measurement and RooWorkspaces for all contained channels
into a ROOT file and write the XML files into a directory.
Parameters
----------
measurement : HistFactory::Measurement
An asrootpy'd ``HistFactory::Measurement`` object
root_file : ROOT TFile or string, optional (default=None)
A ROOT file or string file name. The measurement and workspaces
will be written to this file. If ``root_file is None`` then a
new file will be created with the same name as the measurement and
with the prefix ``ws_``.
xml_path : string, optional (default=None)
A directory path to write the XML into. If None, a new directory with
the same name as the measurement and with the prefix ``xml_`` will be
created.
output_path : string, optional (default=None)
If ``root_file is None``, create the ROOT file under this path.
If ``xml_path is None``, create the XML directory under this path.
output_suffix : string, optional (default=None)
If ``root_file is None`` then a new file is created with the same name
as the measurement and with the prefix ``ws_``. ``output_suffix`` will
append a suffix to this file name (before the .root extension).
If ``xml_path is None``, then a new directory is created with the
same name as the measurement and with the prefix ``xml_``.
``output_suffix`` will append a suffix to this directory name.
write_workspaces : bool, optional (default=False)
If True then also write a RooWorkspace for each channel and for all
channels combined.
apply_xml_patches : bool, optional (default=True)
Apply fixes on the output of ``Measurement::PrintXML()`` to avoid known
HistFactory bugs. Some of the patches assume that the ROOT file
containing the histograms will exist one directory level up from the
XML and that hist2workspace, or any tool that later reads the XML will
run from that same directory containing the ROOT file.
silence : bool, optional (default=False)
If True then capture and silence all stdout/stderr output from
HistFactory.
"""
context = silence_sout_serr if silence else do_nothing
output_name = measurement.name
if output_suffix is not None:
output_name += '_{0}'.format(output_suffix)
output_name = output_name.replace(' ', '_')
if xml_path is None:
xml_path = 'xml_{0}'.format(output_name)
if output_path is not None:
xml_path = os.path.join(output_path, xml_path)
if not os.path.exists(xml_path):
mkdir_p(xml_path)
if root_file is None:
root_file = 'ws_{0}.root'.format(output_name)
if output_path is not None:
root_file = os.path.join(output_path, root_file)
own_file = False
if isinstance(root_file, string_types):
root_file = root_open(root_file, 'recreate')
own_file = True
with preserve_current_directory():
root_file.cd()
log.info("writing histograms and measurement in {0} ...".format(
root_file.GetName()))
with context():
measurement.writeToFile(root_file)
# get modified measurement
out_m = root_file.Get(measurement.name)
log.info("writing XML in {0} ...".format(xml_path))
with context():
out_m.PrintXML(xml_path)
if write_workspaces:
log.info("writing combined model in {0} ...".format(
root_file.GetName()))
workspace = make_workspace(measurement, silence=silence)
workspace.Write()
for channel in measurement.channels:
log.info("writing model for channel `{0}` in {1} ...".format(
channel.name, root_file.GetName()))
workspace = make_workspace(
measurement, channel=channel, silence=silence)
workspace.Write()
if apply_xml_patches:
# patch the output XML to avoid HistFactory bugs
patch_xml(glob(os.path.join(xml_path, '*.xml')),
root_file=os.path.basename(root_file.GetName()))
if own_file:
root_file.Close()
|
[
"def",
"write_measurement",
"(",
"measurement",
",",
"root_file",
"=",
"None",
",",
"xml_path",
"=",
"None",
",",
"output_path",
"=",
"None",
",",
"output_suffix",
"=",
"None",
",",
"write_workspaces",
"=",
"False",
",",
"apply_xml_patches",
"=",
"True",
",",
"silence",
"=",
"False",
")",
":",
"context",
"=",
"silence_sout_serr",
"if",
"silence",
"else",
"do_nothing",
"output_name",
"=",
"measurement",
".",
"name",
"if",
"output_suffix",
"is",
"not",
"None",
":",
"output_name",
"+=",
"'_{0}'",
".",
"format",
"(",
"output_suffix",
")",
"output_name",
"=",
"output_name",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
"if",
"xml_path",
"is",
"None",
":",
"xml_path",
"=",
"'xml_{0}'",
".",
"format",
"(",
"output_name",
")",
"if",
"output_path",
"is",
"not",
"None",
":",
"xml_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"output_path",
",",
"xml_path",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"xml_path",
")",
":",
"mkdir_p",
"(",
"xml_path",
")",
"if",
"root_file",
"is",
"None",
":",
"root_file",
"=",
"'ws_{0}.root'",
".",
"format",
"(",
"output_name",
")",
"if",
"output_path",
"is",
"not",
"None",
":",
"root_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"output_path",
",",
"root_file",
")",
"own_file",
"=",
"False",
"if",
"isinstance",
"(",
"root_file",
",",
"string_types",
")",
":",
"root_file",
"=",
"root_open",
"(",
"root_file",
",",
"'recreate'",
")",
"own_file",
"=",
"True",
"with",
"preserve_current_directory",
"(",
")",
":",
"root_file",
".",
"cd",
"(",
")",
"log",
".",
"info",
"(",
"\"writing histograms and measurement in {0} ...\"",
".",
"format",
"(",
"root_file",
".",
"GetName",
"(",
")",
")",
")",
"with",
"context",
"(",
")",
":",
"measurement",
".",
"writeToFile",
"(",
"root_file",
")",
"# get modified measurement",
"out_m",
"=",
"root_file",
".",
"Get",
"(",
"measurement",
".",
"name",
")",
"log",
".",
"info",
"(",
"\"writing XML in {0} ...\"",
".",
"format",
"(",
"xml_path",
")",
")",
"with",
"context",
"(",
")",
":",
"out_m",
".",
"PrintXML",
"(",
"xml_path",
")",
"if",
"write_workspaces",
":",
"log",
".",
"info",
"(",
"\"writing combined model in {0} ...\"",
".",
"format",
"(",
"root_file",
".",
"GetName",
"(",
")",
")",
")",
"workspace",
"=",
"make_workspace",
"(",
"measurement",
",",
"silence",
"=",
"silence",
")",
"workspace",
".",
"Write",
"(",
")",
"for",
"channel",
"in",
"measurement",
".",
"channels",
":",
"log",
".",
"info",
"(",
"\"writing model for channel `{0}` in {1} ...\"",
".",
"format",
"(",
"channel",
".",
"name",
",",
"root_file",
".",
"GetName",
"(",
")",
")",
")",
"workspace",
"=",
"make_workspace",
"(",
"measurement",
",",
"channel",
"=",
"channel",
",",
"silence",
"=",
"silence",
")",
"workspace",
".",
"Write",
"(",
")",
"if",
"apply_xml_patches",
":",
"# patch the output XML to avoid HistFactory bugs",
"patch_xml",
"(",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"xml_path",
",",
"'*.xml'",
")",
")",
",",
"root_file",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"root_file",
".",
"GetName",
"(",
")",
")",
")",
"if",
"own_file",
":",
"root_file",
".",
"Close",
"(",
")"
] |
Write a measurement and RooWorkspaces for all contained channels
into a ROOT file and write the XML files into a directory.
Parameters
----------
measurement : HistFactory::Measurement
An asrootpy'd ``HistFactory::Measurement`` object
root_file : ROOT TFile or string, optional (default=None)
A ROOT file or string file name. The measurement and workspaces
will be written to this file. If ``root_file is None`` then a
new file will be created with the same name as the measurement and
with the prefix ``ws_``.
xml_path : string, optional (default=None)
A directory path to write the XML into. If None, a new directory with
the same name as the measurement and with the prefix ``xml_`` will be
created.
output_path : string, optional (default=None)
If ``root_file is None``, create the ROOT file under this path.
If ``xml_path is None``, create the XML directory under this path.
output_suffix : string, optional (default=None)
If ``root_file is None`` then a new file is created with the same name
as the measurement and with the prefix ``ws_``. ``output_suffix`` will
append a suffix to this file name (before the .root extension).
If ``xml_path is None``, then a new directory is created with the
same name as the measurement and with the prefix ``xml_``.
``output_suffix`` will append a suffix to this directory name.
write_workspaces : bool, optional (default=False)
If True then also write a RooWorkspace for each channel and for all
channels combined.
apply_xml_patches : bool, optional (default=True)
Apply fixes on the output of ``Measurement::PrintXML()`` to avoid known
HistFactory bugs. Some of the patches assume that the ROOT file
containing the histograms will exist one directory level up from the
XML and that hist2workspace, or any tool that later reads the XML will
run from that same directory containing the ROOT file.
silence : bool, optional (default=False)
If True then capture and silence all stdout/stderr output from
HistFactory.
|
[
"Write",
"a",
"measurement",
"and",
"RooWorkspaces",
"for",
"all",
"contained",
"channels",
"into",
"a",
"ROOT",
"file",
"and",
"write",
"the",
"XML",
"files",
"into",
"a",
"directory",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/histfactory/utils.py#L169-L282
|
14,014
|
rootpy/rootpy
|
rootpy/stats/histfactory/utils.py
|
patch_xml
|
def patch_xml(files, root_file=None, float_precision=3):
"""
Apply patches to HistFactory XML output from PrintXML
"""
if float_precision < 0:
raise ValueError("precision must be greater than 0")
def fix_path(match):
path = match.group(1)
if path:
head, tail = os.path.split(path)
new_path = os.path.join(os.path.basename(head), tail)
else:
new_path = ''
return '<Input>{0}</Input>'.format(new_path)
for xmlfilename in files:
xmlfilename = os.path.abspath(os.path.normpath(xmlfilename))
patched_xmlfilename = '{0}.tmp'.format(xmlfilename)
log.info("patching {0} ...".format(xmlfilename))
fin = open(xmlfilename, 'r')
fout = open(patched_xmlfilename, 'w')
for line in fin:
if root_file is not None:
line = re.sub(
'InputFile="[^"]*"',
'InputFile="{0}"'.format(root_file), line)
line = line.replace(
'<StatError Activate="True" InputFile="" '
'HistoName="" HistoPath="" />',
'<StatError Activate="True" />')
line = re.sub(
'<Combination OutputFilePrefix="(\S*)" >',
'<Combination OutputFilePrefix="hist2workspace" >', line)
line = re.sub('\w+=""', '', line)
line = re.sub('\s+/>', ' />', line)
line = re.sub('(\S)\s+</', r'\1</', line)
# HistFactory bug:
line = re.sub('InputFileHigh="\S+"', '', line)
line = re.sub('InputFileLow="\S+"', '', line)
# HistFactory bug:
line = line.replace(
'<ParamSetting Const="True"></ParamSetting>', '')
# chop off floats to desired precision
line = re.sub(
r'"(\d*\.\d{{{0:d},}})"'.format(float_precision + 1),
lambda x: '"{0}"'.format(
str(round(float(x.group(1)), float_precision))),
line)
line = re.sub('"\s\s+(\S)', r'" \1', line)
line = re.sub('<Input>(.*)</Input>', fix_path, line)
fout.write(line)
fin.close()
fout.close()
shutil.move(patched_xmlfilename, xmlfilename)
if not os.path.isfile(os.path.join(
os.path.dirname(xmlfilename),
'HistFactorySchema.dtd')):
rootsys = os.getenv('ROOTSYS', None)
if rootsys is not None:
dtdfile = os.path.join(rootsys, 'etc/HistFactorySchema.dtd')
target = os.path.dirname(xmlfilename)
if os.path.isfile(dtdfile):
log.info("copying {0} to {1} ...".format(dtdfile, target))
shutil.copy(dtdfile, target)
else:
log.warning("{0} does not exist".format(dtdfile))
else:
log.warning(
"$ROOTSYS is not set so cannot find HistFactorySchema.dtd")
|
python
|
def patch_xml(files, root_file=None, float_precision=3):
"""
Apply patches to HistFactory XML output from PrintXML
"""
if float_precision < 0:
raise ValueError("precision must be greater than 0")
def fix_path(match):
path = match.group(1)
if path:
head, tail = os.path.split(path)
new_path = os.path.join(os.path.basename(head), tail)
else:
new_path = ''
return '<Input>{0}</Input>'.format(new_path)
for xmlfilename in files:
xmlfilename = os.path.abspath(os.path.normpath(xmlfilename))
patched_xmlfilename = '{0}.tmp'.format(xmlfilename)
log.info("patching {0} ...".format(xmlfilename))
fin = open(xmlfilename, 'r')
fout = open(patched_xmlfilename, 'w')
for line in fin:
if root_file is not None:
line = re.sub(
'InputFile="[^"]*"',
'InputFile="{0}"'.format(root_file), line)
line = line.replace(
'<StatError Activate="True" InputFile="" '
'HistoName="" HistoPath="" />',
'<StatError Activate="True" />')
line = re.sub(
'<Combination OutputFilePrefix="(\S*)" >',
'<Combination OutputFilePrefix="hist2workspace" >', line)
line = re.sub('\w+=""', '', line)
line = re.sub('\s+/>', ' />', line)
line = re.sub('(\S)\s+</', r'\1</', line)
# HistFactory bug:
line = re.sub('InputFileHigh="\S+"', '', line)
line = re.sub('InputFileLow="\S+"', '', line)
# HistFactory bug:
line = line.replace(
'<ParamSetting Const="True"></ParamSetting>', '')
# chop off floats to desired precision
line = re.sub(
r'"(\d*\.\d{{{0:d},}})"'.format(float_precision + 1),
lambda x: '"{0}"'.format(
str(round(float(x.group(1)), float_precision))),
line)
line = re.sub('"\s\s+(\S)', r'" \1', line)
line = re.sub('<Input>(.*)</Input>', fix_path, line)
fout.write(line)
fin.close()
fout.close()
shutil.move(patched_xmlfilename, xmlfilename)
if not os.path.isfile(os.path.join(
os.path.dirname(xmlfilename),
'HistFactorySchema.dtd')):
rootsys = os.getenv('ROOTSYS', None)
if rootsys is not None:
dtdfile = os.path.join(rootsys, 'etc/HistFactorySchema.dtd')
target = os.path.dirname(xmlfilename)
if os.path.isfile(dtdfile):
log.info("copying {0} to {1} ...".format(dtdfile, target))
shutil.copy(dtdfile, target)
else:
log.warning("{0} does not exist".format(dtdfile))
else:
log.warning(
"$ROOTSYS is not set so cannot find HistFactorySchema.dtd")
|
[
"def",
"patch_xml",
"(",
"files",
",",
"root_file",
"=",
"None",
",",
"float_precision",
"=",
"3",
")",
":",
"if",
"float_precision",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"precision must be greater than 0\"",
")",
"def",
"fix_path",
"(",
"match",
")",
":",
"path",
"=",
"match",
".",
"group",
"(",
"1",
")",
"if",
"path",
":",
"head",
",",
"tail",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"new_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"head",
")",
",",
"tail",
")",
"else",
":",
"new_path",
"=",
"''",
"return",
"'<Input>{0}</Input>'",
".",
"format",
"(",
"new_path",
")",
"for",
"xmlfilename",
"in",
"files",
":",
"xmlfilename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"xmlfilename",
")",
")",
"patched_xmlfilename",
"=",
"'{0}.tmp'",
".",
"format",
"(",
"xmlfilename",
")",
"log",
".",
"info",
"(",
"\"patching {0} ...\"",
".",
"format",
"(",
"xmlfilename",
")",
")",
"fin",
"=",
"open",
"(",
"xmlfilename",
",",
"'r'",
")",
"fout",
"=",
"open",
"(",
"patched_xmlfilename",
",",
"'w'",
")",
"for",
"line",
"in",
"fin",
":",
"if",
"root_file",
"is",
"not",
"None",
":",
"line",
"=",
"re",
".",
"sub",
"(",
"'InputFile=\"[^\"]*\"'",
",",
"'InputFile=\"{0}\"'",
".",
"format",
"(",
"root_file",
")",
",",
"line",
")",
"line",
"=",
"line",
".",
"replace",
"(",
"'<StatError Activate=\"True\" InputFile=\"\" '",
"'HistoName=\"\" HistoPath=\"\" />'",
",",
"'<StatError Activate=\"True\" />'",
")",
"line",
"=",
"re",
".",
"sub",
"(",
"'<Combination OutputFilePrefix=\"(\\S*)\" >'",
",",
"'<Combination OutputFilePrefix=\"hist2workspace\" >'",
",",
"line",
")",
"line",
"=",
"re",
".",
"sub",
"(",
"'\\w+=\"\"'",
",",
"''",
",",
"line",
")",
"line",
"=",
"re",
".",
"sub",
"(",
"'\\s+/>'",
",",
"' />'",
",",
"line",
")",
"line",
"=",
"re",
".",
"sub",
"(",
"'(\\S)\\s+</'",
",",
"r'\\1</'",
",",
"line",
")",
"# HistFactory bug:",
"line",
"=",
"re",
".",
"sub",
"(",
"'InputFileHigh=\"\\S+\"'",
",",
"''",
",",
"line",
")",
"line",
"=",
"re",
".",
"sub",
"(",
"'InputFileLow=\"\\S+\"'",
",",
"''",
",",
"line",
")",
"# HistFactory bug:",
"line",
"=",
"line",
".",
"replace",
"(",
"'<ParamSetting Const=\"True\"></ParamSetting>'",
",",
"''",
")",
"# chop off floats to desired precision",
"line",
"=",
"re",
".",
"sub",
"(",
"r'\"(\\d*\\.\\d{{{0:d},}})\"'",
".",
"format",
"(",
"float_precision",
"+",
"1",
")",
",",
"lambda",
"x",
":",
"'\"{0}\"'",
".",
"format",
"(",
"str",
"(",
"round",
"(",
"float",
"(",
"x",
".",
"group",
"(",
"1",
")",
")",
",",
"float_precision",
")",
")",
")",
",",
"line",
")",
"line",
"=",
"re",
".",
"sub",
"(",
"'\"\\s\\s+(\\S)'",
",",
"r'\" \\1'",
",",
"line",
")",
"line",
"=",
"re",
".",
"sub",
"(",
"'<Input>(.*)</Input>'",
",",
"fix_path",
",",
"line",
")",
"fout",
".",
"write",
"(",
"line",
")",
"fin",
".",
"close",
"(",
")",
"fout",
".",
"close",
"(",
")",
"shutil",
".",
"move",
"(",
"patched_xmlfilename",
",",
"xmlfilename",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"xmlfilename",
")",
",",
"'HistFactorySchema.dtd'",
")",
")",
":",
"rootsys",
"=",
"os",
".",
"getenv",
"(",
"'ROOTSYS'",
",",
"None",
")",
"if",
"rootsys",
"is",
"not",
"None",
":",
"dtdfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"rootsys",
",",
"'etc/HistFactorySchema.dtd'",
")",
"target",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"xmlfilename",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"dtdfile",
")",
":",
"log",
".",
"info",
"(",
"\"copying {0} to {1} ...\"",
".",
"format",
"(",
"dtdfile",
",",
"target",
")",
")",
"shutil",
".",
"copy",
"(",
"dtdfile",
",",
"target",
")",
"else",
":",
"log",
".",
"warning",
"(",
"\"{0} does not exist\"",
".",
"format",
"(",
"dtdfile",
")",
")",
"else",
":",
"log",
".",
"warning",
"(",
"\"$ROOTSYS is not set so cannot find HistFactorySchema.dtd\"",
")"
] |
Apply patches to HistFactory XML output from PrintXML
|
[
"Apply",
"patches",
"to",
"HistFactory",
"XML",
"output",
"from",
"PrintXML"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/histfactory/utils.py#L285-L354
|
14,015
|
rootpy/rootpy
|
rootpy/plotting/views.py
|
_FolderView.path
|
def path(self):
''' Get the path of the wrapped folder '''
if isinstance(self.dir, Directory):
return self.dir._path
elif isinstance(self.dir, ROOT.TDirectory):
return self.dir.GetPath()
elif isinstance(self.dir, _FolderView):
return self.dir.path()
else:
return str(self.dir)
|
python
|
def path(self):
''' Get the path of the wrapped folder '''
if isinstance(self.dir, Directory):
return self.dir._path
elif isinstance(self.dir, ROOT.TDirectory):
return self.dir.GetPath()
elif isinstance(self.dir, _FolderView):
return self.dir.path()
else:
return str(self.dir)
|
[
"def",
"path",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"dir",
",",
"Directory",
")",
":",
"return",
"self",
".",
"dir",
".",
"_path",
"elif",
"isinstance",
"(",
"self",
".",
"dir",
",",
"ROOT",
".",
"TDirectory",
")",
":",
"return",
"self",
".",
"dir",
".",
"GetPath",
"(",
")",
"elif",
"isinstance",
"(",
"self",
".",
"dir",
",",
"_FolderView",
")",
":",
"return",
"self",
".",
"dir",
".",
"path",
"(",
")",
"else",
":",
"return",
"str",
"(",
"self",
".",
"dir",
")"
] |
Get the path of the wrapped folder
|
[
"Get",
"the",
"path",
"of",
"the",
"wrapped",
"folder"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/views.py#L293-L302
|
14,016
|
rootpy/rootpy
|
rootpy/plotting/views.py
|
_MultiFolderView.Get
|
def Get(self, path):
''' Merge the objects at path in all subdirectories '''
return self.merge_views(x.Get(path) for x in self.dirs)
|
python
|
def Get(self, path):
''' Merge the objects at path in all subdirectories '''
return self.merge_views(x.Get(path) for x in self.dirs)
|
[
"def",
"Get",
"(",
"self",
",",
"path",
")",
":",
"return",
"self",
".",
"merge_views",
"(",
"x",
".",
"Get",
"(",
"path",
")",
"for",
"x",
"in",
"self",
".",
"dirs",
")"
] |
Merge the objects at path in all subdirectories
|
[
"Merge",
"the",
"objects",
"at",
"path",
"in",
"all",
"subdirectories"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/views.py#L341-L343
|
14,017
|
rootpy/rootpy
|
rootpy/logger/roothandler.py
|
python_logging_error_handler
|
def python_logging_error_handler(level, root_says_abort, location, msg):
"""
A python error handler for ROOT which maps ROOT's errors and warnings on
to python's.
"""
from ..utils import quickroot as QROOT
if not Initialized.value:
try:
QROOT.kTRUE
except AttributeError:
# Python is exiting. Do nothing.
return
QROOT.kInfo, QROOT.kWarning, QROOT.kError, QROOT.kFatal, QROOT.kSysError
QROOT.gErrorIgnoreLevel
Initialized.value = True
try:
QROOT.kTRUE
except RuntimeError:
# Note: If the above causes us problems, it's because this logging
# handler has been called multiple times already with an
# exception. In that case we need to force upstream to raise it.
_, exc, traceback = sys.exc_info()
caller = sys._getframe(2)
re_execute_with_exception(caller, exc, traceback)
if level < QROOT.gErrorIgnoreLevel:
# Needed to silence some "normal" startup warnings
# (copied from PyROOT Utility.cxx)
return
if sys.version_info[0] >= 3:
location = location.decode('utf-8')
msg = msg.decode('utf-8')
log = ROOT_log.getChild(location.replace("::", "."))
if level >= QROOT.kSysError or level >= QROOT.kFatal:
lvl = logging.CRITICAL
elif level >= QROOT.kError:
lvl = logging.ERROR
elif level >= QROOT.kWarning:
lvl = logging.WARNING
elif level >= QROOT.kInfo:
lvl = logging.INFO
else:
lvl = logging.DEBUG
if not SANE_REGEX.match(msg):
# Not ASCII characters. Escape them.
msg = repr(msg)[1:-1]
# Apply fixups to improve consistency of errors/warnings
lvl, msg = fixup_msg(lvl, msg)
log.log(lvl, msg)
# String checks are used because we need a way of (un)forcing abort without
# modifying a global variable (gErrorAbortLevel) for the multithread tests
abort = lvl >= ABORT_LEVEL or "rootpy.ALWAYSABORT" in msg or root_says_abort
if abort and not "rootpy.NEVERABORT" in msg:
caller = sys._getframe(1)
try:
# We can't raise an exception from here because ctypes/PyROOT swallows it.
# Hence the need for dark magic, we re-raise it within a trace.
from .. import ROOTError
raise ROOTError(level, location, msg)
except RuntimeError:
_, exc, traceback = sys.exc_info()
if SHOWTRACE.enabled:
from traceback import print_stack
print_stack(caller)
if DANGER.enabled:
# Avert your eyes, dark magic be within...
re_execute_with_exception(caller, exc, traceback)
if root_says_abort:
log.critical("abort().. expect a stack trace")
ctypes.CDLL(None).abort()
|
python
|
def python_logging_error_handler(level, root_says_abort, location, msg):
"""
A python error handler for ROOT which maps ROOT's errors and warnings on
to python's.
"""
from ..utils import quickroot as QROOT
if not Initialized.value:
try:
QROOT.kTRUE
except AttributeError:
# Python is exiting. Do nothing.
return
QROOT.kInfo, QROOT.kWarning, QROOT.kError, QROOT.kFatal, QROOT.kSysError
QROOT.gErrorIgnoreLevel
Initialized.value = True
try:
QROOT.kTRUE
except RuntimeError:
# Note: If the above causes us problems, it's because this logging
# handler has been called multiple times already with an
# exception. In that case we need to force upstream to raise it.
_, exc, traceback = sys.exc_info()
caller = sys._getframe(2)
re_execute_with_exception(caller, exc, traceback)
if level < QROOT.gErrorIgnoreLevel:
# Needed to silence some "normal" startup warnings
# (copied from PyROOT Utility.cxx)
return
if sys.version_info[0] >= 3:
location = location.decode('utf-8')
msg = msg.decode('utf-8')
log = ROOT_log.getChild(location.replace("::", "."))
if level >= QROOT.kSysError or level >= QROOT.kFatal:
lvl = logging.CRITICAL
elif level >= QROOT.kError:
lvl = logging.ERROR
elif level >= QROOT.kWarning:
lvl = logging.WARNING
elif level >= QROOT.kInfo:
lvl = logging.INFO
else:
lvl = logging.DEBUG
if not SANE_REGEX.match(msg):
# Not ASCII characters. Escape them.
msg = repr(msg)[1:-1]
# Apply fixups to improve consistency of errors/warnings
lvl, msg = fixup_msg(lvl, msg)
log.log(lvl, msg)
# String checks are used because we need a way of (un)forcing abort without
# modifying a global variable (gErrorAbortLevel) for the multithread tests
abort = lvl >= ABORT_LEVEL or "rootpy.ALWAYSABORT" in msg or root_says_abort
if abort and not "rootpy.NEVERABORT" in msg:
caller = sys._getframe(1)
try:
# We can't raise an exception from here because ctypes/PyROOT swallows it.
# Hence the need for dark magic, we re-raise it within a trace.
from .. import ROOTError
raise ROOTError(level, location, msg)
except RuntimeError:
_, exc, traceback = sys.exc_info()
if SHOWTRACE.enabled:
from traceback import print_stack
print_stack(caller)
if DANGER.enabled:
# Avert your eyes, dark magic be within...
re_execute_with_exception(caller, exc, traceback)
if root_says_abort:
log.critical("abort().. expect a stack trace")
ctypes.CDLL(None).abort()
|
[
"def",
"python_logging_error_handler",
"(",
"level",
",",
"root_says_abort",
",",
"location",
",",
"msg",
")",
":",
"from",
".",
".",
"utils",
"import",
"quickroot",
"as",
"QROOT",
"if",
"not",
"Initialized",
".",
"value",
":",
"try",
":",
"QROOT",
".",
"kTRUE",
"except",
"AttributeError",
":",
"# Python is exiting. Do nothing.",
"return",
"QROOT",
".",
"kInfo",
",",
"QROOT",
".",
"kWarning",
",",
"QROOT",
".",
"kError",
",",
"QROOT",
".",
"kFatal",
",",
"QROOT",
".",
"kSysError",
"QROOT",
".",
"gErrorIgnoreLevel",
"Initialized",
".",
"value",
"=",
"True",
"try",
":",
"QROOT",
".",
"kTRUE",
"except",
"RuntimeError",
":",
"# Note: If the above causes us problems, it's because this logging",
"# handler has been called multiple times already with an",
"# exception. In that case we need to force upstream to raise it.",
"_",
",",
"exc",
",",
"traceback",
"=",
"sys",
".",
"exc_info",
"(",
")",
"caller",
"=",
"sys",
".",
"_getframe",
"(",
"2",
")",
"re_execute_with_exception",
"(",
"caller",
",",
"exc",
",",
"traceback",
")",
"if",
"level",
"<",
"QROOT",
".",
"gErrorIgnoreLevel",
":",
"# Needed to silence some \"normal\" startup warnings",
"# (copied from PyROOT Utility.cxx)",
"return",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
">=",
"3",
":",
"location",
"=",
"location",
".",
"decode",
"(",
"'utf-8'",
")",
"msg",
"=",
"msg",
".",
"decode",
"(",
"'utf-8'",
")",
"log",
"=",
"ROOT_log",
".",
"getChild",
"(",
"location",
".",
"replace",
"(",
"\"::\"",
",",
"\".\"",
")",
")",
"if",
"level",
">=",
"QROOT",
".",
"kSysError",
"or",
"level",
">=",
"QROOT",
".",
"kFatal",
":",
"lvl",
"=",
"logging",
".",
"CRITICAL",
"elif",
"level",
">=",
"QROOT",
".",
"kError",
":",
"lvl",
"=",
"logging",
".",
"ERROR",
"elif",
"level",
">=",
"QROOT",
".",
"kWarning",
":",
"lvl",
"=",
"logging",
".",
"WARNING",
"elif",
"level",
">=",
"QROOT",
".",
"kInfo",
":",
"lvl",
"=",
"logging",
".",
"INFO",
"else",
":",
"lvl",
"=",
"logging",
".",
"DEBUG",
"if",
"not",
"SANE_REGEX",
".",
"match",
"(",
"msg",
")",
":",
"# Not ASCII characters. Escape them.",
"msg",
"=",
"repr",
"(",
"msg",
")",
"[",
"1",
":",
"-",
"1",
"]",
"# Apply fixups to improve consistency of errors/warnings",
"lvl",
",",
"msg",
"=",
"fixup_msg",
"(",
"lvl",
",",
"msg",
")",
"log",
".",
"log",
"(",
"lvl",
",",
"msg",
")",
"# String checks are used because we need a way of (un)forcing abort without",
"# modifying a global variable (gErrorAbortLevel) for the multithread tests",
"abort",
"=",
"lvl",
">=",
"ABORT_LEVEL",
"or",
"\"rootpy.ALWAYSABORT\"",
"in",
"msg",
"or",
"root_says_abort",
"if",
"abort",
"and",
"not",
"\"rootpy.NEVERABORT\"",
"in",
"msg",
":",
"caller",
"=",
"sys",
".",
"_getframe",
"(",
"1",
")",
"try",
":",
"# We can't raise an exception from here because ctypes/PyROOT swallows it.",
"# Hence the need for dark magic, we re-raise it within a trace.",
"from",
".",
".",
"import",
"ROOTError",
"raise",
"ROOTError",
"(",
"level",
",",
"location",
",",
"msg",
")",
"except",
"RuntimeError",
":",
"_",
",",
"exc",
",",
"traceback",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"SHOWTRACE",
".",
"enabled",
":",
"from",
"traceback",
"import",
"print_stack",
"print_stack",
"(",
"caller",
")",
"if",
"DANGER",
".",
"enabled",
":",
"# Avert your eyes, dark magic be within...",
"re_execute_with_exception",
"(",
"caller",
",",
"exc",
",",
"traceback",
")",
"if",
"root_says_abort",
":",
"log",
".",
"critical",
"(",
"\"abort().. expect a stack trace\"",
")",
"ctypes",
".",
"CDLL",
"(",
"None",
")",
".",
"abort",
"(",
")"
] |
A python error handler for ROOT which maps ROOT's errors and warnings on
to python's.
|
[
"A",
"python",
"error",
"handler",
"for",
"ROOT",
"which",
"maps",
"ROOT",
"s",
"errors",
"and",
"warnings",
"on",
"to",
"python",
"s",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/roothandler.py#L42-L124
|
14,018
|
rootpy/rootpy
|
rootpy/context.py
|
preserve_current_canvas
|
def preserve_current_canvas():
"""
Context manager which ensures that the current canvas remains the current
canvas when the context is left.
"""
old = ROOT.gPad
try:
yield
finally:
if old:
old.cd()
elif ROOT.gPad:
# Put things back how they were before.
with invisible_canvas():
# This is a round-about way of resetting gPad to None.
# No other technique I tried could do it.
pass
|
python
|
def preserve_current_canvas():
"""
Context manager which ensures that the current canvas remains the current
canvas when the context is left.
"""
old = ROOT.gPad
try:
yield
finally:
if old:
old.cd()
elif ROOT.gPad:
# Put things back how they were before.
with invisible_canvas():
# This is a round-about way of resetting gPad to None.
# No other technique I tried could do it.
pass
|
[
"def",
"preserve_current_canvas",
"(",
")",
":",
"old",
"=",
"ROOT",
".",
"gPad",
"try",
":",
"yield",
"finally",
":",
"if",
"old",
":",
"old",
".",
"cd",
"(",
")",
"elif",
"ROOT",
".",
"gPad",
":",
"# Put things back how they were before.",
"with",
"invisible_canvas",
"(",
")",
":",
"# This is a round-about way of resetting gPad to None.",
"# No other technique I tried could do it.",
"pass"
] |
Context manager which ensures that the current canvas remains the current
canvas when the context is left.
|
[
"Context",
"manager",
"which",
"ensures",
"that",
"the",
"current",
"canvas",
"remains",
"the",
"current",
"canvas",
"when",
"the",
"context",
"is",
"left",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/context.py#L46-L62
|
14,019
|
rootpy/rootpy
|
rootpy/context.py
|
preserve_batch_state
|
def preserve_batch_state():
"""
Context manager which ensures the batch state is the same on exit as it was
on entry.
"""
with LOCK:
old = ROOT.gROOT.IsBatch()
try:
yield
finally:
ROOT.gROOT.SetBatch(old)
|
python
|
def preserve_batch_state():
"""
Context manager which ensures the batch state is the same on exit as it was
on entry.
"""
with LOCK:
old = ROOT.gROOT.IsBatch()
try:
yield
finally:
ROOT.gROOT.SetBatch(old)
|
[
"def",
"preserve_batch_state",
"(",
")",
":",
"with",
"LOCK",
":",
"old",
"=",
"ROOT",
".",
"gROOT",
".",
"IsBatch",
"(",
")",
"try",
":",
"yield",
"finally",
":",
"ROOT",
".",
"gROOT",
".",
"SetBatch",
"(",
"old",
")"
] |
Context manager which ensures the batch state is the same on exit as it was
on entry.
|
[
"Context",
"manager",
"which",
"ensures",
"the",
"batch",
"state",
"is",
"the",
"same",
"on",
"exit",
"as",
"it",
"was",
"on",
"entry",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/context.py#L81-L91
|
14,020
|
rootpy/rootpy
|
rootpy/context.py
|
invisible_canvas
|
def invisible_canvas():
"""
Context manager yielding a temporary canvas drawn in batch mode, invisible
to the user. Original state is restored on exit.
Example use; obtain X axis object without interfering with anything::
with invisible_canvas() as c:
efficiency.Draw()
g = efficiency.GetPaintedGraph()
return g.GetXaxis()
"""
with preserve_current_canvas():
with preserve_batch_state():
ROOT.gROOT.SetBatch()
c = ROOT.TCanvas()
try:
c.cd()
yield c
finally:
c.Close()
c.IsA().Destructor(c)
|
python
|
def invisible_canvas():
"""
Context manager yielding a temporary canvas drawn in batch mode, invisible
to the user. Original state is restored on exit.
Example use; obtain X axis object without interfering with anything::
with invisible_canvas() as c:
efficiency.Draw()
g = efficiency.GetPaintedGraph()
return g.GetXaxis()
"""
with preserve_current_canvas():
with preserve_batch_state():
ROOT.gROOT.SetBatch()
c = ROOT.TCanvas()
try:
c.cd()
yield c
finally:
c.Close()
c.IsA().Destructor(c)
|
[
"def",
"invisible_canvas",
"(",
")",
":",
"with",
"preserve_current_canvas",
"(",
")",
":",
"with",
"preserve_batch_state",
"(",
")",
":",
"ROOT",
".",
"gROOT",
".",
"SetBatch",
"(",
")",
"c",
"=",
"ROOT",
".",
"TCanvas",
"(",
")",
"try",
":",
"c",
".",
"cd",
"(",
")",
"yield",
"c",
"finally",
":",
"c",
".",
"Close",
"(",
")",
"c",
".",
"IsA",
"(",
")",
".",
"Destructor",
"(",
"c",
")"
] |
Context manager yielding a temporary canvas drawn in batch mode, invisible
to the user. Original state is restored on exit.
Example use; obtain X axis object without interfering with anything::
with invisible_canvas() as c:
efficiency.Draw()
g = efficiency.GetPaintedGraph()
return g.GetXaxis()
|
[
"Context",
"manager",
"yielding",
"a",
"temporary",
"canvas",
"drawn",
"in",
"batch",
"mode",
"invisible",
"to",
"the",
"user",
".",
"Original",
"state",
"is",
"restored",
"on",
"exit",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/context.py#L95-L116
|
14,021
|
rootpy/rootpy
|
rootpy/context.py
|
thread_specific_tmprootdir
|
def thread_specific_tmprootdir():
"""
Context manager which makes a thread specific gDirectory to avoid
interfering with the current file.
Use cases:
A TTree Draw function which doesn't want to interfere with whatever
gDirectory happens to be.
Multi-threading where there are two threads creating objects with the
same name which must reside in a directory. (again, this happens with
TTree draw)
"""
with preserve_current_directory():
dname = "rootpy-tmp/thread/{0}".format(
threading.current_thread().ident)
d = ROOT.gROOT.mkdir(dname)
if not d:
d = ROOT.gROOT.GetDirectory(dname)
assert d, "Unexpected failure, can't cd to tmpdir."
d.cd()
yield d
|
python
|
def thread_specific_tmprootdir():
"""
Context manager which makes a thread specific gDirectory to avoid
interfering with the current file.
Use cases:
A TTree Draw function which doesn't want to interfere with whatever
gDirectory happens to be.
Multi-threading where there are two threads creating objects with the
same name which must reside in a directory. (again, this happens with
TTree draw)
"""
with preserve_current_directory():
dname = "rootpy-tmp/thread/{0}".format(
threading.current_thread().ident)
d = ROOT.gROOT.mkdir(dname)
if not d:
d = ROOT.gROOT.GetDirectory(dname)
assert d, "Unexpected failure, can't cd to tmpdir."
d.cd()
yield d
|
[
"def",
"thread_specific_tmprootdir",
"(",
")",
":",
"with",
"preserve_current_directory",
"(",
")",
":",
"dname",
"=",
"\"rootpy-tmp/thread/{0}\"",
".",
"format",
"(",
"threading",
".",
"current_thread",
"(",
")",
".",
"ident",
")",
"d",
"=",
"ROOT",
".",
"gROOT",
".",
"mkdir",
"(",
"dname",
")",
"if",
"not",
"d",
":",
"d",
"=",
"ROOT",
".",
"gROOT",
".",
"GetDirectory",
"(",
"dname",
")",
"assert",
"d",
",",
"\"Unexpected failure, can't cd to tmpdir.\"",
"d",
".",
"cd",
"(",
")",
"yield",
"d"
] |
Context manager which makes a thread specific gDirectory to avoid
interfering with the current file.
Use cases:
A TTree Draw function which doesn't want to interfere with whatever
gDirectory happens to be.
Multi-threading where there are two threads creating objects with the
same name which must reside in a directory. (again, this happens with
TTree draw)
|
[
"Context",
"manager",
"which",
"makes",
"a",
"thread",
"specific",
"gDirectory",
"to",
"avoid",
"interfering",
"with",
"the",
"current",
"file",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/context.py#L120-L142
|
14,022
|
rootpy/rootpy
|
rootpy/context.py
|
working_directory
|
def working_directory(path):
"""
A context manager that changes the working directory to the given
path, and then changes it back to its previous value on exit.
"""
prev_cwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(prev_cwd)
|
python
|
def working_directory(path):
"""
A context manager that changes the working directory to the given
path, and then changes it back to its previous value on exit.
"""
prev_cwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(prev_cwd)
|
[
"def",
"working_directory",
"(",
"path",
")",
":",
"prev_cwd",
"=",
"os",
".",
"getcwd",
"(",
")",
"os",
".",
"chdir",
"(",
"path",
")",
"try",
":",
"yield",
"finally",
":",
"os",
".",
"chdir",
"(",
"prev_cwd",
")"
] |
A context manager that changes the working directory to the given
path, and then changes it back to its previous value on exit.
|
[
"A",
"context",
"manager",
"that",
"changes",
"the",
"working",
"directory",
"to",
"the",
"given",
"path",
"and",
"then",
"changes",
"it",
"back",
"to",
"its",
"previous",
"value",
"on",
"exit",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/context.py#L181-L191
|
14,023
|
rootpy/rootpy
|
rootpy/plotting/autobinning.py
|
autobinning
|
def autobinning(data, method="freedman_diaconis"):
"""
This method determines the optimal binning for histogramming.
Parameters
----------
data: 1D array-like
Input data.
method: string, one of the following:
- sturges
- sturges-doane
- scott
- sqrt
- doane
- freedman-diaconis
- risk
- knuth
Returns
-------
(nbins, min, max): int, type(data), type(data)
nbins is the optimal number of bin estimated by the method
min is the minimum of data
max is the maximum of data
Notes
-----
If the length of data is less than 4 the method retun nbins = 1
"""
name = method.replace("-", "_")
try:
method = getattr(BinningMethods, name)
if not isinstance(method, types.FunctionType):
raise AttributeError
except AttributeError:
raise ValueError("`{0}` is not a valid binning method".format(name))
if len(data) < 4:
return 1, np.min(data), np.max(data)
return int(np.ceil(method(data))), np.min(data), np.max(data)
|
python
|
def autobinning(data, method="freedman_diaconis"):
"""
This method determines the optimal binning for histogramming.
Parameters
----------
data: 1D array-like
Input data.
method: string, one of the following:
- sturges
- sturges-doane
- scott
- sqrt
- doane
- freedman-diaconis
- risk
- knuth
Returns
-------
(nbins, min, max): int, type(data), type(data)
nbins is the optimal number of bin estimated by the method
min is the minimum of data
max is the maximum of data
Notes
-----
If the length of data is less than 4 the method retun nbins = 1
"""
name = method.replace("-", "_")
try:
method = getattr(BinningMethods, name)
if not isinstance(method, types.FunctionType):
raise AttributeError
except AttributeError:
raise ValueError("`{0}` is not a valid binning method".format(name))
if len(data) < 4:
return 1, np.min(data), np.max(data)
return int(np.ceil(method(data))), np.min(data), np.max(data)
|
[
"def",
"autobinning",
"(",
"data",
",",
"method",
"=",
"\"freedman_diaconis\"",
")",
":",
"name",
"=",
"method",
".",
"replace",
"(",
"\"-\"",
",",
"\"_\"",
")",
"try",
":",
"method",
"=",
"getattr",
"(",
"BinningMethods",
",",
"name",
")",
"if",
"not",
"isinstance",
"(",
"method",
",",
"types",
".",
"FunctionType",
")",
":",
"raise",
"AttributeError",
"except",
"AttributeError",
":",
"raise",
"ValueError",
"(",
"\"`{0}` is not a valid binning method\"",
".",
"format",
"(",
"name",
")",
")",
"if",
"len",
"(",
"data",
")",
"<",
"4",
":",
"return",
"1",
",",
"np",
".",
"min",
"(",
"data",
")",
",",
"np",
".",
"max",
"(",
"data",
")",
"return",
"int",
"(",
"np",
".",
"ceil",
"(",
"method",
"(",
"data",
")",
")",
")",
",",
"np",
".",
"min",
"(",
"data",
")",
",",
"np",
".",
"max",
"(",
"data",
")"
] |
This method determines the optimal binning for histogramming.
Parameters
----------
data: 1D array-like
Input data.
method: string, one of the following:
- sturges
- sturges-doane
- scott
- sqrt
- doane
- freedman-diaconis
- risk
- knuth
Returns
-------
(nbins, min, max): int, type(data), type(data)
nbins is the optimal number of bin estimated by the method
min is the minimum of data
max is the maximum of data
Notes
-----
If the length of data is less than 4 the method retun nbins = 1
|
[
"This",
"method",
"determines",
"the",
"optimal",
"binning",
"for",
"histogramming",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/autobinning.py#L12-L50
|
14,024
|
rootpy/rootpy
|
rootpy/plotting/autobinning.py
|
BinningMethods.all_methods
|
def all_methods(cls):
"""
Return the names of all available binning methods
"""
def name(fn):
return fn.__get__(cls).__name__.replace("_", "-")
return sorted(name(f) for f in cls.__dict__.values()
if isinstance(f, staticmethod))
|
python
|
def all_methods(cls):
"""
Return the names of all available binning methods
"""
def name(fn):
return fn.__get__(cls).__name__.replace("_", "-")
return sorted(name(f) for f in cls.__dict__.values()
if isinstance(f, staticmethod))
|
[
"def",
"all_methods",
"(",
"cls",
")",
":",
"def",
"name",
"(",
"fn",
")",
":",
"return",
"fn",
".",
"__get__",
"(",
"cls",
")",
".",
"__name__",
".",
"replace",
"(",
"\"_\"",
",",
"\"-\"",
")",
"return",
"sorted",
"(",
"name",
"(",
"f",
")",
"for",
"f",
"in",
"cls",
".",
"__dict__",
".",
"values",
"(",
")",
"if",
"isinstance",
"(",
"f",
",",
"staticmethod",
")",
")"
] |
Return the names of all available binning methods
|
[
"Return",
"the",
"names",
"of",
"all",
"available",
"binning",
"methods"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/autobinning.py#L58-L65
|
14,025
|
rootpy/rootpy
|
rootpy/plotting/autobinning.py
|
BinningMethods.doane
|
def doane(data):
"""
Modified Doane modified
"""
from scipy.stats import skew
n = len(data)
sigma = np.sqrt(6. * (n - 2.) / (n + 1.) / (n + 3.))
return 1 + np.log2(n) + \
np.log2(1 + np.abs(skew(data)) / sigma)
|
python
|
def doane(data):
"""
Modified Doane modified
"""
from scipy.stats import skew
n = len(data)
sigma = np.sqrt(6. * (n - 2.) / (n + 1.) / (n + 3.))
return 1 + np.log2(n) + \
np.log2(1 + np.abs(skew(data)) / sigma)
|
[
"def",
"doane",
"(",
"data",
")",
":",
"from",
"scipy",
".",
"stats",
"import",
"skew",
"n",
"=",
"len",
"(",
"data",
")",
"sigma",
"=",
"np",
".",
"sqrt",
"(",
"6.",
"*",
"(",
"n",
"-",
"2.",
")",
"/",
"(",
"n",
"+",
"1.",
")",
"/",
"(",
"n",
"+",
"3.",
")",
")",
"return",
"1",
"+",
"np",
".",
"log2",
"(",
"n",
")",
"+",
"np",
".",
"log2",
"(",
"1",
"+",
"np",
".",
"abs",
"(",
"skew",
"(",
"data",
")",
")",
"/",
"sigma",
")"
] |
Modified Doane modified
|
[
"Modified",
"Doane",
"modified"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/autobinning.py#L84-L92
|
14,026
|
rootpy/rootpy
|
rootpy/utils/lock.py
|
lock
|
def lock(path, poll_interval=5, max_age=60):
"""
Aquire a file lock in a thread-safe manner that also reaps stale locks
possibly left behind by processes that crashed hard.
"""
if max_age < 30:
raise ValueError("`max_age` must be at least 30 seconds")
if poll_interval < 1:
raise ValueError("`poll_interval` must be at least 1 second")
if poll_interval >= max_age:
raise ValueError("`poll_interval` must be less than `max_age`")
proc = '{0:d}@{1}'.format(os.getpid(), platform.node())
lock = LockFile(path)
log.debug("{0} attempting to lock {1}".format(proc, path))
while not lock.i_am_locking():
if lock.is_locked():
# Protect against race condition
try:
# Check age of the lock file
age = time.time() - os.stat(lock.lock_file)[stat.ST_MTIME]
# Break the lock if too old (considered stale)
if age > max_age:
lock.break_lock()
# What if lock was released and reacquired in the meantime?
# We don't want to break a fresh lock!
# If a lock is stale then we may have many threads
# attempting to break it here at the "same time".
# Avoid the possibility of some thread trying to break the
# lock after it has already been broken and after the first
# other thread attempting to acquire the lock by sleeping
# for 0.5 seconds below.
log.warning(
"{0} broke lock on {1} "
"that is {2:d} seconds old".format(
proc, path, int(age)))
except OSError:
# Lock was released just now
# os.path.exists(lock.lock_file) is False
# OSError may be raised by os.stat() or lock.break_lock() above
pass
time.sleep(0.5)
try:
log.debug(
"{0} waiting for {1:d} seconds "
"for lock on {2} to be released".format(
proc, poll_interval, path))
# Use float() here since acquire sleeps for timeout/10
lock.acquire(timeout=float(poll_interval))
except LockTimeout:
pass
log.debug("{0} locked {1}".format(proc, path))
yield lock
lock.release()
log.debug("{0} released lock on {1}".format(proc, path))
|
python
|
def lock(path, poll_interval=5, max_age=60):
"""
Aquire a file lock in a thread-safe manner that also reaps stale locks
possibly left behind by processes that crashed hard.
"""
if max_age < 30:
raise ValueError("`max_age` must be at least 30 seconds")
if poll_interval < 1:
raise ValueError("`poll_interval` must be at least 1 second")
if poll_interval >= max_age:
raise ValueError("`poll_interval` must be less than `max_age`")
proc = '{0:d}@{1}'.format(os.getpid(), platform.node())
lock = LockFile(path)
log.debug("{0} attempting to lock {1}".format(proc, path))
while not lock.i_am_locking():
if lock.is_locked():
# Protect against race condition
try:
# Check age of the lock file
age = time.time() - os.stat(lock.lock_file)[stat.ST_MTIME]
# Break the lock if too old (considered stale)
if age > max_age:
lock.break_lock()
# What if lock was released and reacquired in the meantime?
# We don't want to break a fresh lock!
# If a lock is stale then we may have many threads
# attempting to break it here at the "same time".
# Avoid the possibility of some thread trying to break the
# lock after it has already been broken and after the first
# other thread attempting to acquire the lock by sleeping
# for 0.5 seconds below.
log.warning(
"{0} broke lock on {1} "
"that is {2:d} seconds old".format(
proc, path, int(age)))
except OSError:
# Lock was released just now
# os.path.exists(lock.lock_file) is False
# OSError may be raised by os.stat() or lock.break_lock() above
pass
time.sleep(0.5)
try:
log.debug(
"{0} waiting for {1:d} seconds "
"for lock on {2} to be released".format(
proc, poll_interval, path))
# Use float() here since acquire sleeps for timeout/10
lock.acquire(timeout=float(poll_interval))
except LockTimeout:
pass
log.debug("{0} locked {1}".format(proc, path))
yield lock
lock.release()
log.debug("{0} released lock on {1}".format(proc, path))
|
[
"def",
"lock",
"(",
"path",
",",
"poll_interval",
"=",
"5",
",",
"max_age",
"=",
"60",
")",
":",
"if",
"max_age",
"<",
"30",
":",
"raise",
"ValueError",
"(",
"\"`max_age` must be at least 30 seconds\"",
")",
"if",
"poll_interval",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"\"`poll_interval` must be at least 1 second\"",
")",
"if",
"poll_interval",
">=",
"max_age",
":",
"raise",
"ValueError",
"(",
"\"`poll_interval` must be less than `max_age`\"",
")",
"proc",
"=",
"'{0:d}@{1}'",
".",
"format",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"platform",
".",
"node",
"(",
")",
")",
"lock",
"=",
"LockFile",
"(",
"path",
")",
"log",
".",
"debug",
"(",
"\"{0} attempting to lock {1}\"",
".",
"format",
"(",
"proc",
",",
"path",
")",
")",
"while",
"not",
"lock",
".",
"i_am_locking",
"(",
")",
":",
"if",
"lock",
".",
"is_locked",
"(",
")",
":",
"# Protect against race condition",
"try",
":",
"# Check age of the lock file",
"age",
"=",
"time",
".",
"time",
"(",
")",
"-",
"os",
".",
"stat",
"(",
"lock",
".",
"lock_file",
")",
"[",
"stat",
".",
"ST_MTIME",
"]",
"# Break the lock if too old (considered stale)",
"if",
"age",
">",
"max_age",
":",
"lock",
".",
"break_lock",
"(",
")",
"# What if lock was released and reacquired in the meantime?",
"# We don't want to break a fresh lock!",
"# If a lock is stale then we may have many threads",
"# attempting to break it here at the \"same time\".",
"# Avoid the possibility of some thread trying to break the",
"# lock after it has already been broken and after the first",
"# other thread attempting to acquire the lock by sleeping",
"# for 0.5 seconds below.",
"log",
".",
"warning",
"(",
"\"{0} broke lock on {1} \"",
"\"that is {2:d} seconds old\"",
".",
"format",
"(",
"proc",
",",
"path",
",",
"int",
"(",
"age",
")",
")",
")",
"except",
"OSError",
":",
"# Lock was released just now",
"# os.path.exists(lock.lock_file) is False",
"# OSError may be raised by os.stat() or lock.break_lock() above",
"pass",
"time",
".",
"sleep",
"(",
"0.5",
")",
"try",
":",
"log",
".",
"debug",
"(",
"\"{0} waiting for {1:d} seconds \"",
"\"for lock on {2} to be released\"",
".",
"format",
"(",
"proc",
",",
"poll_interval",
",",
"path",
")",
")",
"# Use float() here since acquire sleeps for timeout/10",
"lock",
".",
"acquire",
"(",
"timeout",
"=",
"float",
"(",
"poll_interval",
")",
")",
"except",
"LockTimeout",
":",
"pass",
"log",
".",
"debug",
"(",
"\"{0} locked {1}\"",
".",
"format",
"(",
"proc",
",",
"path",
")",
")",
"yield",
"lock",
"lock",
".",
"release",
"(",
")",
"log",
".",
"debug",
"(",
"\"{0} released lock on {1}\"",
".",
"format",
"(",
"proc",
",",
"path",
")",
")"
] |
Aquire a file lock in a thread-safe manner that also reaps stale locks
possibly left behind by processes that crashed hard.
|
[
"Aquire",
"a",
"file",
"lock",
"in",
"a",
"thread",
"-",
"safe",
"manner",
"that",
"also",
"reaps",
"stale",
"locks",
"possibly",
"left",
"behind",
"by",
"processes",
"that",
"crashed",
"hard",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/lock.py#L18-L71
|
14,027
|
rootpy/rootpy
|
rootpy/ROOT.py
|
proxy_global
|
def proxy_global(name, no_expand_macro=False, fname='func', args=()):
"""
Used to automatically asrootpy ROOT's thread local variables
"""
if no_expand_macro: # pragma: no cover
# handle older ROOT versions without _ExpandMacroFunction wrapping
@property
def gSomething_no_func(self):
glob = self(getattr(ROOT, name))
# create a fake func() that just returns self
def func():
return glob
glob.func = func
return glob
return gSomething_no_func
@property
def gSomething(self):
obj_func = getattr(getattr(ROOT, name), fname)
try:
obj = obj_func(*args)
except ReferenceError: # null pointer
return None
# asrootpy
return self(obj)
return gSomething
|
python
|
def proxy_global(name, no_expand_macro=False, fname='func', args=()):
"""
Used to automatically asrootpy ROOT's thread local variables
"""
if no_expand_macro: # pragma: no cover
# handle older ROOT versions without _ExpandMacroFunction wrapping
@property
def gSomething_no_func(self):
glob = self(getattr(ROOT, name))
# create a fake func() that just returns self
def func():
return glob
glob.func = func
return glob
return gSomething_no_func
@property
def gSomething(self):
obj_func = getattr(getattr(ROOT, name), fname)
try:
obj = obj_func(*args)
except ReferenceError: # null pointer
return None
# asrootpy
return self(obj)
return gSomething
|
[
"def",
"proxy_global",
"(",
"name",
",",
"no_expand_macro",
"=",
"False",
",",
"fname",
"=",
"'func'",
",",
"args",
"=",
"(",
")",
")",
":",
"if",
"no_expand_macro",
":",
"# pragma: no cover",
"# handle older ROOT versions without _ExpandMacroFunction wrapping",
"@",
"property",
"def",
"gSomething_no_func",
"(",
"self",
")",
":",
"glob",
"=",
"self",
"(",
"getattr",
"(",
"ROOT",
",",
"name",
")",
")",
"# create a fake func() that just returns self",
"def",
"func",
"(",
")",
":",
"return",
"glob",
"glob",
".",
"func",
"=",
"func",
"return",
"glob",
"return",
"gSomething_no_func",
"@",
"property",
"def",
"gSomething",
"(",
"self",
")",
":",
"obj_func",
"=",
"getattr",
"(",
"getattr",
"(",
"ROOT",
",",
"name",
")",
",",
"fname",
")",
"try",
":",
"obj",
"=",
"obj_func",
"(",
"*",
"args",
")",
"except",
"ReferenceError",
":",
"# null pointer",
"return",
"None",
"# asrootpy",
"return",
"self",
"(",
"obj",
")",
"return",
"gSomething"
] |
Used to automatically asrootpy ROOT's thread local variables
|
[
"Used",
"to",
"automatically",
"asrootpy",
"ROOT",
"s",
"thread",
"local",
"variables"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/ROOT.py#L61-L87
|
14,028
|
rootpy/rootpy
|
rootpy/plotting/legend.py
|
Legend.AddEntry
|
def AddEntry(self, thing, label=None, style=None):
"""
Add an entry to the legend.
If `label` is None, `thing.GetTitle()` will be used as the label.
If `style` is None, `thing.legendstyle` is used if present,
otherwise `P`.
"""
if isinstance(thing, HistStack):
things = thing
else:
things = [thing]
for thing in things:
if getattr(thing, 'inlegend', True):
thing_label = thing.GetTitle() if label is None else label
thing_style = getattr(thing, 'legendstyle', 'P') if style is None else style
super(Legend, self).AddEntry(thing, thing_label, thing_style)
keepalive(self, thing)
|
python
|
def AddEntry(self, thing, label=None, style=None):
"""
Add an entry to the legend.
If `label` is None, `thing.GetTitle()` will be used as the label.
If `style` is None, `thing.legendstyle` is used if present,
otherwise `P`.
"""
if isinstance(thing, HistStack):
things = thing
else:
things = [thing]
for thing in things:
if getattr(thing, 'inlegend', True):
thing_label = thing.GetTitle() if label is None else label
thing_style = getattr(thing, 'legendstyle', 'P') if style is None else style
super(Legend, self).AddEntry(thing, thing_label, thing_style)
keepalive(self, thing)
|
[
"def",
"AddEntry",
"(",
"self",
",",
"thing",
",",
"label",
"=",
"None",
",",
"style",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"thing",
",",
"HistStack",
")",
":",
"things",
"=",
"thing",
"else",
":",
"things",
"=",
"[",
"thing",
"]",
"for",
"thing",
"in",
"things",
":",
"if",
"getattr",
"(",
"thing",
",",
"'inlegend'",
",",
"True",
")",
":",
"thing_label",
"=",
"thing",
".",
"GetTitle",
"(",
")",
"if",
"label",
"is",
"None",
"else",
"label",
"thing_style",
"=",
"getattr",
"(",
"thing",
",",
"'legendstyle'",
",",
"'P'",
")",
"if",
"style",
"is",
"None",
"else",
"style",
"super",
"(",
"Legend",
",",
"self",
")",
".",
"AddEntry",
"(",
"thing",
",",
"thing_label",
",",
"thing_style",
")",
"keepalive",
"(",
"self",
",",
"thing",
")"
] |
Add an entry to the legend.
If `label` is None, `thing.GetTitle()` will be used as the label.
If `style` is None, `thing.legendstyle` is used if present,
otherwise `P`.
|
[
"Add",
"an",
"entry",
"to",
"the",
"legend",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/legend.py#L87-L105
|
14,029
|
rootpy/rootpy
|
rootpy/logger/magic.py
|
get_seh
|
def get_seh():
"""
Makes a function which can be used to set the ROOT error handler with a
python function and returns the existing error handler.
"""
if ON_RTD:
return lambda x: x
ErrorHandlerFunc_t = ctypes.CFUNCTYPE(
None, ctypes.c_int, ctypes.c_bool,
ctypes.c_char_p, ctypes.c_char_p)
# Required to avoid strange dynamic linker problem on OSX.
# See https://github.com/rootpy/rootpy/issues/256
import ROOT
dll = get_dll("libCore")
SetErrorHandler = None
try:
if dll:
SetErrorHandler = dll._Z15SetErrorHandlerPFvibPKcS0_E
except AttributeError:
pass
if not SetErrorHandler:
log.warning(
"Couldn't find SetErrorHandler. "
"Please submit a rootpy bug report.")
return lambda x: None
SetErrorHandler.restype = ErrorHandlerFunc_t
SetErrorHandler.argtypes = ErrorHandlerFunc_t,
def _SetErrorHandler(fn):
"""
Set ROOT's warning/error handler. Returns the existing one.
"""
log.debug("called SetErrorHandler()")
eh = ErrorHandlerFunc_t(fn)
# ``eh`` can get garbage collected unless kept alive, leading to a segfault.
_keep_alive.append(eh)
return SetErrorHandler(eh)
return _SetErrorHandler
|
python
|
def get_seh():
"""
Makes a function which can be used to set the ROOT error handler with a
python function and returns the existing error handler.
"""
if ON_RTD:
return lambda x: x
ErrorHandlerFunc_t = ctypes.CFUNCTYPE(
None, ctypes.c_int, ctypes.c_bool,
ctypes.c_char_p, ctypes.c_char_p)
# Required to avoid strange dynamic linker problem on OSX.
# See https://github.com/rootpy/rootpy/issues/256
import ROOT
dll = get_dll("libCore")
SetErrorHandler = None
try:
if dll:
SetErrorHandler = dll._Z15SetErrorHandlerPFvibPKcS0_E
except AttributeError:
pass
if not SetErrorHandler:
log.warning(
"Couldn't find SetErrorHandler. "
"Please submit a rootpy bug report.")
return lambda x: None
SetErrorHandler.restype = ErrorHandlerFunc_t
SetErrorHandler.argtypes = ErrorHandlerFunc_t,
def _SetErrorHandler(fn):
"""
Set ROOT's warning/error handler. Returns the existing one.
"""
log.debug("called SetErrorHandler()")
eh = ErrorHandlerFunc_t(fn)
# ``eh`` can get garbage collected unless kept alive, leading to a segfault.
_keep_alive.append(eh)
return SetErrorHandler(eh)
return _SetErrorHandler
|
[
"def",
"get_seh",
"(",
")",
":",
"if",
"ON_RTD",
":",
"return",
"lambda",
"x",
":",
"x",
"ErrorHandlerFunc_t",
"=",
"ctypes",
".",
"CFUNCTYPE",
"(",
"None",
",",
"ctypes",
".",
"c_int",
",",
"ctypes",
".",
"c_bool",
",",
"ctypes",
".",
"c_char_p",
",",
"ctypes",
".",
"c_char_p",
")",
"# Required to avoid strange dynamic linker problem on OSX.",
"# See https://github.com/rootpy/rootpy/issues/256",
"import",
"ROOT",
"dll",
"=",
"get_dll",
"(",
"\"libCore\"",
")",
"SetErrorHandler",
"=",
"None",
"try",
":",
"if",
"dll",
":",
"SetErrorHandler",
"=",
"dll",
".",
"_Z15SetErrorHandlerPFvibPKcS0_E",
"except",
"AttributeError",
":",
"pass",
"if",
"not",
"SetErrorHandler",
":",
"log",
".",
"warning",
"(",
"\"Couldn't find SetErrorHandler. \"",
"\"Please submit a rootpy bug report.\"",
")",
"return",
"lambda",
"x",
":",
"None",
"SetErrorHandler",
".",
"restype",
"=",
"ErrorHandlerFunc_t",
"SetErrorHandler",
".",
"argtypes",
"=",
"ErrorHandlerFunc_t",
",",
"def",
"_SetErrorHandler",
"(",
"fn",
")",
":",
"\"\"\"\n Set ROOT's warning/error handler. Returns the existing one.\n \"\"\"",
"log",
".",
"debug",
"(",
"\"called SetErrorHandler()\"",
")",
"eh",
"=",
"ErrorHandlerFunc_t",
"(",
"fn",
")",
"# ``eh`` can get garbage collected unless kept alive, leading to a segfault.",
"_keep_alive",
".",
"append",
"(",
"eh",
")",
"return",
"SetErrorHandler",
"(",
"eh",
")",
"return",
"_SetErrorHandler"
] |
Makes a function which can be used to set the ROOT error handler with a
python function and returns the existing error handler.
|
[
"Makes",
"a",
"function",
"which",
"can",
"be",
"used",
"to",
"set",
"the",
"ROOT",
"error",
"handler",
"with",
"a",
"python",
"function",
"and",
"returns",
"the",
"existing",
"error",
"handler",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/magic.py#L85-L129
|
14,030
|
rootpy/rootpy
|
rootpy/logger/magic.py
|
get_f_code_idx
|
def get_f_code_idx():
"""
How many pointers into PyFrame is the ``f_code`` variable?
"""
frame = sys._getframe()
frame_ptr = id(frame)
LARGE_ENOUGH = 20
# Look through the frame object until we find the f_tstate variable, whose
# value we know from above.
ptrs = [ctypes.c_voidp.from_address(frame_ptr+i*svp)
for i in range(LARGE_ENOUGH)]
# Find its index into the structure
ptrs = [p.value for p in ptrs]
fcode_ptr = id(frame.f_code)
try:
threadstate_idx = ptrs.index(fcode_ptr)
except ValueError:
log.critical("rootpy bug! Please report this.")
raise
return threadstate_idx
|
python
|
def get_f_code_idx():
"""
How many pointers into PyFrame is the ``f_code`` variable?
"""
frame = sys._getframe()
frame_ptr = id(frame)
LARGE_ENOUGH = 20
# Look through the frame object until we find the f_tstate variable, whose
# value we know from above.
ptrs = [ctypes.c_voidp.from_address(frame_ptr+i*svp)
for i in range(LARGE_ENOUGH)]
# Find its index into the structure
ptrs = [p.value for p in ptrs]
fcode_ptr = id(frame.f_code)
try:
threadstate_idx = ptrs.index(fcode_ptr)
except ValueError:
log.critical("rootpy bug! Please report this.")
raise
return threadstate_idx
|
[
"def",
"get_f_code_idx",
"(",
")",
":",
"frame",
"=",
"sys",
".",
"_getframe",
"(",
")",
"frame_ptr",
"=",
"id",
"(",
"frame",
")",
"LARGE_ENOUGH",
"=",
"20",
"# Look through the frame object until we find the f_tstate variable, whose",
"# value we know from above.",
"ptrs",
"=",
"[",
"ctypes",
".",
"c_voidp",
".",
"from_address",
"(",
"frame_ptr",
"+",
"i",
"*",
"svp",
")",
"for",
"i",
"in",
"range",
"(",
"LARGE_ENOUGH",
")",
"]",
"# Find its index into the structure",
"ptrs",
"=",
"[",
"p",
".",
"value",
"for",
"p",
"in",
"ptrs",
"]",
"fcode_ptr",
"=",
"id",
"(",
"frame",
".",
"f_code",
")",
"try",
":",
"threadstate_idx",
"=",
"ptrs",
".",
"index",
"(",
"fcode_ptr",
")",
"except",
"ValueError",
":",
"log",
".",
"critical",
"(",
"\"rootpy bug! Please report this.\"",
")",
"raise",
"return",
"threadstate_idx"
] |
How many pointers into PyFrame is the ``f_code`` variable?
|
[
"How",
"many",
"pointers",
"into",
"PyFrame",
"is",
"the",
"f_code",
"variable?"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/magic.py#L138-L161
|
14,031
|
rootpy/rootpy
|
rootpy/logger/magic.py
|
get_frame_pointers
|
def get_frame_pointers(frame=None):
"""
Obtain writable pointers to ``frame.f_trace`` and ``frame.f_lineno``.
Very dangerous. Unlikely to be portable between python implementations.
This is hard in general because the ``PyFrameObject`` can have a variable size
depending on the build configuration. We can get it reliably because we can
determine the offset to ``f_tstate`` by searching for the value of that pointer.
"""
if frame is None:
frame = sys._getframe(2)
frame = id(frame)
# http://hg.python.org/cpython/file/3aa530c2db06/Include/frameobject.h#l28
F_TRACE_OFFSET = 6
Ppy_object = ctypes.POINTER(ctypes.py_object)
trace = Ppy_object.from_address(frame+(F_CODE_IDX+F_TRACE_OFFSET)*svp)
LASTI_OFFSET = F_TRACE_OFFSET + 4
lasti_addr = LASTI_OFFSET
lineno_addr = LASTI_OFFSET + ctypes.sizeof(ctypes.c_int)
f_lineno = ctypes.c_int.from_address(lineno_addr)
f_lasti = ctypes.c_int.from_address(lasti_addr)
return trace, f_lineno, f_lasti
|
python
|
def get_frame_pointers(frame=None):
"""
Obtain writable pointers to ``frame.f_trace`` and ``frame.f_lineno``.
Very dangerous. Unlikely to be portable between python implementations.
This is hard in general because the ``PyFrameObject`` can have a variable size
depending on the build configuration. We can get it reliably because we can
determine the offset to ``f_tstate`` by searching for the value of that pointer.
"""
if frame is None:
frame = sys._getframe(2)
frame = id(frame)
# http://hg.python.org/cpython/file/3aa530c2db06/Include/frameobject.h#l28
F_TRACE_OFFSET = 6
Ppy_object = ctypes.POINTER(ctypes.py_object)
trace = Ppy_object.from_address(frame+(F_CODE_IDX+F_TRACE_OFFSET)*svp)
LASTI_OFFSET = F_TRACE_OFFSET + 4
lasti_addr = LASTI_OFFSET
lineno_addr = LASTI_OFFSET + ctypes.sizeof(ctypes.c_int)
f_lineno = ctypes.c_int.from_address(lineno_addr)
f_lasti = ctypes.c_int.from_address(lasti_addr)
return trace, f_lineno, f_lasti
|
[
"def",
"get_frame_pointers",
"(",
"frame",
"=",
"None",
")",
":",
"if",
"frame",
"is",
"None",
":",
"frame",
"=",
"sys",
".",
"_getframe",
"(",
"2",
")",
"frame",
"=",
"id",
"(",
"frame",
")",
"# http://hg.python.org/cpython/file/3aa530c2db06/Include/frameobject.h#l28",
"F_TRACE_OFFSET",
"=",
"6",
"Ppy_object",
"=",
"ctypes",
".",
"POINTER",
"(",
"ctypes",
".",
"py_object",
")",
"trace",
"=",
"Ppy_object",
".",
"from_address",
"(",
"frame",
"+",
"(",
"F_CODE_IDX",
"+",
"F_TRACE_OFFSET",
")",
"*",
"svp",
")",
"LASTI_OFFSET",
"=",
"F_TRACE_OFFSET",
"+",
"4",
"lasti_addr",
"=",
"LASTI_OFFSET",
"lineno_addr",
"=",
"LASTI_OFFSET",
"+",
"ctypes",
".",
"sizeof",
"(",
"ctypes",
".",
"c_int",
")",
"f_lineno",
"=",
"ctypes",
".",
"c_int",
".",
"from_address",
"(",
"lineno_addr",
")",
"f_lasti",
"=",
"ctypes",
".",
"c_int",
".",
"from_address",
"(",
"lasti_addr",
")",
"return",
"trace",
",",
"f_lineno",
",",
"f_lasti"
] |
Obtain writable pointers to ``frame.f_trace`` and ``frame.f_lineno``.
Very dangerous. Unlikely to be portable between python implementations.
This is hard in general because the ``PyFrameObject`` can have a variable size
depending on the build configuration. We can get it reliably because we can
determine the offset to ``f_tstate`` by searching for the value of that pointer.
|
[
"Obtain",
"writable",
"pointers",
"to",
"frame",
".",
"f_trace",
"and",
"frame",
".",
"f_lineno",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/magic.py#L166-L193
|
14,032
|
rootpy/rootpy
|
rootpy/logger/magic.py
|
set_linetrace_on_frame
|
def set_linetrace_on_frame(f, localtrace=None):
"""
Non-portable function to modify linetracing.
Remember to enable global tracing with :py:func:`sys.settrace`, otherwise no
effect!
"""
traceptr, _, _ = get_frame_pointers(f)
if localtrace is not None:
# Need to incref to avoid the frame causing a double-delete
ctypes.pythonapi.Py_IncRef(localtrace)
# Not sure if this is the best way to do this, but it works.
addr = id(localtrace)
else:
addr = 0
traceptr.contents = ctypes.py_object.from_address(addr)
|
python
|
def set_linetrace_on_frame(f, localtrace=None):
"""
Non-portable function to modify linetracing.
Remember to enable global tracing with :py:func:`sys.settrace`, otherwise no
effect!
"""
traceptr, _, _ = get_frame_pointers(f)
if localtrace is not None:
# Need to incref to avoid the frame causing a double-delete
ctypes.pythonapi.Py_IncRef(localtrace)
# Not sure if this is the best way to do this, but it works.
addr = id(localtrace)
else:
addr = 0
traceptr.contents = ctypes.py_object.from_address(addr)
|
[
"def",
"set_linetrace_on_frame",
"(",
"f",
",",
"localtrace",
"=",
"None",
")",
":",
"traceptr",
",",
"_",
",",
"_",
"=",
"get_frame_pointers",
"(",
"f",
")",
"if",
"localtrace",
"is",
"not",
"None",
":",
"# Need to incref to avoid the frame causing a double-delete",
"ctypes",
".",
"pythonapi",
".",
"Py_IncRef",
"(",
"localtrace",
")",
"# Not sure if this is the best way to do this, but it works.",
"addr",
"=",
"id",
"(",
"localtrace",
")",
"else",
":",
"addr",
"=",
"0",
"traceptr",
".",
"contents",
"=",
"ctypes",
".",
"py_object",
".",
"from_address",
"(",
"addr",
")"
] |
Non-portable function to modify linetracing.
Remember to enable global tracing with :py:func:`sys.settrace`, otherwise no
effect!
|
[
"Non",
"-",
"portable",
"function",
"to",
"modify",
"linetracing",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/magic.py#L196-L212
|
14,033
|
rootpy/rootpy
|
rootpy/logger/magic.py
|
re_execute_with_exception
|
def re_execute_with_exception(frame, exception, traceback):
"""
Dark magic. Causes ``frame`` to raise an exception at the current location
with ``traceback`` appended to it.
Note that since the line tracer is raising an exception, the interpreter
disables the global trace, so it's not possible to restore the previous
tracing conditions.
"""
if sys.gettrace() == globaltrace:
# If our trace handler is already installed, that means that this
# function has been called twice before the line tracer had a chance to
# run. That can happen if more than one exception was logged.
return
call_lineno = frame.f_lineno
def intercept_next_line(f, why, *args):
if f is not frame:
return
set_linetrace_on_frame(f)
# Undo modifications to the callers code (ick ick ick)
back_like_nothing_happened()
# Raise exception in (almost) the perfect place (except for duplication)
if sys.version_info[0] < 3:
#raise exception.__class__, exception, traceback
raise exception
raise exception.with_traceback(traceback)
set_linetrace_on_frame(frame, intercept_next_line)
linestarts = list(dis.findlinestarts(frame.f_code))
linestarts = [a for a, l in linestarts if l >= call_lineno]
# Jump target
dest = linestarts[0]
oc = frame.f_code.co_code[frame.f_lasti]
if sys.version_info[0] < 3:
oc = ord(oc)
opcode_size = 2 if oc >= opcode.HAVE_ARGUMENT else 0
# Opcode to overwrite
where = frame.f_lasti + 1 + opcode_size
#dis.disco(frame.f_code)
pc = PyCodeObject.from_address(id(frame.f_code))
back_like_nothing_happened = pc.co_code.contents.inject_jump(where, dest)
#print("#"*100)
#dis.disco(frame.f_code)
sys.settrace(globaltrace)
|
python
|
def re_execute_with_exception(frame, exception, traceback):
"""
Dark magic. Causes ``frame`` to raise an exception at the current location
with ``traceback`` appended to it.
Note that since the line tracer is raising an exception, the interpreter
disables the global trace, so it's not possible to restore the previous
tracing conditions.
"""
if sys.gettrace() == globaltrace:
# If our trace handler is already installed, that means that this
# function has been called twice before the line tracer had a chance to
# run. That can happen if more than one exception was logged.
return
call_lineno = frame.f_lineno
def intercept_next_line(f, why, *args):
if f is not frame:
return
set_linetrace_on_frame(f)
# Undo modifications to the callers code (ick ick ick)
back_like_nothing_happened()
# Raise exception in (almost) the perfect place (except for duplication)
if sys.version_info[0] < 3:
#raise exception.__class__, exception, traceback
raise exception
raise exception.with_traceback(traceback)
set_linetrace_on_frame(frame, intercept_next_line)
linestarts = list(dis.findlinestarts(frame.f_code))
linestarts = [a for a, l in linestarts if l >= call_lineno]
# Jump target
dest = linestarts[0]
oc = frame.f_code.co_code[frame.f_lasti]
if sys.version_info[0] < 3:
oc = ord(oc)
opcode_size = 2 if oc >= opcode.HAVE_ARGUMENT else 0
# Opcode to overwrite
where = frame.f_lasti + 1 + opcode_size
#dis.disco(frame.f_code)
pc = PyCodeObject.from_address(id(frame.f_code))
back_like_nothing_happened = pc.co_code.contents.inject_jump(where, dest)
#print("#"*100)
#dis.disco(frame.f_code)
sys.settrace(globaltrace)
|
[
"def",
"re_execute_with_exception",
"(",
"frame",
",",
"exception",
",",
"traceback",
")",
":",
"if",
"sys",
".",
"gettrace",
"(",
")",
"==",
"globaltrace",
":",
"# If our trace handler is already installed, that means that this",
"# function has been called twice before the line tracer had a chance to",
"# run. That can happen if more than one exception was logged.",
"return",
"call_lineno",
"=",
"frame",
".",
"f_lineno",
"def",
"intercept_next_line",
"(",
"f",
",",
"why",
",",
"*",
"args",
")",
":",
"if",
"f",
"is",
"not",
"frame",
":",
"return",
"set_linetrace_on_frame",
"(",
"f",
")",
"# Undo modifications to the callers code (ick ick ick)",
"back_like_nothing_happened",
"(",
")",
"# Raise exception in (almost) the perfect place (except for duplication)",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"#raise exception.__class__, exception, traceback",
"raise",
"exception",
"raise",
"exception",
".",
"with_traceback",
"(",
"traceback",
")",
"set_linetrace_on_frame",
"(",
"frame",
",",
"intercept_next_line",
")",
"linestarts",
"=",
"list",
"(",
"dis",
".",
"findlinestarts",
"(",
"frame",
".",
"f_code",
")",
")",
"linestarts",
"=",
"[",
"a",
"for",
"a",
",",
"l",
"in",
"linestarts",
"if",
"l",
">=",
"call_lineno",
"]",
"# Jump target",
"dest",
"=",
"linestarts",
"[",
"0",
"]",
"oc",
"=",
"frame",
".",
"f_code",
".",
"co_code",
"[",
"frame",
".",
"f_lasti",
"]",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"oc",
"=",
"ord",
"(",
"oc",
")",
"opcode_size",
"=",
"2",
"if",
"oc",
">=",
"opcode",
".",
"HAVE_ARGUMENT",
"else",
"0",
"# Opcode to overwrite",
"where",
"=",
"frame",
".",
"f_lasti",
"+",
"1",
"+",
"opcode_size",
"#dis.disco(frame.f_code)",
"pc",
"=",
"PyCodeObject",
".",
"from_address",
"(",
"id",
"(",
"frame",
".",
"f_code",
")",
")",
"back_like_nothing_happened",
"=",
"pc",
".",
"co_code",
".",
"contents",
".",
"inject_jump",
"(",
"where",
",",
"dest",
")",
"#print(\"#\"*100)",
"#dis.disco(frame.f_code)",
"sys",
".",
"settrace",
"(",
"globaltrace",
")"
] |
Dark magic. Causes ``frame`` to raise an exception at the current location
with ``traceback`` appended to it.
Note that since the line tracer is raising an exception, the interpreter
disables the global trace, so it's not possible to restore the previous
tracing conditions.
|
[
"Dark",
"magic",
".",
"Causes",
"frame",
"to",
"raise",
"an",
"exception",
"at",
"the",
"current",
"location",
"with",
"traceback",
"appended",
"to",
"it",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/magic.py#L219-L269
|
14,034
|
rootpy/rootpy
|
rootpy/logger/magic.py
|
_inject_jump
|
def _inject_jump(self, where, dest):
"""
Monkeypatch bytecode at ``where`` to force it to jump to ``dest``.
Returns function which puts things back to how they were.
"""
# We're about to do dangerous things to a function's code content.
# We can't make a lock to prevent the interpreter from using those
# bytes, so the best we can do is to set the check interval to be high
# and just pray that this keeps other threads at bay.
if sys.version_info[0] < 3:
old_check_interval = sys.getcheckinterval()
sys.setcheckinterval(2**20)
else:
old_check_interval = sys.getswitchinterval()
sys.setswitchinterval(1000)
pb = ctypes.pointer(self.ob_sval)
orig_bytes = [pb[where + i][0] for i in range(3)]
v = struct.pack("<BH", opcode.opmap["JUMP_ABSOLUTE"], dest)
# Overwrite code to cause it to jump to the target
if sys.version_info[0] < 3:
for i in range(3):
pb[where + i][0] = ord(v[i])
else:
for i in range(3):
pb[where + i][0] = v[i]
def tidy_up():
"""
Put the bytecode back to how it was. Good as new.
"""
if sys.version_info[0] < 3:
sys.setcheckinterval(old_check_interval)
else:
sys.setswitchinterval(old_check_interval)
for i in range(3):
pb[where + i][0] = orig_bytes[i]
return tidy_up
|
python
|
def _inject_jump(self, where, dest):
"""
Monkeypatch bytecode at ``where`` to force it to jump to ``dest``.
Returns function which puts things back to how they were.
"""
# We're about to do dangerous things to a function's code content.
# We can't make a lock to prevent the interpreter from using those
# bytes, so the best we can do is to set the check interval to be high
# and just pray that this keeps other threads at bay.
if sys.version_info[0] < 3:
old_check_interval = sys.getcheckinterval()
sys.setcheckinterval(2**20)
else:
old_check_interval = sys.getswitchinterval()
sys.setswitchinterval(1000)
pb = ctypes.pointer(self.ob_sval)
orig_bytes = [pb[where + i][0] for i in range(3)]
v = struct.pack("<BH", opcode.opmap["JUMP_ABSOLUTE"], dest)
# Overwrite code to cause it to jump to the target
if sys.version_info[0] < 3:
for i in range(3):
pb[where + i][0] = ord(v[i])
else:
for i in range(3):
pb[where + i][0] = v[i]
def tidy_up():
"""
Put the bytecode back to how it was. Good as new.
"""
if sys.version_info[0] < 3:
sys.setcheckinterval(old_check_interval)
else:
sys.setswitchinterval(old_check_interval)
for i in range(3):
pb[where + i][0] = orig_bytes[i]
return tidy_up
|
[
"def",
"_inject_jump",
"(",
"self",
",",
"where",
",",
"dest",
")",
":",
"# We're about to do dangerous things to a function's code content.",
"# We can't make a lock to prevent the interpreter from using those",
"# bytes, so the best we can do is to set the check interval to be high",
"# and just pray that this keeps other threads at bay.",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"old_check_interval",
"=",
"sys",
".",
"getcheckinterval",
"(",
")",
"sys",
".",
"setcheckinterval",
"(",
"2",
"**",
"20",
")",
"else",
":",
"old_check_interval",
"=",
"sys",
".",
"getswitchinterval",
"(",
")",
"sys",
".",
"setswitchinterval",
"(",
"1000",
")",
"pb",
"=",
"ctypes",
".",
"pointer",
"(",
"self",
".",
"ob_sval",
")",
"orig_bytes",
"=",
"[",
"pb",
"[",
"where",
"+",
"i",
"]",
"[",
"0",
"]",
"for",
"i",
"in",
"range",
"(",
"3",
")",
"]",
"v",
"=",
"struct",
".",
"pack",
"(",
"\"<BH\"",
",",
"opcode",
".",
"opmap",
"[",
"\"JUMP_ABSOLUTE\"",
"]",
",",
"dest",
")",
"# Overwrite code to cause it to jump to the target",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"for",
"i",
"in",
"range",
"(",
"3",
")",
":",
"pb",
"[",
"where",
"+",
"i",
"]",
"[",
"0",
"]",
"=",
"ord",
"(",
"v",
"[",
"i",
"]",
")",
"else",
":",
"for",
"i",
"in",
"range",
"(",
"3",
")",
":",
"pb",
"[",
"where",
"+",
"i",
"]",
"[",
"0",
"]",
"=",
"v",
"[",
"i",
"]",
"def",
"tidy_up",
"(",
")",
":",
"\"\"\"\n Put the bytecode back to how it was. Good as new.\n \"\"\"",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"sys",
".",
"setcheckinterval",
"(",
"old_check_interval",
")",
"else",
":",
"sys",
".",
"setswitchinterval",
"(",
"old_check_interval",
")",
"for",
"i",
"in",
"range",
"(",
"3",
")",
":",
"pb",
"[",
"where",
"+",
"i",
"]",
"[",
"0",
"]",
"=",
"orig_bytes",
"[",
"i",
"]",
"return",
"tidy_up"
] |
Monkeypatch bytecode at ``where`` to force it to jump to ``dest``.
Returns function which puts things back to how they were.
|
[
"Monkeypatch",
"bytecode",
"at",
"where",
"to",
"force",
"it",
"to",
"jump",
"to",
"dest",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/magic.py#L272-L313
|
14,035
|
rootpy/rootpy
|
rootpy/tree/chain.py
|
BaseTreeChain.Draw
|
def Draw(self, *args, **kwargs):
"""
Loop over subfiles, draw each, and sum the output into a single
histogram.
"""
self.reset()
output = None
while self._rollover():
if output is None:
# Make our own copy of the drawn histogram
output = self._tree.Draw(*args, **kwargs)
if output is not None:
output = output.Clone()
# Make it memory resident (histograms)
if hasattr(output, 'SetDirectory'):
output.SetDirectory(0)
else:
newoutput = self._tree.Draw(*args, **kwargs)
if newoutput is not None:
if isinstance(output, _GraphBase):
output.Append(newoutput)
else: # histogram
output += newoutput
return output
|
python
|
def Draw(self, *args, **kwargs):
"""
Loop over subfiles, draw each, and sum the output into a single
histogram.
"""
self.reset()
output = None
while self._rollover():
if output is None:
# Make our own copy of the drawn histogram
output = self._tree.Draw(*args, **kwargs)
if output is not None:
output = output.Clone()
# Make it memory resident (histograms)
if hasattr(output, 'SetDirectory'):
output.SetDirectory(0)
else:
newoutput = self._tree.Draw(*args, **kwargs)
if newoutput is not None:
if isinstance(output, _GraphBase):
output.Append(newoutput)
else: # histogram
output += newoutput
return output
|
[
"def",
"Draw",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"reset",
"(",
")",
"output",
"=",
"None",
"while",
"self",
".",
"_rollover",
"(",
")",
":",
"if",
"output",
"is",
"None",
":",
"# Make our own copy of the drawn histogram",
"output",
"=",
"self",
".",
"_tree",
".",
"Draw",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"output",
"is",
"not",
"None",
":",
"output",
"=",
"output",
".",
"Clone",
"(",
")",
"# Make it memory resident (histograms)",
"if",
"hasattr",
"(",
"output",
",",
"'SetDirectory'",
")",
":",
"output",
".",
"SetDirectory",
"(",
"0",
")",
"else",
":",
"newoutput",
"=",
"self",
".",
"_tree",
".",
"Draw",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"newoutput",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"output",
",",
"_GraphBase",
")",
":",
"output",
".",
"Append",
"(",
"newoutput",
")",
"else",
":",
"# histogram",
"output",
"+=",
"newoutput",
"return",
"output"
] |
Loop over subfiles, draw each, and sum the output into a single
histogram.
|
[
"Loop",
"over",
"subfiles",
"draw",
"each",
"and",
"sum",
"the",
"output",
"into",
"a",
"single",
"histogram",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/chain.py#L105-L128
|
14,036
|
rootpy/rootpy
|
rootpy/interactive/console.py
|
interact_plain
|
def interact_plain(header=UP_LINE, local_ns=None,
module=None, dummy=None,
stack_depth=1, global_ns=None):
"""
Create an interactive python console
"""
frame = sys._getframe(stack_depth)
variables = {}
if local_ns is not None:
variables.update(local_ns)
else:
variables.update(frame.f_locals)
if global_ns is not None:
variables.update(local_ns)
else:
variables.update(frame.f_globals)
shell = code.InteractiveConsole(variables)
return shell.interact(banner=header)
|
python
|
def interact_plain(header=UP_LINE, local_ns=None,
module=None, dummy=None,
stack_depth=1, global_ns=None):
"""
Create an interactive python console
"""
frame = sys._getframe(stack_depth)
variables = {}
if local_ns is not None:
variables.update(local_ns)
else:
variables.update(frame.f_locals)
if global_ns is not None:
variables.update(local_ns)
else:
variables.update(frame.f_globals)
shell = code.InteractiveConsole(variables)
return shell.interact(banner=header)
|
[
"def",
"interact_plain",
"(",
"header",
"=",
"UP_LINE",
",",
"local_ns",
"=",
"None",
",",
"module",
"=",
"None",
",",
"dummy",
"=",
"None",
",",
"stack_depth",
"=",
"1",
",",
"global_ns",
"=",
"None",
")",
":",
"frame",
"=",
"sys",
".",
"_getframe",
"(",
"stack_depth",
")",
"variables",
"=",
"{",
"}",
"if",
"local_ns",
"is",
"not",
"None",
":",
"variables",
".",
"update",
"(",
"local_ns",
")",
"else",
":",
"variables",
".",
"update",
"(",
"frame",
".",
"f_locals",
")",
"if",
"global_ns",
"is",
"not",
"None",
":",
"variables",
".",
"update",
"(",
"local_ns",
")",
"else",
":",
"variables",
".",
"update",
"(",
"frame",
".",
"f_globals",
")",
"shell",
"=",
"code",
".",
"InteractiveConsole",
"(",
"variables",
")",
"return",
"shell",
".",
"interact",
"(",
"banner",
"=",
"header",
")"
] |
Create an interactive python console
|
[
"Create",
"an",
"interactive",
"python",
"console"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/console.py#L33-L54
|
14,037
|
rootpy/rootpy
|
rootpy/plotting/root2matplotlib.py
|
hist
|
def hist(hists,
stacked=True,
reverse=False,
xpadding=0, ypadding=.1,
yerror_in_padding=True,
logy=None,
snap=True,
axes=None,
**kwargs):
"""
Make a matplotlib hist plot from a ROOT histogram, stack or
list of histograms.
Parameters
----------
hists : Hist, list of Hist, HistStack
The histogram(s) to be plotted
stacked : bool, optional (default=True)
If True then stack the histograms with the first histogram on the
bottom, otherwise overlay them with the first histogram in the
background.
reverse : bool, optional (default=False)
If True then reverse the order of the stack or overlay.
xpadding : float or 2-tuple of floats, optional (default=0)
Padding to add on the left and right sides of the plot as a fraction of
the axes width after the padding has been added. Specify unique left
and right padding with a 2-tuple.
ypadding : float or 2-tuple of floats, optional (default=.1)
Padding to add on the top and bottom of the plot as a fraction of
the axes height after the padding has been added. Specify unique top
and bottom padding with a 2-tuple.
yerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the y errors otherwise
only pad around the y values.
logy : bool, optional (default=None)
Apply special treatment of a log-scale y-axis to display the histogram
correctly. If None (the default) then automatically determine if the
y-axis is log-scale.
snap : bool, optional (default=True)
If True (the default) then the origin is an implicit lower bound of the
histogram unless the histogram has both positive and negative bins.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
All additional keyword arguments are passed to matplotlib's
fill_between for the filled regions and matplotlib's step function
for the edges.
Returns
-------
The return value from matplotlib's hist function, or list of such return
values if a stack or list of histograms was plotted.
"""
if axes is None:
axes = plt.gca()
if logy is None:
logy = axes.get_yscale() == 'log'
curr_xlim = axes.get_xlim()
curr_ylim = axes.get_ylim()
was_empty = not axes.has_data()
returns = []
if isinstance(hists, _Hist):
# This is a single plottable object.
returns = _hist(hists, axes=axes, logy=logy, **kwargs)
_set_bounds(hists, axes=axes,
was_empty=was_empty,
prev_xlim=curr_xlim,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
yerror_in_padding=yerror_in_padding,
snap=snap,
logy=logy)
elif stacked:
# draw the top histogram first so its edges don't cover the histograms
# beneath it in the stack
if not reverse:
hists = list(hists)[::-1]
for i, h in enumerate(hists):
kwargs_local = kwargs.copy()
if i == len(hists) - 1:
low = h.Clone()
low.Reset()
else:
low = sum(hists[i + 1:])
high = h + low
high.alpha = getattr(h, 'alpha', None)
proxy = _hist(high, bottom=low, axes=axes, logy=logy, **kwargs)
returns.append(proxy)
if not reverse:
returns = returns[::-1]
_set_bounds(sum(hists), axes=axes,
was_empty=was_empty,
prev_xlim=curr_xlim,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
yerror_in_padding=yerror_in_padding,
snap=snap,
logy=logy)
else:
for h in _maybe_reversed(hists, reverse):
returns.append(_hist(h, axes=axes, logy=logy, **kwargs))
if reverse:
returns = returns[::-1]
_set_bounds(hists[max(range(len(hists)), key=lambda idx: hists[idx].max())],
axes=axes,
was_empty=was_empty,
prev_xlim=curr_xlim,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
yerror_in_padding=yerror_in_padding,
snap=snap,
logy=logy)
return returns
|
python
|
def hist(hists,
stacked=True,
reverse=False,
xpadding=0, ypadding=.1,
yerror_in_padding=True,
logy=None,
snap=True,
axes=None,
**kwargs):
"""
Make a matplotlib hist plot from a ROOT histogram, stack or
list of histograms.
Parameters
----------
hists : Hist, list of Hist, HistStack
The histogram(s) to be plotted
stacked : bool, optional (default=True)
If True then stack the histograms with the first histogram on the
bottom, otherwise overlay them with the first histogram in the
background.
reverse : bool, optional (default=False)
If True then reverse the order of the stack or overlay.
xpadding : float or 2-tuple of floats, optional (default=0)
Padding to add on the left and right sides of the plot as a fraction of
the axes width after the padding has been added. Specify unique left
and right padding with a 2-tuple.
ypadding : float or 2-tuple of floats, optional (default=.1)
Padding to add on the top and bottom of the plot as a fraction of
the axes height after the padding has been added. Specify unique top
and bottom padding with a 2-tuple.
yerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the y errors otherwise
only pad around the y values.
logy : bool, optional (default=None)
Apply special treatment of a log-scale y-axis to display the histogram
correctly. If None (the default) then automatically determine if the
y-axis is log-scale.
snap : bool, optional (default=True)
If True (the default) then the origin is an implicit lower bound of the
histogram unless the histogram has both positive and negative bins.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
All additional keyword arguments are passed to matplotlib's
fill_between for the filled regions and matplotlib's step function
for the edges.
Returns
-------
The return value from matplotlib's hist function, or list of such return
values if a stack or list of histograms was plotted.
"""
if axes is None:
axes = plt.gca()
if logy is None:
logy = axes.get_yscale() == 'log'
curr_xlim = axes.get_xlim()
curr_ylim = axes.get_ylim()
was_empty = not axes.has_data()
returns = []
if isinstance(hists, _Hist):
# This is a single plottable object.
returns = _hist(hists, axes=axes, logy=logy, **kwargs)
_set_bounds(hists, axes=axes,
was_empty=was_empty,
prev_xlim=curr_xlim,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
yerror_in_padding=yerror_in_padding,
snap=snap,
logy=logy)
elif stacked:
# draw the top histogram first so its edges don't cover the histograms
# beneath it in the stack
if not reverse:
hists = list(hists)[::-1]
for i, h in enumerate(hists):
kwargs_local = kwargs.copy()
if i == len(hists) - 1:
low = h.Clone()
low.Reset()
else:
low = sum(hists[i + 1:])
high = h + low
high.alpha = getattr(h, 'alpha', None)
proxy = _hist(high, bottom=low, axes=axes, logy=logy, **kwargs)
returns.append(proxy)
if not reverse:
returns = returns[::-1]
_set_bounds(sum(hists), axes=axes,
was_empty=was_empty,
prev_xlim=curr_xlim,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
yerror_in_padding=yerror_in_padding,
snap=snap,
logy=logy)
else:
for h in _maybe_reversed(hists, reverse):
returns.append(_hist(h, axes=axes, logy=logy, **kwargs))
if reverse:
returns = returns[::-1]
_set_bounds(hists[max(range(len(hists)), key=lambda idx: hists[idx].max())],
axes=axes,
was_empty=was_empty,
prev_xlim=curr_xlim,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
yerror_in_padding=yerror_in_padding,
snap=snap,
logy=logy)
return returns
|
[
"def",
"hist",
"(",
"hists",
",",
"stacked",
"=",
"True",
",",
"reverse",
"=",
"False",
",",
"xpadding",
"=",
"0",
",",
"ypadding",
"=",
".1",
",",
"yerror_in_padding",
"=",
"True",
",",
"logy",
"=",
"None",
",",
"snap",
"=",
"True",
",",
"axes",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"axes",
"is",
"None",
":",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"if",
"logy",
"is",
"None",
":",
"logy",
"=",
"axes",
".",
"get_yscale",
"(",
")",
"==",
"'log'",
"curr_xlim",
"=",
"axes",
".",
"get_xlim",
"(",
")",
"curr_ylim",
"=",
"axes",
".",
"get_ylim",
"(",
")",
"was_empty",
"=",
"not",
"axes",
".",
"has_data",
"(",
")",
"returns",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"hists",
",",
"_Hist",
")",
":",
"# This is a single plottable object.",
"returns",
"=",
"_hist",
"(",
"hists",
",",
"axes",
"=",
"axes",
",",
"logy",
"=",
"logy",
",",
"*",
"*",
"kwargs",
")",
"_set_bounds",
"(",
"hists",
",",
"axes",
"=",
"axes",
",",
"was_empty",
"=",
"was_empty",
",",
"prev_xlim",
"=",
"curr_xlim",
",",
"prev_ylim",
"=",
"curr_ylim",
",",
"xpadding",
"=",
"xpadding",
",",
"ypadding",
"=",
"ypadding",
",",
"yerror_in_padding",
"=",
"yerror_in_padding",
",",
"snap",
"=",
"snap",
",",
"logy",
"=",
"logy",
")",
"elif",
"stacked",
":",
"# draw the top histogram first so its edges don't cover the histograms",
"# beneath it in the stack",
"if",
"not",
"reverse",
":",
"hists",
"=",
"list",
"(",
"hists",
")",
"[",
":",
":",
"-",
"1",
"]",
"for",
"i",
",",
"h",
"in",
"enumerate",
"(",
"hists",
")",
":",
"kwargs_local",
"=",
"kwargs",
".",
"copy",
"(",
")",
"if",
"i",
"==",
"len",
"(",
"hists",
")",
"-",
"1",
":",
"low",
"=",
"h",
".",
"Clone",
"(",
")",
"low",
".",
"Reset",
"(",
")",
"else",
":",
"low",
"=",
"sum",
"(",
"hists",
"[",
"i",
"+",
"1",
":",
"]",
")",
"high",
"=",
"h",
"+",
"low",
"high",
".",
"alpha",
"=",
"getattr",
"(",
"h",
",",
"'alpha'",
",",
"None",
")",
"proxy",
"=",
"_hist",
"(",
"high",
",",
"bottom",
"=",
"low",
",",
"axes",
"=",
"axes",
",",
"logy",
"=",
"logy",
",",
"*",
"*",
"kwargs",
")",
"returns",
".",
"append",
"(",
"proxy",
")",
"if",
"not",
"reverse",
":",
"returns",
"=",
"returns",
"[",
":",
":",
"-",
"1",
"]",
"_set_bounds",
"(",
"sum",
"(",
"hists",
")",
",",
"axes",
"=",
"axes",
",",
"was_empty",
"=",
"was_empty",
",",
"prev_xlim",
"=",
"curr_xlim",
",",
"prev_ylim",
"=",
"curr_ylim",
",",
"xpadding",
"=",
"xpadding",
",",
"ypadding",
"=",
"ypadding",
",",
"yerror_in_padding",
"=",
"yerror_in_padding",
",",
"snap",
"=",
"snap",
",",
"logy",
"=",
"logy",
")",
"else",
":",
"for",
"h",
"in",
"_maybe_reversed",
"(",
"hists",
",",
"reverse",
")",
":",
"returns",
".",
"append",
"(",
"_hist",
"(",
"h",
",",
"axes",
"=",
"axes",
",",
"logy",
"=",
"logy",
",",
"*",
"*",
"kwargs",
")",
")",
"if",
"reverse",
":",
"returns",
"=",
"returns",
"[",
":",
":",
"-",
"1",
"]",
"_set_bounds",
"(",
"hists",
"[",
"max",
"(",
"range",
"(",
"len",
"(",
"hists",
")",
")",
",",
"key",
"=",
"lambda",
"idx",
":",
"hists",
"[",
"idx",
"]",
".",
"max",
"(",
")",
")",
"]",
",",
"axes",
"=",
"axes",
",",
"was_empty",
"=",
"was_empty",
",",
"prev_xlim",
"=",
"curr_xlim",
",",
"prev_ylim",
"=",
"curr_ylim",
",",
"xpadding",
"=",
"xpadding",
",",
"ypadding",
"=",
"ypadding",
",",
"yerror_in_padding",
"=",
"yerror_in_padding",
",",
"snap",
"=",
"snap",
",",
"logy",
"=",
"logy",
")",
"return",
"returns"
] |
Make a matplotlib hist plot from a ROOT histogram, stack or
list of histograms.
Parameters
----------
hists : Hist, list of Hist, HistStack
The histogram(s) to be plotted
stacked : bool, optional (default=True)
If True then stack the histograms with the first histogram on the
bottom, otherwise overlay them with the first histogram in the
background.
reverse : bool, optional (default=False)
If True then reverse the order of the stack or overlay.
xpadding : float or 2-tuple of floats, optional (default=0)
Padding to add on the left and right sides of the plot as a fraction of
the axes width after the padding has been added. Specify unique left
and right padding with a 2-tuple.
ypadding : float or 2-tuple of floats, optional (default=.1)
Padding to add on the top and bottom of the plot as a fraction of
the axes height after the padding has been added. Specify unique top
and bottom padding with a 2-tuple.
yerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the y errors otherwise
only pad around the y values.
logy : bool, optional (default=None)
Apply special treatment of a log-scale y-axis to display the histogram
correctly. If None (the default) then automatically determine if the
y-axis is log-scale.
snap : bool, optional (default=True)
If True (the default) then the origin is an implicit lower bound of the
histogram unless the histogram has both positive and negative bins.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
All additional keyword arguments are passed to matplotlib's
fill_between for the filled regions and matplotlib's step function
for the edges.
Returns
-------
The return value from matplotlib's hist function, or list of such return
values if a stack or list of histograms was plotted.
|
[
"Make",
"a",
"matplotlib",
"hist",
"plot",
"from",
"a",
"ROOT",
"histogram",
"stack",
"or",
"list",
"of",
"histograms",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L141-L265
|
14,038
|
rootpy/rootpy
|
rootpy/plotting/root2matplotlib.py
|
errorbar
|
def errorbar(hists,
xerr=True, yerr=True,
xpadding=0, ypadding=.1,
xerror_in_padding=True,
yerror_in_padding=True,
emptybins=True,
snap=True,
axes=None,
**kwargs):
"""
Make a matplotlib errorbar plot from a ROOT histogram or graph
or list of histograms and graphs.
Parameters
----------
hists : Hist, Graph or list of Hist and Graph
The histogram(s) and/or Graph(s) to be plotted
xerr : bool, optional (default=True)
If True, x error bars will be displayed.
yerr : bool or string, optional (default=True)
If False, no y errors are displayed. If True, an individual y
error will be displayed for each hist in the stack. If 'linear' or
'quadratic', a single error bar will be displayed with either the
linear or quadratic sum of the individual errors.
xpadding : float or 2-tuple of floats, optional (default=0)
Padding to add on the left and right sides of the plot as a fraction of
the axes width after the padding has been added. Specify unique left
and right padding with a 2-tuple.
ypadding : float or 2-tuple of floats, optional (default=.1)
Padding to add on the top and bottom of the plot as a fraction of
the axes height after the padding has been added. Specify unique top
and bottom padding with a 2-tuple.
xerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the x errors otherwise
only pad around the x values.
yerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the y errors otherwise
only pad around the y values.
emptybins : bool, optional (default=True)
If True (the default) then plot bins with zero content otherwise only
show bins with nonzero content.
snap : bool, optional (default=True)
If True (the default) then the origin is an implicit lower bound of the
histogram unless the histogram has both positive and negative bins.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
All additional keyword arguments are passed to matplotlib's errorbar
function.
Returns
-------
The return value from matplotlib's errorbar function, or list of such
return values if a list of histograms and/or graphs was plotted.
"""
if axes is None:
axes = plt.gca()
curr_xlim = axes.get_xlim()
curr_ylim = axes.get_ylim()
was_empty = not axes.has_data()
if isinstance(hists, (_Hist, _Graph1DBase)):
# This is a single plottable object.
returns = _errorbar(
hists, xerr, yerr,
axes=axes, emptybins=emptybins, **kwargs)
_set_bounds(hists, axes=axes,
was_empty=was_empty,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
xerror_in_padding=xerror_in_padding,
yerror_in_padding=yerror_in_padding,
snap=snap)
else:
returns = []
for h in hists:
returns.append(errorbar(
h, xerr=xerr, yerr=yerr, axes=axes,
xpadding=xpadding, ypadding=ypadding,
xerror_in_padding=xerror_in_padding,
yerror_in_padding=yerror_in_padding,
snap=snap,
emptybins=emptybins,
**kwargs))
return returns
|
python
|
def errorbar(hists,
xerr=True, yerr=True,
xpadding=0, ypadding=.1,
xerror_in_padding=True,
yerror_in_padding=True,
emptybins=True,
snap=True,
axes=None,
**kwargs):
"""
Make a matplotlib errorbar plot from a ROOT histogram or graph
or list of histograms and graphs.
Parameters
----------
hists : Hist, Graph or list of Hist and Graph
The histogram(s) and/or Graph(s) to be plotted
xerr : bool, optional (default=True)
If True, x error bars will be displayed.
yerr : bool or string, optional (default=True)
If False, no y errors are displayed. If True, an individual y
error will be displayed for each hist in the stack. If 'linear' or
'quadratic', a single error bar will be displayed with either the
linear or quadratic sum of the individual errors.
xpadding : float or 2-tuple of floats, optional (default=0)
Padding to add on the left and right sides of the plot as a fraction of
the axes width after the padding has been added. Specify unique left
and right padding with a 2-tuple.
ypadding : float or 2-tuple of floats, optional (default=.1)
Padding to add on the top and bottom of the plot as a fraction of
the axes height after the padding has been added. Specify unique top
and bottom padding with a 2-tuple.
xerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the x errors otherwise
only pad around the x values.
yerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the y errors otherwise
only pad around the y values.
emptybins : bool, optional (default=True)
If True (the default) then plot bins with zero content otherwise only
show bins with nonzero content.
snap : bool, optional (default=True)
If True (the default) then the origin is an implicit lower bound of the
histogram unless the histogram has both positive and negative bins.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
All additional keyword arguments are passed to matplotlib's errorbar
function.
Returns
-------
The return value from matplotlib's errorbar function, or list of such
return values if a list of histograms and/or graphs was plotted.
"""
if axes is None:
axes = plt.gca()
curr_xlim = axes.get_xlim()
curr_ylim = axes.get_ylim()
was_empty = not axes.has_data()
if isinstance(hists, (_Hist, _Graph1DBase)):
# This is a single plottable object.
returns = _errorbar(
hists, xerr, yerr,
axes=axes, emptybins=emptybins, **kwargs)
_set_bounds(hists, axes=axes,
was_empty=was_empty,
prev_ylim=curr_ylim,
xpadding=xpadding, ypadding=ypadding,
xerror_in_padding=xerror_in_padding,
yerror_in_padding=yerror_in_padding,
snap=snap)
else:
returns = []
for h in hists:
returns.append(errorbar(
h, xerr=xerr, yerr=yerr, axes=axes,
xpadding=xpadding, ypadding=ypadding,
xerror_in_padding=xerror_in_padding,
yerror_in_padding=yerror_in_padding,
snap=snap,
emptybins=emptybins,
**kwargs))
return returns
|
[
"def",
"errorbar",
"(",
"hists",
",",
"xerr",
"=",
"True",
",",
"yerr",
"=",
"True",
",",
"xpadding",
"=",
"0",
",",
"ypadding",
"=",
".1",
",",
"xerror_in_padding",
"=",
"True",
",",
"yerror_in_padding",
"=",
"True",
",",
"emptybins",
"=",
"True",
",",
"snap",
"=",
"True",
",",
"axes",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"axes",
"is",
"None",
":",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"curr_xlim",
"=",
"axes",
".",
"get_xlim",
"(",
")",
"curr_ylim",
"=",
"axes",
".",
"get_ylim",
"(",
")",
"was_empty",
"=",
"not",
"axes",
".",
"has_data",
"(",
")",
"if",
"isinstance",
"(",
"hists",
",",
"(",
"_Hist",
",",
"_Graph1DBase",
")",
")",
":",
"# This is a single plottable object.",
"returns",
"=",
"_errorbar",
"(",
"hists",
",",
"xerr",
",",
"yerr",
",",
"axes",
"=",
"axes",
",",
"emptybins",
"=",
"emptybins",
",",
"*",
"*",
"kwargs",
")",
"_set_bounds",
"(",
"hists",
",",
"axes",
"=",
"axes",
",",
"was_empty",
"=",
"was_empty",
",",
"prev_ylim",
"=",
"curr_ylim",
",",
"xpadding",
"=",
"xpadding",
",",
"ypadding",
"=",
"ypadding",
",",
"xerror_in_padding",
"=",
"xerror_in_padding",
",",
"yerror_in_padding",
"=",
"yerror_in_padding",
",",
"snap",
"=",
"snap",
")",
"else",
":",
"returns",
"=",
"[",
"]",
"for",
"h",
"in",
"hists",
":",
"returns",
".",
"append",
"(",
"errorbar",
"(",
"h",
",",
"xerr",
"=",
"xerr",
",",
"yerr",
"=",
"yerr",
",",
"axes",
"=",
"axes",
",",
"xpadding",
"=",
"xpadding",
",",
"ypadding",
"=",
"ypadding",
",",
"xerror_in_padding",
"=",
"xerror_in_padding",
",",
"yerror_in_padding",
"=",
"yerror_in_padding",
",",
"snap",
"=",
"snap",
",",
"emptybins",
"=",
"emptybins",
",",
"*",
"*",
"kwargs",
")",
")",
"return",
"returns"
] |
Make a matplotlib errorbar plot from a ROOT histogram or graph
or list of histograms and graphs.
Parameters
----------
hists : Hist, Graph or list of Hist and Graph
The histogram(s) and/or Graph(s) to be plotted
xerr : bool, optional (default=True)
If True, x error bars will be displayed.
yerr : bool or string, optional (default=True)
If False, no y errors are displayed. If True, an individual y
error will be displayed for each hist in the stack. If 'linear' or
'quadratic', a single error bar will be displayed with either the
linear or quadratic sum of the individual errors.
xpadding : float or 2-tuple of floats, optional (default=0)
Padding to add on the left and right sides of the plot as a fraction of
the axes width after the padding has been added. Specify unique left
and right padding with a 2-tuple.
ypadding : float or 2-tuple of floats, optional (default=.1)
Padding to add on the top and bottom of the plot as a fraction of
the axes height after the padding has been added. Specify unique top
and bottom padding with a 2-tuple.
xerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the x errors otherwise
only pad around the x values.
yerror_in_padding : bool, optional (default=True)
If True then make the padding inclusive of the y errors otherwise
only pad around the y values.
emptybins : bool, optional (default=True)
If True (the default) then plot bins with zero content otherwise only
show bins with nonzero content.
snap : bool, optional (default=True)
If True (the default) then the origin is an implicit lower bound of the
histogram unless the histogram has both positive and negative bins.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
All additional keyword arguments are passed to matplotlib's errorbar
function.
Returns
-------
The return value from matplotlib's errorbar function, or list of such
return values if a list of histograms and/or graphs was plotted.
|
[
"Make",
"a",
"matplotlib",
"errorbar",
"plot",
"from",
"a",
"ROOT",
"histogram",
"or",
"graph",
"or",
"list",
"of",
"histograms",
"and",
"graphs",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L481-L577
|
14,039
|
rootpy/rootpy
|
rootpy/plotting/root2matplotlib.py
|
step
|
def step(h, logy=None, axes=None, **kwargs):
"""
Make a matplotlib step plot from a ROOT histogram.
Parameters
----------
h : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the y range between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
"""
if axes is None:
axes = plt.gca()
if logy is None:
logy = axes.get_yscale() == 'log'
_set_defaults(h, kwargs, ['common', 'line'])
if kwargs.get('color') is None:
kwargs['color'] = h.GetLineColor('mpl')
y = np.array(list(h.y()) + [0.])
if logy:
np.clip(y, 1E-300, 1E300, out=y)
return axes.step(list(h.xedges()), y, where='post', **kwargs)
|
python
|
def step(h, logy=None, axes=None, **kwargs):
"""
Make a matplotlib step plot from a ROOT histogram.
Parameters
----------
h : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the y range between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
"""
if axes is None:
axes = plt.gca()
if logy is None:
logy = axes.get_yscale() == 'log'
_set_defaults(h, kwargs, ['common', 'line'])
if kwargs.get('color') is None:
kwargs['color'] = h.GetLineColor('mpl')
y = np.array(list(h.y()) + [0.])
if logy:
np.clip(y, 1E-300, 1E300, out=y)
return axes.step(list(h.xedges()), y, where='post', **kwargs)
|
[
"def",
"step",
"(",
"h",
",",
"logy",
"=",
"None",
",",
"axes",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"axes",
"is",
"None",
":",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"if",
"logy",
"is",
"None",
":",
"logy",
"=",
"axes",
".",
"get_yscale",
"(",
")",
"==",
"'log'",
"_set_defaults",
"(",
"h",
",",
"kwargs",
",",
"[",
"'common'",
",",
"'line'",
"]",
")",
"if",
"kwargs",
".",
"get",
"(",
"'color'",
")",
"is",
"None",
":",
"kwargs",
"[",
"'color'",
"]",
"=",
"h",
".",
"GetLineColor",
"(",
"'mpl'",
")",
"y",
"=",
"np",
".",
"array",
"(",
"list",
"(",
"h",
".",
"y",
"(",
")",
")",
"+",
"[",
"0.",
"]",
")",
"if",
"logy",
":",
"np",
".",
"clip",
"(",
"y",
",",
"1E-300",
",",
"1E300",
",",
"out",
"=",
"y",
")",
"return",
"axes",
".",
"step",
"(",
"list",
"(",
"h",
".",
"xedges",
"(",
")",
")",
",",
"y",
",",
"where",
"=",
"'post'",
",",
"*",
"*",
"kwargs",
")"
] |
Make a matplotlib step plot from a ROOT histogram.
Parameters
----------
h : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the y range between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
|
[
"Make",
"a",
"matplotlib",
"step",
"plot",
"from",
"a",
"ROOT",
"histogram",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L603-L641
|
14,040
|
rootpy/rootpy
|
rootpy/plotting/root2matplotlib.py
|
fill_between
|
def fill_between(a, b, logy=None, axes=None, **kwargs):
"""
Fill the region between two histograms or graphs.
Parameters
----------
a : Hist
A rootpy Hist
b : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the region between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
"""
if axes is None:
axes = plt.gca()
if logy is None:
logy = axes.get_yscale() == 'log'
if not isinstance(a, _Hist) or not isinstance(b, _Hist):
raise TypeError(
"fill_between only operates on 1D histograms")
a.check_compatibility(b, check_edges=True)
x = []
top = []
bottom = []
for abin, bbin in zip(a.bins(overflow=False), b.bins(overflow=False)):
up = max(abin.value, bbin.value)
dn = min(abin.value, bbin.value)
x.extend([abin.x.low, abin.x.high])
top.extend([up, up])
bottom.extend([dn, dn])
x = np.array(x)
top = np.array(top)
bottom = np.array(bottom)
if logy:
np.clip(top, 1E-300, 1E300, out=top)
np.clip(bottom, 1E-300, 1E300, out=bottom)
return axes.fill_between(x, top, bottom, **kwargs)
|
python
|
def fill_between(a, b, logy=None, axes=None, **kwargs):
"""
Fill the region between two histograms or graphs.
Parameters
----------
a : Hist
A rootpy Hist
b : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the region between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
"""
if axes is None:
axes = plt.gca()
if logy is None:
logy = axes.get_yscale() == 'log'
if not isinstance(a, _Hist) or not isinstance(b, _Hist):
raise TypeError(
"fill_between only operates on 1D histograms")
a.check_compatibility(b, check_edges=True)
x = []
top = []
bottom = []
for abin, bbin in zip(a.bins(overflow=False), b.bins(overflow=False)):
up = max(abin.value, bbin.value)
dn = min(abin.value, bbin.value)
x.extend([abin.x.low, abin.x.high])
top.extend([up, up])
bottom.extend([dn, dn])
x = np.array(x)
top = np.array(top)
bottom = np.array(bottom)
if logy:
np.clip(top, 1E-300, 1E300, out=top)
np.clip(bottom, 1E-300, 1E300, out=bottom)
return axes.fill_between(x, top, bottom, **kwargs)
|
[
"def",
"fill_between",
"(",
"a",
",",
"b",
",",
"logy",
"=",
"None",
",",
"axes",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"axes",
"is",
"None",
":",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"if",
"logy",
"is",
"None",
":",
"logy",
"=",
"axes",
".",
"get_yscale",
"(",
")",
"==",
"'log'",
"if",
"not",
"isinstance",
"(",
"a",
",",
"_Hist",
")",
"or",
"not",
"isinstance",
"(",
"b",
",",
"_Hist",
")",
":",
"raise",
"TypeError",
"(",
"\"fill_between only operates on 1D histograms\"",
")",
"a",
".",
"check_compatibility",
"(",
"b",
",",
"check_edges",
"=",
"True",
")",
"x",
"=",
"[",
"]",
"top",
"=",
"[",
"]",
"bottom",
"=",
"[",
"]",
"for",
"abin",
",",
"bbin",
"in",
"zip",
"(",
"a",
".",
"bins",
"(",
"overflow",
"=",
"False",
")",
",",
"b",
".",
"bins",
"(",
"overflow",
"=",
"False",
")",
")",
":",
"up",
"=",
"max",
"(",
"abin",
".",
"value",
",",
"bbin",
".",
"value",
")",
"dn",
"=",
"min",
"(",
"abin",
".",
"value",
",",
"bbin",
".",
"value",
")",
"x",
".",
"extend",
"(",
"[",
"abin",
".",
"x",
".",
"low",
",",
"abin",
".",
"x",
".",
"high",
"]",
")",
"top",
".",
"extend",
"(",
"[",
"up",
",",
"up",
"]",
")",
"bottom",
".",
"extend",
"(",
"[",
"dn",
",",
"dn",
"]",
")",
"x",
"=",
"np",
".",
"array",
"(",
"x",
")",
"top",
"=",
"np",
".",
"array",
"(",
"top",
")",
"bottom",
"=",
"np",
".",
"array",
"(",
"bottom",
")",
"if",
"logy",
":",
"np",
".",
"clip",
"(",
"top",
",",
"1E-300",
",",
"1E300",
",",
"out",
"=",
"top",
")",
"np",
".",
"clip",
"(",
"bottom",
",",
"1E-300",
",",
"1E300",
",",
"out",
"=",
"bottom",
")",
"return",
"axes",
".",
"fill_between",
"(",
"x",
",",
"top",
",",
"bottom",
",",
"*",
"*",
"kwargs",
")"
] |
Fill the region between two histograms or graphs.
Parameters
----------
a : Hist
A rootpy Hist
b : Hist
A rootpy Hist
logy : bool, optional (default=None)
If True then clip the region between 1E-300 and 1E300.
If None (the default) then automatically determine if the axes are
log-scale and if this clipping should be performed.
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's fill_between function.
Returns
-------
Returns the value from matplotlib's fill_between function.
|
[
"Fill",
"the",
"region",
"between",
"two",
"histograms",
"or",
"graphs",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L644-L698
|
14,041
|
rootpy/rootpy
|
rootpy/plotting/root2matplotlib.py
|
hist2d
|
def hist2d(h, axes=None, colorbar=False, **kwargs):
"""
Draw a 2D matplotlib histogram plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
colorbar : Boolean, optional (default=False)
If True, include a colorbar in the produced plot
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's hist2d function.
Returns
-------
Returns the value from matplotlib's hist2d function.
"""
if axes is None:
axes = plt.gca()
X, Y = np.meshgrid(list(h.x()), list(h.y()))
x = X.ravel()
y = Y.ravel()
z = np.array(h.z()).T
# returns of hist2d: (counts, xedges, yedges, Image)
return_values = axes.hist2d(x, y, weights=z.ravel(),
bins=(list(h.xedges()), list(h.yedges())),
**kwargs)
if colorbar:
mappable = return_values[-1]
plt.colorbar(mappable, ax=axes)
return return_values
|
python
|
def hist2d(h, axes=None, colorbar=False, **kwargs):
"""
Draw a 2D matplotlib histogram plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
colorbar : Boolean, optional (default=False)
If True, include a colorbar in the produced plot
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's hist2d function.
Returns
-------
Returns the value from matplotlib's hist2d function.
"""
if axes is None:
axes = plt.gca()
X, Y = np.meshgrid(list(h.x()), list(h.y()))
x = X.ravel()
y = Y.ravel()
z = np.array(h.z()).T
# returns of hist2d: (counts, xedges, yedges, Image)
return_values = axes.hist2d(x, y, weights=z.ravel(),
bins=(list(h.xedges()), list(h.yedges())),
**kwargs)
if colorbar:
mappable = return_values[-1]
plt.colorbar(mappable, ax=axes)
return return_values
|
[
"def",
"hist2d",
"(",
"h",
",",
"axes",
"=",
"None",
",",
"colorbar",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"axes",
"is",
"None",
":",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"X",
",",
"Y",
"=",
"np",
".",
"meshgrid",
"(",
"list",
"(",
"h",
".",
"x",
"(",
")",
")",
",",
"list",
"(",
"h",
".",
"y",
"(",
")",
")",
")",
"x",
"=",
"X",
".",
"ravel",
"(",
")",
"y",
"=",
"Y",
".",
"ravel",
"(",
")",
"z",
"=",
"np",
".",
"array",
"(",
"h",
".",
"z",
"(",
")",
")",
".",
"T",
"# returns of hist2d: (counts, xedges, yedges, Image)",
"return_values",
"=",
"axes",
".",
"hist2d",
"(",
"x",
",",
"y",
",",
"weights",
"=",
"z",
".",
"ravel",
"(",
")",
",",
"bins",
"=",
"(",
"list",
"(",
"h",
".",
"xedges",
"(",
")",
")",
",",
"list",
"(",
"h",
".",
"yedges",
"(",
")",
")",
")",
",",
"*",
"*",
"kwargs",
")",
"if",
"colorbar",
":",
"mappable",
"=",
"return_values",
"[",
"-",
"1",
"]",
"plt",
".",
"colorbar",
"(",
"mappable",
",",
"ax",
"=",
"axes",
")",
"return",
"return_values"
] |
Draw a 2D matplotlib histogram plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
colorbar : Boolean, optional (default=False)
If True, include a colorbar in the produced plot
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's hist2d function.
Returns
-------
Returns the value from matplotlib's hist2d function.
|
[
"Draw",
"a",
"2D",
"matplotlib",
"histogram",
"plot",
"from",
"a",
"2D",
"ROOT",
"histogram",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L701-L740
|
14,042
|
rootpy/rootpy
|
rootpy/plotting/root2matplotlib.py
|
imshow
|
def imshow(h, axes=None, colorbar=False, **kwargs):
"""
Draw a matplotlib imshow plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
colorbar : Boolean, optional (default=False)
If True, include a colorbar in the produced plot
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's imshow function.
Returns
-------
Returns the value from matplotlib's imshow function.
"""
kwargs.setdefault('aspect', 'auto')
if axes is None:
axes = plt.gca()
z = np.array(h.z()).T
axis_image= axes.imshow(
z,
extent=[
h.xedges(1), h.xedges(h.nbins(0) + 1),
h.yedges(1), h.yedges(h.nbins(1) + 1)],
interpolation='nearest',
origin='lower',
**kwargs)
if colorbar:
plt.colorbar(axis_image, ax=axes)
return axis_image
|
python
|
def imshow(h, axes=None, colorbar=False, **kwargs):
"""
Draw a matplotlib imshow plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
colorbar : Boolean, optional (default=False)
If True, include a colorbar in the produced plot
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's imshow function.
Returns
-------
Returns the value from matplotlib's imshow function.
"""
kwargs.setdefault('aspect', 'auto')
if axes is None:
axes = plt.gca()
z = np.array(h.z()).T
axis_image= axes.imshow(
z,
extent=[
h.xedges(1), h.xedges(h.nbins(0) + 1),
h.yedges(1), h.yedges(h.nbins(1) + 1)],
interpolation='nearest',
origin='lower',
**kwargs)
if colorbar:
plt.colorbar(axis_image, ax=axes)
return axis_image
|
[
"def",
"imshow",
"(",
"h",
",",
"axes",
"=",
"None",
",",
"colorbar",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"setdefault",
"(",
"'aspect'",
",",
"'auto'",
")",
"if",
"axes",
"is",
"None",
":",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"z",
"=",
"np",
".",
"array",
"(",
"h",
".",
"z",
"(",
")",
")",
".",
"T",
"axis_image",
"=",
"axes",
".",
"imshow",
"(",
"z",
",",
"extent",
"=",
"[",
"h",
".",
"xedges",
"(",
"1",
")",
",",
"h",
".",
"xedges",
"(",
"h",
".",
"nbins",
"(",
"0",
")",
"+",
"1",
")",
",",
"h",
".",
"yedges",
"(",
"1",
")",
",",
"h",
".",
"yedges",
"(",
"h",
".",
"nbins",
"(",
"1",
")",
"+",
"1",
")",
"]",
",",
"interpolation",
"=",
"'nearest'",
",",
"origin",
"=",
"'lower'",
",",
"*",
"*",
"kwargs",
")",
"if",
"colorbar",
":",
"plt",
".",
"colorbar",
"(",
"axis_image",
",",
"ax",
"=",
"axes",
")",
"return",
"axis_image"
] |
Draw a matplotlib imshow plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
colorbar : Boolean, optional (default=False)
If True, include a colorbar in the produced plot
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's imshow function.
Returns
-------
Returns the value from matplotlib's imshow function.
|
[
"Draw",
"a",
"matplotlib",
"imshow",
"plot",
"from",
"a",
"2D",
"ROOT",
"histogram",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L743-L785
|
14,043
|
rootpy/rootpy
|
rootpy/plotting/root2matplotlib.py
|
contour
|
def contour(h, axes=None, zoom=None, label_contour=False, **kwargs):
"""
Draw a matplotlib contour plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
zoom : float or sequence, optional (default=None)
The zoom factor along the axes. If a float, zoom is the same for each
axis. If a sequence, zoom should contain one value for each axis.
The histogram is zoomed using a cubic spline interpolation to create
smooth contours.
label_contour : Boolean, optional (default=False)
If True, labels are printed on the contour lines.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's contour function.
Returns
-------
Returns the value from matplotlib's contour function.
"""
if axes is None:
axes = plt.gca()
x = np.array(list(h.x()))
y = np.array(list(h.y()))
z = np.array(h.z()).T
if zoom is not None:
from scipy import ndimage
if hasattr(zoom, '__iter__'):
zoom = list(zoom)
x = ndimage.zoom(x, zoom[0])
y = ndimage.zoom(y, zoom[1])
else:
x = ndimage.zoom(x, zoom)
y = ndimage.zoom(y, zoom)
z = ndimage.zoom(z, zoom)
return_values = axes.contour(x, y, z, **kwargs)
if label_contour:
plt.clabel(return_values)
return return_values
|
python
|
def contour(h, axes=None, zoom=None, label_contour=False, **kwargs):
"""
Draw a matplotlib contour plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
zoom : float or sequence, optional (default=None)
The zoom factor along the axes. If a float, zoom is the same for each
axis. If a sequence, zoom should contain one value for each axis.
The histogram is zoomed using a cubic spline interpolation to create
smooth contours.
label_contour : Boolean, optional (default=False)
If True, labels are printed on the contour lines.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's contour function.
Returns
-------
Returns the value from matplotlib's contour function.
"""
if axes is None:
axes = plt.gca()
x = np.array(list(h.x()))
y = np.array(list(h.y()))
z = np.array(h.z()).T
if zoom is not None:
from scipy import ndimage
if hasattr(zoom, '__iter__'):
zoom = list(zoom)
x = ndimage.zoom(x, zoom[0])
y = ndimage.zoom(y, zoom[1])
else:
x = ndimage.zoom(x, zoom)
y = ndimage.zoom(y, zoom)
z = ndimage.zoom(z, zoom)
return_values = axes.contour(x, y, z, **kwargs)
if label_contour:
plt.clabel(return_values)
return return_values
|
[
"def",
"contour",
"(",
"h",
",",
"axes",
"=",
"None",
",",
"zoom",
"=",
"None",
",",
"label_contour",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"axes",
"is",
"None",
":",
"axes",
"=",
"plt",
".",
"gca",
"(",
")",
"x",
"=",
"np",
".",
"array",
"(",
"list",
"(",
"h",
".",
"x",
"(",
")",
")",
")",
"y",
"=",
"np",
".",
"array",
"(",
"list",
"(",
"h",
".",
"y",
"(",
")",
")",
")",
"z",
"=",
"np",
".",
"array",
"(",
"h",
".",
"z",
"(",
")",
")",
".",
"T",
"if",
"zoom",
"is",
"not",
"None",
":",
"from",
"scipy",
"import",
"ndimage",
"if",
"hasattr",
"(",
"zoom",
",",
"'__iter__'",
")",
":",
"zoom",
"=",
"list",
"(",
"zoom",
")",
"x",
"=",
"ndimage",
".",
"zoom",
"(",
"x",
",",
"zoom",
"[",
"0",
"]",
")",
"y",
"=",
"ndimage",
".",
"zoom",
"(",
"y",
",",
"zoom",
"[",
"1",
"]",
")",
"else",
":",
"x",
"=",
"ndimage",
".",
"zoom",
"(",
"x",
",",
"zoom",
")",
"y",
"=",
"ndimage",
".",
"zoom",
"(",
"y",
",",
"zoom",
")",
"z",
"=",
"ndimage",
".",
"zoom",
"(",
"z",
",",
"zoom",
")",
"return_values",
"=",
"axes",
".",
"contour",
"(",
"x",
",",
"y",
",",
"z",
",",
"*",
"*",
"kwargs",
")",
"if",
"label_contour",
":",
"plt",
".",
"clabel",
"(",
"return_values",
")",
"return",
"return_values"
] |
Draw a matplotlib contour plot from a 2D ROOT histogram.
Parameters
----------
h : Hist2D
A rootpy Hist2D
axes : matplotlib Axes instance, optional (default=None)
The axes to plot on. If None then use the global current axes.
zoom : float or sequence, optional (default=None)
The zoom factor along the axes. If a float, zoom is the same for each
axis. If a sequence, zoom should contain one value for each axis.
The histogram is zoomed using a cubic spline interpolation to create
smooth contours.
label_contour : Boolean, optional (default=False)
If True, labels are printed on the contour lines.
kwargs : additional keyword arguments, optional
Additional keyword arguments are passed directly to
matplotlib's contour function.
Returns
-------
Returns the value from matplotlib's contour function.
|
[
"Draw",
"a",
"matplotlib",
"contour",
"plot",
"from",
"a",
"2D",
"ROOT",
"histogram",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/root2matplotlib.py#L788-L838
|
14,044
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree._post_init
|
def _post_init(self):
"""
The standard rootpy _post_init method that is used to initialize both
new Trees and Trees retrieved from a File.
"""
if not hasattr(self, '_buffer'):
# only set _buffer if model was not specified in the __init__
self._buffer = TreeBuffer()
self.read_branches_on_demand = False
self._branch_cache = {}
self._current_entry = 0
self._always_read = []
self.userdata = UserData()
self._inited = True
|
python
|
def _post_init(self):
"""
The standard rootpy _post_init method that is used to initialize both
new Trees and Trees retrieved from a File.
"""
if not hasattr(self, '_buffer'):
# only set _buffer if model was not specified in the __init__
self._buffer = TreeBuffer()
self.read_branches_on_demand = False
self._branch_cache = {}
self._current_entry = 0
self._always_read = []
self.userdata = UserData()
self._inited = True
|
[
"def",
"_post_init",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_buffer'",
")",
":",
"# only set _buffer if model was not specified in the __init__",
"self",
".",
"_buffer",
"=",
"TreeBuffer",
"(",
")",
"self",
".",
"read_branches_on_demand",
"=",
"False",
"self",
".",
"_branch_cache",
"=",
"{",
"}",
"self",
".",
"_current_entry",
"=",
"0",
"self",
".",
"_always_read",
"=",
"[",
"]",
"self",
".",
"userdata",
"=",
"UserData",
"(",
")",
"self",
".",
"_inited",
"=",
"True"
] |
The standard rootpy _post_init method that is used to initialize both
new Trees and Trees retrieved from a File.
|
[
"The",
"standard",
"rootpy",
"_post_init",
"method",
"that",
"is",
"used",
"to",
"initialize",
"both",
"new",
"Trees",
"and",
"Trees",
"retrieved",
"from",
"a",
"File",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L49-L62
|
14,045
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.always_read
|
def always_read(self, branches):
"""
Always read these branches, even when in caching mode. Maybe you have
caching enabled and there are branches you want to be updated for each
entry even though you never access them directly. This is useful if you
are iterating over an input tree and writing to an output tree sharing
the same TreeBuffer and you want a direct copy of certain branches. If
you have caching enabled but these branches are not specified here and
never accessed then they will never be read from disk, so the values of
branches in memory will remain unchanged.
Parameters
----------
branches : list, tuple
these branches will always be read from disk for every GetEntry
"""
if type(branches) not in (list, tuple):
raise TypeError("branches must be a list or tuple")
self._always_read = branches
|
python
|
def always_read(self, branches):
"""
Always read these branches, even when in caching mode. Maybe you have
caching enabled and there are branches you want to be updated for each
entry even though you never access them directly. This is useful if you
are iterating over an input tree and writing to an output tree sharing
the same TreeBuffer and you want a direct copy of certain branches. If
you have caching enabled but these branches are not specified here and
never accessed then they will never be read from disk, so the values of
branches in memory will remain unchanged.
Parameters
----------
branches : list, tuple
these branches will always be read from disk for every GetEntry
"""
if type(branches) not in (list, tuple):
raise TypeError("branches must be a list or tuple")
self._always_read = branches
|
[
"def",
"always_read",
"(",
"self",
",",
"branches",
")",
":",
"if",
"type",
"(",
"branches",
")",
"not",
"in",
"(",
"list",
",",
"tuple",
")",
":",
"raise",
"TypeError",
"(",
"\"branches must be a list or tuple\"",
")",
"self",
".",
"_always_read",
"=",
"branches"
] |
Always read these branches, even when in caching mode. Maybe you have
caching enabled and there are branches you want to be updated for each
entry even though you never access them directly. This is useful if you
are iterating over an input tree and writing to an output tree sharing
the same TreeBuffer and you want a direct copy of certain branches. If
you have caching enabled but these branches are not specified here and
never accessed then they will never be read from disk, so the values of
branches in memory will remain unchanged.
Parameters
----------
branches : list, tuple
these branches will always be read from disk for every GetEntry
|
[
"Always",
"read",
"these",
"branches",
"even",
"when",
"in",
"caching",
"mode",
".",
"Maybe",
"you",
"have",
"caching",
"enabled",
"and",
"there",
"are",
"branches",
"you",
"want",
"to",
"be",
"updated",
"for",
"each",
"entry",
"even",
"though",
"you",
"never",
"access",
"them",
"directly",
".",
"This",
"is",
"useful",
"if",
"you",
"are",
"iterating",
"over",
"an",
"input",
"tree",
"and",
"writing",
"to",
"an",
"output",
"tree",
"sharing",
"the",
"same",
"TreeBuffer",
"and",
"you",
"want",
"a",
"direct",
"copy",
"of",
"certain",
"branches",
".",
"If",
"you",
"have",
"caching",
"enabled",
"but",
"these",
"branches",
"are",
"not",
"specified",
"here",
"and",
"never",
"accessed",
"then",
"they",
"will",
"never",
"be",
"read",
"from",
"disk",
"so",
"the",
"values",
"of",
"branches",
"in",
"memory",
"will",
"remain",
"unchanged",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L64-L82
|
14,046
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.branch_type
|
def branch_type(cls, branch):
"""
Return the string representation for the type of a branch
"""
typename = branch.GetClassName()
if not typename:
leaf = branch.GetListOfLeaves()[0]
typename = leaf.GetTypeName()
# check if leaf has multiple elements
leaf_count = leaf.GetLeafCount()
if leaf_count:
length = leaf_count.GetMaximum()
else:
length = leaf.GetLen()
if length > 1:
typename = '{0}[{1:d}]'.format(typename, length)
return typename
|
python
|
def branch_type(cls, branch):
"""
Return the string representation for the type of a branch
"""
typename = branch.GetClassName()
if not typename:
leaf = branch.GetListOfLeaves()[0]
typename = leaf.GetTypeName()
# check if leaf has multiple elements
leaf_count = leaf.GetLeafCount()
if leaf_count:
length = leaf_count.GetMaximum()
else:
length = leaf.GetLen()
if length > 1:
typename = '{0}[{1:d}]'.format(typename, length)
return typename
|
[
"def",
"branch_type",
"(",
"cls",
",",
"branch",
")",
":",
"typename",
"=",
"branch",
".",
"GetClassName",
"(",
")",
"if",
"not",
"typename",
":",
"leaf",
"=",
"branch",
".",
"GetListOfLeaves",
"(",
")",
"[",
"0",
"]",
"typename",
"=",
"leaf",
".",
"GetTypeName",
"(",
")",
"# check if leaf has multiple elements",
"leaf_count",
"=",
"leaf",
".",
"GetLeafCount",
"(",
")",
"if",
"leaf_count",
":",
"length",
"=",
"leaf_count",
".",
"GetMaximum",
"(",
")",
"else",
":",
"length",
"=",
"leaf",
".",
"GetLen",
"(",
")",
"if",
"length",
">",
"1",
":",
"typename",
"=",
"'{0}[{1:d}]'",
".",
"format",
"(",
"typename",
",",
"length",
")",
"return",
"typename"
] |
Return the string representation for the type of a branch
|
[
"Return",
"the",
"string",
"representation",
"for",
"the",
"type",
"of",
"a",
"branch"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L85-L101
|
14,047
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.create_buffer
|
def create_buffer(self, ignore_unsupported=False):
"""
Create this tree's TreeBuffer
"""
bufferdict = OrderedDict()
for branch in self.iterbranches():
# only include activated branches
if not self.GetBranchStatus(branch.GetName()):
continue
if not BaseTree.branch_is_supported(branch):
log.warning(
"ignore unsupported branch `{0}`".format(branch.GetName()))
continue
bufferdict[branch.GetName()] = Tree.branch_type(branch)
self.set_buffer(TreeBuffer(
bufferdict,
ignore_unsupported=ignore_unsupported))
|
python
|
def create_buffer(self, ignore_unsupported=False):
"""
Create this tree's TreeBuffer
"""
bufferdict = OrderedDict()
for branch in self.iterbranches():
# only include activated branches
if not self.GetBranchStatus(branch.GetName()):
continue
if not BaseTree.branch_is_supported(branch):
log.warning(
"ignore unsupported branch `{0}`".format(branch.GetName()))
continue
bufferdict[branch.GetName()] = Tree.branch_type(branch)
self.set_buffer(TreeBuffer(
bufferdict,
ignore_unsupported=ignore_unsupported))
|
[
"def",
"create_buffer",
"(",
"self",
",",
"ignore_unsupported",
"=",
"False",
")",
":",
"bufferdict",
"=",
"OrderedDict",
"(",
")",
"for",
"branch",
"in",
"self",
".",
"iterbranches",
"(",
")",
":",
"# only include activated branches",
"if",
"not",
"self",
".",
"GetBranchStatus",
"(",
"branch",
".",
"GetName",
"(",
")",
")",
":",
"continue",
"if",
"not",
"BaseTree",
".",
"branch_is_supported",
"(",
"branch",
")",
":",
"log",
".",
"warning",
"(",
"\"ignore unsupported branch `{0}`\"",
".",
"format",
"(",
"branch",
".",
"GetName",
"(",
")",
")",
")",
"continue",
"bufferdict",
"[",
"branch",
".",
"GetName",
"(",
")",
"]",
"=",
"Tree",
".",
"branch_type",
"(",
"branch",
")",
"self",
".",
"set_buffer",
"(",
"TreeBuffer",
"(",
"bufferdict",
",",
"ignore_unsupported",
"=",
"ignore_unsupported",
")",
")"
] |
Create this tree's TreeBuffer
|
[
"Create",
"this",
"tree",
"s",
"TreeBuffer"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L111-L127
|
14,048
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.create_branches
|
def create_branches(self, branches):
"""
Create branches from a TreeBuffer or dict mapping names to type names
Parameters
----------
branches : TreeBuffer or dict
"""
if not isinstance(branches, TreeBuffer):
branches = TreeBuffer(branches)
self.set_buffer(branches, create_branches=True)
|
python
|
def create_branches(self, branches):
"""
Create branches from a TreeBuffer or dict mapping names to type names
Parameters
----------
branches : TreeBuffer or dict
"""
if not isinstance(branches, TreeBuffer):
branches = TreeBuffer(branches)
self.set_buffer(branches, create_branches=True)
|
[
"def",
"create_branches",
"(",
"self",
",",
"branches",
")",
":",
"if",
"not",
"isinstance",
"(",
"branches",
",",
"TreeBuffer",
")",
":",
"branches",
"=",
"TreeBuffer",
"(",
"branches",
")",
"self",
".",
"set_buffer",
"(",
"branches",
",",
"create_branches",
"=",
"True",
")"
] |
Create branches from a TreeBuffer or dict mapping names to type names
Parameters
----------
branches : TreeBuffer or dict
|
[
"Create",
"branches",
"from",
"a",
"TreeBuffer",
"or",
"dict",
"mapping",
"names",
"to",
"type",
"names"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L129-L139
|
14,049
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.update_buffer
|
def update_buffer(self, treebuffer, transfer_objects=False):
"""
Merge items from a TreeBuffer into this Tree's TreeBuffer
Parameters
----------
buffer : rootpy.tree.buffer.TreeBuffer
The TreeBuffer to merge into this Tree's buffer
transfer_objects : bool, optional (default=False)
If True then all objects and collections on the input buffer will
be transferred to this Tree's buffer.
"""
self._buffer.update(treebuffer)
if transfer_objects:
self._buffer.set_objects(treebuffer)
|
python
|
def update_buffer(self, treebuffer, transfer_objects=False):
"""
Merge items from a TreeBuffer into this Tree's TreeBuffer
Parameters
----------
buffer : rootpy.tree.buffer.TreeBuffer
The TreeBuffer to merge into this Tree's buffer
transfer_objects : bool, optional (default=False)
If True then all objects and collections on the input buffer will
be transferred to this Tree's buffer.
"""
self._buffer.update(treebuffer)
if transfer_objects:
self._buffer.set_objects(treebuffer)
|
[
"def",
"update_buffer",
"(",
"self",
",",
"treebuffer",
",",
"transfer_objects",
"=",
"False",
")",
":",
"self",
".",
"_buffer",
".",
"update",
"(",
"treebuffer",
")",
"if",
"transfer_objects",
":",
"self",
".",
"_buffer",
".",
"set_objects",
"(",
"treebuffer",
")"
] |
Merge items from a TreeBuffer into this Tree's TreeBuffer
Parameters
----------
buffer : rootpy.tree.buffer.TreeBuffer
The TreeBuffer to merge into this Tree's buffer
transfer_objects : bool, optional (default=False)
If True then all objects and collections on the input buffer will
be transferred to this Tree's buffer.
|
[
"Merge",
"items",
"from",
"a",
"TreeBuffer",
"into",
"this",
"Tree",
"s",
"TreeBuffer"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L141-L156
|
14,050
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.set_buffer
|
def set_buffer(self, treebuffer,
branches=None,
ignore_branches=None,
create_branches=False,
visible=True,
ignore_missing=False,
ignore_duplicates=False,
transfer_objects=False):
"""
Set the Tree buffer
Parameters
----------
treebuffer : rootpy.tree.buffer.TreeBuffer
a TreeBuffer
branches : list, optional (default=None)
only include these branches from the TreeBuffer
ignore_branches : list, optional (default=None)
ignore these branches from the TreeBuffer
create_branches : bool, optional (default=False)
If True then the branches in the TreeBuffer should be created.
Use this option if initializing the Tree. A ValueError is raised
if an attempt is made to create a branch with the same name as one
that already exists in the Tree. If False the addresses of existing
branches will be set to point at the addresses in this buffer.
visible : bool, optional (default=True)
If True then the branches will be added to the buffer and will be
accessible as attributes of the Tree.
ignore_missing : bool, optional (default=False)
If True then any branches in this buffer that do not exist in the
Tree will be ignored, otherwise a ValueError will be raised. This
option is only valid when ``create_branches`` is False.
ignore_duplicates : bool, optional (default=False)
If False then raise a ValueError if the tree already has a branch
with the same name as an entry in the buffer. If True then skip
branches that already exist. This option is only valid when
``create_branches`` is True.
transfer_objects : bool, optional (default=False)
If True, all tree objects and collections will be transferred from
the buffer into this Tree's buffer.
"""
# determine branches to keep while preserving branch order
if branches is None:
branches = treebuffer.keys()
if ignore_branches is not None:
branches = [b for b in branches if b not in ignore_branches]
if create_branches:
for name in branches:
value = treebuffer[name]
if self.has_branch(name):
if ignore_duplicates:
log.warning(
"Skipping entry in buffer with the same name "
"as an existing branch: `{0}`".format(name))
continue
raise ValueError(
"Attempting to create two branches "
"with the same name: `{0}`".format(name))
if isinstance(value, Scalar):
self.Branch(name, value,
'{0}/{1}'.format(
name, value.type))
elif isinstance(value, Array):
length = value.length_name or len(value)
self.Branch(name, value,
'{0}[{2}]/{1}'.format(
name, value.type, length))
else:
self.Branch(name, value)
else:
for name in branches:
value = treebuffer[name]
if self.has_branch(name):
self.SetBranchAddress(name, value)
elif not ignore_missing:
raise ValueError(
"Attempting to set address for "
"branch `{0}` which does not exist".format(name))
else:
log.warning(
"Skipping entry in buffer for which no "
"corresponding branch in the "
"tree exists: `{0}`".format(name))
if visible:
newbuffer = TreeBuffer()
for branch in branches:
if branch in treebuffer:
newbuffer[branch] = treebuffer[branch]
newbuffer.set_objects(treebuffer)
self.update_buffer(newbuffer, transfer_objects=transfer_objects)
|
python
|
def set_buffer(self, treebuffer,
branches=None,
ignore_branches=None,
create_branches=False,
visible=True,
ignore_missing=False,
ignore_duplicates=False,
transfer_objects=False):
"""
Set the Tree buffer
Parameters
----------
treebuffer : rootpy.tree.buffer.TreeBuffer
a TreeBuffer
branches : list, optional (default=None)
only include these branches from the TreeBuffer
ignore_branches : list, optional (default=None)
ignore these branches from the TreeBuffer
create_branches : bool, optional (default=False)
If True then the branches in the TreeBuffer should be created.
Use this option if initializing the Tree. A ValueError is raised
if an attempt is made to create a branch with the same name as one
that already exists in the Tree. If False the addresses of existing
branches will be set to point at the addresses in this buffer.
visible : bool, optional (default=True)
If True then the branches will be added to the buffer and will be
accessible as attributes of the Tree.
ignore_missing : bool, optional (default=False)
If True then any branches in this buffer that do not exist in the
Tree will be ignored, otherwise a ValueError will be raised. This
option is only valid when ``create_branches`` is False.
ignore_duplicates : bool, optional (default=False)
If False then raise a ValueError if the tree already has a branch
with the same name as an entry in the buffer. If True then skip
branches that already exist. This option is only valid when
``create_branches`` is True.
transfer_objects : bool, optional (default=False)
If True, all tree objects and collections will be transferred from
the buffer into this Tree's buffer.
"""
# determine branches to keep while preserving branch order
if branches is None:
branches = treebuffer.keys()
if ignore_branches is not None:
branches = [b for b in branches if b not in ignore_branches]
if create_branches:
for name in branches:
value = treebuffer[name]
if self.has_branch(name):
if ignore_duplicates:
log.warning(
"Skipping entry in buffer with the same name "
"as an existing branch: `{0}`".format(name))
continue
raise ValueError(
"Attempting to create two branches "
"with the same name: `{0}`".format(name))
if isinstance(value, Scalar):
self.Branch(name, value,
'{0}/{1}'.format(
name, value.type))
elif isinstance(value, Array):
length = value.length_name or len(value)
self.Branch(name, value,
'{0}[{2}]/{1}'.format(
name, value.type, length))
else:
self.Branch(name, value)
else:
for name in branches:
value = treebuffer[name]
if self.has_branch(name):
self.SetBranchAddress(name, value)
elif not ignore_missing:
raise ValueError(
"Attempting to set address for "
"branch `{0}` which does not exist".format(name))
else:
log.warning(
"Skipping entry in buffer for which no "
"corresponding branch in the "
"tree exists: `{0}`".format(name))
if visible:
newbuffer = TreeBuffer()
for branch in branches:
if branch in treebuffer:
newbuffer[branch] = treebuffer[branch]
newbuffer.set_objects(treebuffer)
self.update_buffer(newbuffer, transfer_objects=transfer_objects)
|
[
"def",
"set_buffer",
"(",
"self",
",",
"treebuffer",
",",
"branches",
"=",
"None",
",",
"ignore_branches",
"=",
"None",
",",
"create_branches",
"=",
"False",
",",
"visible",
"=",
"True",
",",
"ignore_missing",
"=",
"False",
",",
"ignore_duplicates",
"=",
"False",
",",
"transfer_objects",
"=",
"False",
")",
":",
"# determine branches to keep while preserving branch order",
"if",
"branches",
"is",
"None",
":",
"branches",
"=",
"treebuffer",
".",
"keys",
"(",
")",
"if",
"ignore_branches",
"is",
"not",
"None",
":",
"branches",
"=",
"[",
"b",
"for",
"b",
"in",
"branches",
"if",
"b",
"not",
"in",
"ignore_branches",
"]",
"if",
"create_branches",
":",
"for",
"name",
"in",
"branches",
":",
"value",
"=",
"treebuffer",
"[",
"name",
"]",
"if",
"self",
".",
"has_branch",
"(",
"name",
")",
":",
"if",
"ignore_duplicates",
":",
"log",
".",
"warning",
"(",
"\"Skipping entry in buffer with the same name \"",
"\"as an existing branch: `{0}`\"",
".",
"format",
"(",
"name",
")",
")",
"continue",
"raise",
"ValueError",
"(",
"\"Attempting to create two branches \"",
"\"with the same name: `{0}`\"",
".",
"format",
"(",
"name",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"Scalar",
")",
":",
"self",
".",
"Branch",
"(",
"name",
",",
"value",
",",
"'{0}/{1}'",
".",
"format",
"(",
"name",
",",
"value",
".",
"type",
")",
")",
"elif",
"isinstance",
"(",
"value",
",",
"Array",
")",
":",
"length",
"=",
"value",
".",
"length_name",
"or",
"len",
"(",
"value",
")",
"self",
".",
"Branch",
"(",
"name",
",",
"value",
",",
"'{0}[{2}]/{1}'",
".",
"format",
"(",
"name",
",",
"value",
".",
"type",
",",
"length",
")",
")",
"else",
":",
"self",
".",
"Branch",
"(",
"name",
",",
"value",
")",
"else",
":",
"for",
"name",
"in",
"branches",
":",
"value",
"=",
"treebuffer",
"[",
"name",
"]",
"if",
"self",
".",
"has_branch",
"(",
"name",
")",
":",
"self",
".",
"SetBranchAddress",
"(",
"name",
",",
"value",
")",
"elif",
"not",
"ignore_missing",
":",
"raise",
"ValueError",
"(",
"\"Attempting to set address for \"",
"\"branch `{0}` which does not exist\"",
".",
"format",
"(",
"name",
")",
")",
"else",
":",
"log",
".",
"warning",
"(",
"\"Skipping entry in buffer for which no \"",
"\"corresponding branch in the \"",
"\"tree exists: `{0}`\"",
".",
"format",
"(",
"name",
")",
")",
"if",
"visible",
":",
"newbuffer",
"=",
"TreeBuffer",
"(",
")",
"for",
"branch",
"in",
"branches",
":",
"if",
"branch",
"in",
"treebuffer",
":",
"newbuffer",
"[",
"branch",
"]",
"=",
"treebuffer",
"[",
"branch",
"]",
"newbuffer",
".",
"set_objects",
"(",
"treebuffer",
")",
"self",
".",
"update_buffer",
"(",
"newbuffer",
",",
"transfer_objects",
"=",
"transfer_objects",
")"
] |
Set the Tree buffer
Parameters
----------
treebuffer : rootpy.tree.buffer.TreeBuffer
a TreeBuffer
branches : list, optional (default=None)
only include these branches from the TreeBuffer
ignore_branches : list, optional (default=None)
ignore these branches from the TreeBuffer
create_branches : bool, optional (default=False)
If True then the branches in the TreeBuffer should be created.
Use this option if initializing the Tree. A ValueError is raised
if an attempt is made to create a branch with the same name as one
that already exists in the Tree. If False the addresses of existing
branches will be set to point at the addresses in this buffer.
visible : bool, optional (default=True)
If True then the branches will be added to the buffer and will be
accessible as attributes of the Tree.
ignore_missing : bool, optional (default=False)
If True then any branches in this buffer that do not exist in the
Tree will be ignored, otherwise a ValueError will be raised. This
option is only valid when ``create_branches`` is False.
ignore_duplicates : bool, optional (default=False)
If False then raise a ValueError if the tree already has a branch
with the same name as an entry in the buffer. If True then skip
branches that already exist. This option is only valid when
``create_branches`` is True.
transfer_objects : bool, optional (default=False)
If True, all tree objects and collections will be transferred from
the buffer into this Tree's buffer.
|
[
"Set",
"the",
"Tree",
"buffer"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L158-L255
|
14,051
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.glob
|
def glob(self, patterns, exclude=None):
"""
Return a list of branch names that match ``pattern``.
Exclude all matched branch names which also match a pattern in
``exclude``. ``exclude`` may be a string or list of strings.
Parameters
----------
patterns: str or list
branches are matched against this pattern or list of patterns where
globbing is performed with '*'.
exclude : str or list, optional (default=None)
branches matching this pattern or list of patterns are excluded
even if they match a pattern in ``patterns``.
Returns
-------
matches : list
List of matching branch names
"""
if isinstance(patterns, string_types):
patterns = [patterns]
if isinstance(exclude, string_types):
exclude = [exclude]
matches = []
for pattern in patterns:
matches += fnmatch.filter(self.iterbranchnames(), pattern)
if exclude is not None:
for exclude_pattern in exclude:
matches = [match for match in matches
if not fnmatch.fnmatch(match, exclude_pattern)]
return matches
|
python
|
def glob(self, patterns, exclude=None):
"""
Return a list of branch names that match ``pattern``.
Exclude all matched branch names which also match a pattern in
``exclude``. ``exclude`` may be a string or list of strings.
Parameters
----------
patterns: str or list
branches are matched against this pattern or list of patterns where
globbing is performed with '*'.
exclude : str or list, optional (default=None)
branches matching this pattern or list of patterns are excluded
even if they match a pattern in ``patterns``.
Returns
-------
matches : list
List of matching branch names
"""
if isinstance(patterns, string_types):
patterns = [patterns]
if isinstance(exclude, string_types):
exclude = [exclude]
matches = []
for pattern in patterns:
matches += fnmatch.filter(self.iterbranchnames(), pattern)
if exclude is not None:
for exclude_pattern in exclude:
matches = [match for match in matches
if not fnmatch.fnmatch(match, exclude_pattern)]
return matches
|
[
"def",
"glob",
"(",
"self",
",",
"patterns",
",",
"exclude",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"patterns",
",",
"string_types",
")",
":",
"patterns",
"=",
"[",
"patterns",
"]",
"if",
"isinstance",
"(",
"exclude",
",",
"string_types",
")",
":",
"exclude",
"=",
"[",
"exclude",
"]",
"matches",
"=",
"[",
"]",
"for",
"pattern",
"in",
"patterns",
":",
"matches",
"+=",
"fnmatch",
".",
"filter",
"(",
"self",
".",
"iterbranchnames",
"(",
")",
",",
"pattern",
")",
"if",
"exclude",
"is",
"not",
"None",
":",
"for",
"exclude_pattern",
"in",
"exclude",
":",
"matches",
"=",
"[",
"match",
"for",
"match",
"in",
"matches",
"if",
"not",
"fnmatch",
".",
"fnmatch",
"(",
"match",
",",
"exclude_pattern",
")",
"]",
"return",
"matches"
] |
Return a list of branch names that match ``pattern``.
Exclude all matched branch names which also match a pattern in
``exclude``. ``exclude`` may be a string or list of strings.
Parameters
----------
patterns: str or list
branches are matched against this pattern or list of patterns where
globbing is performed with '*'.
exclude : str or list, optional (default=None)
branches matching this pattern or list of patterns are excluded
even if they match a pattern in ``patterns``.
Returns
-------
matches : list
List of matching branch names
|
[
"Return",
"a",
"list",
"of",
"branch",
"names",
"that",
"match",
"pattern",
".",
"Exclude",
"all",
"matched",
"branch",
"names",
"which",
"also",
"match",
"a",
"pattern",
"in",
"exclude",
".",
"exclude",
"may",
"be",
"a",
"string",
"or",
"list",
"of",
"strings",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L337-L369
|
14,052
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.CopyTree
|
def CopyTree(self, selection, *args, **kwargs):
"""
Copy the tree while supporting a rootpy.tree.cut.Cut selection in
addition to a simple string.
"""
return super(BaseTree, self).CopyTree(str(selection), *args, **kwargs)
|
python
|
def CopyTree(self, selection, *args, **kwargs):
"""
Copy the tree while supporting a rootpy.tree.cut.Cut selection in
addition to a simple string.
"""
return super(BaseTree, self).CopyTree(str(selection), *args, **kwargs)
|
[
"def",
"CopyTree",
"(",
"self",
",",
"selection",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"super",
"(",
"BaseTree",
",",
"self",
")",
".",
"CopyTree",
"(",
"str",
"(",
"selection",
")",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Copy the tree while supporting a rootpy.tree.cut.Cut selection in
addition to a simple string.
|
[
"Copy",
"the",
"tree",
"while",
"supporting",
"a",
"rootpy",
".",
"tree",
".",
"cut",
".",
"Cut",
"selection",
"in",
"addition",
"to",
"a",
"simple",
"string",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L656-L661
|
14,053
|
rootpy/rootpy
|
rootpy/tree/tree.py
|
BaseTree.to_array
|
def to_array(self, *args, **kwargs):
"""
Convert this tree into a NumPy structured array
"""
from root_numpy import tree2array
return tree2array(self, *args, **kwargs)
|
python
|
def to_array(self, *args, **kwargs):
"""
Convert this tree into a NumPy structured array
"""
from root_numpy import tree2array
return tree2array(self, *args, **kwargs)
|
[
"def",
"to_array",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"root_numpy",
"import",
"tree2array",
"return",
"tree2array",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Convert this tree into a NumPy structured array
|
[
"Convert",
"this",
"tree",
"into",
"a",
"NumPy",
"structured",
"array"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/tree.py#L858-L863
|
14,054
|
rootpy/rootpy
|
rootpy/roosh.py
|
color_key
|
def color_key(tkey):
"""
Function which returns a colorized TKey name given its type
"""
name = tkey.GetName()
classname = tkey.GetClassName()
for class_regex, color in _COLOR_MATCHER:
if class_regex.match(classname):
return colored(name, color=color)
return name
|
python
|
def color_key(tkey):
"""
Function which returns a colorized TKey name given its type
"""
name = tkey.GetName()
classname = tkey.GetClassName()
for class_regex, color in _COLOR_MATCHER:
if class_regex.match(classname):
return colored(name, color=color)
return name
|
[
"def",
"color_key",
"(",
"tkey",
")",
":",
"name",
"=",
"tkey",
".",
"GetName",
"(",
")",
"classname",
"=",
"tkey",
".",
"GetClassName",
"(",
")",
"for",
"class_regex",
",",
"color",
"in",
"_COLOR_MATCHER",
":",
"if",
"class_regex",
".",
"match",
"(",
"classname",
")",
":",
"return",
"colored",
"(",
"name",
",",
"color",
"=",
"color",
")",
"return",
"name"
] |
Function which returns a colorized TKey name given its type
|
[
"Function",
"which",
"returns",
"a",
"colorized",
"TKey",
"name",
"given",
"its",
"type"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/roosh.py#L49-L58
|
14,055
|
rootpy/rootpy
|
rootpy/plotting/contrib/plot_corrcoef_matrix.py
|
cov
|
def cov(m, y=None, rowvar=1, bias=0, ddof=None, weights=None, repeat_weights=0):
"""
Estimate a covariance matrix, given data.
Covariance indicates the level to which two variables vary together.
If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`,
then the covariance matrix element :math:`C_{ij}` is the covariance of
:math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance
of :math:`x_i`.
Parameters
----------
m : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
form as that of `m`.
rowvar : int, optional
If `rowvar` is non-zero (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations given (unbiased estimate). If `bias` is 1, then
normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : int, optional
.. versionadded:: 1.5
If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is
the number of observations; this overrides the value implied by
``bias``. The default value is ``None``.
weights : array-like, optional
A 1-D array of weights with a length equal to the number of
observations.
repeat_weights : int, optional
The default treatment of weights in the weighted covariance is to first
normalize them to unit sum and use the biased weighted covariance
equation. If `repeat_weights` is 1 then the weights must represent an
integer number of occurrences of each observation and both a biased and
unbiased weighted covariance is defined because the total sample size
can be determined.
Returns
-------
out : ndarray
The covariance matrix of the variables.
See Also
--------
corrcoef : Normalized covariance matrix
Examples
--------
Consider two variables, :math:`x_0` and :math:`x_1`, which
correlate perfectly, but in opposite directions:
>>> x = np.array([[0, 2], [1, 1], [2, 0]]).T
>>> x
array([[0, 1, 2],
[2, 1, 0]])
Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance
matrix shows this clearly:
>>> np.cov(x)
array([[ 1., -1.],
[-1., 1.]])
Note that element :math:`C_{0,1}`, which shows the correlation between
:math:`x_0` and :math:`x_1`, is negative.
Further, note how `x` and `y` are combined:
>>> x = [-2.1, -1, 4.3]
>>> y = [3, 1.1, 0.12]
>>> X = np.vstack((x,y))
>>> print np.cov(X)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x, y)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x)
11.71
"""
import numpy as np
# Check inputs
if ddof is not None and ddof != int(ddof):
raise ValueError(
"ddof must be integer")
X = np.array(m, ndmin=2, dtype=float)
if X.size == 0:
# handle empty arrays
return np.array(m)
if X.shape[0] == 1:
rowvar = 1
if rowvar:
axis = 0
tup = (slice(None), np.newaxis)
else:
axis = 1
tup = (np.newaxis, slice(None))
if y is not None:
y = np.array(y, copy=False, ndmin=2, dtype=float)
X = np.concatenate((X, y), axis)
if ddof is None:
if bias == 0:
ddof = 1
else:
ddof = 0
if weights is not None:
weights = np.array(weights, dtype=float)
weights_sum = weights.sum()
if weights_sum <= 0:
raise ValueError(
"sum of weights is non-positive")
X -= np.average(X, axis=1-axis, weights=weights)[tup]
if repeat_weights:
# each weight represents a number of repetitions of an observation
# the total sample size can be determined in this case and we have
# both an unbiased and biased weighted covariance
fact = weights_sum - ddof
else:
# normalize weights so they sum to unity
weights /= weights_sum
# unbiased weighted covariance is not defined if the weights are
# not integral frequencies (repeat-type)
fact = (1. - np.power(weights, 2).sum())
else:
weights = 1
X -= X.mean(axis=1-axis)[tup]
if rowvar:
N = X.shape[1]
else:
N = X.shape[0]
fact = float(N - ddof)
if not rowvar:
return (np.dot(weights * X.T, X.conj()) / fact).squeeze()
else:
return (np.dot(weights * X, X.T.conj()) / fact).squeeze()
|
python
|
def cov(m, y=None, rowvar=1, bias=0, ddof=None, weights=None, repeat_weights=0):
"""
Estimate a covariance matrix, given data.
Covariance indicates the level to which two variables vary together.
If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`,
then the covariance matrix element :math:`C_{ij}` is the covariance of
:math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance
of :math:`x_i`.
Parameters
----------
m : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
form as that of `m`.
rowvar : int, optional
If `rowvar` is non-zero (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations given (unbiased estimate). If `bias` is 1, then
normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : int, optional
.. versionadded:: 1.5
If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is
the number of observations; this overrides the value implied by
``bias``. The default value is ``None``.
weights : array-like, optional
A 1-D array of weights with a length equal to the number of
observations.
repeat_weights : int, optional
The default treatment of weights in the weighted covariance is to first
normalize them to unit sum and use the biased weighted covariance
equation. If `repeat_weights` is 1 then the weights must represent an
integer number of occurrences of each observation and both a biased and
unbiased weighted covariance is defined because the total sample size
can be determined.
Returns
-------
out : ndarray
The covariance matrix of the variables.
See Also
--------
corrcoef : Normalized covariance matrix
Examples
--------
Consider two variables, :math:`x_0` and :math:`x_1`, which
correlate perfectly, but in opposite directions:
>>> x = np.array([[0, 2], [1, 1], [2, 0]]).T
>>> x
array([[0, 1, 2],
[2, 1, 0]])
Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance
matrix shows this clearly:
>>> np.cov(x)
array([[ 1., -1.],
[-1., 1.]])
Note that element :math:`C_{0,1}`, which shows the correlation between
:math:`x_0` and :math:`x_1`, is negative.
Further, note how `x` and `y` are combined:
>>> x = [-2.1, -1, 4.3]
>>> y = [3, 1.1, 0.12]
>>> X = np.vstack((x,y))
>>> print np.cov(X)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x, y)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x)
11.71
"""
import numpy as np
# Check inputs
if ddof is not None and ddof != int(ddof):
raise ValueError(
"ddof must be integer")
X = np.array(m, ndmin=2, dtype=float)
if X.size == 0:
# handle empty arrays
return np.array(m)
if X.shape[0] == 1:
rowvar = 1
if rowvar:
axis = 0
tup = (slice(None), np.newaxis)
else:
axis = 1
tup = (np.newaxis, slice(None))
if y is not None:
y = np.array(y, copy=False, ndmin=2, dtype=float)
X = np.concatenate((X, y), axis)
if ddof is None:
if bias == 0:
ddof = 1
else:
ddof = 0
if weights is not None:
weights = np.array(weights, dtype=float)
weights_sum = weights.sum()
if weights_sum <= 0:
raise ValueError(
"sum of weights is non-positive")
X -= np.average(X, axis=1-axis, weights=weights)[tup]
if repeat_weights:
# each weight represents a number of repetitions of an observation
# the total sample size can be determined in this case and we have
# both an unbiased and biased weighted covariance
fact = weights_sum - ddof
else:
# normalize weights so they sum to unity
weights /= weights_sum
# unbiased weighted covariance is not defined if the weights are
# not integral frequencies (repeat-type)
fact = (1. - np.power(weights, 2).sum())
else:
weights = 1
X -= X.mean(axis=1-axis)[tup]
if rowvar:
N = X.shape[1]
else:
N = X.shape[0]
fact = float(N - ddof)
if not rowvar:
return (np.dot(weights * X.T, X.conj()) / fact).squeeze()
else:
return (np.dot(weights * X, X.T.conj()) / fact).squeeze()
|
[
"def",
"cov",
"(",
"m",
",",
"y",
"=",
"None",
",",
"rowvar",
"=",
"1",
",",
"bias",
"=",
"0",
",",
"ddof",
"=",
"None",
",",
"weights",
"=",
"None",
",",
"repeat_weights",
"=",
"0",
")",
":",
"import",
"numpy",
"as",
"np",
"# Check inputs",
"if",
"ddof",
"is",
"not",
"None",
"and",
"ddof",
"!=",
"int",
"(",
"ddof",
")",
":",
"raise",
"ValueError",
"(",
"\"ddof must be integer\"",
")",
"X",
"=",
"np",
".",
"array",
"(",
"m",
",",
"ndmin",
"=",
"2",
",",
"dtype",
"=",
"float",
")",
"if",
"X",
".",
"size",
"==",
"0",
":",
"# handle empty arrays",
"return",
"np",
".",
"array",
"(",
"m",
")",
"if",
"X",
".",
"shape",
"[",
"0",
"]",
"==",
"1",
":",
"rowvar",
"=",
"1",
"if",
"rowvar",
":",
"axis",
"=",
"0",
"tup",
"=",
"(",
"slice",
"(",
"None",
")",
",",
"np",
".",
"newaxis",
")",
"else",
":",
"axis",
"=",
"1",
"tup",
"=",
"(",
"np",
".",
"newaxis",
",",
"slice",
"(",
"None",
")",
")",
"if",
"y",
"is",
"not",
"None",
":",
"y",
"=",
"np",
".",
"array",
"(",
"y",
",",
"copy",
"=",
"False",
",",
"ndmin",
"=",
"2",
",",
"dtype",
"=",
"float",
")",
"X",
"=",
"np",
".",
"concatenate",
"(",
"(",
"X",
",",
"y",
")",
",",
"axis",
")",
"if",
"ddof",
"is",
"None",
":",
"if",
"bias",
"==",
"0",
":",
"ddof",
"=",
"1",
"else",
":",
"ddof",
"=",
"0",
"if",
"weights",
"is",
"not",
"None",
":",
"weights",
"=",
"np",
".",
"array",
"(",
"weights",
",",
"dtype",
"=",
"float",
")",
"weights_sum",
"=",
"weights",
".",
"sum",
"(",
")",
"if",
"weights_sum",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"\"sum of weights is non-positive\"",
")",
"X",
"-=",
"np",
".",
"average",
"(",
"X",
",",
"axis",
"=",
"1",
"-",
"axis",
",",
"weights",
"=",
"weights",
")",
"[",
"tup",
"]",
"if",
"repeat_weights",
":",
"# each weight represents a number of repetitions of an observation",
"# the total sample size can be determined in this case and we have",
"# both an unbiased and biased weighted covariance",
"fact",
"=",
"weights_sum",
"-",
"ddof",
"else",
":",
"# normalize weights so they sum to unity",
"weights",
"/=",
"weights_sum",
"# unbiased weighted covariance is not defined if the weights are",
"# not integral frequencies (repeat-type)",
"fact",
"=",
"(",
"1.",
"-",
"np",
".",
"power",
"(",
"weights",
",",
"2",
")",
".",
"sum",
"(",
")",
")",
"else",
":",
"weights",
"=",
"1",
"X",
"-=",
"X",
".",
"mean",
"(",
"axis",
"=",
"1",
"-",
"axis",
")",
"[",
"tup",
"]",
"if",
"rowvar",
":",
"N",
"=",
"X",
".",
"shape",
"[",
"1",
"]",
"else",
":",
"N",
"=",
"X",
".",
"shape",
"[",
"0",
"]",
"fact",
"=",
"float",
"(",
"N",
"-",
"ddof",
")",
"if",
"not",
"rowvar",
":",
"return",
"(",
"np",
".",
"dot",
"(",
"weights",
"*",
"X",
".",
"T",
",",
"X",
".",
"conj",
"(",
")",
")",
"/",
"fact",
")",
".",
"squeeze",
"(",
")",
"else",
":",
"return",
"(",
"np",
".",
"dot",
"(",
"weights",
"*",
"X",
",",
"X",
".",
"T",
".",
"conj",
"(",
")",
")",
"/",
"fact",
")",
".",
"squeeze",
"(",
")"
] |
Estimate a covariance matrix, given data.
Covariance indicates the level to which two variables vary together.
If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`,
then the covariance matrix element :math:`C_{ij}` is the covariance of
:math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance
of :math:`x_i`.
Parameters
----------
m : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
form as that of `m`.
rowvar : int, optional
If `rowvar` is non-zero (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations given (unbiased estimate). If `bias` is 1, then
normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : int, optional
.. versionadded:: 1.5
If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is
the number of observations; this overrides the value implied by
``bias``. The default value is ``None``.
weights : array-like, optional
A 1-D array of weights with a length equal to the number of
observations.
repeat_weights : int, optional
The default treatment of weights in the weighted covariance is to first
normalize them to unit sum and use the biased weighted covariance
equation. If `repeat_weights` is 1 then the weights must represent an
integer number of occurrences of each observation and both a biased and
unbiased weighted covariance is defined because the total sample size
can be determined.
Returns
-------
out : ndarray
The covariance matrix of the variables.
See Also
--------
corrcoef : Normalized covariance matrix
Examples
--------
Consider two variables, :math:`x_0` and :math:`x_1`, which
correlate perfectly, but in opposite directions:
>>> x = np.array([[0, 2], [1, 1], [2, 0]]).T
>>> x
array([[0, 1, 2],
[2, 1, 0]])
Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance
matrix shows this clearly:
>>> np.cov(x)
array([[ 1., -1.],
[-1., 1.]])
Note that element :math:`C_{0,1}`, which shows the correlation between
:math:`x_0` and :math:`x_1`, is negative.
Further, note how `x` and `y` are combined:
>>> x = [-2.1, -1, 4.3]
>>> y = [3, 1.1, 0.12]
>>> X = np.vstack((x,y))
>>> print np.cov(X)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x, y)
[[ 11.71 -4.286 ]
[ -4.286 2.14413333]]
>>> print np.cov(x)
11.71
|
[
"Estimate",
"a",
"covariance",
"matrix",
"given",
"data",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/contrib/plot_corrcoef_matrix.py#L136-L285
|
14,056
|
rootpy/rootpy
|
rootpy/plotting/contrib/plot_corrcoef_matrix.py
|
corrcoef
|
def corrcoef(x, y=None, rowvar=1, bias=0, ddof=None, weights=None,
repeat_weights=0):
"""
Return correlation coefficients.
Please refer to the documentation for `cov` for more detail. The
relationship between the correlation coefficient matrix, `P`, and the
covariance matrix, `C`, is
.. math:: P_{ij} = \\frac{ C_{ij} } { \\sqrt{ C_{ii} * C_{jj} } }
The values of `P` are between -1 and 1, inclusive.
Parameters
----------
x : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
shape as `m`.
rowvar : int, optional
If `rowvar` is non-zero (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations (unbiased estimate). If `bias` is 1, then
normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : {None, int}, optional
.. versionadded:: 1.5
If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is
the number of observations; this overrides the value implied by
``bias``. The default value is ``None``.
weights : array-like, optional
A 1-D array of weights with a length equal to the number of
observations.
repeat_weights : int, optional
The default treatment of weights in the weighted covariance is to first
normalize them to unit sum and use the biased weighted covariance
equation. If `repeat_weights` is 1 then the weights must represent an
integer number of occurrences of each observation and both a biased and
unbiased weighted covariance is defined because the total sample size
can be determined.
Returns
-------
out : ndarray
The correlation coefficient matrix of the variables.
See Also
--------
cov : Covariance matrix
"""
import numpy as np
c = cov(x, y, rowvar, bias, ddof, weights, repeat_weights)
if c.size == 0:
# handle empty arrays
return c
try:
d = np.diag(c)
except ValueError: # scalar covariance
return 1
return c / np.sqrt(np.multiply.outer(d, d))
|
python
|
def corrcoef(x, y=None, rowvar=1, bias=0, ddof=None, weights=None,
repeat_weights=0):
"""
Return correlation coefficients.
Please refer to the documentation for `cov` for more detail. The
relationship between the correlation coefficient matrix, `P`, and the
covariance matrix, `C`, is
.. math:: P_{ij} = \\frac{ C_{ij} } { \\sqrt{ C_{ii} * C_{jj} } }
The values of `P` are between -1 and 1, inclusive.
Parameters
----------
x : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
shape as `m`.
rowvar : int, optional
If `rowvar` is non-zero (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations (unbiased estimate). If `bias` is 1, then
normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : {None, int}, optional
.. versionadded:: 1.5
If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is
the number of observations; this overrides the value implied by
``bias``. The default value is ``None``.
weights : array-like, optional
A 1-D array of weights with a length equal to the number of
observations.
repeat_weights : int, optional
The default treatment of weights in the weighted covariance is to first
normalize them to unit sum and use the biased weighted covariance
equation. If `repeat_weights` is 1 then the weights must represent an
integer number of occurrences of each observation and both a biased and
unbiased weighted covariance is defined because the total sample size
can be determined.
Returns
-------
out : ndarray
The correlation coefficient matrix of the variables.
See Also
--------
cov : Covariance matrix
"""
import numpy as np
c = cov(x, y, rowvar, bias, ddof, weights, repeat_weights)
if c.size == 0:
# handle empty arrays
return c
try:
d = np.diag(c)
except ValueError: # scalar covariance
return 1
return c / np.sqrt(np.multiply.outer(d, d))
|
[
"def",
"corrcoef",
"(",
"x",
",",
"y",
"=",
"None",
",",
"rowvar",
"=",
"1",
",",
"bias",
"=",
"0",
",",
"ddof",
"=",
"None",
",",
"weights",
"=",
"None",
",",
"repeat_weights",
"=",
"0",
")",
":",
"import",
"numpy",
"as",
"np",
"c",
"=",
"cov",
"(",
"x",
",",
"y",
",",
"rowvar",
",",
"bias",
",",
"ddof",
",",
"weights",
",",
"repeat_weights",
")",
"if",
"c",
".",
"size",
"==",
"0",
":",
"# handle empty arrays",
"return",
"c",
"try",
":",
"d",
"=",
"np",
".",
"diag",
"(",
"c",
")",
"except",
"ValueError",
":",
"# scalar covariance",
"return",
"1",
"return",
"c",
"/",
"np",
".",
"sqrt",
"(",
"np",
".",
"multiply",
".",
"outer",
"(",
"d",
",",
"d",
")",
")"
] |
Return correlation coefficients.
Please refer to the documentation for `cov` for more detail. The
relationship between the correlation coefficient matrix, `P`, and the
covariance matrix, `C`, is
.. math:: P_{ij} = \\frac{ C_{ij} } { \\sqrt{ C_{ii} * C_{jj} } }
The values of `P` are between -1 and 1, inclusive.
Parameters
----------
x : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
shape as `m`.
rowvar : int, optional
If `rowvar` is non-zero (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : int, optional
Default normalization is by ``(N - 1)``, where ``N`` is the number of
observations (unbiased estimate). If `bias` is 1, then
normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : {None, int}, optional
.. versionadded:: 1.5
If not ``None`` normalization is by ``(N - ddof)``, where ``N`` is
the number of observations; this overrides the value implied by
``bias``. The default value is ``None``.
weights : array-like, optional
A 1-D array of weights with a length equal to the number of
observations.
repeat_weights : int, optional
The default treatment of weights in the weighted covariance is to first
normalize them to unit sum and use the biased weighted covariance
equation. If `repeat_weights` is 1 then the weights must represent an
integer number of occurrences of each observation and both a biased and
unbiased weighted covariance is defined because the total sample size
can be determined.
Returns
-------
out : ndarray
The correlation coefficient matrix of the variables.
See Also
--------
cov : Covariance matrix
|
[
"Return",
"correlation",
"coefficients",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/contrib/plot_corrcoef_matrix.py#L288-L355
|
14,057
|
rootpy/rootpy
|
rootpy/tree/cut.py
|
Cut.safe
|
def safe(self, parentheses=True):
"""
Returns a string representation with special characters
replaced by safer characters for use in file names.
"""
if not self:
return ""
string = str(self)
string = string.replace("**", "_pow_")
string = string.replace("*", "_mul_")
string = string.replace("/", "_div_")
string = string.replace("==", "_eq_")
string = string.replace("<=", "_leq_")
string = string.replace(">=", "_geq_")
string = string.replace("<", "_lt_")
string = string.replace(">", "_gt_")
string = string.replace("&&", "_and_")
string = string.replace("||", "_or_")
string = string.replace("!", "not_")
if parentheses:
string = string.replace("(", "L")
string = string.replace(")", "R")
else:
string = string.replace("(", "")
string = string.replace(")", "")
string = string.replace(" ", "")
return string
|
python
|
def safe(self, parentheses=True):
"""
Returns a string representation with special characters
replaced by safer characters for use in file names.
"""
if not self:
return ""
string = str(self)
string = string.replace("**", "_pow_")
string = string.replace("*", "_mul_")
string = string.replace("/", "_div_")
string = string.replace("==", "_eq_")
string = string.replace("<=", "_leq_")
string = string.replace(">=", "_geq_")
string = string.replace("<", "_lt_")
string = string.replace(">", "_gt_")
string = string.replace("&&", "_and_")
string = string.replace("||", "_or_")
string = string.replace("!", "not_")
if parentheses:
string = string.replace("(", "L")
string = string.replace(")", "R")
else:
string = string.replace("(", "")
string = string.replace(")", "")
string = string.replace(" ", "")
return string
|
[
"def",
"safe",
"(",
"self",
",",
"parentheses",
"=",
"True",
")",
":",
"if",
"not",
"self",
":",
"return",
"\"\"",
"string",
"=",
"str",
"(",
"self",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"**\"",
",",
"\"_pow_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"*\"",
",",
"\"_mul_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"/\"",
",",
"\"_div_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"==\"",
",",
"\"_eq_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"<=\"",
",",
"\"_leq_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\">=\"",
",",
"\"_geq_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"<\"",
",",
"\"_lt_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\">\"",
",",
"\"_gt_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"&&\"",
",",
"\"_and_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"||\"",
",",
"\"_or_\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\"!\"",
",",
"\"not_\"",
")",
"if",
"parentheses",
":",
"string",
"=",
"string",
".",
"replace",
"(",
"\"(\"",
",",
"\"L\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\")\"",
",",
"\"R\"",
")",
"else",
":",
"string",
"=",
"string",
".",
"replace",
"(",
"\"(\"",
",",
"\"\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\")\"",
",",
"\"\"",
")",
"string",
"=",
"string",
".",
"replace",
"(",
"\" \"",
",",
"\"\"",
")",
"return",
"string"
] |
Returns a string representation with special characters
replaced by safer characters for use in file names.
|
[
"Returns",
"a",
"string",
"representation",
"with",
"special",
"characters",
"replaced",
"by",
"safer",
"characters",
"for",
"use",
"in",
"file",
"names",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/cut.py#L222-L248
|
14,058
|
rootpy/rootpy
|
rootpy/tree/cut.py
|
Cut.latex
|
def latex(self):
"""
Returns a string representation for use in LaTeX
"""
if not self:
return ""
s = str(self)
s = s.replace("==", " = ")
s = s.replace("<=", " \leq ")
s = s.replace(">=", " \geq ")
s = s.replace("&&", r" \text{ and } ")
s = s.replace("||", r" \text{ or } ")
return s
|
python
|
def latex(self):
"""
Returns a string representation for use in LaTeX
"""
if not self:
return ""
s = str(self)
s = s.replace("==", " = ")
s = s.replace("<=", " \leq ")
s = s.replace(">=", " \geq ")
s = s.replace("&&", r" \text{ and } ")
s = s.replace("||", r" \text{ or } ")
return s
|
[
"def",
"latex",
"(",
"self",
")",
":",
"if",
"not",
"self",
":",
"return",
"\"\"",
"s",
"=",
"str",
"(",
"self",
")",
"s",
"=",
"s",
".",
"replace",
"(",
"\"==\"",
",",
"\" = \"",
")",
"s",
"=",
"s",
".",
"replace",
"(",
"\"<=\"",
",",
"\" \\leq \"",
")",
"s",
"=",
"s",
".",
"replace",
"(",
"\">=\"",
",",
"\" \\geq \"",
")",
"s",
"=",
"s",
".",
"replace",
"(",
"\"&&\"",
",",
"r\" \\text{ and } \"",
")",
"s",
"=",
"s",
".",
"replace",
"(",
"\"||\"",
",",
"r\" \\text{ or } \"",
")",
"return",
"s"
] |
Returns a string representation for use in LaTeX
|
[
"Returns",
"a",
"string",
"representation",
"for",
"use",
"in",
"LaTeX"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/cut.py#L250-L262
|
14,059
|
rootpy/rootpy
|
rootpy/tree/cut.py
|
Cut.replace
|
def replace(self, name, newname):
"""
Replace all occurrences of name with newname
"""
if not re.match("[a-zA-Z]\w*", name):
return None
if not re.match("[a-zA-Z]\w*", newname):
return None
def _replace(match):
return match.group(0).replace(match.group('name'), newname)
pattern = re.compile("(\W|^)(?P<name>" + name + ")(\W|$)")
cut = re.sub(pattern, _replace, str(self))
return Cut(cut)
|
python
|
def replace(self, name, newname):
"""
Replace all occurrences of name with newname
"""
if not re.match("[a-zA-Z]\w*", name):
return None
if not re.match("[a-zA-Z]\w*", newname):
return None
def _replace(match):
return match.group(0).replace(match.group('name'), newname)
pattern = re.compile("(\W|^)(?P<name>" + name + ")(\W|$)")
cut = re.sub(pattern, _replace, str(self))
return Cut(cut)
|
[
"def",
"replace",
"(",
"self",
",",
"name",
",",
"newname",
")",
":",
"if",
"not",
"re",
".",
"match",
"(",
"\"[a-zA-Z]\\w*\"",
",",
"name",
")",
":",
"return",
"None",
"if",
"not",
"re",
".",
"match",
"(",
"\"[a-zA-Z]\\w*\"",
",",
"newname",
")",
":",
"return",
"None",
"def",
"_replace",
"(",
"match",
")",
":",
"return",
"match",
".",
"group",
"(",
"0",
")",
".",
"replace",
"(",
"match",
".",
"group",
"(",
"'name'",
")",
",",
"newname",
")",
"pattern",
"=",
"re",
".",
"compile",
"(",
"\"(\\W|^)(?P<name>\"",
"+",
"name",
"+",
"\")(\\W|$)\"",
")",
"cut",
"=",
"re",
".",
"sub",
"(",
"pattern",
",",
"_replace",
",",
"str",
"(",
"self",
")",
")",
"return",
"Cut",
"(",
"cut",
")"
] |
Replace all occurrences of name with newname
|
[
"Replace",
"all",
"occurrences",
"of",
"name",
"with",
"newname"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/cut.py#L273-L287
|
14,060
|
rootpy/rootpy
|
docs/sphinxext/ipython_directive.py
|
EmbeddedSphinxShell.save_image
|
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = 'plt.gcf().savefig("%s")'%image_file
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
|
python
|
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = 'plt.gcf().savefig("%s")'%image_file
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
|
[
"def",
"save_image",
"(",
"self",
",",
"image_file",
")",
":",
"self",
".",
"ensure_pyplot",
"(",
")",
"command",
"=",
"'plt.gcf().savefig(\"%s\")'",
"%",
"image_file",
"#print 'SAVEFIG', command # dbg",
"self",
".",
"process_input_line",
"(",
"'bookmark ipy_thisdir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"'cd -b ipy_savedir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"command",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"'cd -b ipy_thisdir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"process_input_line",
"(",
"'bookmark -d ipy_thisdir'",
",",
"store_history",
"=",
"False",
")",
"self",
".",
"clear_cout",
"(",
")"
] |
Saves the image file to disk.
|
[
"Saves",
"the",
"image",
"file",
"to",
"disk",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/docs/sphinxext/ipython_directive.py#L393-L405
|
14,061
|
rootpy/rootpy
|
rootpy/plotting/base.py
|
Plottable.decorate
|
def decorate(self, other=None, **kwargs):
"""
Apply style options to a Plottable object.
Returns a reference to self.
"""
if 'color' in kwargs:
incompatible = []
for othercolor in ('linecolor', 'fillcolor', 'markercolor'):
if othercolor in kwargs:
incompatible.append(othercolor)
if incompatible:
raise ValueError(
"Setting both the `color` and the `{0}` attribute{1} "
"is ambiguous. Please set only one.".format(
', '.join(incompatible),
's' if len(incompatible) != 1 else ''))
if other is not None:
decor = other.decorators
if 'color' in kwargs:
decor.pop('linecolor', None)
decor.pop('fillcolor', None)
decor.pop('markercolor', None)
decor.update(kwargs)
kwargs = decor
for key, value in kwargs.items():
if key in Plottable.EXTRA_ATTRS_DEPRECATED:
newkey = Plottable.EXTRA_ATTRS_DEPRECATED[key]
warnings.warn(
"`{0}` is deprecated and will be removed in "
"future versions. Use `{1}` instead".format(
key, newkey),
DeprecationWarning)
key = newkey
if key in Plottable.EXTRA_ATTRS:
setattr(self, key, value)
elif key == 'markerstyle':
self.SetMarkerStyle(value)
elif key == 'markercolor':
self.SetMarkerColor(value)
elif key == 'markersize':
self.SetMarkerSize(value)
elif key == 'fillcolor':
self.SetFillColor(value)
elif key == 'fillstyle':
self.SetFillStyle(value)
elif key == 'linecolor':
self.SetLineColor(value)
elif key == 'linestyle':
self.SetLineStyle(value)
elif key == 'linewidth':
self.SetLineWidth(value)
elif key == 'color':
self.SetColor(value)
else:
raise AttributeError(
"unknown decoration attribute: `{0}`".format(key))
return self
|
python
|
def decorate(self, other=None, **kwargs):
"""
Apply style options to a Plottable object.
Returns a reference to self.
"""
if 'color' in kwargs:
incompatible = []
for othercolor in ('linecolor', 'fillcolor', 'markercolor'):
if othercolor in kwargs:
incompatible.append(othercolor)
if incompatible:
raise ValueError(
"Setting both the `color` and the `{0}` attribute{1} "
"is ambiguous. Please set only one.".format(
', '.join(incompatible),
's' if len(incompatible) != 1 else ''))
if other is not None:
decor = other.decorators
if 'color' in kwargs:
decor.pop('linecolor', None)
decor.pop('fillcolor', None)
decor.pop('markercolor', None)
decor.update(kwargs)
kwargs = decor
for key, value in kwargs.items():
if key in Plottable.EXTRA_ATTRS_DEPRECATED:
newkey = Plottable.EXTRA_ATTRS_DEPRECATED[key]
warnings.warn(
"`{0}` is deprecated and will be removed in "
"future versions. Use `{1}` instead".format(
key, newkey),
DeprecationWarning)
key = newkey
if key in Plottable.EXTRA_ATTRS:
setattr(self, key, value)
elif key == 'markerstyle':
self.SetMarkerStyle(value)
elif key == 'markercolor':
self.SetMarkerColor(value)
elif key == 'markersize':
self.SetMarkerSize(value)
elif key == 'fillcolor':
self.SetFillColor(value)
elif key == 'fillstyle':
self.SetFillStyle(value)
elif key == 'linecolor':
self.SetLineColor(value)
elif key == 'linestyle':
self.SetLineStyle(value)
elif key == 'linewidth':
self.SetLineWidth(value)
elif key == 'color':
self.SetColor(value)
else:
raise AttributeError(
"unknown decoration attribute: `{0}`".format(key))
return self
|
[
"def",
"decorate",
"(",
"self",
",",
"other",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'color'",
"in",
"kwargs",
":",
"incompatible",
"=",
"[",
"]",
"for",
"othercolor",
"in",
"(",
"'linecolor'",
",",
"'fillcolor'",
",",
"'markercolor'",
")",
":",
"if",
"othercolor",
"in",
"kwargs",
":",
"incompatible",
".",
"append",
"(",
"othercolor",
")",
"if",
"incompatible",
":",
"raise",
"ValueError",
"(",
"\"Setting both the `color` and the `{0}` attribute{1} \"",
"\"is ambiguous. Please set only one.\"",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"incompatible",
")",
",",
"'s'",
"if",
"len",
"(",
"incompatible",
")",
"!=",
"1",
"else",
"''",
")",
")",
"if",
"other",
"is",
"not",
"None",
":",
"decor",
"=",
"other",
".",
"decorators",
"if",
"'color'",
"in",
"kwargs",
":",
"decor",
".",
"pop",
"(",
"'linecolor'",
",",
"None",
")",
"decor",
".",
"pop",
"(",
"'fillcolor'",
",",
"None",
")",
"decor",
".",
"pop",
"(",
"'markercolor'",
",",
"None",
")",
"decor",
".",
"update",
"(",
"kwargs",
")",
"kwargs",
"=",
"decor",
"for",
"key",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"Plottable",
".",
"EXTRA_ATTRS_DEPRECATED",
":",
"newkey",
"=",
"Plottable",
".",
"EXTRA_ATTRS_DEPRECATED",
"[",
"key",
"]",
"warnings",
".",
"warn",
"(",
"\"`{0}` is deprecated and will be removed in \"",
"\"future versions. Use `{1}` instead\"",
".",
"format",
"(",
"key",
",",
"newkey",
")",
",",
"DeprecationWarning",
")",
"key",
"=",
"newkey",
"if",
"key",
"in",
"Plottable",
".",
"EXTRA_ATTRS",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"value",
")",
"elif",
"key",
"==",
"'markerstyle'",
":",
"self",
".",
"SetMarkerStyle",
"(",
"value",
")",
"elif",
"key",
"==",
"'markercolor'",
":",
"self",
".",
"SetMarkerColor",
"(",
"value",
")",
"elif",
"key",
"==",
"'markersize'",
":",
"self",
".",
"SetMarkerSize",
"(",
"value",
")",
"elif",
"key",
"==",
"'fillcolor'",
":",
"self",
".",
"SetFillColor",
"(",
"value",
")",
"elif",
"key",
"==",
"'fillstyle'",
":",
"self",
".",
"SetFillStyle",
"(",
"value",
")",
"elif",
"key",
"==",
"'linecolor'",
":",
"self",
".",
"SetLineColor",
"(",
"value",
")",
"elif",
"key",
"==",
"'linestyle'",
":",
"self",
".",
"SetLineStyle",
"(",
"value",
")",
"elif",
"key",
"==",
"'linewidth'",
":",
"self",
".",
"SetLineWidth",
"(",
"value",
")",
"elif",
"key",
"==",
"'color'",
":",
"self",
".",
"SetColor",
"(",
"value",
")",
"else",
":",
"raise",
"AttributeError",
"(",
"\"unknown decoration attribute: `{0}`\"",
".",
"format",
"(",
"key",
")",
")",
"return",
"self"
] |
Apply style options to a Plottable object.
Returns a reference to self.
|
[
"Apply",
"style",
"options",
"to",
"a",
"Plottable",
"object",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/base.py#L174-L231
|
14,062
|
rootpy/rootpy
|
rootpy/tree/treeobject.py
|
TreeCollection.getitem
|
def getitem(self, index):
"""
direct access without going through self.selection
"""
if index >= getattr(self.tree, self.size):
raise IndexError(index)
if self.__cache_objects and index in self.__cache:
return self.__cache[index]
obj = self.tree_object_cls(self.tree, self.name, self.prefix, index)
if self.__cache_objects:
self.__cache[index] = obj
return obj
|
python
|
def getitem(self, index):
"""
direct access without going through self.selection
"""
if index >= getattr(self.tree, self.size):
raise IndexError(index)
if self.__cache_objects and index in self.__cache:
return self.__cache[index]
obj = self.tree_object_cls(self.tree, self.name, self.prefix, index)
if self.__cache_objects:
self.__cache[index] = obj
return obj
|
[
"def",
"getitem",
"(",
"self",
",",
"index",
")",
":",
"if",
"index",
">=",
"getattr",
"(",
"self",
".",
"tree",
",",
"self",
".",
"size",
")",
":",
"raise",
"IndexError",
"(",
"index",
")",
"if",
"self",
".",
"__cache_objects",
"and",
"index",
"in",
"self",
".",
"__cache",
":",
"return",
"self",
".",
"__cache",
"[",
"index",
"]",
"obj",
"=",
"self",
".",
"tree_object_cls",
"(",
"self",
".",
"tree",
",",
"self",
".",
"name",
",",
"self",
".",
"prefix",
",",
"index",
")",
"if",
"self",
".",
"__cache_objects",
":",
"self",
".",
"__cache",
"[",
"index",
"]",
"=",
"obj",
"return",
"obj"
] |
direct access without going through self.selection
|
[
"direct",
"access",
"without",
"going",
"through",
"self",
".",
"selection"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treeobject.py#L219-L230
|
14,063
|
rootpy/rootpy
|
rootpy/defaults.py
|
configure_defaults
|
def configure_defaults():
"""
This function is executed immediately after ROOT's finalSetup
"""
log.debug("configure_defaults()")
global initialized
initialized = True
if use_rootpy_handler:
# Need to do it again here, since it is overridden by ROOT.
set_error_handler(python_logging_error_handler)
if os.environ.get('ROOTPY_BATCH', False) or IN_NOSETESTS:
ROOT.gROOT.SetBatch(True)
log.debug('ROOT is running in batch mode')
ROOT.gErrorIgnoreLevel = 0
this_dll = C.CDLL(None)
try:
EnableAutoDictionary = C.c_int.in_dll(
this_dll, "G__EnableAutoDictionary")
except ValueError:
pass
else:
# Disable automatic dictionary generation
EnableAutoDictionary.value = 0
# TODO(pwaller): idea, `execfile("userdata/initrc.py")` here?
# note: that wouldn't allow the user to override the default
# canvas size, for example.
for init in _initializations:
init()
|
python
|
def configure_defaults():
"""
This function is executed immediately after ROOT's finalSetup
"""
log.debug("configure_defaults()")
global initialized
initialized = True
if use_rootpy_handler:
# Need to do it again here, since it is overridden by ROOT.
set_error_handler(python_logging_error_handler)
if os.environ.get('ROOTPY_BATCH', False) or IN_NOSETESTS:
ROOT.gROOT.SetBatch(True)
log.debug('ROOT is running in batch mode')
ROOT.gErrorIgnoreLevel = 0
this_dll = C.CDLL(None)
try:
EnableAutoDictionary = C.c_int.in_dll(
this_dll, "G__EnableAutoDictionary")
except ValueError:
pass
else:
# Disable automatic dictionary generation
EnableAutoDictionary.value = 0
# TODO(pwaller): idea, `execfile("userdata/initrc.py")` here?
# note: that wouldn't allow the user to override the default
# canvas size, for example.
for init in _initializations:
init()
|
[
"def",
"configure_defaults",
"(",
")",
":",
"log",
".",
"debug",
"(",
"\"configure_defaults()\"",
")",
"global",
"initialized",
"initialized",
"=",
"True",
"if",
"use_rootpy_handler",
":",
"# Need to do it again here, since it is overridden by ROOT.",
"set_error_handler",
"(",
"python_logging_error_handler",
")",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"'ROOTPY_BATCH'",
",",
"False",
")",
"or",
"IN_NOSETESTS",
":",
"ROOT",
".",
"gROOT",
".",
"SetBatch",
"(",
"True",
")",
"log",
".",
"debug",
"(",
"'ROOT is running in batch mode'",
")",
"ROOT",
".",
"gErrorIgnoreLevel",
"=",
"0",
"this_dll",
"=",
"C",
".",
"CDLL",
"(",
"None",
")",
"try",
":",
"EnableAutoDictionary",
"=",
"C",
".",
"c_int",
".",
"in_dll",
"(",
"this_dll",
",",
"\"G__EnableAutoDictionary\"",
")",
"except",
"ValueError",
":",
"pass",
"else",
":",
"# Disable automatic dictionary generation",
"EnableAutoDictionary",
".",
"value",
"=",
"0",
"# TODO(pwaller): idea, `execfile(\"userdata/initrc.py\")` here?",
"# note: that wouldn't allow the user to override the default",
"# canvas size, for example.",
"for",
"init",
"in",
"_initializations",
":",
"init",
"(",
")"
] |
This function is executed immediately after ROOT's finalSetup
|
[
"This",
"function",
"is",
"executed",
"immediately",
"after",
"ROOT",
"s",
"finalSetup"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/defaults.py#L70-L104
|
14,064
|
rootpy/rootpy
|
rootpy/defaults.py
|
rp_module_level_in_stack
|
def rp_module_level_in_stack():
"""
Returns true if we're during a rootpy import
"""
from traceback import extract_stack
from rootpy import _ROOTPY_SOURCE_PATH
modlevel_files = [filename for filename, _, func, _ in extract_stack()
if func == "<module>"]
return any(path.startswith(_ROOTPY_SOURCE_PATH) for path in modlevel_files)
|
python
|
def rp_module_level_in_stack():
"""
Returns true if we're during a rootpy import
"""
from traceback import extract_stack
from rootpy import _ROOTPY_SOURCE_PATH
modlevel_files = [filename for filename, _, func, _ in extract_stack()
if func == "<module>"]
return any(path.startswith(_ROOTPY_SOURCE_PATH) for path in modlevel_files)
|
[
"def",
"rp_module_level_in_stack",
"(",
")",
":",
"from",
"traceback",
"import",
"extract_stack",
"from",
"rootpy",
"import",
"_ROOTPY_SOURCE_PATH",
"modlevel_files",
"=",
"[",
"filename",
"for",
"filename",
",",
"_",
",",
"func",
",",
"_",
"in",
"extract_stack",
"(",
")",
"if",
"func",
"==",
"\"<module>\"",
"]",
"return",
"any",
"(",
"path",
".",
"startswith",
"(",
"_ROOTPY_SOURCE_PATH",
")",
"for",
"path",
"in",
"modlevel_files",
")"
] |
Returns true if we're during a rootpy import
|
[
"Returns",
"true",
"if",
"we",
"re",
"during",
"a",
"rootpy",
"import"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/defaults.py#L107-L116
|
14,065
|
rootpy/rootpy
|
rootpy/memory/deletion.py
|
monitor_deletion
|
def monitor_deletion():
"""
Function for checking for correct deletion of weakref-able objects.
Example usage::
monitor, is_alive = monitor_deletion()
obj = set()
monitor(obj, "obj")
assert is_alive("obj") # True because there is a ref to `obj` is_alive
del obj
assert not is_alive("obj") # True because there `obj` is deleted
"""
monitors = {}
def set_deleted(x):
def _(weakref):
del monitors[x]
return _
def monitor(item, name):
monitors[name] = ref(item, set_deleted(name))
def is_alive(name):
return monitors.get(name, None) is not None
return monitor, is_alive
|
python
|
def monitor_deletion():
"""
Function for checking for correct deletion of weakref-able objects.
Example usage::
monitor, is_alive = monitor_deletion()
obj = set()
monitor(obj, "obj")
assert is_alive("obj") # True because there is a ref to `obj` is_alive
del obj
assert not is_alive("obj") # True because there `obj` is deleted
"""
monitors = {}
def set_deleted(x):
def _(weakref):
del monitors[x]
return _
def monitor(item, name):
monitors[name] = ref(item, set_deleted(name))
def is_alive(name):
return monitors.get(name, None) is not None
return monitor, is_alive
|
[
"def",
"monitor_deletion",
"(",
")",
":",
"monitors",
"=",
"{",
"}",
"def",
"set_deleted",
"(",
"x",
")",
":",
"def",
"_",
"(",
"weakref",
")",
":",
"del",
"monitors",
"[",
"x",
"]",
"return",
"_",
"def",
"monitor",
"(",
"item",
",",
"name",
")",
":",
"monitors",
"[",
"name",
"]",
"=",
"ref",
"(",
"item",
",",
"set_deleted",
"(",
"name",
")",
")",
"def",
"is_alive",
"(",
"name",
")",
":",
"return",
"monitors",
".",
"get",
"(",
"name",
",",
"None",
")",
"is",
"not",
"None",
"return",
"monitor",
",",
"is_alive"
] |
Function for checking for correct deletion of weakref-able objects.
Example usage::
monitor, is_alive = monitor_deletion()
obj = set()
monitor(obj, "obj")
assert is_alive("obj") # True because there is a ref to `obj` is_alive
del obj
assert not is_alive("obj") # True because there `obj` is deleted
|
[
"Function",
"for",
"checking",
"for",
"correct",
"deletion",
"of",
"weakref",
"-",
"able",
"objects",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/memory/deletion.py#L25-L52
|
14,066
|
rootpy/rootpy
|
rootpy/plotting/utils.py
|
canvases_with
|
def canvases_with(drawable):
"""
Return a list of all canvases where `drawable` has been painted.
Note: This function is inefficient because it inspects all objects on all
canvases, recursively. Avoid calling it if you have a large number of
canvases and primitives.
"""
return [c for c in ROOT.gROOT.GetListOfCanvases()
if drawable in find_all_primitives(c)]
|
python
|
def canvases_with(drawable):
"""
Return a list of all canvases where `drawable` has been painted.
Note: This function is inefficient because it inspects all objects on all
canvases, recursively. Avoid calling it if you have a large number of
canvases and primitives.
"""
return [c for c in ROOT.gROOT.GetListOfCanvases()
if drawable in find_all_primitives(c)]
|
[
"def",
"canvases_with",
"(",
"drawable",
")",
":",
"return",
"[",
"c",
"for",
"c",
"in",
"ROOT",
".",
"gROOT",
".",
"GetListOfCanvases",
"(",
")",
"if",
"drawable",
"in",
"find_all_primitives",
"(",
"c",
")",
"]"
] |
Return a list of all canvases where `drawable` has been painted.
Note: This function is inefficient because it inspects all objects on all
canvases, recursively. Avoid calling it if you have a large number of
canvases and primitives.
|
[
"Return",
"a",
"list",
"of",
"all",
"canvases",
"where",
"drawable",
"has",
"been",
"painted",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/utils.py#L405-L414
|
14,067
|
rootpy/rootpy
|
rootpy/plotting/utils.py
|
tick_length_pixels
|
def tick_length_pixels(pad, xaxis, yaxis, xlength, ylength=None):
"""
Set the axes tick lengths in pixels
"""
if ylength is None:
ylength = xlength
xaxis.SetTickLength(xlength / float(pad.height_pixels))
yaxis.SetTickLength(ylength / float(pad.width_pixels))
|
python
|
def tick_length_pixels(pad, xaxis, yaxis, xlength, ylength=None):
"""
Set the axes tick lengths in pixels
"""
if ylength is None:
ylength = xlength
xaxis.SetTickLength(xlength / float(pad.height_pixels))
yaxis.SetTickLength(ylength / float(pad.width_pixels))
|
[
"def",
"tick_length_pixels",
"(",
"pad",
",",
"xaxis",
",",
"yaxis",
",",
"xlength",
",",
"ylength",
"=",
"None",
")",
":",
"if",
"ylength",
"is",
"None",
":",
"ylength",
"=",
"xlength",
"xaxis",
".",
"SetTickLength",
"(",
"xlength",
"/",
"float",
"(",
"pad",
".",
"height_pixels",
")",
")",
"yaxis",
".",
"SetTickLength",
"(",
"ylength",
"/",
"float",
"(",
"pad",
".",
"width_pixels",
")",
")"
] |
Set the axes tick lengths in pixels
|
[
"Set",
"the",
"axes",
"tick",
"lengths",
"in",
"pixels"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/utils.py#L436-L443
|
14,068
|
rootpy/rootpy
|
rootpy/tree/treetypes.py
|
BaseArray.reset
|
def reset(self):
"""Reset the value to the default"""
if self.resetable:
for i in range(len(self)):
self[i] = self.default
|
python
|
def reset(self):
"""Reset the value to the default"""
if self.resetable:
for i in range(len(self)):
self[i] = self.default
|
[
"def",
"reset",
"(",
"self",
")",
":",
"if",
"self",
".",
"resetable",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
")",
")",
":",
"self",
"[",
"i",
"]",
"=",
"self",
".",
"default"
] |
Reset the value to the default
|
[
"Reset",
"the",
"value",
"to",
"the",
"default"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/tree/treetypes.py#L210-L214
|
14,069
|
rootpy/rootpy
|
rootpy/stats/fit.py
|
minimize
|
def minimize(func,
minimizer_type=None,
minimizer_algo=None,
strategy=None,
retry=0,
scan=False,
print_level=None):
"""
Minimize a RooAbsReal function
Parameters
----------
func : RooAbsReal
The function to minimize
minimizer_type : string, optional (default=None)
The minimizer type: "Minuit" or "Minuit2".
If None (the default) then use the current global default value.
minimizer_algo : string, optional (default=None)
The minimizer algorithm: "Migrad", etc.
If None (the default) then use the current global default value.
strategy : int, optional (default=None)
Set the MINUIT strategy. Accepted values
are 0, 1, and 2 and represent MINUIT strategies for dealing
most efficiently with fast FCNs (0), expensive FCNs (2)
and 'intermediate' FCNs (1). If None (the default) then use
the current global default value.
retry : int, optional (default=0)
Number of times to retry failed minimizations. The strategy is
incremented to a maximum of 2 from its initial value and remains at 2
for additional retries.
scan : bool, optional (default=False)
If True then run Minuit2's scan algorithm before running the main
``minimizer_algo`` ("Migrad").
print_level : int, optional (default=None)
The verbosity level for the minimizer algorithm.
If None (the default) then use the global default print level.
If negative then all non-fatal messages will be suppressed.
Returns
-------
minimizer : RooMinimizer
The minimizer. Get the RooFitResult with ``minimizer.save()``.
"""
llog = log['minimize']
min_opts = ROOT.Math.MinimizerOptions
if minimizer_type is None:
minimizer_type = min_opts.DefaultMinimizerType()
if minimizer_algo is None:
minimizer_algo = min_opts.DefaultMinimizerAlgo()
if strategy is None:
strategy = min_opts.DefaultStrategy()
if print_level is None:
print_level = min_opts.DefaultPrintLevel()
if print_level < 0:
msg_service = ROOT.RooMsgService.instance()
msg_level = msg_service.globalKillBelow()
msg_service.setGlobalKillBelow(ROOT.RooFit.FATAL)
minim = Minimizer(func)
minim.setPrintLevel(print_level)
minim.setStrategy(strategy)
if scan:
llog.info("running scan algorithm ...")
minim.minimize('Minuit2', 'Scan')
llog.info("minimizing with {0} {1} using strategy {2}".format(
minimizer_type, minimizer_algo, strategy))
status = minim.minimize(minimizer_type, minimizer_algo)
iretry = 0
while iretry < retry and status not in (0, 1):
if strategy < 2:
strategy += 1
minim.setStrategy(strategy)
llog.warning("minimization failed with status {0:d}".format(status))
llog.info("retrying minimization with strategy {0:d}".format(strategy))
status = minim.minimize(minimizer_type, minimizer_algo)
if status in (0, 1):
llog.info("found minimum")
else:
llog.warning("minimization failed with status {0:d}".format(status))
if print_level < 0:
msg_service.setGlobalKillBelow(msg_level)
return minim
|
python
|
def minimize(func,
minimizer_type=None,
minimizer_algo=None,
strategy=None,
retry=0,
scan=False,
print_level=None):
"""
Minimize a RooAbsReal function
Parameters
----------
func : RooAbsReal
The function to minimize
minimizer_type : string, optional (default=None)
The minimizer type: "Minuit" or "Minuit2".
If None (the default) then use the current global default value.
minimizer_algo : string, optional (default=None)
The minimizer algorithm: "Migrad", etc.
If None (the default) then use the current global default value.
strategy : int, optional (default=None)
Set the MINUIT strategy. Accepted values
are 0, 1, and 2 and represent MINUIT strategies for dealing
most efficiently with fast FCNs (0), expensive FCNs (2)
and 'intermediate' FCNs (1). If None (the default) then use
the current global default value.
retry : int, optional (default=0)
Number of times to retry failed minimizations. The strategy is
incremented to a maximum of 2 from its initial value and remains at 2
for additional retries.
scan : bool, optional (default=False)
If True then run Minuit2's scan algorithm before running the main
``minimizer_algo`` ("Migrad").
print_level : int, optional (default=None)
The verbosity level for the minimizer algorithm.
If None (the default) then use the global default print level.
If negative then all non-fatal messages will be suppressed.
Returns
-------
minimizer : RooMinimizer
The minimizer. Get the RooFitResult with ``minimizer.save()``.
"""
llog = log['minimize']
min_opts = ROOT.Math.MinimizerOptions
if minimizer_type is None:
minimizer_type = min_opts.DefaultMinimizerType()
if minimizer_algo is None:
minimizer_algo = min_opts.DefaultMinimizerAlgo()
if strategy is None:
strategy = min_opts.DefaultStrategy()
if print_level is None:
print_level = min_opts.DefaultPrintLevel()
if print_level < 0:
msg_service = ROOT.RooMsgService.instance()
msg_level = msg_service.globalKillBelow()
msg_service.setGlobalKillBelow(ROOT.RooFit.FATAL)
minim = Minimizer(func)
minim.setPrintLevel(print_level)
minim.setStrategy(strategy)
if scan:
llog.info("running scan algorithm ...")
minim.minimize('Minuit2', 'Scan')
llog.info("minimizing with {0} {1} using strategy {2}".format(
minimizer_type, minimizer_algo, strategy))
status = minim.minimize(minimizer_type, minimizer_algo)
iretry = 0
while iretry < retry and status not in (0, 1):
if strategy < 2:
strategy += 1
minim.setStrategy(strategy)
llog.warning("minimization failed with status {0:d}".format(status))
llog.info("retrying minimization with strategy {0:d}".format(strategy))
status = minim.minimize(minimizer_type, minimizer_algo)
if status in (0, 1):
llog.info("found minimum")
else:
llog.warning("minimization failed with status {0:d}".format(status))
if print_level < 0:
msg_service.setGlobalKillBelow(msg_level)
return minim
|
[
"def",
"minimize",
"(",
"func",
",",
"minimizer_type",
"=",
"None",
",",
"minimizer_algo",
"=",
"None",
",",
"strategy",
"=",
"None",
",",
"retry",
"=",
"0",
",",
"scan",
"=",
"False",
",",
"print_level",
"=",
"None",
")",
":",
"llog",
"=",
"log",
"[",
"'minimize'",
"]",
"min_opts",
"=",
"ROOT",
".",
"Math",
".",
"MinimizerOptions",
"if",
"minimizer_type",
"is",
"None",
":",
"minimizer_type",
"=",
"min_opts",
".",
"DefaultMinimizerType",
"(",
")",
"if",
"minimizer_algo",
"is",
"None",
":",
"minimizer_algo",
"=",
"min_opts",
".",
"DefaultMinimizerAlgo",
"(",
")",
"if",
"strategy",
"is",
"None",
":",
"strategy",
"=",
"min_opts",
".",
"DefaultStrategy",
"(",
")",
"if",
"print_level",
"is",
"None",
":",
"print_level",
"=",
"min_opts",
".",
"DefaultPrintLevel",
"(",
")",
"if",
"print_level",
"<",
"0",
":",
"msg_service",
"=",
"ROOT",
".",
"RooMsgService",
".",
"instance",
"(",
")",
"msg_level",
"=",
"msg_service",
".",
"globalKillBelow",
"(",
")",
"msg_service",
".",
"setGlobalKillBelow",
"(",
"ROOT",
".",
"RooFit",
".",
"FATAL",
")",
"minim",
"=",
"Minimizer",
"(",
"func",
")",
"minim",
".",
"setPrintLevel",
"(",
"print_level",
")",
"minim",
".",
"setStrategy",
"(",
"strategy",
")",
"if",
"scan",
":",
"llog",
".",
"info",
"(",
"\"running scan algorithm ...\"",
")",
"minim",
".",
"minimize",
"(",
"'Minuit2'",
",",
"'Scan'",
")",
"llog",
".",
"info",
"(",
"\"minimizing with {0} {1} using strategy {2}\"",
".",
"format",
"(",
"minimizer_type",
",",
"minimizer_algo",
",",
"strategy",
")",
")",
"status",
"=",
"minim",
".",
"minimize",
"(",
"minimizer_type",
",",
"minimizer_algo",
")",
"iretry",
"=",
"0",
"while",
"iretry",
"<",
"retry",
"and",
"status",
"not",
"in",
"(",
"0",
",",
"1",
")",
":",
"if",
"strategy",
"<",
"2",
":",
"strategy",
"+=",
"1",
"minim",
".",
"setStrategy",
"(",
"strategy",
")",
"llog",
".",
"warning",
"(",
"\"minimization failed with status {0:d}\"",
".",
"format",
"(",
"status",
")",
")",
"llog",
".",
"info",
"(",
"\"retrying minimization with strategy {0:d}\"",
".",
"format",
"(",
"strategy",
")",
")",
"status",
"=",
"minim",
".",
"minimize",
"(",
"minimizer_type",
",",
"minimizer_algo",
")",
"if",
"status",
"in",
"(",
"0",
",",
"1",
")",
":",
"llog",
".",
"info",
"(",
"\"found minimum\"",
")",
"else",
":",
"llog",
".",
"warning",
"(",
"\"minimization failed with status {0:d}\"",
".",
"format",
"(",
"status",
")",
")",
"if",
"print_level",
"<",
"0",
":",
"msg_service",
".",
"setGlobalKillBelow",
"(",
"msg_level",
")",
"return",
"minim"
] |
Minimize a RooAbsReal function
Parameters
----------
func : RooAbsReal
The function to minimize
minimizer_type : string, optional (default=None)
The minimizer type: "Minuit" or "Minuit2".
If None (the default) then use the current global default value.
minimizer_algo : string, optional (default=None)
The minimizer algorithm: "Migrad", etc.
If None (the default) then use the current global default value.
strategy : int, optional (default=None)
Set the MINUIT strategy. Accepted values
are 0, 1, and 2 and represent MINUIT strategies for dealing
most efficiently with fast FCNs (0), expensive FCNs (2)
and 'intermediate' FCNs (1). If None (the default) then use
the current global default value.
retry : int, optional (default=0)
Number of times to retry failed minimizations. The strategy is
incremented to a maximum of 2 from its initial value and remains at 2
for additional retries.
scan : bool, optional (default=False)
If True then run Minuit2's scan algorithm before running the main
``minimizer_algo`` ("Migrad").
print_level : int, optional (default=None)
The verbosity level for the minimizer algorithm.
If None (the default) then use the global default print level.
If negative then all non-fatal messages will be suppressed.
Returns
-------
minimizer : RooMinimizer
The minimizer. Get the RooFitResult with ``minimizer.save()``.
|
[
"Minimize",
"a",
"RooAbsReal",
"function"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stats/fit.py#L15-L113
|
14,070
|
rootpy/rootpy
|
rootpy/stl.py
|
make_string
|
def make_string(obj):
"""
If ``obj`` is a string, return that, otherwise attempt to figure out the
name of a type.
"""
if inspect.isclass(obj):
if issubclass(obj, Object):
return obj._ROOT.__name__
if issubclass(obj, string_types):
return 'string'
return obj.__name__
if not isinstance(obj, string_types):
raise TypeError("expected string or class")
return obj
|
python
|
def make_string(obj):
"""
If ``obj`` is a string, return that, otherwise attempt to figure out the
name of a type.
"""
if inspect.isclass(obj):
if issubclass(obj, Object):
return obj._ROOT.__name__
if issubclass(obj, string_types):
return 'string'
return obj.__name__
if not isinstance(obj, string_types):
raise TypeError("expected string or class")
return obj
|
[
"def",
"make_string",
"(",
"obj",
")",
":",
"if",
"inspect",
".",
"isclass",
"(",
"obj",
")",
":",
"if",
"issubclass",
"(",
"obj",
",",
"Object",
")",
":",
"return",
"obj",
".",
"_ROOT",
".",
"__name__",
"if",
"issubclass",
"(",
"obj",
",",
"string_types",
")",
":",
"return",
"'string'",
"return",
"obj",
".",
"__name__",
"if",
"not",
"isinstance",
"(",
"obj",
",",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"\"expected string or class\"",
")",
"return",
"obj"
] |
If ``obj`` is a string, return that, otherwise attempt to figure out the
name of a type.
|
[
"If",
"obj",
"is",
"a",
"string",
"return",
"that",
"otherwise",
"attempt",
"to",
"figure",
"out",
"the",
"name",
"of",
"a",
"type",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L302-L315
|
14,071
|
rootpy/rootpy
|
rootpy/stl.py
|
generate
|
def generate(declaration, headers=None, has_iterators=False):
"""Compile and load the reflection dictionary for a type.
If the requested dictionary has already been cached, then load that instead.
Parameters
----------
declaration : str
A type declaration (for example "vector<int>")
headers : str or list of str
A header file or list of header files required to compile the dictionary
for this type.
has_iterators : bool
If True, then include iterators in the dictionary generation.
"""
global NEW_DICTS
# FIXME: _rootpy_dictionary_already_exists returns false positives
# if a third-party module provides "incomplete" dictionaries.
#if compiled._rootpy_dictionary_already_exists(declaration):
# log.debug("generate({0}) => already available".format(declaration))
# return
log.debug("requesting dictionary for {0}".format(declaration))
if headers:
if isinstance(headers, string_types):
headers = sorted(headers.split(';'))
log.debug("using the headers {0}".format(', '.join(headers)))
unique_name = ';'.join([declaration] + headers)
else:
unique_name = declaration
unique_name = unique_name.replace(' ', '')
# If the library is already loaded, do nothing
if unique_name in LOADED_DICTS:
log.debug("dictionary for {0} is already loaded".format(declaration))
return
if sys.version_info[0] < 3:
libname = hashlib.sha512(unique_name).hexdigest()[:16]
else:
libname = hashlib.sha512(unique_name.encode('utf-8')).hexdigest()[:16]
libnameso = libname + ".so"
if ROOT.gROOT.GetVersionInt() < 53403:
# check for this class in the global TClass list and remove it
# fixes infinite recursion in ROOT < 5.34.03
# (exact ROOT versions where this is required is unknown)
cls = ROOT.gROOT.GetClass(declaration)
if cls and not cls.IsLoaded():
log.debug("removing {0} from gROOT.GetListOfClasses()".format(
declaration))
ROOT.gROOT.GetListOfClasses().Remove(cls)
# If a .so already exists for this class, use it.
if exists(pjoin(DICTS_PATH, libnameso)):
log.debug("loading previously generated dictionary for {0}"
.format(declaration))
if (ROOT.gInterpreter.Load(pjoin(DICTS_PATH, libnameso))
not in (0, 1)):
raise RuntimeError(
"failed to load the library for '{0}' @ {1}".format(
declaration, libname))
LOADED_DICTS[unique_name] = None
return
with lock(pjoin(DICTS_PATH, "lock"), poll_interval=5, max_age=60):
# This dict was not previously generated so we must create it now
log.info("generating dictionary for {0} ...".format(declaration))
includes = ''
if headers is not None:
for header in headers:
if re.match('^<.+>$', header):
includes += '#include {0}\n'.format(header)
else:
includes += '#include "{0}"\n'.format(header)
source = LINKDEF % locals()
sourcepath = os.path.join(DICTS_PATH, '{0}.C'.format(libname))
log.debug("source path: {0}".format(sourcepath))
with open(sourcepath, 'w') as sourcefile:
sourcefile.write(source)
log.debug("include path: {0}".format(
ROOT.gSystem.GetIncludePath()))
if (ROOT.gSystem.CompileMacro(
sourcepath, 'k-', libname, DICTS_PATH) != 1):
raise RuntimeError(
"failed to compile the library for '{0}'".format(
sourcepath))
LOADED_DICTS[unique_name] = None
NEW_DICTS = True
|
python
|
def generate(declaration, headers=None, has_iterators=False):
"""Compile and load the reflection dictionary for a type.
If the requested dictionary has already been cached, then load that instead.
Parameters
----------
declaration : str
A type declaration (for example "vector<int>")
headers : str or list of str
A header file or list of header files required to compile the dictionary
for this type.
has_iterators : bool
If True, then include iterators in the dictionary generation.
"""
global NEW_DICTS
# FIXME: _rootpy_dictionary_already_exists returns false positives
# if a third-party module provides "incomplete" dictionaries.
#if compiled._rootpy_dictionary_already_exists(declaration):
# log.debug("generate({0}) => already available".format(declaration))
# return
log.debug("requesting dictionary for {0}".format(declaration))
if headers:
if isinstance(headers, string_types):
headers = sorted(headers.split(';'))
log.debug("using the headers {0}".format(', '.join(headers)))
unique_name = ';'.join([declaration] + headers)
else:
unique_name = declaration
unique_name = unique_name.replace(' ', '')
# If the library is already loaded, do nothing
if unique_name in LOADED_DICTS:
log.debug("dictionary for {0} is already loaded".format(declaration))
return
if sys.version_info[0] < 3:
libname = hashlib.sha512(unique_name).hexdigest()[:16]
else:
libname = hashlib.sha512(unique_name.encode('utf-8')).hexdigest()[:16]
libnameso = libname + ".so"
if ROOT.gROOT.GetVersionInt() < 53403:
# check for this class in the global TClass list and remove it
# fixes infinite recursion in ROOT < 5.34.03
# (exact ROOT versions where this is required is unknown)
cls = ROOT.gROOT.GetClass(declaration)
if cls and not cls.IsLoaded():
log.debug("removing {0} from gROOT.GetListOfClasses()".format(
declaration))
ROOT.gROOT.GetListOfClasses().Remove(cls)
# If a .so already exists for this class, use it.
if exists(pjoin(DICTS_PATH, libnameso)):
log.debug("loading previously generated dictionary for {0}"
.format(declaration))
if (ROOT.gInterpreter.Load(pjoin(DICTS_PATH, libnameso))
not in (0, 1)):
raise RuntimeError(
"failed to load the library for '{0}' @ {1}".format(
declaration, libname))
LOADED_DICTS[unique_name] = None
return
with lock(pjoin(DICTS_PATH, "lock"), poll_interval=5, max_age=60):
# This dict was not previously generated so we must create it now
log.info("generating dictionary for {0} ...".format(declaration))
includes = ''
if headers is not None:
for header in headers:
if re.match('^<.+>$', header):
includes += '#include {0}\n'.format(header)
else:
includes += '#include "{0}"\n'.format(header)
source = LINKDEF % locals()
sourcepath = os.path.join(DICTS_PATH, '{0}.C'.format(libname))
log.debug("source path: {0}".format(sourcepath))
with open(sourcepath, 'w') as sourcefile:
sourcefile.write(source)
log.debug("include path: {0}".format(
ROOT.gSystem.GetIncludePath()))
if (ROOT.gSystem.CompileMacro(
sourcepath, 'k-', libname, DICTS_PATH) != 1):
raise RuntimeError(
"failed to compile the library for '{0}'".format(
sourcepath))
LOADED_DICTS[unique_name] = None
NEW_DICTS = True
|
[
"def",
"generate",
"(",
"declaration",
",",
"headers",
"=",
"None",
",",
"has_iterators",
"=",
"False",
")",
":",
"global",
"NEW_DICTS",
"# FIXME: _rootpy_dictionary_already_exists returns false positives",
"# if a third-party module provides \"incomplete\" dictionaries.",
"#if compiled._rootpy_dictionary_already_exists(declaration):",
"# log.debug(\"generate({0}) => already available\".format(declaration))",
"# return",
"log",
".",
"debug",
"(",
"\"requesting dictionary for {0}\"",
".",
"format",
"(",
"declaration",
")",
")",
"if",
"headers",
":",
"if",
"isinstance",
"(",
"headers",
",",
"string_types",
")",
":",
"headers",
"=",
"sorted",
"(",
"headers",
".",
"split",
"(",
"';'",
")",
")",
"log",
".",
"debug",
"(",
"\"using the headers {0}\"",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"headers",
")",
")",
")",
"unique_name",
"=",
"';'",
".",
"join",
"(",
"[",
"declaration",
"]",
"+",
"headers",
")",
"else",
":",
"unique_name",
"=",
"declaration",
"unique_name",
"=",
"unique_name",
".",
"replace",
"(",
"' '",
",",
"''",
")",
"# If the library is already loaded, do nothing",
"if",
"unique_name",
"in",
"LOADED_DICTS",
":",
"log",
".",
"debug",
"(",
"\"dictionary for {0} is already loaded\"",
".",
"format",
"(",
"declaration",
")",
")",
"return",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"libname",
"=",
"hashlib",
".",
"sha512",
"(",
"unique_name",
")",
".",
"hexdigest",
"(",
")",
"[",
":",
"16",
"]",
"else",
":",
"libname",
"=",
"hashlib",
".",
"sha512",
"(",
"unique_name",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
"[",
":",
"16",
"]",
"libnameso",
"=",
"libname",
"+",
"\".so\"",
"if",
"ROOT",
".",
"gROOT",
".",
"GetVersionInt",
"(",
")",
"<",
"53403",
":",
"# check for this class in the global TClass list and remove it",
"# fixes infinite recursion in ROOT < 5.34.03",
"# (exact ROOT versions where this is required is unknown)",
"cls",
"=",
"ROOT",
".",
"gROOT",
".",
"GetClass",
"(",
"declaration",
")",
"if",
"cls",
"and",
"not",
"cls",
".",
"IsLoaded",
"(",
")",
":",
"log",
".",
"debug",
"(",
"\"removing {0} from gROOT.GetListOfClasses()\"",
".",
"format",
"(",
"declaration",
")",
")",
"ROOT",
".",
"gROOT",
".",
"GetListOfClasses",
"(",
")",
".",
"Remove",
"(",
"cls",
")",
"# If a .so already exists for this class, use it.",
"if",
"exists",
"(",
"pjoin",
"(",
"DICTS_PATH",
",",
"libnameso",
")",
")",
":",
"log",
".",
"debug",
"(",
"\"loading previously generated dictionary for {0}\"",
".",
"format",
"(",
"declaration",
")",
")",
"if",
"(",
"ROOT",
".",
"gInterpreter",
".",
"Load",
"(",
"pjoin",
"(",
"DICTS_PATH",
",",
"libnameso",
")",
")",
"not",
"in",
"(",
"0",
",",
"1",
")",
")",
":",
"raise",
"RuntimeError",
"(",
"\"failed to load the library for '{0}' @ {1}\"",
".",
"format",
"(",
"declaration",
",",
"libname",
")",
")",
"LOADED_DICTS",
"[",
"unique_name",
"]",
"=",
"None",
"return",
"with",
"lock",
"(",
"pjoin",
"(",
"DICTS_PATH",
",",
"\"lock\"",
")",
",",
"poll_interval",
"=",
"5",
",",
"max_age",
"=",
"60",
")",
":",
"# This dict was not previously generated so we must create it now",
"log",
".",
"info",
"(",
"\"generating dictionary for {0} ...\"",
".",
"format",
"(",
"declaration",
")",
")",
"includes",
"=",
"''",
"if",
"headers",
"is",
"not",
"None",
":",
"for",
"header",
"in",
"headers",
":",
"if",
"re",
".",
"match",
"(",
"'^<.+>$'",
",",
"header",
")",
":",
"includes",
"+=",
"'#include {0}\\n'",
".",
"format",
"(",
"header",
")",
"else",
":",
"includes",
"+=",
"'#include \"{0}\"\\n'",
".",
"format",
"(",
"header",
")",
"source",
"=",
"LINKDEF",
"%",
"locals",
"(",
")",
"sourcepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"DICTS_PATH",
",",
"'{0}.C'",
".",
"format",
"(",
"libname",
")",
")",
"log",
".",
"debug",
"(",
"\"source path: {0}\"",
".",
"format",
"(",
"sourcepath",
")",
")",
"with",
"open",
"(",
"sourcepath",
",",
"'w'",
")",
"as",
"sourcefile",
":",
"sourcefile",
".",
"write",
"(",
"source",
")",
"log",
".",
"debug",
"(",
"\"include path: {0}\"",
".",
"format",
"(",
"ROOT",
".",
"gSystem",
".",
"GetIncludePath",
"(",
")",
")",
")",
"if",
"(",
"ROOT",
".",
"gSystem",
".",
"CompileMacro",
"(",
"sourcepath",
",",
"'k-'",
",",
"libname",
",",
"DICTS_PATH",
")",
"!=",
"1",
")",
":",
"raise",
"RuntimeError",
"(",
"\"failed to compile the library for '{0}'\"",
".",
"format",
"(",
"sourcepath",
")",
")",
"LOADED_DICTS",
"[",
"unique_name",
"]",
"=",
"None",
"NEW_DICTS",
"=",
"True"
] |
Compile and load the reflection dictionary for a type.
If the requested dictionary has already been cached, then load that instead.
Parameters
----------
declaration : str
A type declaration (for example "vector<int>")
headers : str or list of str
A header file or list of header files required to compile the dictionary
for this type.
has_iterators : bool
If True, then include iterators in the dictionary generation.
|
[
"Compile",
"and",
"load",
"the",
"reflection",
"dictionary",
"for",
"a",
"type",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L318-L406
|
14,072
|
rootpy/rootpy
|
rootpy/stl.py
|
CPPType.ensure_built
|
def ensure_built(self, headers=None):
"""
Make sure that a dictionary exists for this type.
"""
if not self.params:
return
else:
for child in self.params:
child.ensure_built(headers=headers)
if headers is None:
headers = self.guess_headers
generate(str(self), headers,
has_iterators=self.name in HAS_ITERATORS)
|
python
|
def ensure_built(self, headers=None):
"""
Make sure that a dictionary exists for this type.
"""
if not self.params:
return
else:
for child in self.params:
child.ensure_built(headers=headers)
if headers is None:
headers = self.guess_headers
generate(str(self), headers,
has_iterators=self.name in HAS_ITERATORS)
|
[
"def",
"ensure_built",
"(",
"self",
",",
"headers",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"params",
":",
"return",
"else",
":",
"for",
"child",
"in",
"self",
".",
"params",
":",
"child",
".",
"ensure_built",
"(",
"headers",
"=",
"headers",
")",
"if",
"headers",
"is",
"None",
":",
"headers",
"=",
"self",
".",
"guess_headers",
"generate",
"(",
"str",
"(",
"self",
")",
",",
"headers",
",",
"has_iterators",
"=",
"self",
".",
"name",
"in",
"HAS_ITERATORS",
")"
] |
Make sure that a dictionary exists for this type.
|
[
"Make",
"sure",
"that",
"a",
"dictionary",
"exists",
"for",
"this",
"type",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L202-L214
|
14,073
|
rootpy/rootpy
|
rootpy/stl.py
|
CPPType.guess_headers
|
def guess_headers(self):
"""
Attempt to guess what headers may be required in order to use this
type. Returns `guess_headers` of all children recursively.
* If the typename is in the :const:`KNOWN_TYPES` dictionary, use the
header specified there
* If it's an STL type, include <{type}>
* If it exists in the ROOT namespace and begins with T,
include <{type}.h>
"""
name = self.name.replace("*", "")
headers = []
if name in KNOWN_TYPES:
headers.append(KNOWN_TYPES[name])
elif name in STL:
headers.append('<{0}>'.format(name))
elif hasattr(ROOT, name) and name.startswith("T"):
headers.append('<{0}.h>'.format(name))
elif '::' in name:
headers.append('<{0}.h>'.format(name.replace('::', '/')))
elif name == 'allocator':
headers.append('<memory>')
else:
try:
# is this just a basic type?
CPPGrammar.BASIC_TYPE.parseString(name, parseAll=True)
except ParseException as e:
# nope... I don't know what it is
log.warning(
"unable to guess headers required for {0}".format(name))
if self.params:
for child in self.params:
headers.extend(child.guess_headers)
# remove duplicates
return list(set(headers))
|
python
|
def guess_headers(self):
"""
Attempt to guess what headers may be required in order to use this
type. Returns `guess_headers` of all children recursively.
* If the typename is in the :const:`KNOWN_TYPES` dictionary, use the
header specified there
* If it's an STL type, include <{type}>
* If it exists in the ROOT namespace and begins with T,
include <{type}.h>
"""
name = self.name.replace("*", "")
headers = []
if name in KNOWN_TYPES:
headers.append(KNOWN_TYPES[name])
elif name in STL:
headers.append('<{0}>'.format(name))
elif hasattr(ROOT, name) and name.startswith("T"):
headers.append('<{0}.h>'.format(name))
elif '::' in name:
headers.append('<{0}.h>'.format(name.replace('::', '/')))
elif name == 'allocator':
headers.append('<memory>')
else:
try:
# is this just a basic type?
CPPGrammar.BASIC_TYPE.parseString(name, parseAll=True)
except ParseException as e:
# nope... I don't know what it is
log.warning(
"unable to guess headers required for {0}".format(name))
if self.params:
for child in self.params:
headers.extend(child.guess_headers)
# remove duplicates
return list(set(headers))
|
[
"def",
"guess_headers",
"(",
"self",
")",
":",
"name",
"=",
"self",
".",
"name",
".",
"replace",
"(",
"\"*\"",
",",
"\"\"",
")",
"headers",
"=",
"[",
"]",
"if",
"name",
"in",
"KNOWN_TYPES",
":",
"headers",
".",
"append",
"(",
"KNOWN_TYPES",
"[",
"name",
"]",
")",
"elif",
"name",
"in",
"STL",
":",
"headers",
".",
"append",
"(",
"'<{0}>'",
".",
"format",
"(",
"name",
")",
")",
"elif",
"hasattr",
"(",
"ROOT",
",",
"name",
")",
"and",
"name",
".",
"startswith",
"(",
"\"T\"",
")",
":",
"headers",
".",
"append",
"(",
"'<{0}.h>'",
".",
"format",
"(",
"name",
")",
")",
"elif",
"'::'",
"in",
"name",
":",
"headers",
".",
"append",
"(",
"'<{0}.h>'",
".",
"format",
"(",
"name",
".",
"replace",
"(",
"'::'",
",",
"'/'",
")",
")",
")",
"elif",
"name",
"==",
"'allocator'",
":",
"headers",
".",
"append",
"(",
"'<memory>'",
")",
"else",
":",
"try",
":",
"# is this just a basic type?",
"CPPGrammar",
".",
"BASIC_TYPE",
".",
"parseString",
"(",
"name",
",",
"parseAll",
"=",
"True",
")",
"except",
"ParseException",
"as",
"e",
":",
"# nope... I don't know what it is",
"log",
".",
"warning",
"(",
"\"unable to guess headers required for {0}\"",
".",
"format",
"(",
"name",
")",
")",
"if",
"self",
".",
"params",
":",
"for",
"child",
"in",
"self",
".",
"params",
":",
"headers",
".",
"extend",
"(",
"child",
".",
"guess_headers",
")",
"# remove duplicates",
"return",
"list",
"(",
"set",
"(",
"headers",
")",
")"
] |
Attempt to guess what headers may be required in order to use this
type. Returns `guess_headers` of all children recursively.
* If the typename is in the :const:`KNOWN_TYPES` dictionary, use the
header specified there
* If it's an STL type, include <{type}>
* If it exists in the ROOT namespace and begins with T,
include <{type}.h>
|
[
"Attempt",
"to",
"guess",
"what",
"headers",
"may",
"be",
"required",
"in",
"order",
"to",
"use",
"this",
"type",
".",
"Returns",
"guess_headers",
"of",
"all",
"children",
"recursively",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L217-L252
|
14,074
|
rootpy/rootpy
|
rootpy/stl.py
|
CPPType.cls
|
def cls(self):
"""
Return the class definition for this type
"""
# TODO: register the resulting type?
return SmartTemplate(self.name)(", ".join(map(str, self.params)))
|
python
|
def cls(self):
"""
Return the class definition for this type
"""
# TODO: register the resulting type?
return SmartTemplate(self.name)(", ".join(map(str, self.params)))
|
[
"def",
"cls",
"(",
"self",
")",
":",
"# TODO: register the resulting type?",
"return",
"SmartTemplate",
"(",
"self",
".",
"name",
")",
"(",
"\", \"",
".",
"join",
"(",
"map",
"(",
"str",
",",
"self",
".",
"params",
")",
")",
")"
] |
Return the class definition for this type
|
[
"Return",
"the",
"class",
"definition",
"for",
"this",
"type"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L255-L260
|
14,075
|
rootpy/rootpy
|
rootpy/stl.py
|
CPPType.from_string
|
def from_string(cls, string):
"""
Parse ``string`` into a CPPType instance
"""
cls.TYPE.setParseAction(cls.make)
try:
return cls.TYPE.parseString(string, parseAll=True)[0]
except ParseException:
log.error("Failed to parse '{0}'".format(string))
raise
|
python
|
def from_string(cls, string):
"""
Parse ``string`` into a CPPType instance
"""
cls.TYPE.setParseAction(cls.make)
try:
return cls.TYPE.parseString(string, parseAll=True)[0]
except ParseException:
log.error("Failed to parse '{0}'".format(string))
raise
|
[
"def",
"from_string",
"(",
"cls",
",",
"string",
")",
":",
"cls",
".",
"TYPE",
".",
"setParseAction",
"(",
"cls",
".",
"make",
")",
"try",
":",
"return",
"cls",
".",
"TYPE",
".",
"parseString",
"(",
"string",
",",
"parseAll",
"=",
"True",
")",
"[",
"0",
"]",
"except",
"ParseException",
":",
"log",
".",
"error",
"(",
"\"Failed to parse '{0}'\"",
".",
"format",
"(",
"string",
")",
")",
"raise"
] |
Parse ``string`` into a CPPType instance
|
[
"Parse",
"string",
"into",
"a",
"CPPType",
"instance"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/stl.py#L275-L284
|
14,076
|
rootpy/rootpy
|
rootpy/utils/cinterface.py
|
callback
|
def callback(cfunc):
"""
Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT
"""
# Note:
# ROOT wants a c_voidp whose addressof() == the call site of the target
# function. This hackery is necessary to achieve that.
return C.c_voidp.from_address(C.cast(cfunc, C.c_voidp).value)
|
python
|
def callback(cfunc):
"""
Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT
"""
# Note:
# ROOT wants a c_voidp whose addressof() == the call site of the target
# function. This hackery is necessary to achieve that.
return C.c_voidp.from_address(C.cast(cfunc, C.c_voidp).value)
|
[
"def",
"callback",
"(",
"cfunc",
")",
":",
"# Note:",
"# ROOT wants a c_voidp whose addressof() == the call site of the target",
"# function. This hackery is necessary to achieve that.",
"return",
"C",
".",
"c_voidp",
".",
"from_address",
"(",
"C",
".",
"cast",
"(",
"cfunc",
",",
"C",
".",
"c_voidp",
")",
".",
"value",
")"
] |
Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT
|
[
"Turn",
"a",
"ctypes",
"CFUNCTYPE",
"instance",
"into",
"a",
"value",
"which",
"can",
"be",
"passed",
"into",
"PyROOT"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/cinterface.py#L21-L28
|
14,077
|
rootpy/rootpy
|
rootpy/utils/cinterface.py
|
objectproxy_realaddress
|
def objectproxy_realaddress(obj):
"""
Obtain a real address as an integer from an objectproxy.
"""
voidp = QROOT.TPython.ObjectProxy_AsVoidPtr(obj)
return C.addressof(C.c_char.from_buffer(voidp))
|
python
|
def objectproxy_realaddress(obj):
"""
Obtain a real address as an integer from an objectproxy.
"""
voidp = QROOT.TPython.ObjectProxy_AsVoidPtr(obj)
return C.addressof(C.c_char.from_buffer(voidp))
|
[
"def",
"objectproxy_realaddress",
"(",
"obj",
")",
":",
"voidp",
"=",
"QROOT",
".",
"TPython",
".",
"ObjectProxy_AsVoidPtr",
"(",
"obj",
")",
"return",
"C",
".",
"addressof",
"(",
"C",
".",
"c_char",
".",
"from_buffer",
"(",
"voidp",
")",
")"
] |
Obtain a real address as an integer from an objectproxy.
|
[
"Obtain",
"a",
"real",
"address",
"as",
"an",
"integer",
"from",
"an",
"objectproxy",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/utils/cinterface.py#L31-L36
|
14,078
|
rootpy/rootpy
|
rootpy/plotting/style/__init__.py
|
set_style
|
def set_style(style, mpl=False, **kwargs):
"""
If mpl is False accept either style name or a TStyle instance.
If mpl is True accept either style name or a matplotlib.rcParams-like
dictionary
"""
if mpl:
import matplotlib as mpl
style_dictionary = {}
if isinstance(style, string_types):
style_dictionary = get_style(style, mpl=True, **kwargs)
log.info("using matplotlib style '{0}'".format(style))
elif isinstance(style, dict):
style_dictionary = style
log.info("using user-defined matplotlib style")
else:
raise TypeError("style must be a matplotlib style name or dict")
for k, v in style_dictionary.items():
mpl.rcParams[k] = v
else:
if isinstance(style, string_types):
style = get_style(style, **kwargs)
log.info("using ROOT style '{0}'".format(style.GetName()))
style.cd()
|
python
|
def set_style(style, mpl=False, **kwargs):
"""
If mpl is False accept either style name or a TStyle instance.
If mpl is True accept either style name or a matplotlib.rcParams-like
dictionary
"""
if mpl:
import matplotlib as mpl
style_dictionary = {}
if isinstance(style, string_types):
style_dictionary = get_style(style, mpl=True, **kwargs)
log.info("using matplotlib style '{0}'".format(style))
elif isinstance(style, dict):
style_dictionary = style
log.info("using user-defined matplotlib style")
else:
raise TypeError("style must be a matplotlib style name or dict")
for k, v in style_dictionary.items():
mpl.rcParams[k] = v
else:
if isinstance(style, string_types):
style = get_style(style, **kwargs)
log.info("using ROOT style '{0}'".format(style.GetName()))
style.cd()
|
[
"def",
"set_style",
"(",
"style",
",",
"mpl",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"mpl",
":",
"import",
"matplotlib",
"as",
"mpl",
"style_dictionary",
"=",
"{",
"}",
"if",
"isinstance",
"(",
"style",
",",
"string_types",
")",
":",
"style_dictionary",
"=",
"get_style",
"(",
"style",
",",
"mpl",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
"log",
".",
"info",
"(",
"\"using matplotlib style '{0}'\"",
".",
"format",
"(",
"style",
")",
")",
"elif",
"isinstance",
"(",
"style",
",",
"dict",
")",
":",
"style_dictionary",
"=",
"style",
"log",
".",
"info",
"(",
"\"using user-defined matplotlib style\"",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"style must be a matplotlib style name or dict\"",
")",
"for",
"k",
",",
"v",
"in",
"style_dictionary",
".",
"items",
"(",
")",
":",
"mpl",
".",
"rcParams",
"[",
"k",
"]",
"=",
"v",
"else",
":",
"if",
"isinstance",
"(",
"style",
",",
"string_types",
")",
":",
"style",
"=",
"get_style",
"(",
"style",
",",
"*",
"*",
"kwargs",
")",
"log",
".",
"info",
"(",
"\"using ROOT style '{0}'\"",
".",
"format",
"(",
"style",
".",
"GetName",
"(",
")",
")",
")",
"style",
".",
"cd",
"(",
")"
] |
If mpl is False accept either style name or a TStyle instance.
If mpl is True accept either style name or a matplotlib.rcParams-like
dictionary
|
[
"If",
"mpl",
"is",
"False",
"accept",
"either",
"style",
"name",
"or",
"a",
"TStyle",
"instance",
".",
"If",
"mpl",
"is",
"True",
"accept",
"either",
"style",
"name",
"or",
"a",
"matplotlib",
".",
"rcParams",
"-",
"like",
"dictionary"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/plotting/style/__init__.py#L56-L80
|
14,079
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.cd_previous
|
def cd_previous(self):
"""
cd to the gDirectory before this file was open.
"""
if self._prev_dir is None or isinstance(self._prev_dir, ROOT.TROOT):
return False
if isinstance(self._prev_dir, ROOT.TFile):
if self._prev_dir.IsOpen() and self._prev_dir.IsWritable():
self._prev_dir.cd()
return True
return False
if not self._prev_dir.IsWritable():
# avoid warning from ROOT stating file is not writable
return False
prev_file = self._prev_dir.GetFile()
if prev_file and prev_file.IsOpen():
self._prev_dir.cd()
return True
return False
|
python
|
def cd_previous(self):
"""
cd to the gDirectory before this file was open.
"""
if self._prev_dir is None or isinstance(self._prev_dir, ROOT.TROOT):
return False
if isinstance(self._prev_dir, ROOT.TFile):
if self._prev_dir.IsOpen() and self._prev_dir.IsWritable():
self._prev_dir.cd()
return True
return False
if not self._prev_dir.IsWritable():
# avoid warning from ROOT stating file is not writable
return False
prev_file = self._prev_dir.GetFile()
if prev_file and prev_file.IsOpen():
self._prev_dir.cd()
return True
return False
|
[
"def",
"cd_previous",
"(",
"self",
")",
":",
"if",
"self",
".",
"_prev_dir",
"is",
"None",
"or",
"isinstance",
"(",
"self",
".",
"_prev_dir",
",",
"ROOT",
".",
"TROOT",
")",
":",
"return",
"False",
"if",
"isinstance",
"(",
"self",
".",
"_prev_dir",
",",
"ROOT",
".",
"TFile",
")",
":",
"if",
"self",
".",
"_prev_dir",
".",
"IsOpen",
"(",
")",
"and",
"self",
".",
"_prev_dir",
".",
"IsWritable",
"(",
")",
":",
"self",
".",
"_prev_dir",
".",
"cd",
"(",
")",
"return",
"True",
"return",
"False",
"if",
"not",
"self",
".",
"_prev_dir",
".",
"IsWritable",
"(",
")",
":",
"# avoid warning from ROOT stating file is not writable",
"return",
"False",
"prev_file",
"=",
"self",
".",
"_prev_dir",
".",
"GetFile",
"(",
")",
"if",
"prev_file",
"and",
"prev_file",
".",
"IsOpen",
"(",
")",
":",
"self",
".",
"_prev_dir",
".",
"cd",
"(",
")",
"return",
"True",
"return",
"False"
] |
cd to the gDirectory before this file was open.
|
[
"cd",
"to",
"the",
"gDirectory",
"before",
"this",
"file",
"was",
"open",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L229-L247
|
14,080
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.Close
|
def Close(self, *args):
"""
Like ROOT's Close but reverts to the gDirectory before this file was
opened.
"""
super(_DirectoryBase, self).Close(*args)
return self.cd_previous()
|
python
|
def Close(self, *args):
"""
Like ROOT's Close but reverts to the gDirectory before this file was
opened.
"""
super(_DirectoryBase, self).Close(*args)
return self.cd_previous()
|
[
"def",
"Close",
"(",
"self",
",",
"*",
"args",
")",
":",
"super",
"(",
"_DirectoryBase",
",",
"self",
")",
".",
"Close",
"(",
"*",
"args",
")",
"return",
"self",
".",
"cd_previous",
"(",
")"
] |
Like ROOT's Close but reverts to the gDirectory before this file was
opened.
|
[
"Like",
"ROOT",
"s",
"Close",
"but",
"reverts",
"to",
"the",
"gDirectory",
"before",
"this",
"file",
"was",
"opened",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L249-L255
|
14,081
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.keys
|
def keys(self, latest=False):
"""
Return a list of the keys in this directory.
Parameters
----------
latest : bool, optional (default=False)
If True then return a list of keys with unique names where only the
key with the highest cycle number is included where multiple keys
exist with the same name.
Returns
-------
keys : list
List of keys
"""
if latest:
keys = {}
for key in self.keys():
name = key.GetName()
if name in keys:
if key.GetCycle() > keys[name].GetCycle():
keys[name] = key
else:
keys[name] = key
return keys.values()
return [asrootpy(key) for key in self.GetListOfKeys()]
|
python
|
def keys(self, latest=False):
"""
Return a list of the keys in this directory.
Parameters
----------
latest : bool, optional (default=False)
If True then return a list of keys with unique names where only the
key with the highest cycle number is included where multiple keys
exist with the same name.
Returns
-------
keys : list
List of keys
"""
if latest:
keys = {}
for key in self.keys():
name = key.GetName()
if name in keys:
if key.GetCycle() > keys[name].GetCycle():
keys[name] = key
else:
keys[name] = key
return keys.values()
return [asrootpy(key) for key in self.GetListOfKeys()]
|
[
"def",
"keys",
"(",
"self",
",",
"latest",
"=",
"False",
")",
":",
"if",
"latest",
":",
"keys",
"=",
"{",
"}",
"for",
"key",
"in",
"self",
".",
"keys",
"(",
")",
":",
"name",
"=",
"key",
".",
"GetName",
"(",
")",
"if",
"name",
"in",
"keys",
":",
"if",
"key",
".",
"GetCycle",
"(",
")",
">",
"keys",
"[",
"name",
"]",
".",
"GetCycle",
"(",
")",
":",
"keys",
"[",
"name",
"]",
"=",
"key",
"else",
":",
"keys",
"[",
"name",
"]",
"=",
"key",
"return",
"keys",
".",
"values",
"(",
")",
"return",
"[",
"asrootpy",
"(",
"key",
")",
"for",
"key",
"in",
"self",
".",
"GetListOfKeys",
"(",
")",
"]"
] |
Return a list of the keys in this directory.
Parameters
----------
latest : bool, optional (default=False)
If True then return a list of keys with unique names where only the
key with the highest cycle number is included where multiple keys
exist with the same name.
Returns
-------
keys : list
List of keys
|
[
"Return",
"a",
"list",
"of",
"the",
"keys",
"in",
"this",
"directory",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L289-L318
|
14,082
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.Get
|
def Get(self, path, rootpy=True, **kwargs):
"""
Return the requested object cast as its corresponding subclass in
rootpy if one exists and ``rootpy=True``, otherwise return the
unadulterated TObject.
"""
thing = super(_DirectoryBase, self).Get(path)
if not thing:
raise DoesNotExist
# Ensure that the file we took the object from is alive at least as
# long as the object being taken from it.
# Note, Python does *not* own `thing`, it is ROOT's responsibility to
# delete it in the C++ sense. (SetOwnership is False). However, ROOT
# will delete the object when the TFile's destructor is run.
# Therefore, when `thing` goes out of scope and the file referred to
# by `this` has no references left, the file is destructed and calls
# `thing`'s delete.
# (this is thanks to the fact that weak referents (used by keepalive)
# are notified when they are dead).
keepalive(thing, self)
if rootpy:
return asrootpy(thing, **kwargs)
return thing
|
python
|
def Get(self, path, rootpy=True, **kwargs):
"""
Return the requested object cast as its corresponding subclass in
rootpy if one exists and ``rootpy=True``, otherwise return the
unadulterated TObject.
"""
thing = super(_DirectoryBase, self).Get(path)
if not thing:
raise DoesNotExist
# Ensure that the file we took the object from is alive at least as
# long as the object being taken from it.
# Note, Python does *not* own `thing`, it is ROOT's responsibility to
# delete it in the C++ sense. (SetOwnership is False). However, ROOT
# will delete the object when the TFile's destructor is run.
# Therefore, when `thing` goes out of scope and the file referred to
# by `this` has no references left, the file is destructed and calls
# `thing`'s delete.
# (this is thanks to the fact that weak referents (used by keepalive)
# are notified when they are dead).
keepalive(thing, self)
if rootpy:
return asrootpy(thing, **kwargs)
return thing
|
[
"def",
"Get",
"(",
"self",
",",
"path",
",",
"rootpy",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"thing",
"=",
"super",
"(",
"_DirectoryBase",
",",
"self",
")",
".",
"Get",
"(",
"path",
")",
"if",
"not",
"thing",
":",
"raise",
"DoesNotExist",
"# Ensure that the file we took the object from is alive at least as",
"# long as the object being taken from it.",
"# Note, Python does *not* own `thing`, it is ROOT's responsibility to",
"# delete it in the C++ sense. (SetOwnership is False). However, ROOT",
"# will delete the object when the TFile's destructor is run.",
"# Therefore, when `thing` goes out of scope and the file referred to",
"# by `this` has no references left, the file is destructed and calls",
"# `thing`'s delete.",
"# (this is thanks to the fact that weak referents (used by keepalive)",
"# are notified when they are dead).",
"keepalive",
"(",
"thing",
",",
"self",
")",
"if",
"rootpy",
":",
"return",
"asrootpy",
"(",
"thing",
",",
"*",
"*",
"kwargs",
")",
"return",
"thing"
] |
Return the requested object cast as its corresponding subclass in
rootpy if one exists and ``rootpy=True``, otherwise return the
unadulterated TObject.
|
[
"Return",
"the",
"requested",
"object",
"cast",
"as",
"its",
"corresponding",
"subclass",
"in",
"rootpy",
"if",
"one",
"exists",
"and",
"rootpy",
"=",
"True",
"otherwise",
"return",
"the",
"unadulterated",
"TObject",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L321-L348
|
14,083
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.GetKey
|
def GetKey(self, path, cycle=9999, rootpy=True, **kwargs):
"""
Override TDirectory's GetKey and also handle accessing keys nested
arbitrarily deep in subdirectories.
"""
key = super(_DirectoryBase, self).GetKey(path, cycle)
if not key:
raise DoesNotExist
if rootpy:
return asrootpy(key, **kwargs)
return key
|
python
|
def GetKey(self, path, cycle=9999, rootpy=True, **kwargs):
"""
Override TDirectory's GetKey and also handle accessing keys nested
arbitrarily deep in subdirectories.
"""
key = super(_DirectoryBase, self).GetKey(path, cycle)
if not key:
raise DoesNotExist
if rootpy:
return asrootpy(key, **kwargs)
return key
|
[
"def",
"GetKey",
"(",
"self",
",",
"path",
",",
"cycle",
"=",
"9999",
",",
"rootpy",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"key",
"=",
"super",
"(",
"_DirectoryBase",
",",
"self",
")",
".",
"GetKey",
"(",
"path",
",",
"cycle",
")",
"if",
"not",
"key",
":",
"raise",
"DoesNotExist",
"if",
"rootpy",
":",
"return",
"asrootpy",
"(",
"key",
",",
"*",
"*",
"kwargs",
")",
"return",
"key"
] |
Override TDirectory's GetKey and also handle accessing keys nested
arbitrarily deep in subdirectories.
|
[
"Override",
"TDirectory",
"s",
"GetKey",
"and",
"also",
"handle",
"accessing",
"keys",
"nested",
"arbitrarily",
"deep",
"in",
"subdirectories",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L360-L370
|
14,084
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.mkdir
|
def mkdir(self, path, title="", recurse=False):
"""
Make a new directory. If recurse is True, create parent directories
as required. Return the newly created TDirectory.
"""
head, tail = os.path.split(os.path.normpath(path))
if tail == "":
raise ValueError("invalid directory name: {0}".format(path))
with preserve_current_directory():
dest = self
if recurse:
parent_dirs = head.split(os.path.sep)
for parent_dir in parent_dirs:
try:
newdest = dest.GetDirectory(parent_dir)
dest = newdest
except DoesNotExist:
dest = dest.mkdir(parent_dir)
elif head != "":
dest = dest.GetDirectory(head)
if tail in dest:
raise ValueError("{0} already exists".format(path))
newdir = asrootpy(super(_DirectoryBase, dest).mkdir(tail, title))
return newdir
|
python
|
def mkdir(self, path, title="", recurse=False):
"""
Make a new directory. If recurse is True, create parent directories
as required. Return the newly created TDirectory.
"""
head, tail = os.path.split(os.path.normpath(path))
if tail == "":
raise ValueError("invalid directory name: {0}".format(path))
with preserve_current_directory():
dest = self
if recurse:
parent_dirs = head.split(os.path.sep)
for parent_dir in parent_dirs:
try:
newdest = dest.GetDirectory(parent_dir)
dest = newdest
except DoesNotExist:
dest = dest.mkdir(parent_dir)
elif head != "":
dest = dest.GetDirectory(head)
if tail in dest:
raise ValueError("{0} already exists".format(path))
newdir = asrootpy(super(_DirectoryBase, dest).mkdir(tail, title))
return newdir
|
[
"def",
"mkdir",
"(",
"self",
",",
"path",
",",
"title",
"=",
"\"\"",
",",
"recurse",
"=",
"False",
")",
":",
"head",
",",
"tail",
"=",
"os",
".",
"path",
".",
"split",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"path",
")",
")",
"if",
"tail",
"==",
"\"\"",
":",
"raise",
"ValueError",
"(",
"\"invalid directory name: {0}\"",
".",
"format",
"(",
"path",
")",
")",
"with",
"preserve_current_directory",
"(",
")",
":",
"dest",
"=",
"self",
"if",
"recurse",
":",
"parent_dirs",
"=",
"head",
".",
"split",
"(",
"os",
".",
"path",
".",
"sep",
")",
"for",
"parent_dir",
"in",
"parent_dirs",
":",
"try",
":",
"newdest",
"=",
"dest",
".",
"GetDirectory",
"(",
"parent_dir",
")",
"dest",
"=",
"newdest",
"except",
"DoesNotExist",
":",
"dest",
"=",
"dest",
".",
"mkdir",
"(",
"parent_dir",
")",
"elif",
"head",
"!=",
"\"\"",
":",
"dest",
"=",
"dest",
".",
"GetDirectory",
"(",
"head",
")",
"if",
"tail",
"in",
"dest",
":",
"raise",
"ValueError",
"(",
"\"{0} already exists\"",
".",
"format",
"(",
"path",
")",
")",
"newdir",
"=",
"asrootpy",
"(",
"super",
"(",
"_DirectoryBase",
",",
"dest",
")",
".",
"mkdir",
"(",
"tail",
",",
"title",
")",
")",
"return",
"newdir"
] |
Make a new directory. If recurse is True, create parent directories
as required. Return the newly created TDirectory.
|
[
"Make",
"a",
"new",
"directory",
".",
"If",
"recurse",
"is",
"True",
"create",
"parent",
"directories",
"as",
"required",
".",
"Return",
"the",
"newly",
"created",
"TDirectory",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L385-L408
|
14,085
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.rm
|
def rm(self, path, cycle=';*'):
"""
Delete an object at `path` relative to this directory
"""
rdir = self
with preserve_current_directory():
dirname, objname = os.path.split(os.path.normpath(path))
if dirname:
rdir = rdir.Get(dirname)
rdir.Delete(objname + cycle)
|
python
|
def rm(self, path, cycle=';*'):
"""
Delete an object at `path` relative to this directory
"""
rdir = self
with preserve_current_directory():
dirname, objname = os.path.split(os.path.normpath(path))
if dirname:
rdir = rdir.Get(dirname)
rdir.Delete(objname + cycle)
|
[
"def",
"rm",
"(",
"self",
",",
"path",
",",
"cycle",
"=",
"';*'",
")",
":",
"rdir",
"=",
"self",
"with",
"preserve_current_directory",
"(",
")",
":",
"dirname",
",",
"objname",
"=",
"os",
".",
"path",
".",
"split",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"path",
")",
")",
"if",
"dirname",
":",
"rdir",
"=",
"rdir",
".",
"Get",
"(",
"dirname",
")",
"rdir",
".",
"Delete",
"(",
"objname",
"+",
"cycle",
")"
] |
Delete an object at `path` relative to this directory
|
[
"Delete",
"an",
"object",
"at",
"path",
"relative",
"to",
"this",
"directory"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L410-L419
|
14,086
|
rootpy/rootpy
|
rootpy/io/file.py
|
_DirectoryBase.copytree
|
def copytree(self, dest_dir, src=None, newname=None,
exclude=None, overwrite=False):
"""
Copy this directory or just one contained object into another
directory.
Parameters
----------
dest_dir : string or Directory
The destination directory.
src : string, optional (default=None)
If ``src`` is None then this entire directory is copied recursively
otherwise if ``src`` is a string path to an object relative to this
directory, only that object will be copied. The copied object can
optionally be given a ``newname``.
newname : string, optional (default=None)
An optional new name for the copied object.
exclude : callable, optional (default=None)
``exclude`` can optionally be a function which takes
``(path, object_name)`` and if returns True excludes
objects from being copied if the entire directory is being copied
recursively.
overwrite : bool, optional (default=False)
If True, then overwrite existing objects with the same name.
"""
def copy_object(obj, dest, name=None):
if name is None:
name = obj.GetName()
if not overwrite and name in dest:
raise ValueError(
"{0} already exists in {1} and `overwrite=False`".format(
name, dest._path))
dest.cd()
if isinstance(obj, ROOT.R.TTree):
new_obj = obj.CloneTree(-1, "fast")
new_obj.Write(name, ROOT.R.TObject.kOverwrite)
else:
obj.Write(name, ROOT.R.TObject.kOverwrite)
with preserve_current_directory():
if isinstance(src, string_types):
src = asrootpy(self.Get(src))
else:
src = self
if isinstance(dest_dir, string_types):
try:
dest_dir = asrootpy(self.GetDirectory(dest_dir))
except DoesNotExist:
dest_dir = self.mkdir(dest_dir)
if isinstance(src, ROOT.R.TDirectory):
# Copy a directory
cp_name = newname if newname is not None else src.GetName()
# See if the directory already exists
if cp_name not in dest_dir:
# Destination directory doesn't exist, so make a new one
new_dir = dest_dir.mkdir(cp_name)
else:
new_dir = dest_dir.get(cp_name)
# Copy everything in the src directory to the destination
for (path, dirnames, objects) in src.walk(maxdepth=0):
# Copy all the objects
for object_name in objects:
if exclude and exclude(path, object_name):
continue
thing = src.Get(object_name)
copy_object(thing, new_dir)
for dirname in dirnames:
if exclude and exclude(path, dirname):
continue
rdir = src.GetDirectory(dirname)
# Recursively copy objects in subdirectories
rdir.copytree(
new_dir,
exclude=exclude, overwrite=overwrite)
else:
# Copy an object
copy_object(src, dest_dir, name=newname)
|
python
|
def copytree(self, dest_dir, src=None, newname=None,
exclude=None, overwrite=False):
"""
Copy this directory or just one contained object into another
directory.
Parameters
----------
dest_dir : string or Directory
The destination directory.
src : string, optional (default=None)
If ``src`` is None then this entire directory is copied recursively
otherwise if ``src`` is a string path to an object relative to this
directory, only that object will be copied. The copied object can
optionally be given a ``newname``.
newname : string, optional (default=None)
An optional new name for the copied object.
exclude : callable, optional (default=None)
``exclude`` can optionally be a function which takes
``(path, object_name)`` and if returns True excludes
objects from being copied if the entire directory is being copied
recursively.
overwrite : bool, optional (default=False)
If True, then overwrite existing objects with the same name.
"""
def copy_object(obj, dest, name=None):
if name is None:
name = obj.GetName()
if not overwrite and name in dest:
raise ValueError(
"{0} already exists in {1} and `overwrite=False`".format(
name, dest._path))
dest.cd()
if isinstance(obj, ROOT.R.TTree):
new_obj = obj.CloneTree(-1, "fast")
new_obj.Write(name, ROOT.R.TObject.kOverwrite)
else:
obj.Write(name, ROOT.R.TObject.kOverwrite)
with preserve_current_directory():
if isinstance(src, string_types):
src = asrootpy(self.Get(src))
else:
src = self
if isinstance(dest_dir, string_types):
try:
dest_dir = asrootpy(self.GetDirectory(dest_dir))
except DoesNotExist:
dest_dir = self.mkdir(dest_dir)
if isinstance(src, ROOT.R.TDirectory):
# Copy a directory
cp_name = newname if newname is not None else src.GetName()
# See if the directory already exists
if cp_name not in dest_dir:
# Destination directory doesn't exist, so make a new one
new_dir = dest_dir.mkdir(cp_name)
else:
new_dir = dest_dir.get(cp_name)
# Copy everything in the src directory to the destination
for (path, dirnames, objects) in src.walk(maxdepth=0):
# Copy all the objects
for object_name in objects:
if exclude and exclude(path, object_name):
continue
thing = src.Get(object_name)
copy_object(thing, new_dir)
for dirname in dirnames:
if exclude and exclude(path, dirname):
continue
rdir = src.GetDirectory(dirname)
# Recursively copy objects in subdirectories
rdir.copytree(
new_dir,
exclude=exclude, overwrite=overwrite)
else:
# Copy an object
copy_object(src, dest_dir, name=newname)
|
[
"def",
"copytree",
"(",
"self",
",",
"dest_dir",
",",
"src",
"=",
"None",
",",
"newname",
"=",
"None",
",",
"exclude",
"=",
"None",
",",
"overwrite",
"=",
"False",
")",
":",
"def",
"copy_object",
"(",
"obj",
",",
"dest",
",",
"name",
"=",
"None",
")",
":",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"obj",
".",
"GetName",
"(",
")",
"if",
"not",
"overwrite",
"and",
"name",
"in",
"dest",
":",
"raise",
"ValueError",
"(",
"\"{0} already exists in {1} and `overwrite=False`\"",
".",
"format",
"(",
"name",
",",
"dest",
".",
"_path",
")",
")",
"dest",
".",
"cd",
"(",
")",
"if",
"isinstance",
"(",
"obj",
",",
"ROOT",
".",
"R",
".",
"TTree",
")",
":",
"new_obj",
"=",
"obj",
".",
"CloneTree",
"(",
"-",
"1",
",",
"\"fast\"",
")",
"new_obj",
".",
"Write",
"(",
"name",
",",
"ROOT",
".",
"R",
".",
"TObject",
".",
"kOverwrite",
")",
"else",
":",
"obj",
".",
"Write",
"(",
"name",
",",
"ROOT",
".",
"R",
".",
"TObject",
".",
"kOverwrite",
")",
"with",
"preserve_current_directory",
"(",
")",
":",
"if",
"isinstance",
"(",
"src",
",",
"string_types",
")",
":",
"src",
"=",
"asrootpy",
"(",
"self",
".",
"Get",
"(",
"src",
")",
")",
"else",
":",
"src",
"=",
"self",
"if",
"isinstance",
"(",
"dest_dir",
",",
"string_types",
")",
":",
"try",
":",
"dest_dir",
"=",
"asrootpy",
"(",
"self",
".",
"GetDirectory",
"(",
"dest_dir",
")",
")",
"except",
"DoesNotExist",
":",
"dest_dir",
"=",
"self",
".",
"mkdir",
"(",
"dest_dir",
")",
"if",
"isinstance",
"(",
"src",
",",
"ROOT",
".",
"R",
".",
"TDirectory",
")",
":",
"# Copy a directory",
"cp_name",
"=",
"newname",
"if",
"newname",
"is",
"not",
"None",
"else",
"src",
".",
"GetName",
"(",
")",
"# See if the directory already exists",
"if",
"cp_name",
"not",
"in",
"dest_dir",
":",
"# Destination directory doesn't exist, so make a new one",
"new_dir",
"=",
"dest_dir",
".",
"mkdir",
"(",
"cp_name",
")",
"else",
":",
"new_dir",
"=",
"dest_dir",
".",
"get",
"(",
"cp_name",
")",
"# Copy everything in the src directory to the destination",
"for",
"(",
"path",
",",
"dirnames",
",",
"objects",
")",
"in",
"src",
".",
"walk",
"(",
"maxdepth",
"=",
"0",
")",
":",
"# Copy all the objects",
"for",
"object_name",
"in",
"objects",
":",
"if",
"exclude",
"and",
"exclude",
"(",
"path",
",",
"object_name",
")",
":",
"continue",
"thing",
"=",
"src",
".",
"Get",
"(",
"object_name",
")",
"copy_object",
"(",
"thing",
",",
"new_dir",
")",
"for",
"dirname",
"in",
"dirnames",
":",
"if",
"exclude",
"and",
"exclude",
"(",
"path",
",",
"dirname",
")",
":",
"continue",
"rdir",
"=",
"src",
".",
"GetDirectory",
"(",
"dirname",
")",
"# Recursively copy objects in subdirectories",
"rdir",
".",
"copytree",
"(",
"new_dir",
",",
"exclude",
"=",
"exclude",
",",
"overwrite",
"=",
"overwrite",
")",
"else",
":",
"# Copy an object",
"copy_object",
"(",
"src",
",",
"dest_dir",
",",
"name",
"=",
"newname",
")"
] |
Copy this directory or just one contained object into another
directory.
Parameters
----------
dest_dir : string or Directory
The destination directory.
src : string, optional (default=None)
If ``src`` is None then this entire directory is copied recursively
otherwise if ``src`` is a string path to an object relative to this
directory, only that object will be copied. The copied object can
optionally be given a ``newname``.
newname : string, optional (default=None)
An optional new name for the copied object.
exclude : callable, optional (default=None)
``exclude`` can optionally be a function which takes
``(path, object_name)`` and if returns True excludes
objects from being copied if the entire directory is being copied
recursively.
overwrite : bool, optional (default=False)
If True, then overwrite existing objects with the same name.
|
[
"Copy",
"this",
"directory",
"or",
"just",
"one",
"contained",
"object",
"into",
"another",
"directory",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L424-L506
|
14,087
|
rootpy/rootpy
|
rootpy/io/file.py
|
_FileBase.find
|
def find(self,
regexp, negate_regexp=False,
class_pattern=None,
find_fnc=re.search,
refresh_cache=False):
"""
yield the full path of the matching regular expression and the
match itself
"""
if refresh_cache or not hasattr(self, 'cache'):
self._populate_cache()
b = self.cache
split_regexp = regexp.split('/')
# traverse as deep as possible in the cache
# special case if the first character is not the root, i.e. not ""
if split_regexp[0] == '':
for d in split_regexp:
if d in b:
b = b[d]
else:
break
else:
b = b['']
# perform the search
for path, (obj, classname) in b['obj']:
if class_pattern:
if not fnmatch(classname, class_pattern):
continue
joined_path = os.path.join(*['/', path, obj])
result = find_fnc(regexp, joined_path)
if (result is not None) ^ negate_regexp:
yield joined_path, result
|
python
|
def find(self,
regexp, negate_regexp=False,
class_pattern=None,
find_fnc=re.search,
refresh_cache=False):
"""
yield the full path of the matching regular expression and the
match itself
"""
if refresh_cache or not hasattr(self, 'cache'):
self._populate_cache()
b = self.cache
split_regexp = regexp.split('/')
# traverse as deep as possible in the cache
# special case if the first character is not the root, i.e. not ""
if split_regexp[0] == '':
for d in split_regexp:
if d in b:
b = b[d]
else:
break
else:
b = b['']
# perform the search
for path, (obj, classname) in b['obj']:
if class_pattern:
if not fnmatch(classname, class_pattern):
continue
joined_path = os.path.join(*['/', path, obj])
result = find_fnc(regexp, joined_path)
if (result is not None) ^ negate_regexp:
yield joined_path, result
|
[
"def",
"find",
"(",
"self",
",",
"regexp",
",",
"negate_regexp",
"=",
"False",
",",
"class_pattern",
"=",
"None",
",",
"find_fnc",
"=",
"re",
".",
"search",
",",
"refresh_cache",
"=",
"False",
")",
":",
"if",
"refresh_cache",
"or",
"not",
"hasattr",
"(",
"self",
",",
"'cache'",
")",
":",
"self",
".",
"_populate_cache",
"(",
")",
"b",
"=",
"self",
".",
"cache",
"split_regexp",
"=",
"regexp",
".",
"split",
"(",
"'/'",
")",
"# traverse as deep as possible in the cache",
"# special case if the first character is not the root, i.e. not \"\"",
"if",
"split_regexp",
"[",
"0",
"]",
"==",
"''",
":",
"for",
"d",
"in",
"split_regexp",
":",
"if",
"d",
"in",
"b",
":",
"b",
"=",
"b",
"[",
"d",
"]",
"else",
":",
"break",
"else",
":",
"b",
"=",
"b",
"[",
"''",
"]",
"# perform the search",
"for",
"path",
",",
"(",
"obj",
",",
"classname",
")",
"in",
"b",
"[",
"'obj'",
"]",
":",
"if",
"class_pattern",
":",
"if",
"not",
"fnmatch",
"(",
"classname",
",",
"class_pattern",
")",
":",
"continue",
"joined_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"*",
"[",
"'/'",
",",
"path",
",",
"obj",
"]",
")",
"result",
"=",
"find_fnc",
"(",
"regexp",
",",
"joined_path",
")",
"if",
"(",
"result",
"is",
"not",
"None",
")",
"^",
"negate_regexp",
":",
"yield",
"joined_path",
",",
"result"
] |
yield the full path of the matching regular expression and the
match itself
|
[
"yield",
"the",
"full",
"path",
"of",
"the",
"matching",
"regular",
"expression",
"and",
"the",
"match",
"itself"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/io/file.py#L696-L730
|
14,088
|
rootpy/rootpy
|
rootpy/interactive/rootwait.py
|
start_new_gui_thread
|
def start_new_gui_thread():
"""
Attempt to start a new GUI thread, if possible.
It is only possible to start one if there was one running on module import.
"""
PyGUIThread = getattr(ROOT, 'PyGUIThread', None)
if PyGUIThread is not None:
assert not PyGUIThread.isAlive(), "GUI thread already running!"
assert _processRootEvents, (
"GUI thread wasn't started when rootwait was imported, "
"so it can't be restarted")
ROOT.keeppolling = 1
ROOT.PyGUIThread = threading.Thread(
None, _processRootEvents, None, (ROOT,))
ROOT.PyGUIThread.finishSchedule = _finishSchedule
ROOT.PyGUIThread.setDaemon(1)
ROOT.PyGUIThread.start()
log.debug("successfully started a new GUI thread")
|
python
|
def start_new_gui_thread():
"""
Attempt to start a new GUI thread, if possible.
It is only possible to start one if there was one running on module import.
"""
PyGUIThread = getattr(ROOT, 'PyGUIThread', None)
if PyGUIThread is not None:
assert not PyGUIThread.isAlive(), "GUI thread already running!"
assert _processRootEvents, (
"GUI thread wasn't started when rootwait was imported, "
"so it can't be restarted")
ROOT.keeppolling = 1
ROOT.PyGUIThread = threading.Thread(
None, _processRootEvents, None, (ROOT,))
ROOT.PyGUIThread.finishSchedule = _finishSchedule
ROOT.PyGUIThread.setDaemon(1)
ROOT.PyGUIThread.start()
log.debug("successfully started a new GUI thread")
|
[
"def",
"start_new_gui_thread",
"(",
")",
":",
"PyGUIThread",
"=",
"getattr",
"(",
"ROOT",
",",
"'PyGUIThread'",
",",
"None",
")",
"if",
"PyGUIThread",
"is",
"not",
"None",
":",
"assert",
"not",
"PyGUIThread",
".",
"isAlive",
"(",
")",
",",
"\"GUI thread already running!\"",
"assert",
"_processRootEvents",
",",
"(",
"\"GUI thread wasn't started when rootwait was imported, \"",
"\"so it can't be restarted\"",
")",
"ROOT",
".",
"keeppolling",
"=",
"1",
"ROOT",
".",
"PyGUIThread",
"=",
"threading",
".",
"Thread",
"(",
"None",
",",
"_processRootEvents",
",",
"None",
",",
"(",
"ROOT",
",",
")",
")",
"ROOT",
".",
"PyGUIThread",
".",
"finishSchedule",
"=",
"_finishSchedule",
"ROOT",
".",
"PyGUIThread",
".",
"setDaemon",
"(",
"1",
")",
"ROOT",
".",
"PyGUIThread",
".",
"start",
"(",
")",
"log",
".",
"debug",
"(",
"\"successfully started a new GUI thread\"",
")"
] |
Attempt to start a new GUI thread, if possible.
It is only possible to start one if there was one running on module import.
|
[
"Attempt",
"to",
"start",
"a",
"new",
"GUI",
"thread",
"if",
"possible",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L85-L107
|
14,089
|
rootpy/rootpy
|
rootpy/interactive/rootwait.py
|
stop_gui_thread
|
def stop_gui_thread():
"""
Try to stop the GUI thread. If it was running returns True,
otherwise False.
"""
PyGUIThread = getattr(ROOT, 'PyGUIThread', None)
if PyGUIThread is None or not PyGUIThread.isAlive():
log.debug("no existing GUI thread is runnng")
return False
ROOT.keeppolling = 0
try:
PyGUIThread.finishSchedule()
except AttributeError:
log.debug("unable to call finishSchedule() on PyGUIThread")
pass
PyGUIThread.join()
log.debug("successfully stopped the existing GUI thread")
return True
|
python
|
def stop_gui_thread():
"""
Try to stop the GUI thread. If it was running returns True,
otherwise False.
"""
PyGUIThread = getattr(ROOT, 'PyGUIThread', None)
if PyGUIThread is None or not PyGUIThread.isAlive():
log.debug("no existing GUI thread is runnng")
return False
ROOT.keeppolling = 0
try:
PyGUIThread.finishSchedule()
except AttributeError:
log.debug("unable to call finishSchedule() on PyGUIThread")
pass
PyGUIThread.join()
log.debug("successfully stopped the existing GUI thread")
return True
|
[
"def",
"stop_gui_thread",
"(",
")",
":",
"PyGUIThread",
"=",
"getattr",
"(",
"ROOT",
",",
"'PyGUIThread'",
",",
"None",
")",
"if",
"PyGUIThread",
"is",
"None",
"or",
"not",
"PyGUIThread",
".",
"isAlive",
"(",
")",
":",
"log",
".",
"debug",
"(",
"\"no existing GUI thread is runnng\"",
")",
"return",
"False",
"ROOT",
".",
"keeppolling",
"=",
"0",
"try",
":",
"PyGUIThread",
".",
"finishSchedule",
"(",
")",
"except",
"AttributeError",
":",
"log",
".",
"debug",
"(",
"\"unable to call finishSchedule() on PyGUIThread\"",
")",
"pass",
"PyGUIThread",
".",
"join",
"(",
")",
"log",
".",
"debug",
"(",
"\"successfully stopped the existing GUI thread\"",
")",
"return",
"True"
] |
Try to stop the GUI thread. If it was running returns True,
otherwise False.
|
[
"Try",
"to",
"stop",
"the",
"GUI",
"thread",
".",
"If",
"it",
"was",
"running",
"returns",
"True",
"otherwise",
"False",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L110-L129
|
14,090
|
rootpy/rootpy
|
rootpy/interactive/rootwait.py
|
wait_for_zero_canvases
|
def wait_for_zero_canvases(middle_mouse_close=False):
"""
Wait for all canvases to be closed, or CTRL-c.
If `middle_mouse_close`, middle click will shut the canvas.
incpy.ignore
"""
if not __ACTIVE:
wait_failover(wait_for_zero_canvases)
return
@dispatcher
def count_canvases():
"""
Count the number of active canvases and finish gApplication.Run()
if there are none remaining.
incpy.ignore
"""
if not get_visible_canvases():
try:
ROOT.gSystem.ExitLoop()
except AttributeError:
# We might be exiting and ROOT.gROOT will raise an AttributeError
pass
@dispatcher
def exit_application_loop():
"""
Signal handler for CTRL-c to cause gApplication.Run() to finish.
incpy.ignore
"""
ROOT.gSystem.ExitLoop()
# Handle CTRL-c
sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True)
sh.Add()
sh.Connect("Notified()", "TPyDispatcher",
exit_application_loop, "Dispatch()")
visible_canvases = get_visible_canvases()
for canvas in visible_canvases:
log.debug("waiting for canvas {0} to close".format(canvas.GetName()))
canvas.Update()
if middle_mouse_close:
attach_event_handler(canvas)
if not getattr(canvas, "_py_close_dispatcher_attached", False):
# Attach a handler only once to each canvas
canvas._py_close_dispatcher_attached = True
canvas.Connect("Closed()", "TPyDispatcher",
count_canvases, "Dispatch()")
keepalive(canvas, count_canvases)
if visible_canvases and not ROOT.gROOT.IsBatch():
run_application_until_done()
# Disconnect from canvases
for canvas in visible_canvases:
if getattr(canvas, "_py_close_dispatcher_attached", False):
canvas._py_close_dispatcher_attached = False
canvas.Disconnect("Closed()", count_canvases, "Dispatch()")
|
python
|
def wait_for_zero_canvases(middle_mouse_close=False):
"""
Wait for all canvases to be closed, or CTRL-c.
If `middle_mouse_close`, middle click will shut the canvas.
incpy.ignore
"""
if not __ACTIVE:
wait_failover(wait_for_zero_canvases)
return
@dispatcher
def count_canvases():
"""
Count the number of active canvases and finish gApplication.Run()
if there are none remaining.
incpy.ignore
"""
if not get_visible_canvases():
try:
ROOT.gSystem.ExitLoop()
except AttributeError:
# We might be exiting and ROOT.gROOT will raise an AttributeError
pass
@dispatcher
def exit_application_loop():
"""
Signal handler for CTRL-c to cause gApplication.Run() to finish.
incpy.ignore
"""
ROOT.gSystem.ExitLoop()
# Handle CTRL-c
sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True)
sh.Add()
sh.Connect("Notified()", "TPyDispatcher",
exit_application_loop, "Dispatch()")
visible_canvases = get_visible_canvases()
for canvas in visible_canvases:
log.debug("waiting for canvas {0} to close".format(canvas.GetName()))
canvas.Update()
if middle_mouse_close:
attach_event_handler(canvas)
if not getattr(canvas, "_py_close_dispatcher_attached", False):
# Attach a handler only once to each canvas
canvas._py_close_dispatcher_attached = True
canvas.Connect("Closed()", "TPyDispatcher",
count_canvases, "Dispatch()")
keepalive(canvas, count_canvases)
if visible_canvases and not ROOT.gROOT.IsBatch():
run_application_until_done()
# Disconnect from canvases
for canvas in visible_canvases:
if getattr(canvas, "_py_close_dispatcher_attached", False):
canvas._py_close_dispatcher_attached = False
canvas.Disconnect("Closed()", count_canvases, "Dispatch()")
|
[
"def",
"wait_for_zero_canvases",
"(",
"middle_mouse_close",
"=",
"False",
")",
":",
"if",
"not",
"__ACTIVE",
":",
"wait_failover",
"(",
"wait_for_zero_canvases",
")",
"return",
"@",
"dispatcher",
"def",
"count_canvases",
"(",
")",
":",
"\"\"\"\n Count the number of active canvases and finish gApplication.Run()\n if there are none remaining.\n\n incpy.ignore\n \"\"\"",
"if",
"not",
"get_visible_canvases",
"(",
")",
":",
"try",
":",
"ROOT",
".",
"gSystem",
".",
"ExitLoop",
"(",
")",
"except",
"AttributeError",
":",
"# We might be exiting and ROOT.gROOT will raise an AttributeError",
"pass",
"@",
"dispatcher",
"def",
"exit_application_loop",
"(",
")",
":",
"\"\"\"\n Signal handler for CTRL-c to cause gApplication.Run() to finish.\n\n incpy.ignore\n \"\"\"",
"ROOT",
".",
"gSystem",
".",
"ExitLoop",
"(",
")",
"# Handle CTRL-c",
"sh",
"=",
"ROOT",
".",
"TSignalHandler",
"(",
"ROOT",
".",
"kSigInterrupt",
",",
"True",
")",
"sh",
".",
"Add",
"(",
")",
"sh",
".",
"Connect",
"(",
"\"Notified()\"",
",",
"\"TPyDispatcher\"",
",",
"exit_application_loop",
",",
"\"Dispatch()\"",
")",
"visible_canvases",
"=",
"get_visible_canvases",
"(",
")",
"for",
"canvas",
"in",
"visible_canvases",
":",
"log",
".",
"debug",
"(",
"\"waiting for canvas {0} to close\"",
".",
"format",
"(",
"canvas",
".",
"GetName",
"(",
")",
")",
")",
"canvas",
".",
"Update",
"(",
")",
"if",
"middle_mouse_close",
":",
"attach_event_handler",
"(",
"canvas",
")",
"if",
"not",
"getattr",
"(",
"canvas",
",",
"\"_py_close_dispatcher_attached\"",
",",
"False",
")",
":",
"# Attach a handler only once to each canvas",
"canvas",
".",
"_py_close_dispatcher_attached",
"=",
"True",
"canvas",
".",
"Connect",
"(",
"\"Closed()\"",
",",
"\"TPyDispatcher\"",
",",
"count_canvases",
",",
"\"Dispatch()\"",
")",
"keepalive",
"(",
"canvas",
",",
"count_canvases",
")",
"if",
"visible_canvases",
"and",
"not",
"ROOT",
".",
"gROOT",
".",
"IsBatch",
"(",
")",
":",
"run_application_until_done",
"(",
")",
"# Disconnect from canvases",
"for",
"canvas",
"in",
"visible_canvases",
":",
"if",
"getattr",
"(",
"canvas",
",",
"\"_py_close_dispatcher_attached\"",
",",
"False",
")",
":",
"canvas",
".",
"_py_close_dispatcher_attached",
"=",
"False",
"canvas",
".",
"Disconnect",
"(",
"\"Closed()\"",
",",
"count_canvases",
",",
"\"Dispatch()\"",
")"
] |
Wait for all canvases to be closed, or CTRL-c.
If `middle_mouse_close`, middle click will shut the canvas.
incpy.ignore
|
[
"Wait",
"for",
"all",
"canvases",
"to",
"be",
"closed",
"or",
"CTRL",
"-",
"c",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L161-L226
|
14,091
|
rootpy/rootpy
|
rootpy/interactive/rootwait.py
|
wait_for_frame
|
def wait_for_frame(frame):
"""
wait until a TGMainFrame is closed or ctrl-c
"""
if not frame:
# It's already closed or maybe we're in batch mode
return
@dispatcher
def close():
ROOT.gSystem.ExitLoop()
if not getattr(frame, "_py_close_dispatcher_attached", False):
frame._py_close_dispatcher_attached = True
frame.Connect("CloseWindow()", "TPyDispatcher", close, "Dispatch()")
@dispatcher
def exit_application_loop():
"""
Signal handler for CTRL-c to cause gApplication.Run() to finish.
incpy.ignore
"""
ROOT.gSystem.ExitLoop()
# Handle CTRL-c
sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True)
sh.Add()
sh.Connect("Notified()", "TPyDispatcher",
exit_application_loop, "Dispatch()")
if not ROOT.gROOT.IsBatch():
run_application_until_done()
# Need to disconnect to prevent close handler from running when python
# teardown has already commenced.
frame.Disconnect("CloseWindow()", close, "Dispatch()")
|
python
|
def wait_for_frame(frame):
"""
wait until a TGMainFrame is closed or ctrl-c
"""
if not frame:
# It's already closed or maybe we're in batch mode
return
@dispatcher
def close():
ROOT.gSystem.ExitLoop()
if not getattr(frame, "_py_close_dispatcher_attached", False):
frame._py_close_dispatcher_attached = True
frame.Connect("CloseWindow()", "TPyDispatcher", close, "Dispatch()")
@dispatcher
def exit_application_loop():
"""
Signal handler for CTRL-c to cause gApplication.Run() to finish.
incpy.ignore
"""
ROOT.gSystem.ExitLoop()
# Handle CTRL-c
sh = ROOT.TSignalHandler(ROOT.kSigInterrupt, True)
sh.Add()
sh.Connect("Notified()", "TPyDispatcher",
exit_application_loop, "Dispatch()")
if not ROOT.gROOT.IsBatch():
run_application_until_done()
# Need to disconnect to prevent close handler from running when python
# teardown has already commenced.
frame.Disconnect("CloseWindow()", close, "Dispatch()")
|
[
"def",
"wait_for_frame",
"(",
"frame",
")",
":",
"if",
"not",
"frame",
":",
"# It's already closed or maybe we're in batch mode",
"return",
"@",
"dispatcher",
"def",
"close",
"(",
")",
":",
"ROOT",
".",
"gSystem",
".",
"ExitLoop",
"(",
")",
"if",
"not",
"getattr",
"(",
"frame",
",",
"\"_py_close_dispatcher_attached\"",
",",
"False",
")",
":",
"frame",
".",
"_py_close_dispatcher_attached",
"=",
"True",
"frame",
".",
"Connect",
"(",
"\"CloseWindow()\"",
",",
"\"TPyDispatcher\"",
",",
"close",
",",
"\"Dispatch()\"",
")",
"@",
"dispatcher",
"def",
"exit_application_loop",
"(",
")",
":",
"\"\"\"\n Signal handler for CTRL-c to cause gApplication.Run() to finish.\n\n incpy.ignore\n \"\"\"",
"ROOT",
".",
"gSystem",
".",
"ExitLoop",
"(",
")",
"# Handle CTRL-c",
"sh",
"=",
"ROOT",
".",
"TSignalHandler",
"(",
"ROOT",
".",
"kSigInterrupt",
",",
"True",
")",
"sh",
".",
"Add",
"(",
")",
"sh",
".",
"Connect",
"(",
"\"Notified()\"",
",",
"\"TPyDispatcher\"",
",",
"exit_application_loop",
",",
"\"Dispatch()\"",
")",
"if",
"not",
"ROOT",
".",
"gROOT",
".",
"IsBatch",
"(",
")",
":",
"run_application_until_done",
"(",
")",
"# Need to disconnect to prevent close handler from running when python",
"# teardown has already commenced.",
"frame",
".",
"Disconnect",
"(",
"\"CloseWindow()\"",
",",
"close",
",",
"\"Dispatch()\"",
")"
] |
wait until a TGMainFrame is closed or ctrl-c
|
[
"wait",
"until",
"a",
"TGMainFrame",
"is",
"closed",
"or",
"ctrl",
"-",
"c"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L231-L266
|
14,092
|
rootpy/rootpy
|
rootpy/interactive/rootwait.py
|
wait_for_browser_close
|
def wait_for_browser_close(b):
"""
Can be used to wait until a TBrowser is closed
"""
if b:
if not __ACTIVE:
wait_failover(wait_for_browser_close)
return
wait_for_frame(b.GetBrowserImp().GetMainFrame())
|
python
|
def wait_for_browser_close(b):
"""
Can be used to wait until a TBrowser is closed
"""
if b:
if not __ACTIVE:
wait_failover(wait_for_browser_close)
return
wait_for_frame(b.GetBrowserImp().GetMainFrame())
|
[
"def",
"wait_for_browser_close",
"(",
"b",
")",
":",
"if",
"b",
":",
"if",
"not",
"__ACTIVE",
":",
"wait_failover",
"(",
"wait_for_browser_close",
")",
"return",
"wait_for_frame",
"(",
"b",
".",
"GetBrowserImp",
"(",
")",
".",
"GetMainFrame",
"(",
")",
")"
] |
Can be used to wait until a TBrowser is closed
|
[
"Can",
"be",
"used",
"to",
"wait",
"until",
"a",
"TBrowser",
"is",
"closed"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/interactive/rootwait.py#L269-L277
|
14,093
|
rootpy/rootpy
|
rootpy/logger/__init__.py
|
log_trace
|
def log_trace(logger, level=logging.DEBUG, show_enter=True, show_exit=True):
"""
log a statement on function entry and exit
"""
def wrap(function):
l = logger.getChild(function.__name__).log
@wraps(function)
def thunk(*args, **kwargs):
global trace_depth
trace_depth.value += 1
try:
start = time()
if show_enter:
l(level, "{0}> {1} {2}".format(" "*trace_depth.value,
args, kwargs))
try:
result = function(*args, **kwargs)
except:
_, result, _ = sys.exc_info()
raise
finally:
if show_exit:
l(level, "{0}< return {1} [{2:.2f} sec]".format(
" "*trace_depth.value, result, time() - start))
finally:
trace_depth.value -= 1
return result
return thunk
return wrap
|
python
|
def log_trace(logger, level=logging.DEBUG, show_enter=True, show_exit=True):
"""
log a statement on function entry and exit
"""
def wrap(function):
l = logger.getChild(function.__name__).log
@wraps(function)
def thunk(*args, **kwargs):
global trace_depth
trace_depth.value += 1
try:
start = time()
if show_enter:
l(level, "{0}> {1} {2}".format(" "*trace_depth.value,
args, kwargs))
try:
result = function(*args, **kwargs)
except:
_, result, _ = sys.exc_info()
raise
finally:
if show_exit:
l(level, "{0}< return {1} [{2:.2f} sec]".format(
" "*trace_depth.value, result, time() - start))
finally:
trace_depth.value -= 1
return result
return thunk
return wrap
|
[
"def",
"log_trace",
"(",
"logger",
",",
"level",
"=",
"logging",
".",
"DEBUG",
",",
"show_enter",
"=",
"True",
",",
"show_exit",
"=",
"True",
")",
":",
"def",
"wrap",
"(",
"function",
")",
":",
"l",
"=",
"logger",
".",
"getChild",
"(",
"function",
".",
"__name__",
")",
".",
"log",
"@",
"wraps",
"(",
"function",
")",
"def",
"thunk",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"global",
"trace_depth",
"trace_depth",
".",
"value",
"+=",
"1",
"try",
":",
"start",
"=",
"time",
"(",
")",
"if",
"show_enter",
":",
"l",
"(",
"level",
",",
"\"{0}> {1} {2}\"",
".",
"format",
"(",
"\" \"",
"*",
"trace_depth",
".",
"value",
",",
"args",
",",
"kwargs",
")",
")",
"try",
":",
"result",
"=",
"function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
":",
"_",
",",
"result",
",",
"_",
"=",
"sys",
".",
"exc_info",
"(",
")",
"raise",
"finally",
":",
"if",
"show_exit",
":",
"l",
"(",
"level",
",",
"\"{0}< return {1} [{2:.2f} sec]\"",
".",
"format",
"(",
"\" \"",
"*",
"trace_depth",
".",
"value",
",",
"result",
",",
"time",
"(",
")",
"-",
"start",
")",
")",
"finally",
":",
"trace_depth",
".",
"value",
"-=",
"1",
"return",
"result",
"return",
"thunk",
"return",
"wrap"
] |
log a statement on function entry and exit
|
[
"log",
"a",
"statement",
"on",
"function",
"entry",
"and",
"exit"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/__init__.py#L140-L168
|
14,094
|
rootpy/rootpy
|
rootpy/logger/extended_logger.py
|
log_stack
|
def log_stack(logger, level=logging.INFO, limit=None, frame=None):
"""
Display the current stack on ``logger``.
This function is designed to be used during emission of log messages, so it
won't call itself.
"""
if showing_stack.inside:
return
showing_stack.inside = True
try:
if frame is None:
frame = sys._getframe(1)
stack = "".join(traceback.format_stack(frame, limit))
for line in (l[2:] for l in stack.split("\n") if l.strip()):
logger.log(level, line)
finally:
showing_stack.inside = False
|
python
|
def log_stack(logger, level=logging.INFO, limit=None, frame=None):
"""
Display the current stack on ``logger``.
This function is designed to be used during emission of log messages, so it
won't call itself.
"""
if showing_stack.inside:
return
showing_stack.inside = True
try:
if frame is None:
frame = sys._getframe(1)
stack = "".join(traceback.format_stack(frame, limit))
for line in (l[2:] for l in stack.split("\n") if l.strip()):
logger.log(level, line)
finally:
showing_stack.inside = False
|
[
"def",
"log_stack",
"(",
"logger",
",",
"level",
"=",
"logging",
".",
"INFO",
",",
"limit",
"=",
"None",
",",
"frame",
"=",
"None",
")",
":",
"if",
"showing_stack",
".",
"inside",
":",
"return",
"showing_stack",
".",
"inside",
"=",
"True",
"try",
":",
"if",
"frame",
"is",
"None",
":",
"frame",
"=",
"sys",
".",
"_getframe",
"(",
"1",
")",
"stack",
"=",
"\"\"",
".",
"join",
"(",
"traceback",
".",
"format_stack",
"(",
"frame",
",",
"limit",
")",
")",
"for",
"line",
"in",
"(",
"l",
"[",
"2",
":",
"]",
"for",
"l",
"in",
"stack",
".",
"split",
"(",
"\"\\n\"",
")",
"if",
"l",
".",
"strip",
"(",
")",
")",
":",
"logger",
".",
"log",
"(",
"level",
",",
"line",
")",
"finally",
":",
"showing_stack",
".",
"inside",
"=",
"False"
] |
Display the current stack on ``logger``.
This function is designed to be used during emission of log messages, so it
won't call itself.
|
[
"Display",
"the",
"current",
"stack",
"on",
"logger",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L25-L42
|
14,095
|
rootpy/rootpy
|
rootpy/logger/extended_logger.py
|
ExtendedLogger.showdeletion
|
def showdeletion(self, *objects):
"""
Record a stack trace at the point when an ROOT TObject is deleted
"""
from ..memory import showdeletion as S
for o in objects:
S.monitor_object_cleanup(o)
|
python
|
def showdeletion(self, *objects):
"""
Record a stack trace at the point when an ROOT TObject is deleted
"""
from ..memory import showdeletion as S
for o in objects:
S.monitor_object_cleanup(o)
|
[
"def",
"showdeletion",
"(",
"self",
",",
"*",
"objects",
")",
":",
"from",
".",
".",
"memory",
"import",
"showdeletion",
"as",
"S",
"for",
"o",
"in",
"objects",
":",
"S",
".",
"monitor_object_cleanup",
"(",
"o",
")"
] |
Record a stack trace at the point when an ROOT TObject is deleted
|
[
"Record",
"a",
"stack",
"trace",
"at",
"the",
"point",
"when",
"an",
"ROOT",
"TObject",
"is",
"deleted"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L67-L73
|
14,096
|
rootpy/rootpy
|
rootpy/logger/extended_logger.py
|
ExtendedLogger.trace
|
def trace(self, level=logging.DEBUG, show_enter=True, show_exit=True):
"""
Functions decorated with this function show function entry and exit with
values, defaults to debug log level.
:param level: log severity to use for function tracing
:param show_enter: log function entry
:param show_enter: log function exit
Example use:
.. sourcecode:: python
log = rootpy.log["/myapp"]
@log.trace()
def salut():
return
@log.trace()
def hello(what):
salut()
return "42"
hello("world")
# Result:
# DEBUG:myapp.trace.hello] > ('world',) {}
# DEBUG:myapp.trace.salut] > () {}
# DEBUG:myapp.trace.salut] < return None [0.00 sec]
# DEBUG:myapp.trace.hello] < return 42 [0.00 sec]
Output:
.. sourcecode:: none
"""
from . import log_trace
return log_trace(self, level, show_enter, show_exit)
|
python
|
def trace(self, level=logging.DEBUG, show_enter=True, show_exit=True):
"""
Functions decorated with this function show function entry and exit with
values, defaults to debug log level.
:param level: log severity to use for function tracing
:param show_enter: log function entry
:param show_enter: log function exit
Example use:
.. sourcecode:: python
log = rootpy.log["/myapp"]
@log.trace()
def salut():
return
@log.trace()
def hello(what):
salut()
return "42"
hello("world")
# Result:
# DEBUG:myapp.trace.hello] > ('world',) {}
# DEBUG:myapp.trace.salut] > () {}
# DEBUG:myapp.trace.salut] < return None [0.00 sec]
# DEBUG:myapp.trace.hello] < return 42 [0.00 sec]
Output:
.. sourcecode:: none
"""
from . import log_trace
return log_trace(self, level, show_enter, show_exit)
|
[
"def",
"trace",
"(",
"self",
",",
"level",
"=",
"logging",
".",
"DEBUG",
",",
"show_enter",
"=",
"True",
",",
"show_exit",
"=",
"True",
")",
":",
"from",
".",
"import",
"log_trace",
"return",
"log_trace",
"(",
"self",
",",
"level",
",",
"show_enter",
",",
"show_exit",
")"
] |
Functions decorated with this function show function entry and exit with
values, defaults to debug log level.
:param level: log severity to use for function tracing
:param show_enter: log function entry
:param show_enter: log function exit
Example use:
.. sourcecode:: python
log = rootpy.log["/myapp"]
@log.trace()
def salut():
return
@log.trace()
def hello(what):
salut()
return "42"
hello("world")
# Result:
# DEBUG:myapp.trace.hello] > ('world',) {}
# DEBUG:myapp.trace.salut] > () {}
# DEBUG:myapp.trace.salut] < return None [0.00 sec]
# DEBUG:myapp.trace.hello] < return 42 [0.00 sec]
Output:
.. sourcecode:: none
|
[
"Functions",
"decorated",
"with",
"this",
"function",
"show",
"function",
"entry",
"and",
"exit",
"with",
"values",
"defaults",
"to",
"debug",
"log",
"level",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L91-L127
|
14,097
|
rootpy/rootpy
|
rootpy/logger/extended_logger.py
|
ExtendedLogger.frame_unique
|
def frame_unique(f):
"""
A tuple representing a value which is unique to a given frame's line of
execution
"""
return f.f_code.co_filename, f.f_code.co_name, f.f_lineno
|
python
|
def frame_unique(f):
"""
A tuple representing a value which is unique to a given frame's line of
execution
"""
return f.f_code.co_filename, f.f_code.co_name, f.f_lineno
|
[
"def",
"frame_unique",
"(",
"f",
")",
":",
"return",
"f",
".",
"f_code",
".",
"co_filename",
",",
"f",
".",
"f_code",
".",
"co_name",
",",
"f",
".",
"f_lineno"
] |
A tuple representing a value which is unique to a given frame's line of
execution
|
[
"A",
"tuple",
"representing",
"a",
"value",
"which",
"is",
"unique",
"to",
"a",
"given",
"frame",
"s",
"line",
"of",
"execution"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L157-L162
|
14,098
|
rootpy/rootpy
|
rootpy/logger/extended_logger.py
|
ExtendedLogger.show_stack_depth
|
def show_stack_depth(self, record, frame):
"""
Compute the maximum stack depth to show requested by any hooks,
returning -1 if there are none matching, or if we've already emitted
one for the line of code referred to.
"""
logger = self
depths = [-1]
msg = record.getMessage()
# For each logger in the hierarchy
while logger:
to_match = getattr(logger, "show_stack_regexes", ())
for regex, depth, once, min_level in to_match:
if record.levelno < min_level:
continue
if not regex.match(record.msg):
continue
# Only for a given regex, line number and logger
unique = regex, self.frame_unique(frame), record.name
if once:
if unique in logger.shown_stack_frames:
# We've shown this one already.
continue
# Prevent this stack frame from being shown again
logger.shown_stack_frames.add(unique)
depths.append(depth)
logger = logger.parent
return max(depths)
|
python
|
def show_stack_depth(self, record, frame):
"""
Compute the maximum stack depth to show requested by any hooks,
returning -1 if there are none matching, or if we've already emitted
one for the line of code referred to.
"""
logger = self
depths = [-1]
msg = record.getMessage()
# For each logger in the hierarchy
while logger:
to_match = getattr(logger, "show_stack_regexes", ())
for regex, depth, once, min_level in to_match:
if record.levelno < min_level:
continue
if not regex.match(record.msg):
continue
# Only for a given regex, line number and logger
unique = regex, self.frame_unique(frame), record.name
if once:
if unique in logger.shown_stack_frames:
# We've shown this one already.
continue
# Prevent this stack frame from being shown again
logger.shown_stack_frames.add(unique)
depths.append(depth)
logger = logger.parent
return max(depths)
|
[
"def",
"show_stack_depth",
"(",
"self",
",",
"record",
",",
"frame",
")",
":",
"logger",
"=",
"self",
"depths",
"=",
"[",
"-",
"1",
"]",
"msg",
"=",
"record",
".",
"getMessage",
"(",
")",
"# For each logger in the hierarchy",
"while",
"logger",
":",
"to_match",
"=",
"getattr",
"(",
"logger",
",",
"\"show_stack_regexes\"",
",",
"(",
")",
")",
"for",
"regex",
",",
"depth",
",",
"once",
",",
"min_level",
"in",
"to_match",
":",
"if",
"record",
".",
"levelno",
"<",
"min_level",
":",
"continue",
"if",
"not",
"regex",
".",
"match",
"(",
"record",
".",
"msg",
")",
":",
"continue",
"# Only for a given regex, line number and logger",
"unique",
"=",
"regex",
",",
"self",
".",
"frame_unique",
"(",
"frame",
")",
",",
"record",
".",
"name",
"if",
"once",
":",
"if",
"unique",
"in",
"logger",
".",
"shown_stack_frames",
":",
"# We've shown this one already.",
"continue",
"# Prevent this stack frame from being shown again",
"logger",
".",
"shown_stack_frames",
".",
"add",
"(",
"unique",
")",
"depths",
".",
"append",
"(",
"depth",
")",
"logger",
"=",
"logger",
".",
"parent",
"return",
"max",
"(",
"depths",
")"
] |
Compute the maximum stack depth to show requested by any hooks,
returning -1 if there are none matching, or if we've already emitted
one for the line of code referred to.
|
[
"Compute",
"the",
"maximum",
"stack",
"depth",
"to",
"show",
"requested",
"by",
"any",
"hooks",
"returning",
"-",
"1",
"if",
"there",
"are",
"none",
"matching",
"or",
"if",
"we",
"ve",
"already",
"emitted",
"one",
"for",
"the",
"line",
"of",
"code",
"referred",
"to",
"."
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L164-L193
|
14,099
|
rootpy/rootpy
|
rootpy/logger/extended_logger.py
|
ExtendedLogger.getChild
|
def getChild(self, suffix):
"""
Taken from CPython 2.7, modified to remove duplicate prefix and suffixes
"""
if suffix is None:
return self
if self.root is not self:
if suffix.startswith(self.name + "."):
# Remove duplicate prefix
suffix = suffix[len(self.name + "."):]
suf_parts = suffix.split(".")
if len(suf_parts) > 1 and suf_parts[-1] == suf_parts[-2]:
# If we have a submodule's name equal to the parent's name,
# omit it.
suffix = ".".join(suf_parts[:-1])
suffix = '.'.join((self.name, suffix))
return self.manager.getLogger(suffix)
|
python
|
def getChild(self, suffix):
"""
Taken from CPython 2.7, modified to remove duplicate prefix and suffixes
"""
if suffix is None:
return self
if self.root is not self:
if suffix.startswith(self.name + "."):
# Remove duplicate prefix
suffix = suffix[len(self.name + "."):]
suf_parts = suffix.split(".")
if len(suf_parts) > 1 and suf_parts[-1] == suf_parts[-2]:
# If we have a submodule's name equal to the parent's name,
# omit it.
suffix = ".".join(suf_parts[:-1])
suffix = '.'.join((self.name, suffix))
return self.manager.getLogger(suffix)
|
[
"def",
"getChild",
"(",
"self",
",",
"suffix",
")",
":",
"if",
"suffix",
"is",
"None",
":",
"return",
"self",
"if",
"self",
".",
"root",
"is",
"not",
"self",
":",
"if",
"suffix",
".",
"startswith",
"(",
"self",
".",
"name",
"+",
"\".\"",
")",
":",
"# Remove duplicate prefix",
"suffix",
"=",
"suffix",
"[",
"len",
"(",
"self",
".",
"name",
"+",
"\".\"",
")",
":",
"]",
"suf_parts",
"=",
"suffix",
".",
"split",
"(",
"\".\"",
")",
"if",
"len",
"(",
"suf_parts",
")",
">",
"1",
"and",
"suf_parts",
"[",
"-",
"1",
"]",
"==",
"suf_parts",
"[",
"-",
"2",
"]",
":",
"# If we have a submodule's name equal to the parent's name,",
"# omit it.",
"suffix",
"=",
"\".\"",
".",
"join",
"(",
"suf_parts",
"[",
":",
"-",
"1",
"]",
")",
"suffix",
"=",
"'.'",
".",
"join",
"(",
"(",
"self",
".",
"name",
",",
"suffix",
")",
")",
"return",
"self",
".",
"manager",
".",
"getLogger",
"(",
"suffix",
")"
] |
Taken from CPython 2.7, modified to remove duplicate prefix and suffixes
|
[
"Taken",
"from",
"CPython",
"2",
".",
"7",
"modified",
"to",
"remove",
"duplicate",
"prefix",
"and",
"suffixes"
] |
3926935e1f2100d8ba68070c2ab44055d4800f73
|
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/logger/extended_logger.py#L225-L241
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.