id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
237,100
|
angr/pyvex
|
pyvex/block.py
|
IRSB.operations
|
def operations(self):
"""
A list of all operations done by the IRSB, as libVEX enum names
"""
ops = []
for e in self.expressions:
if hasattr(e, 'op'):
ops.append(e.op)
return ops
|
python
|
def operations(self):
"""
A list of all operations done by the IRSB, as libVEX enum names
"""
ops = []
for e in self.expressions:
if hasattr(e, 'op'):
ops.append(e.op)
return ops
|
[
"def",
"operations",
"(",
"self",
")",
":",
"ops",
"=",
"[",
"]",
"for",
"e",
"in",
"self",
".",
"expressions",
":",
"if",
"hasattr",
"(",
"e",
",",
"'op'",
")",
":",
"ops",
".",
"append",
"(",
"e",
".",
"op",
")",
"return",
"ops"
] |
A list of all operations done by the IRSB, as libVEX enum names
|
[
"A",
"list",
"of",
"all",
"operations",
"done",
"by",
"the",
"IRSB",
"as",
"libVEX",
"enum",
"names"
] |
c418edc1146982b2a0579bf56e5993c1c7046b19
|
https://github.com/angr/pyvex/blob/c418edc1146982b2a0579bf56e5993c1c7046b19/pyvex/block.py#L381-L389
|
237,101
|
angr/pyvex
|
pyvex/block.py
|
IRSB.constant_jump_targets
|
def constant_jump_targets(self):
"""
A set of the static jump targets of the basic block.
"""
exits = set()
if self.exit_statements:
for _, _, stmt_ in self.exit_statements:
exits.add(stmt_.dst.value)
default_target = self.default_exit_target
if default_target is not None:
exits.add(default_target)
return exits
|
python
|
def constant_jump_targets(self):
"""
A set of the static jump targets of the basic block.
"""
exits = set()
if self.exit_statements:
for _, _, stmt_ in self.exit_statements:
exits.add(stmt_.dst.value)
default_target = self.default_exit_target
if default_target is not None:
exits.add(default_target)
return exits
|
[
"def",
"constant_jump_targets",
"(",
"self",
")",
":",
"exits",
"=",
"set",
"(",
")",
"if",
"self",
".",
"exit_statements",
":",
"for",
"_",
",",
"_",
",",
"stmt_",
"in",
"self",
".",
"exit_statements",
":",
"exits",
".",
"add",
"(",
"stmt_",
".",
"dst",
".",
"value",
")",
"default_target",
"=",
"self",
".",
"default_exit_target",
"if",
"default_target",
"is",
"not",
"None",
":",
"exits",
".",
"add",
"(",
"default_target",
")",
"return",
"exits"
] |
A set of the static jump targets of the basic block.
|
[
"A",
"set",
"of",
"the",
"static",
"jump",
"targets",
"of",
"the",
"basic",
"block",
"."
] |
c418edc1146982b2a0579bf56e5993c1c7046b19
|
https://github.com/angr/pyvex/blob/c418edc1146982b2a0579bf56e5993c1c7046b19/pyvex/block.py#L407-L421
|
237,102
|
angr/pyvex
|
pyvex/block.py
|
IRSB.constant_jump_targets_and_jumpkinds
|
def constant_jump_targets_and_jumpkinds(self):
"""
A dict of the static jump targets of the basic block to their jumpkind.
"""
exits = dict()
if self.exit_statements:
for _, _, stmt_ in self.exit_statements:
exits[stmt_.dst.value] = stmt_.jumpkind
default_target = self.default_exit_target
if default_target is not None:
exits[default_target] = self.jumpkind
return exits
|
python
|
def constant_jump_targets_and_jumpkinds(self):
"""
A dict of the static jump targets of the basic block to their jumpkind.
"""
exits = dict()
if self.exit_statements:
for _, _, stmt_ in self.exit_statements:
exits[stmt_.dst.value] = stmt_.jumpkind
default_target = self.default_exit_target
if default_target is not None:
exits[default_target] = self.jumpkind
return exits
|
[
"def",
"constant_jump_targets_and_jumpkinds",
"(",
"self",
")",
":",
"exits",
"=",
"dict",
"(",
")",
"if",
"self",
".",
"exit_statements",
":",
"for",
"_",
",",
"_",
",",
"stmt_",
"in",
"self",
".",
"exit_statements",
":",
"exits",
"[",
"stmt_",
".",
"dst",
".",
"value",
"]",
"=",
"stmt_",
".",
"jumpkind",
"default_target",
"=",
"self",
".",
"default_exit_target",
"if",
"default_target",
"is",
"not",
"None",
":",
"exits",
"[",
"default_target",
"]",
"=",
"self",
".",
"jumpkind",
"return",
"exits"
] |
A dict of the static jump targets of the basic block to their jumpkind.
|
[
"A",
"dict",
"of",
"the",
"static",
"jump",
"targets",
"of",
"the",
"basic",
"block",
"to",
"their",
"jumpkind",
"."
] |
c418edc1146982b2a0579bf56e5993c1c7046b19
|
https://github.com/angr/pyvex/blob/c418edc1146982b2a0579bf56e5993c1c7046b19/pyvex/block.py#L424-L438
|
237,103
|
angr/pyvex
|
pyvex/block.py
|
IRSB._pp_str
|
def _pp_str(self):
"""
Return the pretty-printed IRSB.
:rtype: str
"""
sa = []
sa.append("IRSB {")
if self.statements is not None:
sa.append(" %s" % self.tyenv)
sa.append("")
if self.statements is not None:
for i, s in enumerate(self.statements):
if isinstance(s, stmt.Put):
stmt_str = s.__str__(reg_name=self.arch.translate_register_name(s.offset, s.data.result_size(self.tyenv) // 8))
elif isinstance(s, stmt.WrTmp) and isinstance(s.data, expr.Get):
stmt_str = s.__str__(reg_name=self.arch.translate_register_name(s.data.offset, s.data.result_size(self.tyenv) // 8))
elif isinstance(s, stmt.Exit):
stmt_str = s.__str__(reg_name=self.arch.translate_register_name(s.offsIP, self.arch.bits // 8))
else:
stmt_str = s.__str__()
sa.append(" %02d | %s" % (i, stmt_str))
else:
sa.append(" Statements are omitted.")
sa.append(
" NEXT: PUT(%s) = %s; %s" % (self.arch.translate_register_name(self.offsIP), self.next, self.jumpkind))
sa.append("}")
return '\n'.join(sa)
|
python
|
def _pp_str(self):
"""
Return the pretty-printed IRSB.
:rtype: str
"""
sa = []
sa.append("IRSB {")
if self.statements is not None:
sa.append(" %s" % self.tyenv)
sa.append("")
if self.statements is not None:
for i, s in enumerate(self.statements):
if isinstance(s, stmt.Put):
stmt_str = s.__str__(reg_name=self.arch.translate_register_name(s.offset, s.data.result_size(self.tyenv) // 8))
elif isinstance(s, stmt.WrTmp) and isinstance(s.data, expr.Get):
stmt_str = s.__str__(reg_name=self.arch.translate_register_name(s.data.offset, s.data.result_size(self.tyenv) // 8))
elif isinstance(s, stmt.Exit):
stmt_str = s.__str__(reg_name=self.arch.translate_register_name(s.offsIP, self.arch.bits // 8))
else:
stmt_str = s.__str__()
sa.append(" %02d | %s" % (i, stmt_str))
else:
sa.append(" Statements are omitted.")
sa.append(
" NEXT: PUT(%s) = %s; %s" % (self.arch.translate_register_name(self.offsIP), self.next, self.jumpkind))
sa.append("}")
return '\n'.join(sa)
|
[
"def",
"_pp_str",
"(",
"self",
")",
":",
"sa",
"=",
"[",
"]",
"sa",
".",
"append",
"(",
"\"IRSB {\"",
")",
"if",
"self",
".",
"statements",
"is",
"not",
"None",
":",
"sa",
".",
"append",
"(",
"\" %s\"",
"%",
"self",
".",
"tyenv",
")",
"sa",
".",
"append",
"(",
"\"\"",
")",
"if",
"self",
".",
"statements",
"is",
"not",
"None",
":",
"for",
"i",
",",
"s",
"in",
"enumerate",
"(",
"self",
".",
"statements",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"stmt",
".",
"Put",
")",
":",
"stmt_str",
"=",
"s",
".",
"__str__",
"(",
"reg_name",
"=",
"self",
".",
"arch",
".",
"translate_register_name",
"(",
"s",
".",
"offset",
",",
"s",
".",
"data",
".",
"result_size",
"(",
"self",
".",
"tyenv",
")",
"//",
"8",
")",
")",
"elif",
"isinstance",
"(",
"s",
",",
"stmt",
".",
"WrTmp",
")",
"and",
"isinstance",
"(",
"s",
".",
"data",
",",
"expr",
".",
"Get",
")",
":",
"stmt_str",
"=",
"s",
".",
"__str__",
"(",
"reg_name",
"=",
"self",
".",
"arch",
".",
"translate_register_name",
"(",
"s",
".",
"data",
".",
"offset",
",",
"s",
".",
"data",
".",
"result_size",
"(",
"self",
".",
"tyenv",
")",
"//",
"8",
")",
")",
"elif",
"isinstance",
"(",
"s",
",",
"stmt",
".",
"Exit",
")",
":",
"stmt_str",
"=",
"s",
".",
"__str__",
"(",
"reg_name",
"=",
"self",
".",
"arch",
".",
"translate_register_name",
"(",
"s",
".",
"offsIP",
",",
"self",
".",
"arch",
".",
"bits",
"//",
"8",
")",
")",
"else",
":",
"stmt_str",
"=",
"s",
".",
"__str__",
"(",
")",
"sa",
".",
"append",
"(",
"\" %02d | %s\"",
"%",
"(",
"i",
",",
"stmt_str",
")",
")",
"else",
":",
"sa",
".",
"append",
"(",
"\" Statements are omitted.\"",
")",
"sa",
".",
"append",
"(",
"\" NEXT: PUT(%s) = %s; %s\"",
"%",
"(",
"self",
".",
"arch",
".",
"translate_register_name",
"(",
"self",
".",
"offsIP",
")",
",",
"self",
".",
"next",
",",
"self",
".",
"jumpkind",
")",
")",
"sa",
".",
"append",
"(",
"\"}\"",
")",
"return",
"'\\n'",
".",
"join",
"(",
"sa",
")"
] |
Return the pretty-printed IRSB.
:rtype: str
|
[
"Return",
"the",
"pretty",
"-",
"printed",
"IRSB",
"."
] |
c418edc1146982b2a0579bf56e5993c1c7046b19
|
https://github.com/angr/pyvex/blob/c418edc1146982b2a0579bf56e5993c1c7046b19/pyvex/block.py#L444-L471
|
237,104
|
angr/pyvex
|
pyvex/block.py
|
IRSB._is_defaultexit_direct_jump
|
def _is_defaultexit_direct_jump(self):
"""
Checks if the default of this IRSB a direct jump or not.
"""
if not (self.jumpkind == 'Ijk_InvalICache' or self.jumpkind == 'Ijk_Boring' or self.jumpkind == 'Ijk_Call'):
return False
target = self.default_exit_target
return target is not None
|
python
|
def _is_defaultexit_direct_jump(self):
"""
Checks if the default of this IRSB a direct jump or not.
"""
if not (self.jumpkind == 'Ijk_InvalICache' or self.jumpkind == 'Ijk_Boring' or self.jumpkind == 'Ijk_Call'):
return False
target = self.default_exit_target
return target is not None
|
[
"def",
"_is_defaultexit_direct_jump",
"(",
"self",
")",
":",
"if",
"not",
"(",
"self",
".",
"jumpkind",
"==",
"'Ijk_InvalICache'",
"or",
"self",
".",
"jumpkind",
"==",
"'Ijk_Boring'",
"or",
"self",
".",
"jumpkind",
"==",
"'Ijk_Call'",
")",
":",
"return",
"False",
"target",
"=",
"self",
".",
"default_exit_target",
"return",
"target",
"is",
"not",
"None"
] |
Checks if the default of this IRSB a direct jump or not.
|
[
"Checks",
"if",
"the",
"default",
"of",
"this",
"IRSB",
"a",
"direct",
"jump",
"or",
"not",
"."
] |
c418edc1146982b2a0579bf56e5993c1c7046b19
|
https://github.com/angr/pyvex/blob/c418edc1146982b2a0579bf56e5993c1c7046b19/pyvex/block.py#L473-L481
|
237,105
|
angr/pyvex
|
pyvex/block.py
|
IRTypeEnv.lookup
|
def lookup(self, tmp):
"""
Return the type of temporary variable `tmp` as an enum string
"""
if tmp < 0 or tmp > self.types_used:
l.debug("Invalid temporary number %d", tmp)
raise IndexError(tmp)
return self.types[tmp]
|
python
|
def lookup(self, tmp):
"""
Return the type of temporary variable `tmp` as an enum string
"""
if tmp < 0 or tmp > self.types_used:
l.debug("Invalid temporary number %d", tmp)
raise IndexError(tmp)
return self.types[tmp]
|
[
"def",
"lookup",
"(",
"self",
",",
"tmp",
")",
":",
"if",
"tmp",
"<",
"0",
"or",
"tmp",
">",
"self",
".",
"types_used",
":",
"l",
".",
"debug",
"(",
"\"Invalid temporary number %d\"",
",",
"tmp",
")",
"raise",
"IndexError",
"(",
"tmp",
")",
"return",
"self",
".",
"types",
"[",
"tmp",
"]"
] |
Return the type of temporary variable `tmp` as an enum string
|
[
"Return",
"the",
"type",
"of",
"temporary",
"variable",
"tmp",
"as",
"an",
"enum",
"string"
] |
c418edc1146982b2a0579bf56e5993c1c7046b19
|
https://github.com/angr/pyvex/blob/c418edc1146982b2a0579bf56e5993c1c7046b19/pyvex/block.py#L568-L575
|
237,106
|
angr/pyvex
|
pyvex/lifting/lifter.py
|
Lifter._lift
|
def _lift(self,
data,
bytes_offset=None,
max_bytes=None,
max_inst=None,
opt_level=1,
traceflags=None,
allow_arch_optimizations=None,
strict_block_end=None,
skip_stmts=False,
collect_data_refs=False):
"""
Wrapper around the `lift` method on Lifters. Should not be overridden in child classes.
:param data: The bytes to lift as either a python string of bytes or a cffi buffer object.
:param bytes_offset: The offset into `data` to start lifting at.
:param max_bytes: The maximum number of bytes to lift. If set to None, no byte limit is used.
:param max_inst: The maximum number of instructions to lift. If set to None, no instruction limit is used.
:param opt_level: The level of optimization to apply to the IR, 0-2. Most likely will be ignored in any lifter
other then LibVEX.
:param traceflags: The libVEX traceflags, controlling VEX debug prints. Most likely will be ignored in any
lifter other than LibVEX.
:param allow_arch_optimizations: Should the LibVEX lifter be allowed to perform lift-time preprocessing optimizations
(e.g., lookback ITSTATE optimization on THUMB)
Most likely will be ignored in any lifter other than LibVEX.
:param strict_block_end: Should the LibVEX arm-thumb split block at some instructions, for example CB{N}Z.
:param skip_stmts: Should the lifter skip transferring IRStmts from C to Python.
:param collect_data_refs: Should the LibVEX lifter collect data references in C.
"""
irsb = IRSB.empty_block(self.arch, self.addr)
self.data = data
self.bytes_offset = bytes_offset
self.opt_level = opt_level
self.traceflags = traceflags
self.allow_arch_optimizations = allow_arch_optimizations
self.strict_block_end = strict_block_end
self.collect_data_refs = collect_data_refs
self.max_inst = max_inst
self.max_bytes = max_bytes
self.skip_stmts = skip_stmts
self.irsb = irsb
self.lift()
return self.irsb
|
python
|
def _lift(self,
data,
bytes_offset=None,
max_bytes=None,
max_inst=None,
opt_level=1,
traceflags=None,
allow_arch_optimizations=None,
strict_block_end=None,
skip_stmts=False,
collect_data_refs=False):
"""
Wrapper around the `lift` method on Lifters. Should not be overridden in child classes.
:param data: The bytes to lift as either a python string of bytes or a cffi buffer object.
:param bytes_offset: The offset into `data` to start lifting at.
:param max_bytes: The maximum number of bytes to lift. If set to None, no byte limit is used.
:param max_inst: The maximum number of instructions to lift. If set to None, no instruction limit is used.
:param opt_level: The level of optimization to apply to the IR, 0-2. Most likely will be ignored in any lifter
other then LibVEX.
:param traceflags: The libVEX traceflags, controlling VEX debug prints. Most likely will be ignored in any
lifter other than LibVEX.
:param allow_arch_optimizations: Should the LibVEX lifter be allowed to perform lift-time preprocessing optimizations
(e.g., lookback ITSTATE optimization on THUMB)
Most likely will be ignored in any lifter other than LibVEX.
:param strict_block_end: Should the LibVEX arm-thumb split block at some instructions, for example CB{N}Z.
:param skip_stmts: Should the lifter skip transferring IRStmts from C to Python.
:param collect_data_refs: Should the LibVEX lifter collect data references in C.
"""
irsb = IRSB.empty_block(self.arch, self.addr)
self.data = data
self.bytes_offset = bytes_offset
self.opt_level = opt_level
self.traceflags = traceflags
self.allow_arch_optimizations = allow_arch_optimizations
self.strict_block_end = strict_block_end
self.collect_data_refs = collect_data_refs
self.max_inst = max_inst
self.max_bytes = max_bytes
self.skip_stmts = skip_stmts
self.irsb = irsb
self.lift()
return self.irsb
|
[
"def",
"_lift",
"(",
"self",
",",
"data",
",",
"bytes_offset",
"=",
"None",
",",
"max_bytes",
"=",
"None",
",",
"max_inst",
"=",
"None",
",",
"opt_level",
"=",
"1",
",",
"traceflags",
"=",
"None",
",",
"allow_arch_optimizations",
"=",
"None",
",",
"strict_block_end",
"=",
"None",
",",
"skip_stmts",
"=",
"False",
",",
"collect_data_refs",
"=",
"False",
")",
":",
"irsb",
"=",
"IRSB",
".",
"empty_block",
"(",
"self",
".",
"arch",
",",
"self",
".",
"addr",
")",
"self",
".",
"data",
"=",
"data",
"self",
".",
"bytes_offset",
"=",
"bytes_offset",
"self",
".",
"opt_level",
"=",
"opt_level",
"self",
".",
"traceflags",
"=",
"traceflags",
"self",
".",
"allow_arch_optimizations",
"=",
"allow_arch_optimizations",
"self",
".",
"strict_block_end",
"=",
"strict_block_end",
"self",
".",
"collect_data_refs",
"=",
"collect_data_refs",
"self",
".",
"max_inst",
"=",
"max_inst",
"self",
".",
"max_bytes",
"=",
"max_bytes",
"self",
".",
"skip_stmts",
"=",
"skip_stmts",
"self",
".",
"irsb",
"=",
"irsb",
"self",
".",
"lift",
"(",
")",
"return",
"self",
".",
"irsb"
] |
Wrapper around the `lift` method on Lifters. Should not be overridden in child classes.
:param data: The bytes to lift as either a python string of bytes or a cffi buffer object.
:param bytes_offset: The offset into `data` to start lifting at.
:param max_bytes: The maximum number of bytes to lift. If set to None, no byte limit is used.
:param max_inst: The maximum number of instructions to lift. If set to None, no instruction limit is used.
:param opt_level: The level of optimization to apply to the IR, 0-2. Most likely will be ignored in any lifter
other then LibVEX.
:param traceflags: The libVEX traceflags, controlling VEX debug prints. Most likely will be ignored in any
lifter other than LibVEX.
:param allow_arch_optimizations: Should the LibVEX lifter be allowed to perform lift-time preprocessing optimizations
(e.g., lookback ITSTATE optimization on THUMB)
Most likely will be ignored in any lifter other than LibVEX.
:param strict_block_end: Should the LibVEX arm-thumb split block at some instructions, for example CB{N}Z.
:param skip_stmts: Should the lifter skip transferring IRStmts from C to Python.
:param collect_data_refs: Should the LibVEX lifter collect data references in C.
|
[
"Wrapper",
"around",
"the",
"lift",
"method",
"on",
"Lifters",
".",
"Should",
"not",
"be",
"overridden",
"in",
"child",
"classes",
"."
] |
c418edc1146982b2a0579bf56e5993c1c7046b19
|
https://github.com/angr/pyvex/blob/c418edc1146982b2a0579bf56e5993c1c7046b19/pyvex/lifting/lifter.py#L36-L78
|
237,107
|
TeamHG-Memex/scrapy-rotating-proxies
|
rotating_proxies/expire.py
|
exp_backoff
|
def exp_backoff(attempt, cap=3600, base=300):
""" Exponential backoff time """
# this is a numerically stable version of
# min(cap, base * 2 ** attempt)
max_attempts = math.log(cap / base, 2)
if attempt <= max_attempts:
return base * 2 ** attempt
return cap
|
python
|
def exp_backoff(attempt, cap=3600, base=300):
""" Exponential backoff time """
# this is a numerically stable version of
# min(cap, base * 2 ** attempt)
max_attempts = math.log(cap / base, 2)
if attempt <= max_attempts:
return base * 2 ** attempt
return cap
|
[
"def",
"exp_backoff",
"(",
"attempt",
",",
"cap",
"=",
"3600",
",",
"base",
"=",
"300",
")",
":",
"# this is a numerically stable version of",
"# min(cap, base * 2 ** attempt)",
"max_attempts",
"=",
"math",
".",
"log",
"(",
"cap",
"/",
"base",
",",
"2",
")",
"if",
"attempt",
"<=",
"max_attempts",
":",
"return",
"base",
"*",
"2",
"**",
"attempt",
"return",
"cap"
] |
Exponential backoff time
|
[
"Exponential",
"backoff",
"time"
] |
89bb677fea6285a5e02e0a5c7dfb1c40330b17f0
|
https://github.com/TeamHG-Memex/scrapy-rotating-proxies/blob/89bb677fea6285a5e02e0a5c7dfb1c40330b17f0/rotating_proxies/expire.py#L149-L156
|
237,108
|
TeamHG-Memex/scrapy-rotating-proxies
|
rotating_proxies/expire.py
|
Proxies.get_proxy
|
def get_proxy(self, proxy_address):
"""
Return complete proxy name associated with a hostport of a given
``proxy_address``. If ``proxy_address`` is unkonwn or empty,
return None.
"""
if not proxy_address:
return None
hostport = extract_proxy_hostport(proxy_address)
return self.proxies_by_hostport.get(hostport, None)
|
python
|
def get_proxy(self, proxy_address):
"""
Return complete proxy name associated with a hostport of a given
``proxy_address``. If ``proxy_address`` is unkonwn or empty,
return None.
"""
if not proxy_address:
return None
hostport = extract_proxy_hostport(proxy_address)
return self.proxies_by_hostport.get(hostport, None)
|
[
"def",
"get_proxy",
"(",
"self",
",",
"proxy_address",
")",
":",
"if",
"not",
"proxy_address",
":",
"return",
"None",
"hostport",
"=",
"extract_proxy_hostport",
"(",
"proxy_address",
")",
"return",
"self",
".",
"proxies_by_hostport",
".",
"get",
"(",
"hostport",
",",
"None",
")"
] |
Return complete proxy name associated with a hostport of a given
``proxy_address``. If ``proxy_address`` is unkonwn or empty,
return None.
|
[
"Return",
"complete",
"proxy",
"name",
"associated",
"with",
"a",
"hostport",
"of",
"a",
"given",
"proxy_address",
".",
"If",
"proxy_address",
"is",
"unkonwn",
"or",
"empty",
"return",
"None",
"."
] |
89bb677fea6285a5e02e0a5c7dfb1c40330b17f0
|
https://github.com/TeamHG-Memex/scrapy-rotating-proxies/blob/89bb677fea6285a5e02e0a5c7dfb1c40330b17f0/rotating_proxies/expire.py#L56-L65
|
237,109
|
TeamHG-Memex/scrapy-rotating-proxies
|
rotating_proxies/expire.py
|
Proxies.mark_dead
|
def mark_dead(self, proxy, _time=None):
""" Mark a proxy as dead """
if proxy not in self.proxies:
logger.warn("Proxy <%s> was not found in proxies list" % proxy)
return
if proxy in self.good:
logger.debug("GOOD proxy became DEAD: <%s>" % proxy)
else:
logger.debug("Proxy <%s> is DEAD" % proxy)
self.unchecked.discard(proxy)
self.good.discard(proxy)
self.dead.add(proxy)
now = _time or time.time()
state = self.proxies[proxy]
state.backoff_time = self.backoff(state.failed_attempts)
state.next_check = now + state.backoff_time
state.failed_attempts += 1
|
python
|
def mark_dead(self, proxy, _time=None):
""" Mark a proxy as dead """
if proxy not in self.proxies:
logger.warn("Proxy <%s> was not found in proxies list" % proxy)
return
if proxy in self.good:
logger.debug("GOOD proxy became DEAD: <%s>" % proxy)
else:
logger.debug("Proxy <%s> is DEAD" % proxy)
self.unchecked.discard(proxy)
self.good.discard(proxy)
self.dead.add(proxy)
now = _time or time.time()
state = self.proxies[proxy]
state.backoff_time = self.backoff(state.failed_attempts)
state.next_check = now + state.backoff_time
state.failed_attempts += 1
|
[
"def",
"mark_dead",
"(",
"self",
",",
"proxy",
",",
"_time",
"=",
"None",
")",
":",
"if",
"proxy",
"not",
"in",
"self",
".",
"proxies",
":",
"logger",
".",
"warn",
"(",
"\"Proxy <%s> was not found in proxies list\"",
"%",
"proxy",
")",
"return",
"if",
"proxy",
"in",
"self",
".",
"good",
":",
"logger",
".",
"debug",
"(",
"\"GOOD proxy became DEAD: <%s>\"",
"%",
"proxy",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"\"Proxy <%s> is DEAD\"",
"%",
"proxy",
")",
"self",
".",
"unchecked",
".",
"discard",
"(",
"proxy",
")",
"self",
".",
"good",
".",
"discard",
"(",
"proxy",
")",
"self",
".",
"dead",
".",
"add",
"(",
"proxy",
")",
"now",
"=",
"_time",
"or",
"time",
".",
"time",
"(",
")",
"state",
"=",
"self",
".",
"proxies",
"[",
"proxy",
"]",
"state",
".",
"backoff_time",
"=",
"self",
".",
"backoff",
"(",
"state",
".",
"failed_attempts",
")",
"state",
".",
"next_check",
"=",
"now",
"+",
"state",
".",
"backoff_time",
"state",
".",
"failed_attempts",
"+=",
"1"
] |
Mark a proxy as dead
|
[
"Mark",
"a",
"proxy",
"as",
"dead"
] |
89bb677fea6285a5e02e0a5c7dfb1c40330b17f0
|
https://github.com/TeamHG-Memex/scrapy-rotating-proxies/blob/89bb677fea6285a5e02e0a5c7dfb1c40330b17f0/rotating_proxies/expire.py#L67-L86
|
237,110
|
TeamHG-Memex/scrapy-rotating-proxies
|
rotating_proxies/expire.py
|
Proxies.mark_good
|
def mark_good(self, proxy):
""" Mark a proxy as good """
if proxy not in self.proxies:
logger.warn("Proxy <%s> was not found in proxies list" % proxy)
return
if proxy not in self.good:
logger.debug("Proxy <%s> is GOOD" % proxy)
self.unchecked.discard(proxy)
self.dead.discard(proxy)
self.good.add(proxy)
self.proxies[proxy].failed_attempts = 0
|
python
|
def mark_good(self, proxy):
""" Mark a proxy as good """
if proxy not in self.proxies:
logger.warn("Proxy <%s> was not found in proxies list" % proxy)
return
if proxy not in self.good:
logger.debug("Proxy <%s> is GOOD" % proxy)
self.unchecked.discard(proxy)
self.dead.discard(proxy)
self.good.add(proxy)
self.proxies[proxy].failed_attempts = 0
|
[
"def",
"mark_good",
"(",
"self",
",",
"proxy",
")",
":",
"if",
"proxy",
"not",
"in",
"self",
".",
"proxies",
":",
"logger",
".",
"warn",
"(",
"\"Proxy <%s> was not found in proxies list\"",
"%",
"proxy",
")",
"return",
"if",
"proxy",
"not",
"in",
"self",
".",
"good",
":",
"logger",
".",
"debug",
"(",
"\"Proxy <%s> is GOOD\"",
"%",
"proxy",
")",
"self",
".",
"unchecked",
".",
"discard",
"(",
"proxy",
")",
"self",
".",
"dead",
".",
"discard",
"(",
"proxy",
")",
"self",
".",
"good",
".",
"add",
"(",
"proxy",
")",
"self",
".",
"proxies",
"[",
"proxy",
"]",
".",
"failed_attempts",
"=",
"0"
] |
Mark a proxy as good
|
[
"Mark",
"a",
"proxy",
"as",
"good"
] |
89bb677fea6285a5e02e0a5c7dfb1c40330b17f0
|
https://github.com/TeamHG-Memex/scrapy-rotating-proxies/blob/89bb677fea6285a5e02e0a5c7dfb1c40330b17f0/rotating_proxies/expire.py#L88-L100
|
237,111
|
TeamHG-Memex/scrapy-rotating-proxies
|
rotating_proxies/expire.py
|
Proxies.reanimate
|
def reanimate(self, _time=None):
""" Move dead proxies to unchecked if a backoff timeout passes """
n_reanimated = 0
now = _time or time.time()
for proxy in list(self.dead):
state = self.proxies[proxy]
assert state.next_check is not None
if state.next_check <= now:
self.dead.remove(proxy)
self.unchecked.add(proxy)
n_reanimated += 1
return n_reanimated
|
python
|
def reanimate(self, _time=None):
""" Move dead proxies to unchecked if a backoff timeout passes """
n_reanimated = 0
now = _time or time.time()
for proxy in list(self.dead):
state = self.proxies[proxy]
assert state.next_check is not None
if state.next_check <= now:
self.dead.remove(proxy)
self.unchecked.add(proxy)
n_reanimated += 1
return n_reanimated
|
[
"def",
"reanimate",
"(",
"self",
",",
"_time",
"=",
"None",
")",
":",
"n_reanimated",
"=",
"0",
"now",
"=",
"_time",
"or",
"time",
".",
"time",
"(",
")",
"for",
"proxy",
"in",
"list",
"(",
"self",
".",
"dead",
")",
":",
"state",
"=",
"self",
".",
"proxies",
"[",
"proxy",
"]",
"assert",
"state",
".",
"next_check",
"is",
"not",
"None",
"if",
"state",
".",
"next_check",
"<=",
"now",
":",
"self",
".",
"dead",
".",
"remove",
"(",
"proxy",
")",
"self",
".",
"unchecked",
".",
"add",
"(",
"proxy",
")",
"n_reanimated",
"+=",
"1",
"return",
"n_reanimated"
] |
Move dead proxies to unchecked if a backoff timeout passes
|
[
"Move",
"dead",
"proxies",
"to",
"unchecked",
"if",
"a",
"backoff",
"timeout",
"passes"
] |
89bb677fea6285a5e02e0a5c7dfb1c40330b17f0
|
https://github.com/TeamHG-Memex/scrapy-rotating-proxies/blob/89bb677fea6285a5e02e0a5c7dfb1c40330b17f0/rotating_proxies/expire.py#L102-L113
|
237,112
|
TeamHG-Memex/scrapy-rotating-proxies
|
rotating_proxies/expire.py
|
Proxies.reset
|
def reset(self):
""" Mark all dead proxies as unchecked """
for proxy in list(self.dead):
self.dead.remove(proxy)
self.unchecked.add(proxy)
|
python
|
def reset(self):
""" Mark all dead proxies as unchecked """
for proxy in list(self.dead):
self.dead.remove(proxy)
self.unchecked.add(proxy)
|
[
"def",
"reset",
"(",
"self",
")",
":",
"for",
"proxy",
"in",
"list",
"(",
"self",
".",
"dead",
")",
":",
"self",
".",
"dead",
".",
"remove",
"(",
"proxy",
")",
"self",
".",
"unchecked",
".",
"add",
"(",
"proxy",
")"
] |
Mark all dead proxies as unchecked
|
[
"Mark",
"all",
"dead",
"proxies",
"as",
"unchecked"
] |
89bb677fea6285a5e02e0a5c7dfb1c40330b17f0
|
https://github.com/TeamHG-Memex/scrapy-rotating-proxies/blob/89bb677fea6285a5e02e0a5c7dfb1c40330b17f0/rotating_proxies/expire.py#L115-L119
|
237,113
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable.on_change
|
def on_change(self, path, event_type):
"""Respond to changes in the file system
This method will be given the path to a file that
has changed on disk. We need to reload the keywords
from that file
"""
# I can do all this work in a sql statement, but
# for debugging it's easier to do it in stages.
sql = """SELECT collection_id
FROM collection_table
WHERE path == ?
"""
cursor = self._execute(sql, (path,))
results = cursor.fetchall()
# there should always be exactly one result, but
# there's no harm in using a loop to process the
# single result
for result in results:
collection_id = result[0]
# remove all keywords in this collection
sql = """DELETE from keyword_table
WHERE collection_id == ?
"""
cursor = self._execute(sql, (collection_id,))
self._load_keywords(collection_id, path=path)
|
python
|
def on_change(self, path, event_type):
"""Respond to changes in the file system
This method will be given the path to a file that
has changed on disk. We need to reload the keywords
from that file
"""
# I can do all this work in a sql statement, but
# for debugging it's easier to do it in stages.
sql = """SELECT collection_id
FROM collection_table
WHERE path == ?
"""
cursor = self._execute(sql, (path,))
results = cursor.fetchall()
# there should always be exactly one result, but
# there's no harm in using a loop to process the
# single result
for result in results:
collection_id = result[0]
# remove all keywords in this collection
sql = """DELETE from keyword_table
WHERE collection_id == ?
"""
cursor = self._execute(sql, (collection_id,))
self._load_keywords(collection_id, path=path)
|
[
"def",
"on_change",
"(",
"self",
",",
"path",
",",
"event_type",
")",
":",
"# I can do all this work in a sql statement, but",
"# for debugging it's easier to do it in stages.",
"sql",
"=",
"\"\"\"SELECT collection_id\n FROM collection_table\n WHERE path == ?\n \"\"\"",
"cursor",
"=",
"self",
".",
"_execute",
"(",
"sql",
",",
"(",
"path",
",",
")",
")",
"results",
"=",
"cursor",
".",
"fetchall",
"(",
")",
"# there should always be exactly one result, but",
"# there's no harm in using a loop to process the",
"# single result",
"for",
"result",
"in",
"results",
":",
"collection_id",
"=",
"result",
"[",
"0",
"]",
"# remove all keywords in this collection",
"sql",
"=",
"\"\"\"DELETE from keyword_table\n WHERE collection_id == ?\n \"\"\"",
"cursor",
"=",
"self",
".",
"_execute",
"(",
"sql",
",",
"(",
"collection_id",
",",
")",
")",
"self",
".",
"_load_keywords",
"(",
"collection_id",
",",
"path",
"=",
"path",
")"
] |
Respond to changes in the file system
This method will be given the path to a file that
has changed on disk. We need to reload the keywords
from that file
|
[
"Respond",
"to",
"changes",
"in",
"the",
"file",
"system"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L86-L111
|
237,114
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable._load_keywords
|
def _load_keywords(self, collection_id, path=None, libdoc=None):
"""Load a collection of keywords
One of path or libdoc needs to be passed in...
"""
if libdoc is None and path is None:
raise(Exception("You must provide either a path or libdoc argument"))
if libdoc is None:
libdoc = LibraryDocumentation(path)
if len(libdoc.keywords) > 0:
for keyword in libdoc.keywords:
self._add_keyword(collection_id, keyword.name, keyword.doc, keyword.args)
|
python
|
def _load_keywords(self, collection_id, path=None, libdoc=None):
"""Load a collection of keywords
One of path or libdoc needs to be passed in...
"""
if libdoc is None and path is None:
raise(Exception("You must provide either a path or libdoc argument"))
if libdoc is None:
libdoc = LibraryDocumentation(path)
if len(libdoc.keywords) > 0:
for keyword in libdoc.keywords:
self._add_keyword(collection_id, keyword.name, keyword.doc, keyword.args)
|
[
"def",
"_load_keywords",
"(",
"self",
",",
"collection_id",
",",
"path",
"=",
"None",
",",
"libdoc",
"=",
"None",
")",
":",
"if",
"libdoc",
"is",
"None",
"and",
"path",
"is",
"None",
":",
"raise",
"(",
"Exception",
"(",
"\"You must provide either a path or libdoc argument\"",
")",
")",
"if",
"libdoc",
"is",
"None",
":",
"libdoc",
"=",
"LibraryDocumentation",
"(",
"path",
")",
"if",
"len",
"(",
"libdoc",
".",
"keywords",
")",
">",
"0",
":",
"for",
"keyword",
"in",
"libdoc",
".",
"keywords",
":",
"self",
".",
"_add_keyword",
"(",
"collection_id",
",",
"keyword",
".",
"name",
",",
"keyword",
".",
"doc",
",",
"keyword",
".",
"args",
")"
] |
Load a collection of keywords
One of path or libdoc needs to be passed in...
|
[
"Load",
"a",
"collection",
"of",
"keywords"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L113-L126
|
237,115
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable.add_file
|
def add_file(self, path):
"""Add a resource file or library file to the database"""
libdoc = LibraryDocumentation(path)
if len(libdoc.keywords) > 0:
if libdoc.doc.startswith("Documentation for resource file"):
# bah! The file doesn't have an file-level documentation
# and libdoc substitutes some placeholder text.
libdoc.doc = ""
collection_id = self.add_collection(path, libdoc.name, libdoc.type,
libdoc.doc, libdoc.version,
libdoc.scope, libdoc.named_args,
libdoc.doc_format)
self._load_keywords(collection_id, libdoc=libdoc)
|
python
|
def add_file(self, path):
"""Add a resource file or library file to the database"""
libdoc = LibraryDocumentation(path)
if len(libdoc.keywords) > 0:
if libdoc.doc.startswith("Documentation for resource file"):
# bah! The file doesn't have an file-level documentation
# and libdoc substitutes some placeholder text.
libdoc.doc = ""
collection_id = self.add_collection(path, libdoc.name, libdoc.type,
libdoc.doc, libdoc.version,
libdoc.scope, libdoc.named_args,
libdoc.doc_format)
self._load_keywords(collection_id, libdoc=libdoc)
|
[
"def",
"add_file",
"(",
"self",
",",
"path",
")",
":",
"libdoc",
"=",
"LibraryDocumentation",
"(",
"path",
")",
"if",
"len",
"(",
"libdoc",
".",
"keywords",
")",
">",
"0",
":",
"if",
"libdoc",
".",
"doc",
".",
"startswith",
"(",
"\"Documentation for resource file\"",
")",
":",
"# bah! The file doesn't have an file-level documentation",
"# and libdoc substitutes some placeholder text.",
"libdoc",
".",
"doc",
"=",
"\"\"",
"collection_id",
"=",
"self",
".",
"add_collection",
"(",
"path",
",",
"libdoc",
".",
"name",
",",
"libdoc",
".",
"type",
",",
"libdoc",
".",
"doc",
",",
"libdoc",
".",
"version",
",",
"libdoc",
".",
"scope",
",",
"libdoc",
".",
"named_args",
",",
"libdoc",
".",
"doc_format",
")",
"self",
".",
"_load_keywords",
"(",
"collection_id",
",",
"libdoc",
"=",
"libdoc",
")"
] |
Add a resource file or library file to the database
|
[
"Add",
"a",
"resource",
"file",
"or",
"library",
"file",
"to",
"the",
"database"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L128-L141
|
237,116
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable.add_library
|
def add_library(self, name):
"""Add a library to the database
This method is for adding a library by name (eg: "BuiltIn")
rather than by a file.
"""
libdoc = LibraryDocumentation(name)
if len(libdoc.keywords) > 0:
# FIXME: figure out the path to the library file
collection_id = self.add_collection(None, libdoc.name, libdoc.type,
libdoc.doc, libdoc.version,
libdoc.scope, libdoc.named_args,
libdoc.doc_format)
self._load_keywords(collection_id, libdoc=libdoc)
|
python
|
def add_library(self, name):
"""Add a library to the database
This method is for adding a library by name (eg: "BuiltIn")
rather than by a file.
"""
libdoc = LibraryDocumentation(name)
if len(libdoc.keywords) > 0:
# FIXME: figure out the path to the library file
collection_id = self.add_collection(None, libdoc.name, libdoc.type,
libdoc.doc, libdoc.version,
libdoc.scope, libdoc.named_args,
libdoc.doc_format)
self._load_keywords(collection_id, libdoc=libdoc)
|
[
"def",
"add_library",
"(",
"self",
",",
"name",
")",
":",
"libdoc",
"=",
"LibraryDocumentation",
"(",
"name",
")",
"if",
"len",
"(",
"libdoc",
".",
"keywords",
")",
">",
"0",
":",
"# FIXME: figure out the path to the library file",
"collection_id",
"=",
"self",
".",
"add_collection",
"(",
"None",
",",
"libdoc",
".",
"name",
",",
"libdoc",
".",
"type",
",",
"libdoc",
".",
"doc",
",",
"libdoc",
".",
"version",
",",
"libdoc",
".",
"scope",
",",
"libdoc",
".",
"named_args",
",",
"libdoc",
".",
"doc_format",
")",
"self",
".",
"_load_keywords",
"(",
"collection_id",
",",
"libdoc",
"=",
"libdoc",
")"
] |
Add a library to the database
This method is for adding a library by name (eg: "BuiltIn")
rather than by a file.
|
[
"Add",
"a",
"library",
"to",
"the",
"database"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L143-L156
|
237,117
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable.add_folder
|
def add_folder(self, dirname, watch=True):
"""Recursively add all files in a folder to the database
By "all files" I mean, "all files that are resource files
or library files". It will silently ignore files that don't
look like they belong in the database. Pity the fool who
uses non-standard suffixes.
N.B. folders with names that begin with '." will be skipped
"""
ignore_file = os.path.join(dirname, ".rfhubignore")
exclude_patterns = []
try:
with open(ignore_file, "r") as f:
exclude_patterns = []
for line in f.readlines():
line = line.strip()
if (re.match(r'^\s*#', line)): continue
if len(line.strip()) > 0:
exclude_patterns.append(line)
except:
# should probably warn the user?
pass
for filename in os.listdir(dirname):
path = os.path.join(dirname, filename)
(basename, ext) = os.path.splitext(filename.lower())
try:
if (os.path.isdir(path)):
if (not basename.startswith(".")):
if os.access(path, os.R_OK):
self.add_folder(path, watch=False)
else:
if (ext in (".xml", ".robot", ".txt", ".py", ".tsv")):
if os.access(path, os.R_OK):
self.add(path)
except Exception as e:
# I really need to get the logging situation figured out.
print("bummer:", str(e))
# FIXME:
# instead of passing a flag around, I should just keep track
# of which folders we're watching, and don't add wathers for
# any subfolders. That will work better in the case where
# the user accidentally starts up the hub giving the same
# folder, or a folder and it's children, on the command line...
if watch:
# add watcher on normalized path
dirname = os.path.abspath(dirname)
event_handler = WatchdogHandler(self, dirname)
self.observer.schedule(event_handler, dirname, recursive=True)
|
python
|
def add_folder(self, dirname, watch=True):
"""Recursively add all files in a folder to the database
By "all files" I mean, "all files that are resource files
or library files". It will silently ignore files that don't
look like they belong in the database. Pity the fool who
uses non-standard suffixes.
N.B. folders with names that begin with '." will be skipped
"""
ignore_file = os.path.join(dirname, ".rfhubignore")
exclude_patterns = []
try:
with open(ignore_file, "r") as f:
exclude_patterns = []
for line in f.readlines():
line = line.strip()
if (re.match(r'^\s*#', line)): continue
if len(line.strip()) > 0:
exclude_patterns.append(line)
except:
# should probably warn the user?
pass
for filename in os.listdir(dirname):
path = os.path.join(dirname, filename)
(basename, ext) = os.path.splitext(filename.lower())
try:
if (os.path.isdir(path)):
if (not basename.startswith(".")):
if os.access(path, os.R_OK):
self.add_folder(path, watch=False)
else:
if (ext in (".xml", ".robot", ".txt", ".py", ".tsv")):
if os.access(path, os.R_OK):
self.add(path)
except Exception as e:
# I really need to get the logging situation figured out.
print("bummer:", str(e))
# FIXME:
# instead of passing a flag around, I should just keep track
# of which folders we're watching, and don't add wathers for
# any subfolders. That will work better in the case where
# the user accidentally starts up the hub giving the same
# folder, or a folder and it's children, on the command line...
if watch:
# add watcher on normalized path
dirname = os.path.abspath(dirname)
event_handler = WatchdogHandler(self, dirname)
self.observer.schedule(event_handler, dirname, recursive=True)
|
[
"def",
"add_folder",
"(",
"self",
",",
"dirname",
",",
"watch",
"=",
"True",
")",
":",
"ignore_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"\".rfhubignore\"",
")",
"exclude_patterns",
"=",
"[",
"]",
"try",
":",
"with",
"open",
"(",
"ignore_file",
",",
"\"r\"",
")",
"as",
"f",
":",
"exclude_patterns",
"=",
"[",
"]",
"for",
"line",
"in",
"f",
".",
"readlines",
"(",
")",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"(",
"re",
".",
"match",
"(",
"r'^\\s*#'",
",",
"line",
")",
")",
":",
"continue",
"if",
"len",
"(",
"line",
".",
"strip",
"(",
")",
")",
">",
"0",
":",
"exclude_patterns",
".",
"append",
"(",
"line",
")",
"except",
":",
"# should probably warn the user?",
"pass",
"for",
"filename",
"in",
"os",
".",
"listdir",
"(",
"dirname",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"filename",
")",
"(",
"basename",
",",
"ext",
")",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
".",
"lower",
"(",
")",
")",
"try",
":",
"if",
"(",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
")",
":",
"if",
"(",
"not",
"basename",
".",
"startswith",
"(",
"\".\"",
")",
")",
":",
"if",
"os",
".",
"access",
"(",
"path",
",",
"os",
".",
"R_OK",
")",
":",
"self",
".",
"add_folder",
"(",
"path",
",",
"watch",
"=",
"False",
")",
"else",
":",
"if",
"(",
"ext",
"in",
"(",
"\".xml\"",
",",
"\".robot\"",
",",
"\".txt\"",
",",
"\".py\"",
",",
"\".tsv\"",
")",
")",
":",
"if",
"os",
".",
"access",
"(",
"path",
",",
"os",
".",
"R_OK",
")",
":",
"self",
".",
"add",
"(",
"path",
")",
"except",
"Exception",
"as",
"e",
":",
"# I really need to get the logging situation figured out.",
"print",
"(",
"\"bummer:\"",
",",
"str",
"(",
"e",
")",
")",
"# FIXME:",
"# instead of passing a flag around, I should just keep track",
"# of which folders we're watching, and don't add wathers for",
"# any subfolders. That will work better in the case where",
"# the user accidentally starts up the hub giving the same",
"# folder, or a folder and it's children, on the command line...",
"if",
"watch",
":",
"# add watcher on normalized path",
"dirname",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"dirname",
")",
"event_handler",
"=",
"WatchdogHandler",
"(",
"self",
",",
"dirname",
")",
"self",
".",
"observer",
".",
"schedule",
"(",
"event_handler",
",",
"dirname",
",",
"recursive",
"=",
"True",
")"
] |
Recursively add all files in a folder to the database
By "all files" I mean, "all files that are resource files
or library files". It will silently ignore files that don't
look like they belong in the database. Pity the fool who
uses non-standard suffixes.
N.B. folders with names that begin with '." will be skipped
|
[
"Recursively",
"add",
"all",
"files",
"in",
"a",
"folder",
"to",
"the",
"database"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L158-L210
|
237,118
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable.add_installed_libraries
|
def add_installed_libraries(self, extra_libs = ["Selenium2Library",
"SudsLibrary",
"RequestsLibrary"]):
"""Add any installed libraries that we can find
We do this by looking in the `libraries` folder where
robot is installed. If you have libraries installed
in a non-standard place, this won't pick them up.
"""
libdir = os.path.dirname(robot.libraries.__file__)
loaded = []
for filename in os.listdir(libdir):
if filename.endswith(".py") or filename.endswith(".pyc"):
libname, ext = os.path.splitext(filename)
if (libname.lower() not in loaded and
not self._should_ignore(libname)):
try:
self.add(libname)
loaded.append(libname.lower())
except Exception as e:
# need a better way to log this...
self.log.debug("unable to add library: " + str(e))
# I hate how I implemented this, but I don't think there's
# any way to find out which installed python packages are
# robot libraries.
for library in extra_libs:
if (library.lower() not in loaded and
not self._should_ignore(library)):
try:
self.add(library)
loaded.append(library.lower())
except Exception as e:
self.log.debug("unable to add external library %s: %s" % \
(library, str(e)))
|
python
|
def add_installed_libraries(self, extra_libs = ["Selenium2Library",
"SudsLibrary",
"RequestsLibrary"]):
"""Add any installed libraries that we can find
We do this by looking in the `libraries` folder where
robot is installed. If you have libraries installed
in a non-standard place, this won't pick them up.
"""
libdir = os.path.dirname(robot.libraries.__file__)
loaded = []
for filename in os.listdir(libdir):
if filename.endswith(".py") or filename.endswith(".pyc"):
libname, ext = os.path.splitext(filename)
if (libname.lower() not in loaded and
not self._should_ignore(libname)):
try:
self.add(libname)
loaded.append(libname.lower())
except Exception as e:
# need a better way to log this...
self.log.debug("unable to add library: " + str(e))
# I hate how I implemented this, but I don't think there's
# any way to find out which installed python packages are
# robot libraries.
for library in extra_libs:
if (library.lower() not in loaded and
not self._should_ignore(library)):
try:
self.add(library)
loaded.append(library.lower())
except Exception as e:
self.log.debug("unable to add external library %s: %s" % \
(library, str(e)))
|
[
"def",
"add_installed_libraries",
"(",
"self",
",",
"extra_libs",
"=",
"[",
"\"Selenium2Library\"",
",",
"\"SudsLibrary\"",
",",
"\"RequestsLibrary\"",
"]",
")",
":",
"libdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"robot",
".",
"libraries",
".",
"__file__",
")",
"loaded",
"=",
"[",
"]",
"for",
"filename",
"in",
"os",
".",
"listdir",
"(",
"libdir",
")",
":",
"if",
"filename",
".",
"endswith",
"(",
"\".py\"",
")",
"or",
"filename",
".",
"endswith",
"(",
"\".pyc\"",
")",
":",
"libname",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"if",
"(",
"libname",
".",
"lower",
"(",
")",
"not",
"in",
"loaded",
"and",
"not",
"self",
".",
"_should_ignore",
"(",
"libname",
")",
")",
":",
"try",
":",
"self",
".",
"add",
"(",
"libname",
")",
"loaded",
".",
"append",
"(",
"libname",
".",
"lower",
"(",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"# need a better way to log this...",
"self",
".",
"log",
".",
"debug",
"(",
"\"unable to add library: \"",
"+",
"str",
"(",
"e",
")",
")",
"# I hate how I implemented this, but I don't think there's",
"# any way to find out which installed python packages are",
"# robot libraries.",
"for",
"library",
"in",
"extra_libs",
":",
"if",
"(",
"library",
".",
"lower",
"(",
")",
"not",
"in",
"loaded",
"and",
"not",
"self",
".",
"_should_ignore",
"(",
"library",
")",
")",
":",
"try",
":",
"self",
".",
"add",
"(",
"library",
")",
"loaded",
".",
"append",
"(",
"library",
".",
"lower",
"(",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"unable to add external library %s: %s\"",
"%",
"(",
"library",
",",
"str",
"(",
"e",
")",
")",
")"
] |
Add any installed libraries that we can find
We do this by looking in the `libraries` folder where
robot is installed. If you have libraries installed
in a non-standard place, this won't pick them up.
|
[
"Add",
"any",
"installed",
"libraries",
"that",
"we",
"can",
"find"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L230-L266
|
237,119
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable.get_collection
|
def get_collection(self, collection_id):
"""Get a specific collection"""
sql = """SELECT collection.collection_id, collection.type,
collection.name, collection.path,
collection.doc,
collection.version, collection.scope,
collection.namedargs,
collection.doc_format
FROM collection_table as collection
WHERE collection_id == ? OR collection.name like ?
"""
cursor = self._execute(sql, (collection_id, collection_id))
# need to handle the case where we get more than one result...
sql_result = cursor.fetchone()
return {
"collection_id": sql_result[0],
"type": sql_result[1],
"name": sql_result[2],
"path": sql_result[3],
"doc": sql_result[4],
"version": sql_result[5],
"scope": sql_result[6],
"namedargs": sql_result[7],
"doc_format": sql_result[8]
}
return sql_result
|
python
|
def get_collection(self, collection_id):
"""Get a specific collection"""
sql = """SELECT collection.collection_id, collection.type,
collection.name, collection.path,
collection.doc,
collection.version, collection.scope,
collection.namedargs,
collection.doc_format
FROM collection_table as collection
WHERE collection_id == ? OR collection.name like ?
"""
cursor = self._execute(sql, (collection_id, collection_id))
# need to handle the case where we get more than one result...
sql_result = cursor.fetchone()
return {
"collection_id": sql_result[0],
"type": sql_result[1],
"name": sql_result[2],
"path": sql_result[3],
"doc": sql_result[4],
"version": sql_result[5],
"scope": sql_result[6],
"namedargs": sql_result[7],
"doc_format": sql_result[8]
}
return sql_result
|
[
"def",
"get_collection",
"(",
"self",
",",
"collection_id",
")",
":",
"sql",
"=",
"\"\"\"SELECT collection.collection_id, collection.type,\n collection.name, collection.path,\n collection.doc,\n collection.version, collection.scope,\n collection.namedargs,\n collection.doc_format\n FROM collection_table as collection\n WHERE collection_id == ? OR collection.name like ?\n \"\"\"",
"cursor",
"=",
"self",
".",
"_execute",
"(",
"sql",
",",
"(",
"collection_id",
",",
"collection_id",
")",
")",
"# need to handle the case where we get more than one result...",
"sql_result",
"=",
"cursor",
".",
"fetchone",
"(",
")",
"return",
"{",
"\"collection_id\"",
":",
"sql_result",
"[",
"0",
"]",
",",
"\"type\"",
":",
"sql_result",
"[",
"1",
"]",
",",
"\"name\"",
":",
"sql_result",
"[",
"2",
"]",
",",
"\"path\"",
":",
"sql_result",
"[",
"3",
"]",
",",
"\"doc\"",
":",
"sql_result",
"[",
"4",
"]",
",",
"\"version\"",
":",
"sql_result",
"[",
"5",
"]",
",",
"\"scope\"",
":",
"sql_result",
"[",
"6",
"]",
",",
"\"namedargs\"",
":",
"sql_result",
"[",
"7",
"]",
",",
"\"doc_format\"",
":",
"sql_result",
"[",
"8",
"]",
"}",
"return",
"sql_result"
] |
Get a specific collection
|
[
"Get",
"a",
"specific",
"collection"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L268-L293
|
237,120
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable.get_keyword
|
def get_keyword(self, collection_id, name):
"""Get a specific keyword from a library"""
sql = """SELECT keyword.name, keyword.args, keyword.doc
FROM keyword_table as keyword
WHERE keyword.collection_id == ?
AND keyword.name like ?
"""
cursor = self._execute(sql, (collection_id,name))
# We're going to assume no library has duplicate keywords
# While that in theory _could_ happen, it never _should_,
# and you get what you deserve if it does.
row = cursor.fetchone()
if row is not None:
return {"name": row[0],
"args": json.loads(row[1]),
"doc": row[2],
"collection_id": collection_id
}
return {}
|
python
|
def get_keyword(self, collection_id, name):
"""Get a specific keyword from a library"""
sql = """SELECT keyword.name, keyword.args, keyword.doc
FROM keyword_table as keyword
WHERE keyword.collection_id == ?
AND keyword.name like ?
"""
cursor = self._execute(sql, (collection_id,name))
# We're going to assume no library has duplicate keywords
# While that in theory _could_ happen, it never _should_,
# and you get what you deserve if it does.
row = cursor.fetchone()
if row is not None:
return {"name": row[0],
"args": json.loads(row[1]),
"doc": row[2],
"collection_id": collection_id
}
return {}
|
[
"def",
"get_keyword",
"(",
"self",
",",
"collection_id",
",",
"name",
")",
":",
"sql",
"=",
"\"\"\"SELECT keyword.name, keyword.args, keyword.doc\n FROM keyword_table as keyword\n WHERE keyword.collection_id == ?\n AND keyword.name like ?\n \"\"\"",
"cursor",
"=",
"self",
".",
"_execute",
"(",
"sql",
",",
"(",
"collection_id",
",",
"name",
")",
")",
"# We're going to assume no library has duplicate keywords",
"# While that in theory _could_ happen, it never _should_,",
"# and you get what you deserve if it does.",
"row",
"=",
"cursor",
".",
"fetchone",
"(",
")",
"if",
"row",
"is",
"not",
"None",
":",
"return",
"{",
"\"name\"",
":",
"row",
"[",
"0",
"]",
",",
"\"args\"",
":",
"json",
".",
"loads",
"(",
"row",
"[",
"1",
"]",
")",
",",
"\"doc\"",
":",
"row",
"[",
"2",
"]",
",",
"\"collection_id\"",
":",
"collection_id",
"}",
"return",
"{",
"}"
] |
Get a specific keyword from a library
|
[
"Get",
"a",
"specific",
"keyword",
"from",
"a",
"library"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L326-L344
|
237,121
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable._looks_like_libdoc_file
|
def _looks_like_libdoc_file(self, name):
"""Return true if an xml file looks like a libdoc file"""
# inefficient since we end up reading the file twice,
# but it's fast enough for our purposes, and prevents
# us from doing a full parse of files that are obviously
# not libdoc files
if name.lower().endswith(".xml"):
with open(name, "r") as f:
# read the first few lines; if we don't see
# what looks like libdoc data, return false
data = f.read(200)
index = data.lower().find("<keywordspec ")
if index > 0:
return True
return False
|
python
|
def _looks_like_libdoc_file(self, name):
"""Return true if an xml file looks like a libdoc file"""
# inefficient since we end up reading the file twice,
# but it's fast enough for our purposes, and prevents
# us from doing a full parse of files that are obviously
# not libdoc files
if name.lower().endswith(".xml"):
with open(name, "r") as f:
# read the first few lines; if we don't see
# what looks like libdoc data, return false
data = f.read(200)
index = data.lower().find("<keywordspec ")
if index > 0:
return True
return False
|
[
"def",
"_looks_like_libdoc_file",
"(",
"self",
",",
"name",
")",
":",
"# inefficient since we end up reading the file twice,",
"# but it's fast enough for our purposes, and prevents",
"# us from doing a full parse of files that are obviously",
"# not libdoc files",
"if",
"name",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"\".xml\"",
")",
":",
"with",
"open",
"(",
"name",
",",
"\"r\"",
")",
"as",
"f",
":",
"# read the first few lines; if we don't see",
"# what looks like libdoc data, return false",
"data",
"=",
"f",
".",
"read",
"(",
"200",
")",
"index",
"=",
"data",
".",
"lower",
"(",
")",
".",
"find",
"(",
"\"<keywordspec \"",
")",
"if",
"index",
">",
"0",
":",
"return",
"True",
"return",
"False"
] |
Return true if an xml file looks like a libdoc file
|
[
"Return",
"true",
"if",
"an",
"xml",
"file",
"looks",
"like",
"a",
"libdoc",
"file"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L447-L461
|
237,122
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable._looks_like_resource_file
|
def _looks_like_resource_file(self, name):
"""Return true if the file has a keyword table but not a testcase table"""
# inefficient since we end up reading the file twice,
# but it's fast enough for our purposes, and prevents
# us from doing a full parse of files that are obviously
# not robot files
if (re.search(r'__init__.(txt|robot|html|tsv)$', name)):
# These are initialize files, not resource files
return False
found_keyword_table = False
if (name.lower().endswith(".robot") or
name.lower().endswith(".txt") or
name.lower().endswith(".tsv")):
with open(name, "r") as f:
data = f.read()
for match in re.finditer(r'^\*+\s*(Test Cases?|(?:User )?Keywords?)',
data, re.MULTILINE|re.IGNORECASE):
if (re.match(r'Test Cases?', match.group(1), re.IGNORECASE)):
# if there's a test case table, it's not a keyword file
return False
if (not found_keyword_table and
re.match(r'(User )?Keywords?', match.group(1), re.IGNORECASE)):
found_keyword_table = True
return found_keyword_table
|
python
|
def _looks_like_resource_file(self, name):
"""Return true if the file has a keyword table but not a testcase table"""
# inefficient since we end up reading the file twice,
# but it's fast enough for our purposes, and prevents
# us from doing a full parse of files that are obviously
# not robot files
if (re.search(r'__init__.(txt|robot|html|tsv)$', name)):
# These are initialize files, not resource files
return False
found_keyword_table = False
if (name.lower().endswith(".robot") or
name.lower().endswith(".txt") or
name.lower().endswith(".tsv")):
with open(name, "r") as f:
data = f.read()
for match in re.finditer(r'^\*+\s*(Test Cases?|(?:User )?Keywords?)',
data, re.MULTILINE|re.IGNORECASE):
if (re.match(r'Test Cases?', match.group(1), re.IGNORECASE)):
# if there's a test case table, it's not a keyword file
return False
if (not found_keyword_table and
re.match(r'(User )?Keywords?', match.group(1), re.IGNORECASE)):
found_keyword_table = True
return found_keyword_table
|
[
"def",
"_looks_like_resource_file",
"(",
"self",
",",
"name",
")",
":",
"# inefficient since we end up reading the file twice,",
"# but it's fast enough for our purposes, and prevents",
"# us from doing a full parse of files that are obviously",
"# not robot files",
"if",
"(",
"re",
".",
"search",
"(",
"r'__init__.(txt|robot|html|tsv)$'",
",",
"name",
")",
")",
":",
"# These are initialize files, not resource files",
"return",
"False",
"found_keyword_table",
"=",
"False",
"if",
"(",
"name",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"\".robot\"",
")",
"or",
"name",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"\".txt\"",
")",
"or",
"name",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"\".tsv\"",
")",
")",
":",
"with",
"open",
"(",
"name",
",",
"\"r\"",
")",
"as",
"f",
":",
"data",
"=",
"f",
".",
"read",
"(",
")",
"for",
"match",
"in",
"re",
".",
"finditer",
"(",
"r'^\\*+\\s*(Test Cases?|(?:User )?Keywords?)'",
",",
"data",
",",
"re",
".",
"MULTILINE",
"|",
"re",
".",
"IGNORECASE",
")",
":",
"if",
"(",
"re",
".",
"match",
"(",
"r'Test Cases?'",
",",
"match",
".",
"group",
"(",
"1",
")",
",",
"re",
".",
"IGNORECASE",
")",
")",
":",
"# if there's a test case table, it's not a keyword file",
"return",
"False",
"if",
"(",
"not",
"found_keyword_table",
"and",
"re",
".",
"match",
"(",
"r'(User )?Keywords?'",
",",
"match",
".",
"group",
"(",
"1",
")",
",",
"re",
".",
"IGNORECASE",
")",
")",
":",
"found_keyword_table",
"=",
"True",
"return",
"found_keyword_table"
] |
Return true if the file has a keyword table but not a testcase table
|
[
"Return",
"true",
"if",
"the",
"file",
"has",
"a",
"keyword",
"table",
"but",
"not",
"a",
"testcase",
"table"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L463-L490
|
237,123
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable._should_ignore
|
def _should_ignore(self, name):
"""Return True if a given library name should be ignored
This is necessary because not all files we find in the library
folder are libraries. I wish there was a public robot API
for "give me a list of installed libraries"...
"""
_name = name.lower()
return (_name.startswith("deprecated") or
_name.startswith("_") or
_name in ("remote", "reserved",
"dialogs_py", "dialogs_ipy", "dialogs_jy"))
|
python
|
def _should_ignore(self, name):
"""Return True if a given library name should be ignored
This is necessary because not all files we find in the library
folder are libraries. I wish there was a public robot API
for "give me a list of installed libraries"...
"""
_name = name.lower()
return (_name.startswith("deprecated") or
_name.startswith("_") or
_name in ("remote", "reserved",
"dialogs_py", "dialogs_ipy", "dialogs_jy"))
|
[
"def",
"_should_ignore",
"(",
"self",
",",
"name",
")",
":",
"_name",
"=",
"name",
".",
"lower",
"(",
")",
"return",
"(",
"_name",
".",
"startswith",
"(",
"\"deprecated\"",
")",
"or",
"_name",
".",
"startswith",
"(",
"\"_\"",
")",
"or",
"_name",
"in",
"(",
"\"remote\"",
",",
"\"reserved\"",
",",
"\"dialogs_py\"",
",",
"\"dialogs_ipy\"",
",",
"\"dialogs_jy\"",
")",
")"
] |
Return True if a given library name should be ignored
This is necessary because not all files we find in the library
folder are libraries. I wish there was a public robot API
for "give me a list of installed libraries"...
|
[
"Return",
"True",
"if",
"a",
"given",
"library",
"name",
"should",
"be",
"ignored"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L492-L503
|
237,124
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable._execute
|
def _execute(self, *args):
"""Execute an SQL query
This exists because I think it's tedious to get a cursor and
then use a cursor.
"""
cursor = self.db.cursor()
cursor.execute(*args)
return cursor
|
python
|
def _execute(self, *args):
"""Execute an SQL query
This exists because I think it's tedious to get a cursor and
then use a cursor.
"""
cursor = self.db.cursor()
cursor.execute(*args)
return cursor
|
[
"def",
"_execute",
"(",
"self",
",",
"*",
"args",
")",
":",
"cursor",
"=",
"self",
".",
"db",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"*",
"args",
")",
"return",
"cursor"
] |
Execute an SQL query
This exists because I think it's tedious to get a cursor and
then use a cursor.
|
[
"Execute",
"an",
"SQL",
"query"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L505-L513
|
237,125
|
boakley/robotframework-hub
|
rfhub/kwdb.py
|
KeywordTable._glob_to_sql
|
def _glob_to_sql(self, string):
"""Convert glob-like wildcards to SQL wildcards
* becomes %
? becomes _
% becomes \%
\\ remains \\
\* remains \*
\? remains \?
This also adds a leading and trailing %, unless the pattern begins with
^ or ends with $
"""
# What's with the chr(1) and chr(2) nonsense? It's a trick to
# hide \* and \? from the * and ? substitutions. This trick
# depends on the substitutiones being done in order. chr(1)
# and chr(2) were picked because I know those characters
# almost certainly won't be in the input string
table = ((r'\\', chr(1)), (r'\*', chr(2)), (r'\?', chr(3)),
(r'%', r'\%'), (r'?', '_'), (r'*', '%'),
(chr(1), r'\\'), (chr(2), r'\*'), (chr(3), r'\?'))
for (a, b) in table:
string = string.replace(a,b)
string = string[1:] if string.startswith("^") else "%" + string
string = string[:-1] if string.endswith("$") else string + "%"
return string
|
python
|
def _glob_to_sql(self, string):
"""Convert glob-like wildcards to SQL wildcards
* becomes %
? becomes _
% becomes \%
\\ remains \\
\* remains \*
\? remains \?
This also adds a leading and trailing %, unless the pattern begins with
^ or ends with $
"""
# What's with the chr(1) and chr(2) nonsense? It's a trick to
# hide \* and \? from the * and ? substitutions. This trick
# depends on the substitutiones being done in order. chr(1)
# and chr(2) were picked because I know those characters
# almost certainly won't be in the input string
table = ((r'\\', chr(1)), (r'\*', chr(2)), (r'\?', chr(3)),
(r'%', r'\%'), (r'?', '_'), (r'*', '%'),
(chr(1), r'\\'), (chr(2), r'\*'), (chr(3), r'\?'))
for (a, b) in table:
string = string.replace(a,b)
string = string[1:] if string.startswith("^") else "%" + string
string = string[:-1] if string.endswith("$") else string + "%"
return string
|
[
"def",
"_glob_to_sql",
"(",
"self",
",",
"string",
")",
":",
"# What's with the chr(1) and chr(2) nonsense? It's a trick to",
"# hide \\* and \\? from the * and ? substitutions. This trick",
"# depends on the substitutiones being done in order. chr(1)",
"# and chr(2) were picked because I know those characters",
"# almost certainly won't be in the input string",
"table",
"=",
"(",
"(",
"r'\\\\'",
",",
"chr",
"(",
"1",
")",
")",
",",
"(",
"r'\\*'",
",",
"chr",
"(",
"2",
")",
")",
",",
"(",
"r'\\?'",
",",
"chr",
"(",
"3",
")",
")",
",",
"(",
"r'%'",
",",
"r'\\%'",
")",
",",
"(",
"r'?'",
",",
"'_'",
")",
",",
"(",
"r'*'",
",",
"'%'",
")",
",",
"(",
"chr",
"(",
"1",
")",
",",
"r'\\\\'",
")",
",",
"(",
"chr",
"(",
"2",
")",
",",
"r'\\*'",
")",
",",
"(",
"chr",
"(",
"3",
")",
",",
"r'\\?'",
")",
")",
"for",
"(",
"a",
",",
"b",
")",
"in",
"table",
":",
"string",
"=",
"string",
".",
"replace",
"(",
"a",
",",
"b",
")",
"string",
"=",
"string",
"[",
"1",
":",
"]",
"if",
"string",
".",
"startswith",
"(",
"\"^\"",
")",
"else",
"\"%\"",
"+",
"string",
"string",
"=",
"string",
"[",
":",
"-",
"1",
"]",
"if",
"string",
".",
"endswith",
"(",
"\"$\"",
")",
"else",
"string",
"+",
"\"%\"",
"return",
"string"
] |
Convert glob-like wildcards to SQL wildcards
* becomes %
? becomes _
% becomes \%
\\ remains \\
\* remains \*
\? remains \?
This also adds a leading and trailing %, unless the pattern begins with
^ or ends with $
|
[
"Convert",
"glob",
"-",
"like",
"wildcards",
"to",
"SQL",
"wildcards"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/kwdb.py#L565-L594
|
237,126
|
boakley/robotframework-hub
|
rfhub/blueprints/doc/__init__.py
|
doc
|
def doc():
"""Show a list of libraries, along with the nav panel on the left"""
kwdb = current_app.kwdb
libraries = get_collections(kwdb, libtype="library")
resource_files = get_collections(kwdb, libtype="resource")
hierarchy = get_navpanel_data(kwdb)
return flask.render_template("home.html",
data={"libraries": libraries,
"version": __version__,
"libdoc": None,
"hierarchy": hierarchy,
"resource_files": resource_files
})
|
python
|
def doc():
"""Show a list of libraries, along with the nav panel on the left"""
kwdb = current_app.kwdb
libraries = get_collections(kwdb, libtype="library")
resource_files = get_collections(kwdb, libtype="resource")
hierarchy = get_navpanel_data(kwdb)
return flask.render_template("home.html",
data={"libraries": libraries,
"version": __version__,
"libdoc": None,
"hierarchy": hierarchy,
"resource_files": resource_files
})
|
[
"def",
"doc",
"(",
")",
":",
"kwdb",
"=",
"current_app",
".",
"kwdb",
"libraries",
"=",
"get_collections",
"(",
"kwdb",
",",
"libtype",
"=",
"\"library\"",
")",
"resource_files",
"=",
"get_collections",
"(",
"kwdb",
",",
"libtype",
"=",
"\"resource\"",
")",
"hierarchy",
"=",
"get_navpanel_data",
"(",
"kwdb",
")",
"return",
"flask",
".",
"render_template",
"(",
"\"home.html\"",
",",
"data",
"=",
"{",
"\"libraries\"",
":",
"libraries",
",",
"\"version\"",
":",
"__version__",
",",
"\"libdoc\"",
":",
"None",
",",
"\"hierarchy\"",
":",
"hierarchy",
",",
"\"resource_files\"",
":",
"resource_files",
"}",
")"
] |
Show a list of libraries, along with the nav panel on the left
|
[
"Show",
"a",
"list",
"of",
"libraries",
"along",
"with",
"the",
"nav",
"panel",
"on",
"the",
"left"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/blueprints/doc/__init__.py#L14-L28
|
237,127
|
boakley/robotframework-hub
|
rfhub/blueprints/doc/__init__.py
|
index
|
def index():
"""Show a list of available libraries, and resource files"""
kwdb = current_app.kwdb
libraries = get_collections(kwdb, libtype="library")
resource_files = get_collections(kwdb, libtype="resource")
return flask.render_template("libraryNames.html",
data={"libraries": libraries,
"version": __version__,
"resource_files": resource_files
})
|
python
|
def index():
"""Show a list of available libraries, and resource files"""
kwdb = current_app.kwdb
libraries = get_collections(kwdb, libtype="library")
resource_files = get_collections(kwdb, libtype="resource")
return flask.render_template("libraryNames.html",
data={"libraries": libraries,
"version": __version__,
"resource_files": resource_files
})
|
[
"def",
"index",
"(",
")",
":",
"kwdb",
"=",
"current_app",
".",
"kwdb",
"libraries",
"=",
"get_collections",
"(",
"kwdb",
",",
"libtype",
"=",
"\"library\"",
")",
"resource_files",
"=",
"get_collections",
"(",
"kwdb",
",",
"libtype",
"=",
"\"resource\"",
")",
"return",
"flask",
".",
"render_template",
"(",
"\"libraryNames.html\"",
",",
"data",
"=",
"{",
"\"libraries\"",
":",
"libraries",
",",
"\"version\"",
":",
"__version__",
",",
"\"resource_files\"",
":",
"resource_files",
"}",
")"
] |
Show a list of available libraries, and resource files
|
[
"Show",
"a",
"list",
"of",
"available",
"libraries",
"and",
"resource",
"files"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/blueprints/doc/__init__.py#L32-L43
|
237,128
|
boakley/robotframework-hub
|
rfhub/blueprints/doc/__init__.py
|
search
|
def search():
"""Show all keywords that match a pattern"""
pattern = flask.request.args.get('pattern', "*").strip().lower()
# if the pattern contains "in:<collection>" (eg: in:builtin),
# filter results to only that (or those) collections
# This was kind-of hacked together, but seems to work well enough
collections = [c["name"].lower() for c in current_app.kwdb.get_collections()]
words = []
filters = []
if pattern.startswith("name:"):
pattern = pattern[5:].strip()
mode = "name"
else:
mode="both"
for word in pattern.split(" "):
if word.lower().startswith("in:"):
filters.extend([name for name in collections if name.startswith(word[3:])])
else:
words.append(word)
pattern = " ".join(words)
keywords = []
for keyword in current_app.kwdb.search(pattern, mode):
kw = list(keyword)
collection_id = kw[0]
collection_name = kw[1].lower()
if len(filters) == 0 or collection_name in filters:
url = flask.url_for(".doc_for_library", collection_id=kw[0], keyword=kw[2])
row_id = "row-%s.%s" % (keyword[1].lower(), keyword[2].lower().replace(" ","-"))
keywords.append({"collection_id": keyword[0],
"collection_name": keyword[1],
"name": keyword[2],
"synopsis": keyword[3],
"version": __version__,
"url": url,
"row_id": row_id
})
keywords.sort(key=lambda kw: kw["name"])
return flask.render_template("search.html",
data={"keywords": keywords,
"version": __version__,
"pattern": pattern
})
|
python
|
def search():
"""Show all keywords that match a pattern"""
pattern = flask.request.args.get('pattern', "*").strip().lower()
# if the pattern contains "in:<collection>" (eg: in:builtin),
# filter results to only that (or those) collections
# This was kind-of hacked together, but seems to work well enough
collections = [c["name"].lower() for c in current_app.kwdb.get_collections()]
words = []
filters = []
if pattern.startswith("name:"):
pattern = pattern[5:].strip()
mode = "name"
else:
mode="both"
for word in pattern.split(" "):
if word.lower().startswith("in:"):
filters.extend([name for name in collections if name.startswith(word[3:])])
else:
words.append(word)
pattern = " ".join(words)
keywords = []
for keyword in current_app.kwdb.search(pattern, mode):
kw = list(keyword)
collection_id = kw[0]
collection_name = kw[1].lower()
if len(filters) == 0 or collection_name in filters:
url = flask.url_for(".doc_for_library", collection_id=kw[0], keyword=kw[2])
row_id = "row-%s.%s" % (keyword[1].lower(), keyword[2].lower().replace(" ","-"))
keywords.append({"collection_id": keyword[0],
"collection_name": keyword[1],
"name": keyword[2],
"synopsis": keyword[3],
"version": __version__,
"url": url,
"row_id": row_id
})
keywords.sort(key=lambda kw: kw["name"])
return flask.render_template("search.html",
data={"keywords": keywords,
"version": __version__,
"pattern": pattern
})
|
[
"def",
"search",
"(",
")",
":",
"pattern",
"=",
"flask",
".",
"request",
".",
"args",
".",
"get",
"(",
"'pattern'",
",",
"\"*\"",
")",
".",
"strip",
"(",
")",
".",
"lower",
"(",
")",
"# if the pattern contains \"in:<collection>\" (eg: in:builtin),",
"# filter results to only that (or those) collections",
"# This was kind-of hacked together, but seems to work well enough",
"collections",
"=",
"[",
"c",
"[",
"\"name\"",
"]",
".",
"lower",
"(",
")",
"for",
"c",
"in",
"current_app",
".",
"kwdb",
".",
"get_collections",
"(",
")",
"]",
"words",
"=",
"[",
"]",
"filters",
"=",
"[",
"]",
"if",
"pattern",
".",
"startswith",
"(",
"\"name:\"",
")",
":",
"pattern",
"=",
"pattern",
"[",
"5",
":",
"]",
".",
"strip",
"(",
")",
"mode",
"=",
"\"name\"",
"else",
":",
"mode",
"=",
"\"both\"",
"for",
"word",
"in",
"pattern",
".",
"split",
"(",
"\" \"",
")",
":",
"if",
"word",
".",
"lower",
"(",
")",
".",
"startswith",
"(",
"\"in:\"",
")",
":",
"filters",
".",
"extend",
"(",
"[",
"name",
"for",
"name",
"in",
"collections",
"if",
"name",
".",
"startswith",
"(",
"word",
"[",
"3",
":",
"]",
")",
"]",
")",
"else",
":",
"words",
".",
"append",
"(",
"word",
")",
"pattern",
"=",
"\" \"",
".",
"join",
"(",
"words",
")",
"keywords",
"=",
"[",
"]",
"for",
"keyword",
"in",
"current_app",
".",
"kwdb",
".",
"search",
"(",
"pattern",
",",
"mode",
")",
":",
"kw",
"=",
"list",
"(",
"keyword",
")",
"collection_id",
"=",
"kw",
"[",
"0",
"]",
"collection_name",
"=",
"kw",
"[",
"1",
"]",
".",
"lower",
"(",
")",
"if",
"len",
"(",
"filters",
")",
"==",
"0",
"or",
"collection_name",
"in",
"filters",
":",
"url",
"=",
"flask",
".",
"url_for",
"(",
"\".doc_for_library\"",
",",
"collection_id",
"=",
"kw",
"[",
"0",
"]",
",",
"keyword",
"=",
"kw",
"[",
"2",
"]",
")",
"row_id",
"=",
"\"row-%s.%s\"",
"%",
"(",
"keyword",
"[",
"1",
"]",
".",
"lower",
"(",
")",
",",
"keyword",
"[",
"2",
"]",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"\" \"",
",",
"\"-\"",
")",
")",
"keywords",
".",
"append",
"(",
"{",
"\"collection_id\"",
":",
"keyword",
"[",
"0",
"]",
",",
"\"collection_name\"",
":",
"keyword",
"[",
"1",
"]",
",",
"\"name\"",
":",
"keyword",
"[",
"2",
"]",
",",
"\"synopsis\"",
":",
"keyword",
"[",
"3",
"]",
",",
"\"version\"",
":",
"__version__",
",",
"\"url\"",
":",
"url",
",",
"\"row_id\"",
":",
"row_id",
"}",
")",
"keywords",
".",
"sort",
"(",
"key",
"=",
"lambda",
"kw",
":",
"kw",
"[",
"\"name\"",
"]",
")",
"return",
"flask",
".",
"render_template",
"(",
"\"search.html\"",
",",
"data",
"=",
"{",
"\"keywords\"",
":",
"keywords",
",",
"\"version\"",
":",
"__version__",
",",
"\"pattern\"",
":",
"pattern",
"}",
")"
] |
Show all keywords that match a pattern
|
[
"Show",
"all",
"keywords",
"that",
"match",
"a",
"pattern"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/blueprints/doc/__init__.py#L47-L92
|
237,129
|
boakley/robotframework-hub
|
rfhub/blueprints/doc/__init__.py
|
get_collections
|
def get_collections(kwdb, libtype="*"):
"""Get list of collections from kwdb, then add urls necessary for hyperlinks"""
collections = kwdb.get_collections(libtype=libtype)
for result in collections:
url = flask.url_for(".doc_for_library", collection_id=result["collection_id"])
result["url"] = url
return collections
|
python
|
def get_collections(kwdb, libtype="*"):
"""Get list of collections from kwdb, then add urls necessary for hyperlinks"""
collections = kwdb.get_collections(libtype=libtype)
for result in collections:
url = flask.url_for(".doc_for_library", collection_id=result["collection_id"])
result["url"] = url
return collections
|
[
"def",
"get_collections",
"(",
"kwdb",
",",
"libtype",
"=",
"\"*\"",
")",
":",
"collections",
"=",
"kwdb",
".",
"get_collections",
"(",
"libtype",
"=",
"libtype",
")",
"for",
"result",
"in",
"collections",
":",
"url",
"=",
"flask",
".",
"url_for",
"(",
"\".doc_for_library\"",
",",
"collection_id",
"=",
"result",
"[",
"\"collection_id\"",
"]",
")",
"result",
"[",
"\"url\"",
"]",
"=",
"url",
"return",
"collections"
] |
Get list of collections from kwdb, then add urls necessary for hyperlinks
|
[
"Get",
"list",
"of",
"collections",
"from",
"kwdb",
"then",
"add",
"urls",
"necessary",
"for",
"hyperlinks"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/blueprints/doc/__init__.py#L129-L136
|
237,130
|
boakley/robotframework-hub
|
rfhub/blueprints/doc/__init__.py
|
get_navpanel_data
|
def get_navpanel_data(kwdb):
"""Get navpanel data from kwdb, and add urls necessary for hyperlinks"""
data = kwdb.get_keyword_hierarchy()
for library in data:
library["url"] = flask.url_for(".doc_for_library", collection_id=library["collection_id"])
for keyword in library["keywords"]:
url = flask.url_for(".doc_for_library",
collection_id=library["collection_id"],
keyword=keyword["name"])
keyword["url"] = url
return data
|
python
|
def get_navpanel_data(kwdb):
"""Get navpanel data from kwdb, and add urls necessary for hyperlinks"""
data = kwdb.get_keyword_hierarchy()
for library in data:
library["url"] = flask.url_for(".doc_for_library", collection_id=library["collection_id"])
for keyword in library["keywords"]:
url = flask.url_for(".doc_for_library",
collection_id=library["collection_id"],
keyword=keyword["name"])
keyword["url"] = url
return data
|
[
"def",
"get_navpanel_data",
"(",
"kwdb",
")",
":",
"data",
"=",
"kwdb",
".",
"get_keyword_hierarchy",
"(",
")",
"for",
"library",
"in",
"data",
":",
"library",
"[",
"\"url\"",
"]",
"=",
"flask",
".",
"url_for",
"(",
"\".doc_for_library\"",
",",
"collection_id",
"=",
"library",
"[",
"\"collection_id\"",
"]",
")",
"for",
"keyword",
"in",
"library",
"[",
"\"keywords\"",
"]",
":",
"url",
"=",
"flask",
".",
"url_for",
"(",
"\".doc_for_library\"",
",",
"collection_id",
"=",
"library",
"[",
"\"collection_id\"",
"]",
",",
"keyword",
"=",
"keyword",
"[",
"\"name\"",
"]",
")",
"keyword",
"[",
"\"url\"",
"]",
"=",
"url",
"return",
"data"
] |
Get navpanel data from kwdb, and add urls necessary for hyperlinks
|
[
"Get",
"navpanel",
"data",
"from",
"kwdb",
"and",
"add",
"urls",
"necessary",
"for",
"hyperlinks"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/blueprints/doc/__init__.py#L138-L149
|
237,131
|
boakley/robotframework-hub
|
rfhub/blueprints/doc/__init__.py
|
doc_to_html
|
def doc_to_html(doc, doc_format="ROBOT"):
"""Convert documentation to HTML"""
from robot.libdocpkg.htmlwriter import DocToHtml
return DocToHtml(doc_format)(doc)
|
python
|
def doc_to_html(doc, doc_format="ROBOT"):
"""Convert documentation to HTML"""
from robot.libdocpkg.htmlwriter import DocToHtml
return DocToHtml(doc_format)(doc)
|
[
"def",
"doc_to_html",
"(",
"doc",
",",
"doc_format",
"=",
"\"ROBOT\"",
")",
":",
"from",
"robot",
".",
"libdocpkg",
".",
"htmlwriter",
"import",
"DocToHtml",
"return",
"DocToHtml",
"(",
"doc_format",
")",
"(",
"doc",
")"
] |
Convert documentation to HTML
|
[
"Convert",
"documentation",
"to",
"HTML"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/blueprints/doc/__init__.py#L152-L155
|
237,132
|
boakley/robotframework-hub
|
rfhub/app.py
|
RobotHub.start
|
def start(self):
"""Start the app"""
if self.args.debug:
self.app.run(port=self.args.port, debug=self.args.debug, host=self.args.interface)
else:
root = "http://%s:%s" % (self.args.interface, self.args.port)
print("tornado web server running on " + root)
self.shutdown_requested = False
http_server = HTTPServer(WSGIContainer(self.app))
http_server.listen(port=self.args.port, address=self.args.interface)
signal.signal(signal.SIGINT, self.signal_handler)
tornado.ioloop.PeriodicCallback(self.check_shutdown_flag, 500).start()
tornado.ioloop.IOLoop.instance().start()
|
python
|
def start(self):
"""Start the app"""
if self.args.debug:
self.app.run(port=self.args.port, debug=self.args.debug, host=self.args.interface)
else:
root = "http://%s:%s" % (self.args.interface, self.args.port)
print("tornado web server running on " + root)
self.shutdown_requested = False
http_server = HTTPServer(WSGIContainer(self.app))
http_server.listen(port=self.args.port, address=self.args.interface)
signal.signal(signal.SIGINT, self.signal_handler)
tornado.ioloop.PeriodicCallback(self.check_shutdown_flag, 500).start()
tornado.ioloop.IOLoop.instance().start()
|
[
"def",
"start",
"(",
"self",
")",
":",
"if",
"self",
".",
"args",
".",
"debug",
":",
"self",
".",
"app",
".",
"run",
"(",
"port",
"=",
"self",
".",
"args",
".",
"port",
",",
"debug",
"=",
"self",
".",
"args",
".",
"debug",
",",
"host",
"=",
"self",
".",
"args",
".",
"interface",
")",
"else",
":",
"root",
"=",
"\"http://%s:%s\"",
"%",
"(",
"self",
".",
"args",
".",
"interface",
",",
"self",
".",
"args",
".",
"port",
")",
"print",
"(",
"\"tornado web server running on \"",
"+",
"root",
")",
"self",
".",
"shutdown_requested",
"=",
"False",
"http_server",
"=",
"HTTPServer",
"(",
"WSGIContainer",
"(",
"self",
".",
"app",
")",
")",
"http_server",
".",
"listen",
"(",
"port",
"=",
"self",
".",
"args",
".",
"port",
",",
"address",
"=",
"self",
".",
"args",
".",
"interface",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"self",
".",
"signal_handler",
")",
"tornado",
".",
"ioloop",
".",
"PeriodicCallback",
"(",
"self",
".",
"check_shutdown_flag",
",",
"500",
")",
".",
"start",
"(",
")",
"tornado",
".",
"ioloop",
".",
"IOLoop",
".",
"instance",
"(",
")",
".",
"start",
"(",
")"
] |
Start the app
|
[
"Start",
"the",
"app"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/app.py#L50-L63
|
237,133
|
boakley/robotframework-hub
|
rfhub/app.py
|
RobotHub.check_shutdown_flag
|
def check_shutdown_flag(self):
"""Shutdown the server if the flag has been set"""
if self.shutdown_requested:
tornado.ioloop.IOLoop.instance().stop()
print("web server stopped.")
|
python
|
def check_shutdown_flag(self):
"""Shutdown the server if the flag has been set"""
if self.shutdown_requested:
tornado.ioloop.IOLoop.instance().stop()
print("web server stopped.")
|
[
"def",
"check_shutdown_flag",
"(",
"self",
")",
":",
"if",
"self",
".",
"shutdown_requested",
":",
"tornado",
".",
"ioloop",
".",
"IOLoop",
".",
"instance",
"(",
")",
".",
"stop",
"(",
")",
"print",
"(",
"\"web server stopped.\"",
")"
] |
Shutdown the server if the flag has been set
|
[
"Shutdown",
"the",
"server",
"if",
"the",
"flag",
"has",
"been",
"set"
] |
f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c
|
https://github.com/boakley/robotframework-hub/blob/f3dc7562fe6218a7b8d7aac7b9ef234e1a573f7c/rfhub/app.py#L69-L73
|
237,134
|
jazzband/python-geojson
|
geojson/utils.py
|
coords
|
def coords(obj):
"""
Yields the coordinates from a Feature or Geometry.
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Feature, Geometry
:return: A generator with coordinate tuples from the geometry or feature.
:rtype: generator
"""
# Handle recursive case first
if 'features' in obj:
for f in obj['features']:
# For Python 2 compatibility
# See https://www.reddit.com/r/learnpython/comments/4rc15s/yield_from_and_python_27/ # noqa: E501
for c in coords(f):
yield c
else:
if isinstance(obj, (tuple, list)):
coordinates = obj
elif 'geometry' in obj:
coordinates = obj['geometry']['coordinates']
else:
coordinates = obj.get('coordinates', obj)
for e in coordinates:
if isinstance(e, (float, int)):
yield tuple(coordinates)
break
for f in coords(e):
yield f
|
python
|
def coords(obj):
"""
Yields the coordinates from a Feature or Geometry.
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Feature, Geometry
:return: A generator with coordinate tuples from the geometry or feature.
:rtype: generator
"""
# Handle recursive case first
if 'features' in obj:
for f in obj['features']:
# For Python 2 compatibility
# See https://www.reddit.com/r/learnpython/comments/4rc15s/yield_from_and_python_27/ # noqa: E501
for c in coords(f):
yield c
else:
if isinstance(obj, (tuple, list)):
coordinates = obj
elif 'geometry' in obj:
coordinates = obj['geometry']['coordinates']
else:
coordinates = obj.get('coordinates', obj)
for e in coordinates:
if isinstance(e, (float, int)):
yield tuple(coordinates)
break
for f in coords(e):
yield f
|
[
"def",
"coords",
"(",
"obj",
")",
":",
"# Handle recursive case first",
"if",
"'features'",
"in",
"obj",
":",
"for",
"f",
"in",
"obj",
"[",
"'features'",
"]",
":",
"# For Python 2 compatibility",
"# See https://www.reddit.com/r/learnpython/comments/4rc15s/yield_from_and_python_27/ # noqa: E501",
"for",
"c",
"in",
"coords",
"(",
"f",
")",
":",
"yield",
"c",
"else",
":",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"coordinates",
"=",
"obj",
"elif",
"'geometry'",
"in",
"obj",
":",
"coordinates",
"=",
"obj",
"[",
"'geometry'",
"]",
"[",
"'coordinates'",
"]",
"else",
":",
"coordinates",
"=",
"obj",
".",
"get",
"(",
"'coordinates'",
",",
"obj",
")",
"for",
"e",
"in",
"coordinates",
":",
"if",
"isinstance",
"(",
"e",
",",
"(",
"float",
",",
"int",
")",
")",
":",
"yield",
"tuple",
"(",
"coordinates",
")",
"break",
"for",
"f",
"in",
"coords",
"(",
"e",
")",
":",
"yield",
"f"
] |
Yields the coordinates from a Feature or Geometry.
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Feature, Geometry
:return: A generator with coordinate tuples from the geometry or feature.
:rtype: generator
|
[
"Yields",
"the",
"coordinates",
"from",
"a",
"Feature",
"or",
"Geometry",
"."
] |
14abb31ba73a9f3cdb81d81c56656ea736f3c865
|
https://github.com/jazzband/python-geojson/blob/14abb31ba73a9f3cdb81d81c56656ea736f3c865/geojson/utils.py#L4-L32
|
237,135
|
jazzband/python-geojson
|
geojson/utils.py
|
map_tuples
|
def map_tuples(func, obj):
"""
Returns the mapped coordinates from a Geometry after applying the provided
function to each coordinate.
:param func: Function to apply to tuples
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each dimension in the
array.
:rtype: list
:raises ValueError: if the provided object is not GeoJSON.
"""
if obj['type'] == 'Point':
coordinates = tuple(func(obj['coordinates']))
elif obj['type'] in ['LineString', 'MultiPoint']:
coordinates = [tuple(func(c)) for c in obj['coordinates']]
elif obj['type'] in ['MultiLineString', 'Polygon']:
coordinates = [[
tuple(func(c)) for c in curve]
for curve in obj['coordinates']]
elif obj['type'] == 'MultiPolygon':
coordinates = [[[
tuple(func(c)) for c in curve]
for curve in part]
for part in obj['coordinates']]
elif obj['type'] in ['Feature', 'FeatureCollection', 'GeometryCollection']:
return map_geometries(lambda g: map_tuples(func, g), obj)
else:
raise ValueError("Invalid geometry object %s" % repr(obj))
return {'type': obj['type'], 'coordinates': coordinates}
|
python
|
def map_tuples(func, obj):
"""
Returns the mapped coordinates from a Geometry after applying the provided
function to each coordinate.
:param func: Function to apply to tuples
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each dimension in the
array.
:rtype: list
:raises ValueError: if the provided object is not GeoJSON.
"""
if obj['type'] == 'Point':
coordinates = tuple(func(obj['coordinates']))
elif obj['type'] in ['LineString', 'MultiPoint']:
coordinates = [tuple(func(c)) for c in obj['coordinates']]
elif obj['type'] in ['MultiLineString', 'Polygon']:
coordinates = [[
tuple(func(c)) for c in curve]
for curve in obj['coordinates']]
elif obj['type'] == 'MultiPolygon':
coordinates = [[[
tuple(func(c)) for c in curve]
for curve in part]
for part in obj['coordinates']]
elif obj['type'] in ['Feature', 'FeatureCollection', 'GeometryCollection']:
return map_geometries(lambda g: map_tuples(func, g), obj)
else:
raise ValueError("Invalid geometry object %s" % repr(obj))
return {'type': obj['type'], 'coordinates': coordinates}
|
[
"def",
"map_tuples",
"(",
"func",
",",
"obj",
")",
":",
"if",
"obj",
"[",
"'type'",
"]",
"==",
"'Point'",
":",
"coordinates",
"=",
"tuple",
"(",
"func",
"(",
"obj",
"[",
"'coordinates'",
"]",
")",
")",
"elif",
"obj",
"[",
"'type'",
"]",
"in",
"[",
"'LineString'",
",",
"'MultiPoint'",
"]",
":",
"coordinates",
"=",
"[",
"tuple",
"(",
"func",
"(",
"c",
")",
")",
"for",
"c",
"in",
"obj",
"[",
"'coordinates'",
"]",
"]",
"elif",
"obj",
"[",
"'type'",
"]",
"in",
"[",
"'MultiLineString'",
",",
"'Polygon'",
"]",
":",
"coordinates",
"=",
"[",
"[",
"tuple",
"(",
"func",
"(",
"c",
")",
")",
"for",
"c",
"in",
"curve",
"]",
"for",
"curve",
"in",
"obj",
"[",
"'coordinates'",
"]",
"]",
"elif",
"obj",
"[",
"'type'",
"]",
"==",
"'MultiPolygon'",
":",
"coordinates",
"=",
"[",
"[",
"[",
"tuple",
"(",
"func",
"(",
"c",
")",
")",
"for",
"c",
"in",
"curve",
"]",
"for",
"curve",
"in",
"part",
"]",
"for",
"part",
"in",
"obj",
"[",
"'coordinates'",
"]",
"]",
"elif",
"obj",
"[",
"'type'",
"]",
"in",
"[",
"'Feature'",
",",
"'FeatureCollection'",
",",
"'GeometryCollection'",
"]",
":",
"return",
"map_geometries",
"(",
"lambda",
"g",
":",
"map_tuples",
"(",
"func",
",",
"g",
")",
",",
"obj",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Invalid geometry object %s\"",
"%",
"repr",
"(",
"obj",
")",
")",
"return",
"{",
"'type'",
":",
"obj",
"[",
"'type'",
"]",
",",
"'coordinates'",
":",
"coordinates",
"}"
] |
Returns the mapped coordinates from a Geometry after applying the provided
function to each coordinate.
:param func: Function to apply to tuples
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each dimension in the
array.
:rtype: list
:raises ValueError: if the provided object is not GeoJSON.
|
[
"Returns",
"the",
"mapped",
"coordinates",
"from",
"a",
"Geometry",
"after",
"applying",
"the",
"provided",
"function",
"to",
"each",
"coordinate",
"."
] |
14abb31ba73a9f3cdb81d81c56656ea736f3c865
|
https://github.com/jazzband/python-geojson/blob/14abb31ba73a9f3cdb81d81c56656ea736f3c865/geojson/utils.py#L58-L91
|
237,136
|
jazzband/python-geojson
|
geojson/utils.py
|
map_geometries
|
def map_geometries(func, obj):
"""
Returns the result of passing every geometry in the given geojson object
through func.
:param func: Function to apply to tuples
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: GeoJSON
:return: The result of applying the function to each geometry
:rtype: list
:raises ValueError: if the provided object is not geojson.
"""
simple_types = [
'Point',
'LineString',
'MultiPoint',
'MultiLineString',
'Polygon',
'MultiPolygon',
]
if obj['type'] in simple_types:
return func(obj)
elif obj['type'] == 'GeometryCollection':
geoms = [func(geom) if geom else None for geom in obj['geometries']]
return {'type': obj['type'], 'geometries': geoms}
elif obj['type'] == 'Feature':
geom = func(obj['geometry']) if obj['geometry'] else None
return {
'type': obj['type'],
'geometry': geom,
'properties': obj['properties'],
}
elif obj['type'] == 'FeatureCollection':
feats = [map_geometries(func, feat) for feat in obj['features']]
return {'type': obj['type'], 'features': feats}
else:
raise ValueError("Invalid GeoJSON object %s" % repr(obj))
|
python
|
def map_geometries(func, obj):
"""
Returns the result of passing every geometry in the given geojson object
through func.
:param func: Function to apply to tuples
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: GeoJSON
:return: The result of applying the function to each geometry
:rtype: list
:raises ValueError: if the provided object is not geojson.
"""
simple_types = [
'Point',
'LineString',
'MultiPoint',
'MultiLineString',
'Polygon',
'MultiPolygon',
]
if obj['type'] in simple_types:
return func(obj)
elif obj['type'] == 'GeometryCollection':
geoms = [func(geom) if geom else None for geom in obj['geometries']]
return {'type': obj['type'], 'geometries': geoms}
elif obj['type'] == 'Feature':
geom = func(obj['geometry']) if obj['geometry'] else None
return {
'type': obj['type'],
'geometry': geom,
'properties': obj['properties'],
}
elif obj['type'] == 'FeatureCollection':
feats = [map_geometries(func, feat) for feat in obj['features']]
return {'type': obj['type'], 'features': feats}
else:
raise ValueError("Invalid GeoJSON object %s" % repr(obj))
|
[
"def",
"map_geometries",
"(",
"func",
",",
"obj",
")",
":",
"simple_types",
"=",
"[",
"'Point'",
",",
"'LineString'",
",",
"'MultiPoint'",
",",
"'MultiLineString'",
",",
"'Polygon'",
",",
"'MultiPolygon'",
",",
"]",
"if",
"obj",
"[",
"'type'",
"]",
"in",
"simple_types",
":",
"return",
"func",
"(",
"obj",
")",
"elif",
"obj",
"[",
"'type'",
"]",
"==",
"'GeometryCollection'",
":",
"geoms",
"=",
"[",
"func",
"(",
"geom",
")",
"if",
"geom",
"else",
"None",
"for",
"geom",
"in",
"obj",
"[",
"'geometries'",
"]",
"]",
"return",
"{",
"'type'",
":",
"obj",
"[",
"'type'",
"]",
",",
"'geometries'",
":",
"geoms",
"}",
"elif",
"obj",
"[",
"'type'",
"]",
"==",
"'Feature'",
":",
"geom",
"=",
"func",
"(",
"obj",
"[",
"'geometry'",
"]",
")",
"if",
"obj",
"[",
"'geometry'",
"]",
"else",
"None",
"return",
"{",
"'type'",
":",
"obj",
"[",
"'type'",
"]",
",",
"'geometry'",
":",
"geom",
",",
"'properties'",
":",
"obj",
"[",
"'properties'",
"]",
",",
"}",
"elif",
"obj",
"[",
"'type'",
"]",
"==",
"'FeatureCollection'",
":",
"feats",
"=",
"[",
"map_geometries",
"(",
"func",
",",
"feat",
")",
"for",
"feat",
"in",
"obj",
"[",
"'features'",
"]",
"]",
"return",
"{",
"'type'",
":",
"obj",
"[",
"'type'",
"]",
",",
"'features'",
":",
"feats",
"}",
"else",
":",
"raise",
"ValueError",
"(",
"\"Invalid GeoJSON object %s\"",
"%",
"repr",
"(",
"obj",
")",
")"
] |
Returns the result of passing every geometry in the given geojson object
through func.
:param func: Function to apply to tuples
:type func: function
:param obj: A geometry or feature to extract the coordinates from.
:type obj: GeoJSON
:return: The result of applying the function to each geometry
:rtype: list
:raises ValueError: if the provided object is not geojson.
|
[
"Returns",
"the",
"result",
"of",
"passing",
"every",
"geometry",
"in",
"the",
"given",
"geojson",
"object",
"through",
"func",
"."
] |
14abb31ba73a9f3cdb81d81c56656ea736f3c865
|
https://github.com/jazzband/python-geojson/blob/14abb31ba73a9f3cdb81d81c56656ea736f3c865/geojson/utils.py#L94-L132
|
237,137
|
jazzband/python-geojson
|
geojson/base.py
|
GeoJSON.to_instance
|
def to_instance(cls, ob, default=None, strict=False):
"""Encode a GeoJSON dict into an GeoJSON object.
Assumes the caller knows that the dict should satisfy a GeoJSON type.
:param cls: Dict containing the elements to be encoded into a GeoJSON
object.
:type cls: dict
:param ob: GeoJSON object into which to encode the dict provided in
`cls`.
:type ob: GeoJSON
:param default: A default instance to append the content of the dict
to if none is provided.
:type default: GeoJSON
:param strict: Raise error if unable to coerce particular keys or
attributes to a valid GeoJSON structure.
:type strict: bool
:return: A GeoJSON object with the dict's elements as its constituents.
:rtype: GeoJSON
:raises TypeError: If the input dict contains items that are not valid
GeoJSON types.
:raises UnicodeEncodeError: If the input dict contains items of a type
that contain non-ASCII characters.
:raises AttributeError: If the input dict contains items that are not
valid GeoJSON types.
"""
if ob is None and default is not None:
instance = default()
elif isinstance(ob, GeoJSON):
instance = ob
else:
mapping = to_mapping(ob)
d = {}
for k in mapping:
d[k] = mapping[k]
try:
type_ = d.pop("type")
try:
type_ = str(type_)
except UnicodeEncodeError:
# If the type contains non-ascii characters, we can assume
# it's not a valid GeoJSON type
raise AttributeError(
"{0} is not a GeoJSON type").format(type_)
geojson_factory = getattr(geojson.factory, type_)
instance = geojson_factory(**d)
except (AttributeError, KeyError) as invalid:
if strict:
msg = "Cannot coerce %r into a valid GeoJSON structure: %s"
msg %= (ob, invalid)
raise ValueError(msg)
instance = ob
return instance
|
python
|
def to_instance(cls, ob, default=None, strict=False):
"""Encode a GeoJSON dict into an GeoJSON object.
Assumes the caller knows that the dict should satisfy a GeoJSON type.
:param cls: Dict containing the elements to be encoded into a GeoJSON
object.
:type cls: dict
:param ob: GeoJSON object into which to encode the dict provided in
`cls`.
:type ob: GeoJSON
:param default: A default instance to append the content of the dict
to if none is provided.
:type default: GeoJSON
:param strict: Raise error if unable to coerce particular keys or
attributes to a valid GeoJSON structure.
:type strict: bool
:return: A GeoJSON object with the dict's elements as its constituents.
:rtype: GeoJSON
:raises TypeError: If the input dict contains items that are not valid
GeoJSON types.
:raises UnicodeEncodeError: If the input dict contains items of a type
that contain non-ASCII characters.
:raises AttributeError: If the input dict contains items that are not
valid GeoJSON types.
"""
if ob is None and default is not None:
instance = default()
elif isinstance(ob, GeoJSON):
instance = ob
else:
mapping = to_mapping(ob)
d = {}
for k in mapping:
d[k] = mapping[k]
try:
type_ = d.pop("type")
try:
type_ = str(type_)
except UnicodeEncodeError:
# If the type contains non-ascii characters, we can assume
# it's not a valid GeoJSON type
raise AttributeError(
"{0} is not a GeoJSON type").format(type_)
geojson_factory = getattr(geojson.factory, type_)
instance = geojson_factory(**d)
except (AttributeError, KeyError) as invalid:
if strict:
msg = "Cannot coerce %r into a valid GeoJSON structure: %s"
msg %= (ob, invalid)
raise ValueError(msg)
instance = ob
return instance
|
[
"def",
"to_instance",
"(",
"cls",
",",
"ob",
",",
"default",
"=",
"None",
",",
"strict",
"=",
"False",
")",
":",
"if",
"ob",
"is",
"None",
"and",
"default",
"is",
"not",
"None",
":",
"instance",
"=",
"default",
"(",
")",
"elif",
"isinstance",
"(",
"ob",
",",
"GeoJSON",
")",
":",
"instance",
"=",
"ob",
"else",
":",
"mapping",
"=",
"to_mapping",
"(",
"ob",
")",
"d",
"=",
"{",
"}",
"for",
"k",
"in",
"mapping",
":",
"d",
"[",
"k",
"]",
"=",
"mapping",
"[",
"k",
"]",
"try",
":",
"type_",
"=",
"d",
".",
"pop",
"(",
"\"type\"",
")",
"try",
":",
"type_",
"=",
"str",
"(",
"type_",
")",
"except",
"UnicodeEncodeError",
":",
"# If the type contains non-ascii characters, we can assume",
"# it's not a valid GeoJSON type",
"raise",
"AttributeError",
"(",
"\"{0} is not a GeoJSON type\"",
")",
".",
"format",
"(",
"type_",
")",
"geojson_factory",
"=",
"getattr",
"(",
"geojson",
".",
"factory",
",",
"type_",
")",
"instance",
"=",
"geojson_factory",
"(",
"*",
"*",
"d",
")",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
"as",
"invalid",
":",
"if",
"strict",
":",
"msg",
"=",
"\"Cannot coerce %r into a valid GeoJSON structure: %s\"",
"msg",
"%=",
"(",
"ob",
",",
"invalid",
")",
"raise",
"ValueError",
"(",
"msg",
")",
"instance",
"=",
"ob",
"return",
"instance"
] |
Encode a GeoJSON dict into an GeoJSON object.
Assumes the caller knows that the dict should satisfy a GeoJSON type.
:param cls: Dict containing the elements to be encoded into a GeoJSON
object.
:type cls: dict
:param ob: GeoJSON object into which to encode the dict provided in
`cls`.
:type ob: GeoJSON
:param default: A default instance to append the content of the dict
to if none is provided.
:type default: GeoJSON
:param strict: Raise error if unable to coerce particular keys or
attributes to a valid GeoJSON structure.
:type strict: bool
:return: A GeoJSON object with the dict's elements as its constituents.
:rtype: GeoJSON
:raises TypeError: If the input dict contains items that are not valid
GeoJSON types.
:raises UnicodeEncodeError: If the input dict contains items of a type
that contain non-ASCII characters.
:raises AttributeError: If the input dict contains items that are not
valid GeoJSON types.
|
[
"Encode",
"a",
"GeoJSON",
"dict",
"into",
"an",
"GeoJSON",
"object",
".",
"Assumes",
"the",
"caller",
"knows",
"that",
"the",
"dict",
"should",
"satisfy",
"a",
"GeoJSON",
"type",
"."
] |
14abb31ba73a9f3cdb81d81c56656ea736f3c865
|
https://github.com/jazzband/python-geojson/blob/14abb31ba73a9f3cdb81d81c56656ea736f3c865/geojson/base.py#L71-L122
|
237,138
|
jazzband/python-geojson
|
geojson/base.py
|
GeoJSON.check_list_errors
|
def check_list_errors(self, checkFunc, lst):
"""Validation helper function."""
# check for errors on each subitem, filter only subitems with errors
results = (checkFunc(i) for i in lst)
return [err for err in results if err]
|
python
|
def check_list_errors(self, checkFunc, lst):
"""Validation helper function."""
# check for errors on each subitem, filter only subitems with errors
results = (checkFunc(i) for i in lst)
return [err for err in results if err]
|
[
"def",
"check_list_errors",
"(",
"self",
",",
"checkFunc",
",",
"lst",
")",
":",
"# check for errors on each subitem, filter only subitems with errors",
"results",
"=",
"(",
"checkFunc",
"(",
"i",
")",
"for",
"i",
"in",
"lst",
")",
"return",
"[",
"err",
"for",
"err",
"in",
"results",
"if",
"err",
"]"
] |
Validation helper function.
|
[
"Validation",
"helper",
"function",
"."
] |
14abb31ba73a9f3cdb81d81c56656ea736f3c865
|
https://github.com/jazzband/python-geojson/blob/14abb31ba73a9f3cdb81d81c56656ea736f3c865/geojson/base.py#L128-L132
|
237,139
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.run_only_once
|
def run_only_once(self, keyword):
"""
Runs a keyword only once in one of the parallel processes.
As the keyword will be called
only in one process and the return value could basically be anything.
The "Run Only Once" can't return the actual return value.
If the keyword fails, "Run Only Once" fails.
Others executing "Run Only Once" wait before going through this
keyword before the actual command has been executed.
NOTE! This is a potential "Shoot yourself in to knee" keyword
Especially note that all the namespace changes are only visible
in the process that actually executed the keyword.
Also note that this might lead to odd situations if used inside
of other keywords.
Also at this point the keyword will be identified to be same
if it has the same name.
"""
lock_name = 'pabot_run_only_once_%s' % keyword
try:
self.acquire_lock(lock_name)
passed = self.get_parallel_value_for_key(lock_name)
if passed != '':
if passed == 'FAILED':
raise AssertionError('Keyword failed in other process')
return
BuiltIn().run_keyword(keyword)
self.set_parallel_value_for_key(lock_name, 'PASSED')
except:
self.set_parallel_value_for_key(lock_name, 'FAILED')
raise
finally:
self.release_lock(lock_name)
|
python
|
def run_only_once(self, keyword):
"""
Runs a keyword only once in one of the parallel processes.
As the keyword will be called
only in one process and the return value could basically be anything.
The "Run Only Once" can't return the actual return value.
If the keyword fails, "Run Only Once" fails.
Others executing "Run Only Once" wait before going through this
keyword before the actual command has been executed.
NOTE! This is a potential "Shoot yourself in to knee" keyword
Especially note that all the namespace changes are only visible
in the process that actually executed the keyword.
Also note that this might lead to odd situations if used inside
of other keywords.
Also at this point the keyword will be identified to be same
if it has the same name.
"""
lock_name = 'pabot_run_only_once_%s' % keyword
try:
self.acquire_lock(lock_name)
passed = self.get_parallel_value_for_key(lock_name)
if passed != '':
if passed == 'FAILED':
raise AssertionError('Keyword failed in other process')
return
BuiltIn().run_keyword(keyword)
self.set_parallel_value_for_key(lock_name, 'PASSED')
except:
self.set_parallel_value_for_key(lock_name, 'FAILED')
raise
finally:
self.release_lock(lock_name)
|
[
"def",
"run_only_once",
"(",
"self",
",",
"keyword",
")",
":",
"lock_name",
"=",
"'pabot_run_only_once_%s'",
"%",
"keyword",
"try",
":",
"self",
".",
"acquire_lock",
"(",
"lock_name",
")",
"passed",
"=",
"self",
".",
"get_parallel_value_for_key",
"(",
"lock_name",
")",
"if",
"passed",
"!=",
"''",
":",
"if",
"passed",
"==",
"'FAILED'",
":",
"raise",
"AssertionError",
"(",
"'Keyword failed in other process'",
")",
"return",
"BuiltIn",
"(",
")",
".",
"run_keyword",
"(",
"keyword",
")",
"self",
".",
"set_parallel_value_for_key",
"(",
"lock_name",
",",
"'PASSED'",
")",
"except",
":",
"self",
".",
"set_parallel_value_for_key",
"(",
"lock_name",
",",
"'FAILED'",
")",
"raise",
"finally",
":",
"self",
".",
"release_lock",
"(",
"lock_name",
")"
] |
Runs a keyword only once in one of the parallel processes.
As the keyword will be called
only in one process and the return value could basically be anything.
The "Run Only Once" can't return the actual return value.
If the keyword fails, "Run Only Once" fails.
Others executing "Run Only Once" wait before going through this
keyword before the actual command has been executed.
NOTE! This is a potential "Shoot yourself in to knee" keyword
Especially note that all the namespace changes are only visible
in the process that actually executed the keyword.
Also note that this might lead to odd situations if used inside
of other keywords.
Also at this point the keyword will be identified to be same
if it has the same name.
|
[
"Runs",
"a",
"keyword",
"only",
"once",
"in",
"one",
"of",
"the",
"parallel",
"processes",
".",
"As",
"the",
"keyword",
"will",
"be",
"called",
"only",
"in",
"one",
"process",
"and",
"the",
"return",
"value",
"could",
"basically",
"be",
"anything",
".",
"The",
"Run",
"Only",
"Once",
"can",
"t",
"return",
"the",
"actual",
"return",
"value",
".",
"If",
"the",
"keyword",
"fails",
"Run",
"Only",
"Once",
"fails",
".",
"Others",
"executing",
"Run",
"Only",
"Once",
"wait",
"before",
"going",
"through",
"this",
"keyword",
"before",
"the",
"actual",
"command",
"has",
"been",
"executed",
".",
"NOTE!",
"This",
"is",
"a",
"potential",
"Shoot",
"yourself",
"in",
"to",
"knee",
"keyword",
"Especially",
"note",
"that",
"all",
"the",
"namespace",
"changes",
"are",
"only",
"visible",
"in",
"the",
"process",
"that",
"actually",
"executed",
"the",
"keyword",
".",
"Also",
"note",
"that",
"this",
"might",
"lead",
"to",
"odd",
"situations",
"if",
"used",
"inside",
"of",
"other",
"keywords",
".",
"Also",
"at",
"this",
"point",
"the",
"keyword",
"will",
"be",
"identified",
"to",
"be",
"same",
"if",
"it",
"has",
"the",
"same",
"name",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L136-L167
|
237,140
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.set_parallel_value_for_key
|
def set_parallel_value_for_key(self, key, value):
"""
Set a globally available key and value that can be accessed
from all the pabot processes.
"""
if self._remotelib:
self._remotelib.run_keyword('set_parallel_value_for_key',
[key, value], {})
else:
_PabotLib.set_parallel_value_for_key(self, key, value)
|
python
|
def set_parallel_value_for_key(self, key, value):
"""
Set a globally available key and value that can be accessed
from all the pabot processes.
"""
if self._remotelib:
self._remotelib.run_keyword('set_parallel_value_for_key',
[key, value], {})
else:
_PabotLib.set_parallel_value_for_key(self, key, value)
|
[
"def",
"set_parallel_value_for_key",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"if",
"self",
".",
"_remotelib",
":",
"self",
".",
"_remotelib",
".",
"run_keyword",
"(",
"'set_parallel_value_for_key'",
",",
"[",
"key",
",",
"value",
"]",
",",
"{",
"}",
")",
"else",
":",
"_PabotLib",
".",
"set_parallel_value_for_key",
"(",
"self",
",",
"key",
",",
"value",
")"
] |
Set a globally available key and value that can be accessed
from all the pabot processes.
|
[
"Set",
"a",
"globally",
"available",
"key",
"and",
"value",
"that",
"can",
"be",
"accessed",
"from",
"all",
"the",
"pabot",
"processes",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L169-L178
|
237,141
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.get_parallel_value_for_key
|
def get_parallel_value_for_key(self, key):
"""
Get the value for a key. If there is no value for the key then empty
string is returned.
"""
if self._remotelib:
return self._remotelib.run_keyword('get_parallel_value_for_key',
[key], {})
return _PabotLib.get_parallel_value_for_key(self, key)
|
python
|
def get_parallel_value_for_key(self, key):
"""
Get the value for a key. If there is no value for the key then empty
string is returned.
"""
if self._remotelib:
return self._remotelib.run_keyword('get_parallel_value_for_key',
[key], {})
return _PabotLib.get_parallel_value_for_key(self, key)
|
[
"def",
"get_parallel_value_for_key",
"(",
"self",
",",
"key",
")",
":",
"if",
"self",
".",
"_remotelib",
":",
"return",
"self",
".",
"_remotelib",
".",
"run_keyword",
"(",
"'get_parallel_value_for_key'",
",",
"[",
"key",
"]",
",",
"{",
"}",
")",
"return",
"_PabotLib",
".",
"get_parallel_value_for_key",
"(",
"self",
",",
"key",
")"
] |
Get the value for a key. If there is no value for the key then empty
string is returned.
|
[
"Get",
"the",
"value",
"for",
"a",
"key",
".",
"If",
"there",
"is",
"no",
"value",
"for",
"the",
"key",
"then",
"empty",
"string",
"is",
"returned",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L180-L188
|
237,142
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.acquire_lock
|
def acquire_lock(self, name):
"""
Wait for a lock with name.
This will prevent other processes from acquiring the lock with
the name while it is held. Thus they will wait in the position
where they are acquiring the lock until the process that has it
releases it.
"""
if self._remotelib:
try:
while not self._remotelib.run_keyword('acquire_lock',
[name, self._my_id], {}):
time.sleep(0.1)
logger.debug('waiting for lock to release')
return True
except RuntimeError:
logger.warn('no connection')
self.__remotelib = None
return _PabotLib.acquire_lock(self, name, self._my_id)
|
python
|
def acquire_lock(self, name):
"""
Wait for a lock with name.
This will prevent other processes from acquiring the lock with
the name while it is held. Thus they will wait in the position
where they are acquiring the lock until the process that has it
releases it.
"""
if self._remotelib:
try:
while not self._remotelib.run_keyword('acquire_lock',
[name, self._my_id], {}):
time.sleep(0.1)
logger.debug('waiting for lock to release')
return True
except RuntimeError:
logger.warn('no connection')
self.__remotelib = None
return _PabotLib.acquire_lock(self, name, self._my_id)
|
[
"def",
"acquire_lock",
"(",
"self",
",",
"name",
")",
":",
"if",
"self",
".",
"_remotelib",
":",
"try",
":",
"while",
"not",
"self",
".",
"_remotelib",
".",
"run_keyword",
"(",
"'acquire_lock'",
",",
"[",
"name",
",",
"self",
".",
"_my_id",
"]",
",",
"{",
"}",
")",
":",
"time",
".",
"sleep",
"(",
"0.1",
")",
"logger",
".",
"debug",
"(",
"'waiting for lock to release'",
")",
"return",
"True",
"except",
"RuntimeError",
":",
"logger",
".",
"warn",
"(",
"'no connection'",
")",
"self",
".",
"__remotelib",
"=",
"None",
"return",
"_PabotLib",
".",
"acquire_lock",
"(",
"self",
",",
"name",
",",
"self",
".",
"_my_id",
")"
] |
Wait for a lock with name.
This will prevent other processes from acquiring the lock with
the name while it is held. Thus they will wait in the position
where they are acquiring the lock until the process that has it
releases it.
|
[
"Wait",
"for",
"a",
"lock",
"with",
"name",
".",
"This",
"will",
"prevent",
"other",
"processes",
"from",
"acquiring",
"the",
"lock",
"with",
"the",
"name",
"while",
"it",
"is",
"held",
".",
"Thus",
"they",
"will",
"wait",
"in",
"the",
"position",
"where",
"they",
"are",
"acquiring",
"the",
"lock",
"until",
"the",
"process",
"that",
"has",
"it",
"releases",
"it",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L190-L208
|
237,143
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.release_lock
|
def release_lock(self, name):
"""
Release a lock with name.
This will enable others to acquire the lock.
"""
if self._remotelib:
self._remotelib.run_keyword('release_lock',
[name, self._my_id], {})
else:
_PabotLib.release_lock(self, name, self._my_id)
|
python
|
def release_lock(self, name):
"""
Release a lock with name.
This will enable others to acquire the lock.
"""
if self._remotelib:
self._remotelib.run_keyword('release_lock',
[name, self._my_id], {})
else:
_PabotLib.release_lock(self, name, self._my_id)
|
[
"def",
"release_lock",
"(",
"self",
",",
"name",
")",
":",
"if",
"self",
".",
"_remotelib",
":",
"self",
".",
"_remotelib",
".",
"run_keyword",
"(",
"'release_lock'",
",",
"[",
"name",
",",
"self",
".",
"_my_id",
"]",
",",
"{",
"}",
")",
"else",
":",
"_PabotLib",
".",
"release_lock",
"(",
"self",
",",
"name",
",",
"self",
".",
"_my_id",
")"
] |
Release a lock with name.
This will enable others to acquire the lock.
|
[
"Release",
"a",
"lock",
"with",
"name",
".",
"This",
"will",
"enable",
"others",
"to",
"acquire",
"the",
"lock",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L210-L219
|
237,144
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.release_locks
|
def release_locks(self):
"""
Release all locks called by instance.
"""
if self._remotelib:
self._remotelib.run_keyword('release_locks',
[self._my_id], {})
else:
_PabotLib.release_locks(self, self._my_id)
|
python
|
def release_locks(self):
"""
Release all locks called by instance.
"""
if self._remotelib:
self._remotelib.run_keyword('release_locks',
[self._my_id], {})
else:
_PabotLib.release_locks(self, self._my_id)
|
[
"def",
"release_locks",
"(",
"self",
")",
":",
"if",
"self",
".",
"_remotelib",
":",
"self",
".",
"_remotelib",
".",
"run_keyword",
"(",
"'release_locks'",
",",
"[",
"self",
".",
"_my_id",
"]",
",",
"{",
"}",
")",
"else",
":",
"_PabotLib",
".",
"release_locks",
"(",
"self",
",",
"self",
".",
"_my_id",
")"
] |
Release all locks called by instance.
|
[
"Release",
"all",
"locks",
"called",
"by",
"instance",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L221-L229
|
237,145
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.acquire_value_set
|
def acquire_value_set(self, *tags):
"""
Reserve a set of values for this execution.
No other process can reserve the same set of values while the set is
reserved. Acquired value set needs to be released after use to allow
other processes to access it.
Add tags to limit the possible value sets that this returns.
"""
setname = self._acquire_value_set(*tags)
if setname is None:
raise ValueError("Could not aquire a value set")
return setname
|
python
|
def acquire_value_set(self, *tags):
"""
Reserve a set of values for this execution.
No other process can reserve the same set of values while the set is
reserved. Acquired value set needs to be released after use to allow
other processes to access it.
Add tags to limit the possible value sets that this returns.
"""
setname = self._acquire_value_set(*tags)
if setname is None:
raise ValueError("Could not aquire a value set")
return setname
|
[
"def",
"acquire_value_set",
"(",
"self",
",",
"*",
"tags",
")",
":",
"setname",
"=",
"self",
".",
"_acquire_value_set",
"(",
"*",
"tags",
")",
"if",
"setname",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Could not aquire a value set\"",
")",
"return",
"setname"
] |
Reserve a set of values for this execution.
No other process can reserve the same set of values while the set is
reserved. Acquired value set needs to be released after use to allow
other processes to access it.
Add tags to limit the possible value sets that this returns.
|
[
"Reserve",
"a",
"set",
"of",
"values",
"for",
"this",
"execution",
".",
"No",
"other",
"process",
"can",
"reserve",
"the",
"same",
"set",
"of",
"values",
"while",
"the",
"set",
"is",
"reserved",
".",
"Acquired",
"value",
"set",
"needs",
"to",
"be",
"released",
"after",
"use",
"to",
"allow",
"other",
"processes",
"to",
"access",
"it",
".",
"Add",
"tags",
"to",
"limit",
"the",
"possible",
"value",
"sets",
"that",
"this",
"returns",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L231-L242
|
237,146
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.get_value_from_set
|
def get_value_from_set(self, key):
"""
Get a value from previously reserved value set.
"""
#TODO: This should be done locally.
# We do not really need to call centralised server if the set is already
# reserved as the data there is immutable during execution
key = key.lower()
if self._remotelib:
while True:
value = self._remotelib.run_keyword('get_value_from_set',
[key, self._my_id], {})
if value:
return value
time.sleep(0.1)
logger.debug('waiting for a value')
else:
return _PabotLib.get_value_from_set(self, key, self._my_id)
|
python
|
def get_value_from_set(self, key):
"""
Get a value from previously reserved value set.
"""
#TODO: This should be done locally.
# We do not really need to call centralised server if the set is already
# reserved as the data there is immutable during execution
key = key.lower()
if self._remotelib:
while True:
value = self._remotelib.run_keyword('get_value_from_set',
[key, self._my_id], {})
if value:
return value
time.sleep(0.1)
logger.debug('waiting for a value')
else:
return _PabotLib.get_value_from_set(self, key, self._my_id)
|
[
"def",
"get_value_from_set",
"(",
"self",
",",
"key",
")",
":",
"#TODO: This should be done locally. ",
"# We do not really need to call centralised server if the set is already",
"# reserved as the data there is immutable during execution",
"key",
"=",
"key",
".",
"lower",
"(",
")",
"if",
"self",
".",
"_remotelib",
":",
"while",
"True",
":",
"value",
"=",
"self",
".",
"_remotelib",
".",
"run_keyword",
"(",
"'get_value_from_set'",
",",
"[",
"key",
",",
"self",
".",
"_my_id",
"]",
",",
"{",
"}",
")",
"if",
"value",
":",
"return",
"value",
"time",
".",
"sleep",
"(",
"0.1",
")",
"logger",
".",
"debug",
"(",
"'waiting for a value'",
")",
"else",
":",
"return",
"_PabotLib",
".",
"get_value_from_set",
"(",
"self",
",",
"key",
",",
"self",
".",
"_my_id",
")"
] |
Get a value from previously reserved value set.
|
[
"Get",
"a",
"value",
"from",
"previously",
"reserved",
"value",
"set",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L260-L277
|
237,147
|
mkorpela/pabot
|
pabot/pabotlib.py
|
PabotLib.release_value_set
|
def release_value_set(self):
"""
Release a reserved value set so that other executions can use it also.
"""
if self._remotelib:
self._remotelib.run_keyword('release_value_set', [self._my_id], {})
else:
_PabotLib.release_value_set(self, self._my_id)
|
python
|
def release_value_set(self):
"""
Release a reserved value set so that other executions can use it also.
"""
if self._remotelib:
self._remotelib.run_keyword('release_value_set', [self._my_id], {})
else:
_PabotLib.release_value_set(self, self._my_id)
|
[
"def",
"release_value_set",
"(",
"self",
")",
":",
"if",
"self",
".",
"_remotelib",
":",
"self",
".",
"_remotelib",
".",
"run_keyword",
"(",
"'release_value_set'",
",",
"[",
"self",
".",
"_my_id",
"]",
",",
"{",
"}",
")",
"else",
":",
"_PabotLib",
".",
"release_value_set",
"(",
"self",
",",
"self",
".",
"_my_id",
")"
] |
Release a reserved value set so that other executions can use it also.
|
[
"Release",
"a",
"reserved",
"value",
"set",
"so",
"that",
"other",
"executions",
"can",
"use",
"it",
"also",
"."
] |
b7d85546a58e398d579bb14fd9135858ec08a031
|
https://github.com/mkorpela/pabot/blob/b7d85546a58e398d579bb14fd9135858ec08a031/pabot/pabotlib.py#L279-L286
|
237,148
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
install_all_patches
|
def install_all_patches():
"""
A convenience method that installs all available hooks.
If a specific module is not available on the path, it is ignored.
"""
from . import mysqldb
from . import psycopg2
from . import strict_redis
from . import sqlalchemy
from . import tornado_http
from . import urllib
from . import urllib2
from . import requests
mysqldb.install_patches()
psycopg2.install_patches()
strict_redis.install_patches()
sqlalchemy.install_patches()
tornado_http.install_patches()
urllib.install_patches()
urllib2.install_patches()
requests.install_patches()
|
python
|
def install_all_patches():
"""
A convenience method that installs all available hooks.
If a specific module is not available on the path, it is ignored.
"""
from . import mysqldb
from . import psycopg2
from . import strict_redis
from . import sqlalchemy
from . import tornado_http
from . import urllib
from . import urllib2
from . import requests
mysqldb.install_patches()
psycopg2.install_patches()
strict_redis.install_patches()
sqlalchemy.install_patches()
tornado_http.install_patches()
urllib.install_patches()
urllib2.install_patches()
requests.install_patches()
|
[
"def",
"install_all_patches",
"(",
")",
":",
"from",
".",
"import",
"mysqldb",
"from",
".",
"import",
"psycopg2",
"from",
".",
"import",
"strict_redis",
"from",
".",
"import",
"sqlalchemy",
"from",
".",
"import",
"tornado_http",
"from",
".",
"import",
"urllib",
"from",
".",
"import",
"urllib2",
"from",
".",
"import",
"requests",
"mysqldb",
".",
"install_patches",
"(",
")",
"psycopg2",
".",
"install_patches",
"(",
")",
"strict_redis",
".",
"install_patches",
"(",
")",
"sqlalchemy",
".",
"install_patches",
"(",
")",
"tornado_http",
".",
"install_patches",
"(",
")",
"urllib",
".",
"install_patches",
"(",
")",
"urllib2",
".",
"install_patches",
"(",
")",
"requests",
".",
"install_patches",
"(",
")"
] |
A convenience method that installs all available hooks.
If a specific module is not available on the path, it is ignored.
|
[
"A",
"convenience",
"method",
"that",
"installs",
"all",
"available",
"hooks",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L33-L55
|
237,149
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
install_patches
|
def install_patches(patchers='all'):
"""
Usually called from middleware to install client hooks
specified in the client_hooks section of the configuration.
:param patchers: a list of patchers to run. Acceptable values include:
* None - installs all client patches
* 'all' - installs all client patches
* empty list - does not install any patches
* list of function names - executes the functions
"""
if patchers is None or patchers == 'all':
install_all_patches()
return
if not _valid_args(patchers):
raise ValueError('patchers argument must be None, "all", or a list')
for patch_func_name in patchers:
logging.info('Loading client hook %s', patch_func_name)
patch_func = _load_symbol(patch_func_name)
logging.info('Applying client hook %s', patch_func_name)
patch_func()
|
python
|
def install_patches(patchers='all'):
"""
Usually called from middleware to install client hooks
specified in the client_hooks section of the configuration.
:param patchers: a list of patchers to run. Acceptable values include:
* None - installs all client patches
* 'all' - installs all client patches
* empty list - does not install any patches
* list of function names - executes the functions
"""
if patchers is None or patchers == 'all':
install_all_patches()
return
if not _valid_args(patchers):
raise ValueError('patchers argument must be None, "all", or a list')
for patch_func_name in patchers:
logging.info('Loading client hook %s', patch_func_name)
patch_func = _load_symbol(patch_func_name)
logging.info('Applying client hook %s', patch_func_name)
patch_func()
|
[
"def",
"install_patches",
"(",
"patchers",
"=",
"'all'",
")",
":",
"if",
"patchers",
"is",
"None",
"or",
"patchers",
"==",
"'all'",
":",
"install_all_patches",
"(",
")",
"return",
"if",
"not",
"_valid_args",
"(",
"patchers",
")",
":",
"raise",
"ValueError",
"(",
"'patchers argument must be None, \"all\", or a list'",
")",
"for",
"patch_func_name",
"in",
"patchers",
":",
"logging",
".",
"info",
"(",
"'Loading client hook %s'",
",",
"patch_func_name",
")",
"patch_func",
"=",
"_load_symbol",
"(",
"patch_func_name",
")",
"logging",
".",
"info",
"(",
"'Applying client hook %s'",
",",
"patch_func_name",
")",
"patch_func",
"(",
")"
] |
Usually called from middleware to install client hooks
specified in the client_hooks section of the configuration.
:param patchers: a list of patchers to run. Acceptable values include:
* None - installs all client patches
* 'all' - installs all client patches
* empty list - does not install any patches
* list of function names - executes the functions
|
[
"Usually",
"called",
"from",
"middleware",
"to",
"install",
"client",
"hooks",
"specified",
"in",
"the",
"client_hooks",
"section",
"of",
"the",
"configuration",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L58-L79
|
237,150
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
install_client_interceptors
|
def install_client_interceptors(client_interceptors=()):
"""
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
"""
if not _valid_args(client_interceptors):
raise ValueError('client_interceptors argument must be a list')
from ..http_client import ClientInterceptors
for client_interceptor in client_interceptors:
logging.info('Loading client interceptor %s', client_interceptor)
interceptor_class = _load_symbol(client_interceptor)
logging.info('Adding client interceptor %s', client_interceptor)
ClientInterceptors.append(interceptor_class())
|
python
|
def install_client_interceptors(client_interceptors=()):
"""
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
"""
if not _valid_args(client_interceptors):
raise ValueError('client_interceptors argument must be a list')
from ..http_client import ClientInterceptors
for client_interceptor in client_interceptors:
logging.info('Loading client interceptor %s', client_interceptor)
interceptor_class = _load_symbol(client_interceptor)
logging.info('Adding client interceptor %s', client_interceptor)
ClientInterceptors.append(interceptor_class())
|
[
"def",
"install_client_interceptors",
"(",
"client_interceptors",
"=",
"(",
")",
")",
":",
"if",
"not",
"_valid_args",
"(",
"client_interceptors",
")",
":",
"raise",
"ValueError",
"(",
"'client_interceptors argument must be a list'",
")",
"from",
".",
".",
"http_client",
"import",
"ClientInterceptors",
"for",
"client_interceptor",
"in",
"client_interceptors",
":",
"logging",
".",
"info",
"(",
"'Loading client interceptor %s'",
",",
"client_interceptor",
")",
"interceptor_class",
"=",
"_load_symbol",
"(",
"client_interceptor",
")",
"logging",
".",
"info",
"(",
"'Adding client interceptor %s'",
",",
"client_interceptor",
")",
"ClientInterceptors",
".",
"append",
"(",
"interceptor_class",
"(",
")",
")"
] |
Install client interceptors for the patchers.
:param client_interceptors: a list of client interceptors to install.
Should be a list of classes
|
[
"Install",
"client",
"interceptors",
"for",
"the",
"patchers",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L82-L98
|
237,151
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/__init__.py
|
_load_symbol
|
def _load_symbol(name):
"""Load a symbol by name.
:param str name: The name to load, specified by `module.attr`.
:returns: The attribute value. If the specified module does not contain
the requested attribute then `None` is returned.
"""
module_name, key = name.rsplit('.', 1)
try:
module = importlib.import_module(module_name)
except ImportError as err:
# it's possible the symbol is a class method
module_name, class_name = module_name.rsplit('.', 1)
module = importlib.import_module(module_name)
cls = getattr(module, class_name, None)
if cls:
attr = getattr(cls, key, None)
else:
raise err
else:
attr = getattr(module, key, None)
if not callable(attr):
raise ValueError('%s is not callable (was %r)' % (name, attr))
return attr
|
python
|
def _load_symbol(name):
"""Load a symbol by name.
:param str name: The name to load, specified by `module.attr`.
:returns: The attribute value. If the specified module does not contain
the requested attribute then `None` is returned.
"""
module_name, key = name.rsplit('.', 1)
try:
module = importlib.import_module(module_name)
except ImportError as err:
# it's possible the symbol is a class method
module_name, class_name = module_name.rsplit('.', 1)
module = importlib.import_module(module_name)
cls = getattr(module, class_name, None)
if cls:
attr = getattr(cls, key, None)
else:
raise err
else:
attr = getattr(module, key, None)
if not callable(attr):
raise ValueError('%s is not callable (was %r)' % (name, attr))
return attr
|
[
"def",
"_load_symbol",
"(",
"name",
")",
":",
"module_name",
",",
"key",
"=",
"name",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"try",
":",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"module_name",
")",
"except",
"ImportError",
"as",
"err",
":",
"# it's possible the symbol is a class method",
"module_name",
",",
"class_name",
"=",
"module_name",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"module_name",
")",
"cls",
"=",
"getattr",
"(",
"module",
",",
"class_name",
",",
"None",
")",
"if",
"cls",
":",
"attr",
"=",
"getattr",
"(",
"cls",
",",
"key",
",",
"None",
")",
"else",
":",
"raise",
"err",
"else",
":",
"attr",
"=",
"getattr",
"(",
"module",
",",
"key",
",",
"None",
")",
"if",
"not",
"callable",
"(",
"attr",
")",
":",
"raise",
"ValueError",
"(",
"'%s is not callable (was %r)'",
"%",
"(",
"name",
",",
"attr",
")",
")",
"return",
"attr"
] |
Load a symbol by name.
:param str name: The name to load, specified by `module.attr`.
:returns: The attribute value. If the specified module does not contain
the requested attribute then `None` is returned.
|
[
"Load",
"a",
"symbol",
"by",
"name",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/__init__.py#L106-L129
|
237,152
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/request_context.py
|
span_in_stack_context
|
def span_in_stack_context(span):
"""
Create Tornado's StackContext that stores the given span in the
thread-local request context. This function is intended for use
in Tornado applications based on IOLoop, although will work fine
in single-threaded apps like Flask, albeit with more overhead.
## Usage example in Tornado application
Suppose you have a method `handle_request(request)` in the http server.
Instead of calling it directly, use a wrapper:
.. code-block:: python
from opentracing_instrumentation import request_context
@tornado.gen.coroutine
def handle_request_wrapper(request, actual_handler, *args, **kwargs)
request_wrapper = TornadoRequestWrapper(request=request)
span = http_server.before_request(request=request_wrapper)
with request_context.span_in_stack_context(span):
return actual_handler(*args, **kwargs)
:param span:
:return:
Return StackContext that wraps the request context.
"""
if not isinstance(opentracing.tracer.scope_manager, TornadoScopeManager):
raise RuntimeError('scope_manager is not TornadoScopeManager')
# Enter the newly created stack context so we have
# storage available for Span activation.
context = tracer_stack_context()
entered_context = _TracerEnteredStackContext(context)
if span is None:
return entered_context
opentracing.tracer.scope_manager.activate(span, False)
assert opentracing.tracer.active_span is not None
assert opentracing.tracer.active_span is span
return entered_context
|
python
|
def span_in_stack_context(span):
"""
Create Tornado's StackContext that stores the given span in the
thread-local request context. This function is intended for use
in Tornado applications based on IOLoop, although will work fine
in single-threaded apps like Flask, albeit with more overhead.
## Usage example in Tornado application
Suppose you have a method `handle_request(request)` in the http server.
Instead of calling it directly, use a wrapper:
.. code-block:: python
from opentracing_instrumentation import request_context
@tornado.gen.coroutine
def handle_request_wrapper(request, actual_handler, *args, **kwargs)
request_wrapper = TornadoRequestWrapper(request=request)
span = http_server.before_request(request=request_wrapper)
with request_context.span_in_stack_context(span):
return actual_handler(*args, **kwargs)
:param span:
:return:
Return StackContext that wraps the request context.
"""
if not isinstance(opentracing.tracer.scope_manager, TornadoScopeManager):
raise RuntimeError('scope_manager is not TornadoScopeManager')
# Enter the newly created stack context so we have
# storage available for Span activation.
context = tracer_stack_context()
entered_context = _TracerEnteredStackContext(context)
if span is None:
return entered_context
opentracing.tracer.scope_manager.activate(span, False)
assert opentracing.tracer.active_span is not None
assert opentracing.tracer.active_span is span
return entered_context
|
[
"def",
"span_in_stack_context",
"(",
"span",
")",
":",
"if",
"not",
"isinstance",
"(",
"opentracing",
".",
"tracer",
".",
"scope_manager",
",",
"TornadoScopeManager",
")",
":",
"raise",
"RuntimeError",
"(",
"'scope_manager is not TornadoScopeManager'",
")",
"# Enter the newly created stack context so we have",
"# storage available for Span activation.",
"context",
"=",
"tracer_stack_context",
"(",
")",
"entered_context",
"=",
"_TracerEnteredStackContext",
"(",
"context",
")",
"if",
"span",
"is",
"None",
":",
"return",
"entered_context",
"opentracing",
".",
"tracer",
".",
"scope_manager",
".",
"activate",
"(",
"span",
",",
"False",
")",
"assert",
"opentracing",
".",
"tracer",
".",
"active_span",
"is",
"not",
"None",
"assert",
"opentracing",
".",
"tracer",
".",
"active_span",
"is",
"span",
"return",
"entered_context"
] |
Create Tornado's StackContext that stores the given span in the
thread-local request context. This function is intended for use
in Tornado applications based on IOLoop, although will work fine
in single-threaded apps like Flask, albeit with more overhead.
## Usage example in Tornado application
Suppose you have a method `handle_request(request)` in the http server.
Instead of calling it directly, use a wrapper:
.. code-block:: python
from opentracing_instrumentation import request_context
@tornado.gen.coroutine
def handle_request_wrapper(request, actual_handler, *args, **kwargs)
request_wrapper = TornadoRequestWrapper(request=request)
span = http_server.before_request(request=request_wrapper)
with request_context.span_in_stack_context(span):
return actual_handler(*args, **kwargs)
:param span:
:return:
Return StackContext that wraps the request context.
|
[
"Create",
"Tornado",
"s",
"StackContext",
"that",
"stores",
"the",
"given",
"span",
"in",
"the",
"thread",
"-",
"local",
"request",
"context",
".",
"This",
"function",
"is",
"intended",
"for",
"use",
"in",
"Tornado",
"applications",
"based",
"on",
"IOLoop",
"although",
"will",
"work",
"fine",
"in",
"single",
"-",
"threaded",
"apps",
"like",
"Flask",
"albeit",
"with",
"more",
"overhead",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/request_context.py#L181-L226
|
237,153
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/local_span.py
|
traced_function
|
def traced_function(func=None, name=None, on_start=None,
require_active_trace=False):
"""
A decorator that enables tracing of the wrapped function or
Tornado co-routine provided there is a parent span already established.
.. code-block:: python
@traced_function
def my_function1(arg1, arg2=None)
...
:param func: decorated function or Tornado co-routine
:param name: optional name to use as the Span.operation_name.
If not provided, func.__name__ will be used.
:param on_start: an optional callback to be executed once the child span
is started, but before the decorated function is called. It can be
used to set any additional tags on the span, perhaps by inspecting
the decorated function arguments. The callback must have a signature
`(span, *args, *kwargs)`, where the last two collections are the
arguments passed to the actual decorated function.
.. code-block:: python
def extract_call_site_tag(span, *args, *kwargs)
if 'call_site_tag' in kwargs:
span.set_tag('call_site_tag', kwargs['call_site_tag'])
@traced_function(on_start=extract_call_site_tag)
@tornado.get.coroutine
def my_function(arg1, arg2=None, call_site_tag=None)
...
:param require_active_trace: controls what to do when there is no active
trace. If require_active_trace=True, then no span is created.
If require_active_trace=False, a new trace is started.
:return: returns a tracing decorator
"""
if func is None:
return functools.partial(traced_function, name=name,
on_start=on_start,
require_active_trace=require_active_trace)
if name:
operation_name = name
else:
operation_name = func.__name__
@functools.wraps(func)
def decorator(*args, **kwargs):
parent_span = get_current_span()
if parent_span is None and require_active_trace:
return func(*args, **kwargs)
span = utils.start_child_span(
operation_name=operation_name, parent=parent_span)
if callable(on_start):
on_start(span, *args, **kwargs)
# We explicitly invoke deactivation callback for the StackContext,
# because there are scenarios when it gets retained forever, for
# example when a Periodic Callback is scheduled lazily while in the
# scope of a tracing StackContext.
with span_in_stack_context(span) as deactivate_cb:
try:
res = func(*args, **kwargs)
# Tornado co-routines usually return futures, so we must wait
# until the future is completed, in order to accurately
# capture the function's execution time.
if tornado.concurrent.is_future(res):
def done_callback(future):
deactivate_cb()
exception = future.exception()
if exception is not None:
span.log(event='exception', payload=exception)
span.set_tag('error', 'true')
span.finish()
res.add_done_callback(done_callback)
else:
deactivate_cb()
span.finish()
return res
except Exception as e:
deactivate_cb()
span.log(event='exception', payload=e)
span.set_tag('error', 'true')
span.finish()
raise
return decorator
|
python
|
def traced_function(func=None, name=None, on_start=None,
require_active_trace=False):
"""
A decorator that enables tracing of the wrapped function or
Tornado co-routine provided there is a parent span already established.
.. code-block:: python
@traced_function
def my_function1(arg1, arg2=None)
...
:param func: decorated function or Tornado co-routine
:param name: optional name to use as the Span.operation_name.
If not provided, func.__name__ will be used.
:param on_start: an optional callback to be executed once the child span
is started, but before the decorated function is called. It can be
used to set any additional tags on the span, perhaps by inspecting
the decorated function arguments. The callback must have a signature
`(span, *args, *kwargs)`, where the last two collections are the
arguments passed to the actual decorated function.
.. code-block:: python
def extract_call_site_tag(span, *args, *kwargs)
if 'call_site_tag' in kwargs:
span.set_tag('call_site_tag', kwargs['call_site_tag'])
@traced_function(on_start=extract_call_site_tag)
@tornado.get.coroutine
def my_function(arg1, arg2=None, call_site_tag=None)
...
:param require_active_trace: controls what to do when there is no active
trace. If require_active_trace=True, then no span is created.
If require_active_trace=False, a new trace is started.
:return: returns a tracing decorator
"""
if func is None:
return functools.partial(traced_function, name=name,
on_start=on_start,
require_active_trace=require_active_trace)
if name:
operation_name = name
else:
operation_name = func.__name__
@functools.wraps(func)
def decorator(*args, **kwargs):
parent_span = get_current_span()
if parent_span is None and require_active_trace:
return func(*args, **kwargs)
span = utils.start_child_span(
operation_name=operation_name, parent=parent_span)
if callable(on_start):
on_start(span, *args, **kwargs)
# We explicitly invoke deactivation callback for the StackContext,
# because there are scenarios when it gets retained forever, for
# example when a Periodic Callback is scheduled lazily while in the
# scope of a tracing StackContext.
with span_in_stack_context(span) as deactivate_cb:
try:
res = func(*args, **kwargs)
# Tornado co-routines usually return futures, so we must wait
# until the future is completed, in order to accurately
# capture the function's execution time.
if tornado.concurrent.is_future(res):
def done_callback(future):
deactivate_cb()
exception = future.exception()
if exception is not None:
span.log(event='exception', payload=exception)
span.set_tag('error', 'true')
span.finish()
res.add_done_callback(done_callback)
else:
deactivate_cb()
span.finish()
return res
except Exception as e:
deactivate_cb()
span.log(event='exception', payload=e)
span.set_tag('error', 'true')
span.finish()
raise
return decorator
|
[
"def",
"traced_function",
"(",
"func",
"=",
"None",
",",
"name",
"=",
"None",
",",
"on_start",
"=",
"None",
",",
"require_active_trace",
"=",
"False",
")",
":",
"if",
"func",
"is",
"None",
":",
"return",
"functools",
".",
"partial",
"(",
"traced_function",
",",
"name",
"=",
"name",
",",
"on_start",
"=",
"on_start",
",",
"require_active_trace",
"=",
"require_active_trace",
")",
"if",
"name",
":",
"operation_name",
"=",
"name",
"else",
":",
"operation_name",
"=",
"func",
".",
"__name__",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"decorator",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"parent_span",
"=",
"get_current_span",
"(",
")",
"if",
"parent_span",
"is",
"None",
"and",
"require_active_trace",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"span",
"=",
"utils",
".",
"start_child_span",
"(",
"operation_name",
"=",
"operation_name",
",",
"parent",
"=",
"parent_span",
")",
"if",
"callable",
"(",
"on_start",
")",
":",
"on_start",
"(",
"span",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# We explicitly invoke deactivation callback for the StackContext,",
"# because there are scenarios when it gets retained forever, for",
"# example when a Periodic Callback is scheduled lazily while in the",
"# scope of a tracing StackContext.",
"with",
"span_in_stack_context",
"(",
"span",
")",
"as",
"deactivate_cb",
":",
"try",
":",
"res",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# Tornado co-routines usually return futures, so we must wait",
"# until the future is completed, in order to accurately",
"# capture the function's execution time.",
"if",
"tornado",
".",
"concurrent",
".",
"is_future",
"(",
"res",
")",
":",
"def",
"done_callback",
"(",
"future",
")",
":",
"deactivate_cb",
"(",
")",
"exception",
"=",
"future",
".",
"exception",
"(",
")",
"if",
"exception",
"is",
"not",
"None",
":",
"span",
".",
"log",
"(",
"event",
"=",
"'exception'",
",",
"payload",
"=",
"exception",
")",
"span",
".",
"set_tag",
"(",
"'error'",
",",
"'true'",
")",
"span",
".",
"finish",
"(",
")",
"res",
".",
"add_done_callback",
"(",
"done_callback",
")",
"else",
":",
"deactivate_cb",
"(",
")",
"span",
".",
"finish",
"(",
")",
"return",
"res",
"except",
"Exception",
"as",
"e",
":",
"deactivate_cb",
"(",
")",
"span",
".",
"log",
"(",
"event",
"=",
"'exception'",
",",
"payload",
"=",
"e",
")",
"span",
".",
"set_tag",
"(",
"'error'",
",",
"'true'",
")",
"span",
".",
"finish",
"(",
")",
"raise",
"return",
"decorator"
] |
A decorator that enables tracing of the wrapped function or
Tornado co-routine provided there is a parent span already established.
.. code-block:: python
@traced_function
def my_function1(arg1, arg2=None)
...
:param func: decorated function or Tornado co-routine
:param name: optional name to use as the Span.operation_name.
If not provided, func.__name__ will be used.
:param on_start: an optional callback to be executed once the child span
is started, but before the decorated function is called. It can be
used to set any additional tags on the span, perhaps by inspecting
the decorated function arguments. The callback must have a signature
`(span, *args, *kwargs)`, where the last two collections are the
arguments passed to the actual decorated function.
.. code-block:: python
def extract_call_site_tag(span, *args, *kwargs)
if 'call_site_tag' in kwargs:
span.set_tag('call_site_tag', kwargs['call_site_tag'])
@traced_function(on_start=extract_call_site_tag)
@tornado.get.coroutine
def my_function(arg1, arg2=None, call_site_tag=None)
...
:param require_active_trace: controls what to do when there is no active
trace. If require_active_trace=True, then no span is created.
If require_active_trace=False, a new trace is started.
:return: returns a tracing decorator
|
[
"A",
"decorator",
"that",
"enables",
"tracing",
"of",
"the",
"wrapped",
"function",
"or",
"Tornado",
"co",
"-",
"routine",
"provided",
"there",
"is",
"a",
"parent",
"span",
"already",
"established",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/local_span.py#L64-L153
|
237,154
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/utils.py
|
start_child_span
|
def start_child_span(operation_name, tracer=None, parent=None, tags=None):
"""
Start a new span as a child of parent_span. If parent_span is None,
start a new root span.
:param operation_name: operation name
:param tracer: Tracer or None (defaults to opentracing.tracer)
:param parent: parent Span or None
:param tags: optional tags
:return: new span
"""
tracer = tracer or opentracing.tracer
return tracer.start_span(
operation_name=operation_name,
child_of=parent.context if parent else None,
tags=tags
)
|
python
|
def start_child_span(operation_name, tracer=None, parent=None, tags=None):
"""
Start a new span as a child of parent_span. If parent_span is None,
start a new root span.
:param operation_name: operation name
:param tracer: Tracer or None (defaults to opentracing.tracer)
:param parent: parent Span or None
:param tags: optional tags
:return: new span
"""
tracer = tracer or opentracing.tracer
return tracer.start_span(
operation_name=operation_name,
child_of=parent.context if parent else None,
tags=tags
)
|
[
"def",
"start_child_span",
"(",
"operation_name",
",",
"tracer",
"=",
"None",
",",
"parent",
"=",
"None",
",",
"tags",
"=",
"None",
")",
":",
"tracer",
"=",
"tracer",
"or",
"opentracing",
".",
"tracer",
"return",
"tracer",
".",
"start_span",
"(",
"operation_name",
"=",
"operation_name",
",",
"child_of",
"=",
"parent",
".",
"context",
"if",
"parent",
"else",
"None",
",",
"tags",
"=",
"tags",
")"
] |
Start a new span as a child of parent_span. If parent_span is None,
start a new root span.
:param operation_name: operation name
:param tracer: Tracer or None (defaults to opentracing.tracer)
:param parent: parent Span or None
:param tags: optional tags
:return: new span
|
[
"Start",
"a",
"new",
"span",
"as",
"a",
"child",
"of",
"parent_span",
".",
"If",
"parent_span",
"is",
"None",
"start",
"a",
"new",
"root",
"span",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/utils.py#L25-L41
|
237,155
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/http_server.py
|
before_request
|
def before_request(request, tracer=None):
"""
Attempts to extract a tracing span from incoming request.
If no tracing context is passed in the headers, or the data
cannot be parsed, a new root span is started.
:param request: HTTP request with `.headers` property exposed
that satisfies a regular dictionary interface
:param tracer: optional tracer instance to use. If not specified
the global opentracing.tracer will be used.
:return: returns a new, already started span.
"""
if tracer is None: # pragma: no cover
tracer = opentracing.tracer
# we need to prepare tags upfront, mainly because RPC_SERVER tag must be
# set when starting the span, to support Zipkin's one-span-per-RPC model
tags_dict = {
tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER,
tags.HTTP_URL: request.full_url,
}
remote_ip = request.remote_ip
if remote_ip:
tags_dict[tags.PEER_HOST_IPV4] = remote_ip
caller_name = request.caller_name
if caller_name:
tags_dict[tags.PEER_SERVICE] = caller_name
remote_port = request.remote_port
if remote_port:
tags_dict[tags.PEER_PORT] = remote_port
operation = request.operation
try:
carrier = {}
for key, value in six.iteritems(request.headers):
carrier[key] = value
parent_ctx = tracer.extract(
format=Format.HTTP_HEADERS, carrier=carrier
)
except Exception as e:
logging.exception('trace extract failed: %s' % e)
parent_ctx = None
span = tracer.start_span(
operation_name=operation,
child_of=parent_ctx,
tags=tags_dict)
return span
|
python
|
def before_request(request, tracer=None):
"""
Attempts to extract a tracing span from incoming request.
If no tracing context is passed in the headers, or the data
cannot be parsed, a new root span is started.
:param request: HTTP request with `.headers` property exposed
that satisfies a regular dictionary interface
:param tracer: optional tracer instance to use. If not specified
the global opentracing.tracer will be used.
:return: returns a new, already started span.
"""
if tracer is None: # pragma: no cover
tracer = opentracing.tracer
# we need to prepare tags upfront, mainly because RPC_SERVER tag must be
# set when starting the span, to support Zipkin's one-span-per-RPC model
tags_dict = {
tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER,
tags.HTTP_URL: request.full_url,
}
remote_ip = request.remote_ip
if remote_ip:
tags_dict[tags.PEER_HOST_IPV4] = remote_ip
caller_name = request.caller_name
if caller_name:
tags_dict[tags.PEER_SERVICE] = caller_name
remote_port = request.remote_port
if remote_port:
tags_dict[tags.PEER_PORT] = remote_port
operation = request.operation
try:
carrier = {}
for key, value in six.iteritems(request.headers):
carrier[key] = value
parent_ctx = tracer.extract(
format=Format.HTTP_HEADERS, carrier=carrier
)
except Exception as e:
logging.exception('trace extract failed: %s' % e)
parent_ctx = None
span = tracer.start_span(
operation_name=operation,
child_of=parent_ctx,
tags=tags_dict)
return span
|
[
"def",
"before_request",
"(",
"request",
",",
"tracer",
"=",
"None",
")",
":",
"if",
"tracer",
"is",
"None",
":",
"# pragma: no cover",
"tracer",
"=",
"opentracing",
".",
"tracer",
"# we need to prepare tags upfront, mainly because RPC_SERVER tag must be",
"# set when starting the span, to support Zipkin's one-span-per-RPC model",
"tags_dict",
"=",
"{",
"tags",
".",
"SPAN_KIND",
":",
"tags",
".",
"SPAN_KIND_RPC_SERVER",
",",
"tags",
".",
"HTTP_URL",
":",
"request",
".",
"full_url",
",",
"}",
"remote_ip",
"=",
"request",
".",
"remote_ip",
"if",
"remote_ip",
":",
"tags_dict",
"[",
"tags",
".",
"PEER_HOST_IPV4",
"]",
"=",
"remote_ip",
"caller_name",
"=",
"request",
".",
"caller_name",
"if",
"caller_name",
":",
"tags_dict",
"[",
"tags",
".",
"PEER_SERVICE",
"]",
"=",
"caller_name",
"remote_port",
"=",
"request",
".",
"remote_port",
"if",
"remote_port",
":",
"tags_dict",
"[",
"tags",
".",
"PEER_PORT",
"]",
"=",
"remote_port",
"operation",
"=",
"request",
".",
"operation",
"try",
":",
"carrier",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"six",
".",
"iteritems",
"(",
"request",
".",
"headers",
")",
":",
"carrier",
"[",
"key",
"]",
"=",
"value",
"parent_ctx",
"=",
"tracer",
".",
"extract",
"(",
"format",
"=",
"Format",
".",
"HTTP_HEADERS",
",",
"carrier",
"=",
"carrier",
")",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"exception",
"(",
"'trace extract failed: %s'",
"%",
"e",
")",
"parent_ctx",
"=",
"None",
"span",
"=",
"tracer",
".",
"start_span",
"(",
"operation_name",
"=",
"operation",
",",
"child_of",
"=",
"parent_ctx",
",",
"tags",
"=",
"tags_dict",
")",
"return",
"span"
] |
Attempts to extract a tracing span from incoming request.
If no tracing context is passed in the headers, or the data
cannot be parsed, a new root span is started.
:param request: HTTP request with `.headers` property exposed
that satisfies a regular dictionary interface
:param tracer: optional tracer instance to use. If not specified
the global opentracing.tracer will be used.
:return: returns a new, already started span.
|
[
"Attempts",
"to",
"extract",
"a",
"tracing",
"span",
"from",
"incoming",
"request",
".",
"If",
"no",
"tracing",
"context",
"is",
"passed",
"in",
"the",
"headers",
"or",
"the",
"data",
"cannot",
"be",
"parsed",
"a",
"new",
"root",
"span",
"is",
"started",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/http_server.py#L35-L86
|
237,156
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/http_server.py
|
WSGIRequestWrapper._parse_wsgi_headers
|
def _parse_wsgi_headers(wsgi_environ):
"""
HTTP headers are presented in WSGI environment with 'HTTP_' prefix.
This method finds those headers, removes the prefix, converts
underscores to dashes, and converts to lower case.
:param wsgi_environ:
:return: returns a dictionary of headers
"""
prefix = 'HTTP_'
p_len = len(prefix)
# use .items() despite suspected memory pressure bc GC occasionally
# collects wsgi_environ.iteritems() during iteration.
headers = {
key[p_len:].replace('_', '-').lower():
val for (key, val) in wsgi_environ.items()
if key.startswith(prefix)}
return headers
|
python
|
def _parse_wsgi_headers(wsgi_environ):
"""
HTTP headers are presented in WSGI environment with 'HTTP_' prefix.
This method finds those headers, removes the prefix, converts
underscores to dashes, and converts to lower case.
:param wsgi_environ:
:return: returns a dictionary of headers
"""
prefix = 'HTTP_'
p_len = len(prefix)
# use .items() despite suspected memory pressure bc GC occasionally
# collects wsgi_environ.iteritems() during iteration.
headers = {
key[p_len:].replace('_', '-').lower():
val for (key, val) in wsgi_environ.items()
if key.startswith(prefix)}
return headers
|
[
"def",
"_parse_wsgi_headers",
"(",
"wsgi_environ",
")",
":",
"prefix",
"=",
"'HTTP_'",
"p_len",
"=",
"len",
"(",
"prefix",
")",
"# use .items() despite suspected memory pressure bc GC occasionally",
"# collects wsgi_environ.iteritems() during iteration.",
"headers",
"=",
"{",
"key",
"[",
"p_len",
":",
"]",
".",
"replace",
"(",
"'_'",
",",
"'-'",
")",
".",
"lower",
"(",
")",
":",
"val",
"for",
"(",
"key",
",",
"val",
")",
"in",
"wsgi_environ",
".",
"items",
"(",
")",
"if",
"key",
".",
"startswith",
"(",
"prefix",
")",
"}",
"return",
"headers"
] |
HTTP headers are presented in WSGI environment with 'HTTP_' prefix.
This method finds those headers, removes the prefix, converts
underscores to dashes, and converts to lower case.
:param wsgi_environ:
:return: returns a dictionary of headers
|
[
"HTTP",
"headers",
"are",
"presented",
"in",
"WSGI",
"environment",
"with",
"HTTP_",
"prefix",
".",
"This",
"method",
"finds",
"those",
"headers",
"removes",
"the",
"prefix",
"converts",
"underscores",
"to",
"dashes",
"and",
"converts",
"to",
"lower",
"case",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/http_server.py#L174-L191
|
237,157
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/interceptors.py
|
ClientInterceptors.append
|
def append(cls, interceptor):
"""
Add interceptor to the end of the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
"""
cls._check(interceptor)
cls._interceptors.append(interceptor)
|
python
|
def append(cls, interceptor):
"""
Add interceptor to the end of the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
"""
cls._check(interceptor)
cls._interceptors.append(interceptor)
|
[
"def",
"append",
"(",
"cls",
",",
"interceptor",
")",
":",
"cls",
".",
"_check",
"(",
"interceptor",
")",
"cls",
".",
"_interceptors",
".",
"append",
"(",
"interceptor",
")"
] |
Add interceptor to the end of the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
|
[
"Add",
"interceptor",
"to",
"the",
"end",
"of",
"the",
"internal",
"list",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/interceptors.py#L80-L88
|
237,158
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/interceptors.py
|
ClientInterceptors.insert
|
def insert(cls, index, interceptor):
"""
Add interceptor to the given index in the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
"""
cls._check(interceptor)
cls._interceptors.insert(index, interceptor)
|
python
|
def insert(cls, index, interceptor):
"""
Add interceptor to the given index in the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
"""
cls._check(interceptor)
cls._interceptors.insert(index, interceptor)
|
[
"def",
"insert",
"(",
"cls",
",",
"index",
",",
"interceptor",
")",
":",
"cls",
".",
"_check",
"(",
"interceptor",
")",
"cls",
".",
"_interceptors",
".",
"insert",
"(",
"index",
",",
"interceptor",
")"
] |
Add interceptor to the given index in the internal list.
Note: Raises ``ValueError`` if interceptor
does not extend ``OpenTracingInterceptor``
|
[
"Add",
"interceptor",
"to",
"the",
"given",
"index",
"in",
"the",
"internal",
"list",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/interceptors.py#L91-L99
|
237,159
|
uber-common/opentracing-python-instrumentation
|
opentracing_instrumentation/client_hooks/_singleton.py
|
singleton
|
def singleton(func):
"""
This decorator allows you to make sure that a function is called once and
only once. Note that recursive functions will still work.
WARNING: Not thread-safe!!!
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if wrapper.__call_state__ == CALLED:
return
ret = func(*args, **kwargs)
wrapper.__call_state__ = CALLED
return ret
def reset():
wrapper.__call_state__ = NOT_CALLED
wrapper.reset = reset
reset()
# save original func to be able to patch and restore multiple times from
# unit tests
wrapper.__original_func = func
return wrapper
|
python
|
def singleton(func):
"""
This decorator allows you to make sure that a function is called once and
only once. Note that recursive functions will still work.
WARNING: Not thread-safe!!!
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if wrapper.__call_state__ == CALLED:
return
ret = func(*args, **kwargs)
wrapper.__call_state__ = CALLED
return ret
def reset():
wrapper.__call_state__ = NOT_CALLED
wrapper.reset = reset
reset()
# save original func to be able to patch and restore multiple times from
# unit tests
wrapper.__original_func = func
return wrapper
|
[
"def",
"singleton",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"wrapper",
".",
"__call_state__",
"==",
"CALLED",
":",
"return",
"ret",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"wrapper",
".",
"__call_state__",
"=",
"CALLED",
"return",
"ret",
"def",
"reset",
"(",
")",
":",
"wrapper",
".",
"__call_state__",
"=",
"NOT_CALLED",
"wrapper",
".",
"reset",
"=",
"reset",
"reset",
"(",
")",
"# save original func to be able to patch and restore multiple times from",
"# unit tests",
"wrapper",
".",
"__original_func",
"=",
"func",
"return",
"wrapper"
] |
This decorator allows you to make sure that a function is called once and
only once. Note that recursive functions will still work.
WARNING: Not thread-safe!!!
|
[
"This",
"decorator",
"allows",
"you",
"to",
"make",
"sure",
"that",
"a",
"function",
"is",
"called",
"once",
"and",
"only",
"once",
".",
"Note",
"that",
"recursive",
"functions",
"will",
"still",
"work",
"."
] |
57b29fb9f647e073cde8c75155f4708cb5661d20
|
https://github.com/uber-common/opentracing-python-instrumentation/blob/57b29fb9f647e073cde8c75155f4708cb5661d20/opentracing_instrumentation/client_hooks/_singleton.py#L30-L55
|
237,160
|
ANTsX/ANTsPy
|
ants/utils/smooth_image.py
|
smooth_image
|
def smooth_image(image, sigma, sigma_in_physical_coordinates=True, FWHM=False, max_kernel_width=32):
"""
Smooth an image
ANTsR function: `smoothImage`
Arguments
---------
image
Image to smooth
sigma
Smoothing factor. Can be scalar, in which case the same sigma is applied to each dimension, or a vector of length dim(inimage) to specify a unique smoothness for each dimension.
sigma_in_physical_coordinates : boolean
If true, the smoothing factor is in millimeters; if false, it is in pixels.
FWHM : boolean
If true, sigma is interpreted as the full-width-half-max (FWHM) of the filter, not the sigma of a Gaussian kernel.
max_kernel_width : scalar
Maximum kernel width
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16'))
>>> simage = ants.smooth_image(image, (1.2,1.5))
"""
if image.components == 1:
return _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width)
else:
imagelist = utils.split_channels(image)
newimages = []
for image in imagelist:
newimage = _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width)
newimages.append(newimage)
return utils.merge_channels(newimages)
|
python
|
def smooth_image(image, sigma, sigma_in_physical_coordinates=True, FWHM=False, max_kernel_width=32):
"""
Smooth an image
ANTsR function: `smoothImage`
Arguments
---------
image
Image to smooth
sigma
Smoothing factor. Can be scalar, in which case the same sigma is applied to each dimension, or a vector of length dim(inimage) to specify a unique smoothness for each dimension.
sigma_in_physical_coordinates : boolean
If true, the smoothing factor is in millimeters; if false, it is in pixels.
FWHM : boolean
If true, sigma is interpreted as the full-width-half-max (FWHM) of the filter, not the sigma of a Gaussian kernel.
max_kernel_width : scalar
Maximum kernel width
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16'))
>>> simage = ants.smooth_image(image, (1.2,1.5))
"""
if image.components == 1:
return _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width)
else:
imagelist = utils.split_channels(image)
newimages = []
for image in imagelist:
newimage = _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width)
newimages.append(newimage)
return utils.merge_channels(newimages)
|
[
"def",
"smooth_image",
"(",
"image",
",",
"sigma",
",",
"sigma_in_physical_coordinates",
"=",
"True",
",",
"FWHM",
"=",
"False",
",",
"max_kernel_width",
"=",
"32",
")",
":",
"if",
"image",
".",
"components",
"==",
"1",
":",
"return",
"_smooth_image_helper",
"(",
"image",
",",
"sigma",
",",
"sigma_in_physical_coordinates",
",",
"FWHM",
",",
"max_kernel_width",
")",
"else",
":",
"imagelist",
"=",
"utils",
".",
"split_channels",
"(",
"image",
")",
"newimages",
"=",
"[",
"]",
"for",
"image",
"in",
"imagelist",
":",
"newimage",
"=",
"_smooth_image_helper",
"(",
"image",
",",
"sigma",
",",
"sigma_in_physical_coordinates",
",",
"FWHM",
",",
"max_kernel_width",
")",
"newimages",
".",
"append",
"(",
"newimage",
")",
"return",
"utils",
".",
"merge_channels",
"(",
"newimages",
")"
] |
Smooth an image
ANTsR function: `smoothImage`
Arguments
---------
image
Image to smooth
sigma
Smoothing factor. Can be scalar, in which case the same sigma is applied to each dimension, or a vector of length dim(inimage) to specify a unique smoothness for each dimension.
sigma_in_physical_coordinates : boolean
If true, the smoothing factor is in millimeters; if false, it is in pixels.
FWHM : boolean
If true, sigma is interpreted as the full-width-half-max (FWHM) of the filter, not the sigma of a Gaussian kernel.
max_kernel_width : scalar
Maximum kernel width
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16'))
>>> simage = ants.smooth_image(image, (1.2,1.5))
|
[
"Smooth",
"an",
"image"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/smooth_image.py#L34-L75
|
237,161
|
ANTsX/ANTsPy
|
ants/registration/build_template.py
|
build_template
|
def build_template(
initial_template=None,
image_list=None,
iterations = 3,
gradient_step = 0.2,
**kwargs ):
"""
Estimate an optimal template from an input image_list
ANTsR function: N/A
Arguments
---------
initial_template : ANTsImage
initialization for the template building
image_list : ANTsImages
images from which to estimate template
iterations : integer
number of template building iterations
gradient_step : scalar
for shape update gradient
kwargs : keyword args
extra arguments passed to ants registration
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') , 'float')
>>> image2 = ants.image_read( ants.get_ants_data('r27') , 'float')
>>> image3 = ants.image_read( ants.get_ants_data('r85') , 'float')
>>> timage = ants.build_template( image_list = ( image, image2, image3 ) )
"""
wt = 1.0 / len( image_list )
if initial_template is None:
initial_template = image_list[ 0 ] * 0
for i in range( len( image_list ) ):
initial_template = initial_template + image_list[ i ] * wt
xavg = initial_template.clone()
for i in range( iterations ):
for k in range( len( image_list ) ):
w1 = registration( xavg, image_list[k],
type_of_transform='SyN', **kwargs )
if k == 0:
wavg = iio.image_read( w1['fwdtransforms'][0] ) * wt
xavgNew = w1['warpedmovout'] * wt
else:
wavg = wavg + iio.image_read( w1['fwdtransforms'][0] ) * wt
xavgNew = xavgNew + w1['warpedmovout'] * wt
print( wavg.abs().mean() )
wscl = (-1.0) * gradient_step
wavg = wavg * wscl
wavgfn = mktemp(suffix='.nii.gz')
iio.image_write(wavg, wavgfn)
xavg = apply_transforms( xavg, xavg, wavgfn )
return xavg
|
python
|
def build_template(
initial_template=None,
image_list=None,
iterations = 3,
gradient_step = 0.2,
**kwargs ):
"""
Estimate an optimal template from an input image_list
ANTsR function: N/A
Arguments
---------
initial_template : ANTsImage
initialization for the template building
image_list : ANTsImages
images from which to estimate template
iterations : integer
number of template building iterations
gradient_step : scalar
for shape update gradient
kwargs : keyword args
extra arguments passed to ants registration
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') , 'float')
>>> image2 = ants.image_read( ants.get_ants_data('r27') , 'float')
>>> image3 = ants.image_read( ants.get_ants_data('r85') , 'float')
>>> timage = ants.build_template( image_list = ( image, image2, image3 ) )
"""
wt = 1.0 / len( image_list )
if initial_template is None:
initial_template = image_list[ 0 ] * 0
for i in range( len( image_list ) ):
initial_template = initial_template + image_list[ i ] * wt
xavg = initial_template.clone()
for i in range( iterations ):
for k in range( len( image_list ) ):
w1 = registration( xavg, image_list[k],
type_of_transform='SyN', **kwargs )
if k == 0:
wavg = iio.image_read( w1['fwdtransforms'][0] ) * wt
xavgNew = w1['warpedmovout'] * wt
else:
wavg = wavg + iio.image_read( w1['fwdtransforms'][0] ) * wt
xavgNew = xavgNew + w1['warpedmovout'] * wt
print( wavg.abs().mean() )
wscl = (-1.0) * gradient_step
wavg = wavg * wscl
wavgfn = mktemp(suffix='.nii.gz')
iio.image_write(wavg, wavgfn)
xavg = apply_transforms( xavg, xavg, wavgfn )
return xavg
|
[
"def",
"build_template",
"(",
"initial_template",
"=",
"None",
",",
"image_list",
"=",
"None",
",",
"iterations",
"=",
"3",
",",
"gradient_step",
"=",
"0.2",
",",
"*",
"*",
"kwargs",
")",
":",
"wt",
"=",
"1.0",
"/",
"len",
"(",
"image_list",
")",
"if",
"initial_template",
"is",
"None",
":",
"initial_template",
"=",
"image_list",
"[",
"0",
"]",
"*",
"0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"image_list",
")",
")",
":",
"initial_template",
"=",
"initial_template",
"+",
"image_list",
"[",
"i",
"]",
"*",
"wt",
"xavg",
"=",
"initial_template",
".",
"clone",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"iterations",
")",
":",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"image_list",
")",
")",
":",
"w1",
"=",
"registration",
"(",
"xavg",
",",
"image_list",
"[",
"k",
"]",
",",
"type_of_transform",
"=",
"'SyN'",
",",
"*",
"*",
"kwargs",
")",
"if",
"k",
"==",
"0",
":",
"wavg",
"=",
"iio",
".",
"image_read",
"(",
"w1",
"[",
"'fwdtransforms'",
"]",
"[",
"0",
"]",
")",
"*",
"wt",
"xavgNew",
"=",
"w1",
"[",
"'warpedmovout'",
"]",
"*",
"wt",
"else",
":",
"wavg",
"=",
"wavg",
"+",
"iio",
".",
"image_read",
"(",
"w1",
"[",
"'fwdtransforms'",
"]",
"[",
"0",
"]",
")",
"*",
"wt",
"xavgNew",
"=",
"xavgNew",
"+",
"w1",
"[",
"'warpedmovout'",
"]",
"*",
"wt",
"print",
"(",
"wavg",
".",
"abs",
"(",
")",
".",
"mean",
"(",
")",
")",
"wscl",
"=",
"(",
"-",
"1.0",
")",
"*",
"gradient_step",
"wavg",
"=",
"wavg",
"*",
"wscl",
"wavgfn",
"=",
"mktemp",
"(",
"suffix",
"=",
"'.nii.gz'",
")",
"iio",
".",
"image_write",
"(",
"wavg",
",",
"wavgfn",
")",
"xavg",
"=",
"apply_transforms",
"(",
"xavg",
",",
"xavg",
",",
"wavgfn",
")",
"return",
"xavg"
] |
Estimate an optimal template from an input image_list
ANTsR function: N/A
Arguments
---------
initial_template : ANTsImage
initialization for the template building
image_list : ANTsImages
images from which to estimate template
iterations : integer
number of template building iterations
gradient_step : scalar
for shape update gradient
kwargs : keyword args
extra arguments passed to ants registration
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') , 'float')
>>> image2 = ants.image_read( ants.get_ants_data('r27') , 'float')
>>> image3 = ants.image_read( ants.get_ants_data('r85') , 'float')
>>> timage = ants.build_template( image_list = ( image, image2, image3 ) )
|
[
"Estimate",
"an",
"optimal",
"template",
"from",
"an",
"input",
"image_list"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/registration/build_template.py#L13-L77
|
237,162
|
ANTsX/ANTsPy
|
ants/registration/resample_image.py
|
resample_image
|
def resample_image(image, resample_params, use_voxels=False, interp_type=1):
"""
Resample image by spacing or number of voxels with
various interpolators. Works with multi-channel images.
ANTsR function: `resampleImage`
Arguments
---------
image : ANTsImage
input image
resample_params : tuple/list
vector of size dimension with numeric values
use_voxels : boolean
True means interpret resample params as voxel counts
interp_type : integer
one of 0 (linear), 1 (nearest neighbor), 2 (gaussian), 3 (windowed sinc), 4 (bspline)
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data("r16"))
>>> finn = ants.resample_image(fi,(50,60),True,0)
>>> filin = ants.resample_image(fi,(1.5,1.5),False,1)
"""
if image.components == 1:
inimage = image.clone('float')
outimage = image.clone('float')
rsampar = 'x'.join([str(rp) for rp in resample_params])
args = [image.dimension, inimage, outimage, rsampar, int(use_voxels), interp_type]
processed_args = utils._int_antsProcessArguments(args)
libfn = utils.get_lib_fn('ResampleImage')
libfn(processed_args)
outimage = outimage.clone(image.pixeltype)
return outimage
else:
raise ValueError('images with more than 1 component not currently supported')
|
python
|
def resample_image(image, resample_params, use_voxels=False, interp_type=1):
"""
Resample image by spacing or number of voxels with
various interpolators. Works with multi-channel images.
ANTsR function: `resampleImage`
Arguments
---------
image : ANTsImage
input image
resample_params : tuple/list
vector of size dimension with numeric values
use_voxels : boolean
True means interpret resample params as voxel counts
interp_type : integer
one of 0 (linear), 1 (nearest neighbor), 2 (gaussian), 3 (windowed sinc), 4 (bspline)
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data("r16"))
>>> finn = ants.resample_image(fi,(50,60),True,0)
>>> filin = ants.resample_image(fi,(1.5,1.5),False,1)
"""
if image.components == 1:
inimage = image.clone('float')
outimage = image.clone('float')
rsampar = 'x'.join([str(rp) for rp in resample_params])
args = [image.dimension, inimage, outimage, rsampar, int(use_voxels), interp_type]
processed_args = utils._int_antsProcessArguments(args)
libfn = utils.get_lib_fn('ResampleImage')
libfn(processed_args)
outimage = outimage.clone(image.pixeltype)
return outimage
else:
raise ValueError('images with more than 1 component not currently supported')
|
[
"def",
"resample_image",
"(",
"image",
",",
"resample_params",
",",
"use_voxels",
"=",
"False",
",",
"interp_type",
"=",
"1",
")",
":",
"if",
"image",
".",
"components",
"==",
"1",
":",
"inimage",
"=",
"image",
".",
"clone",
"(",
"'float'",
")",
"outimage",
"=",
"image",
".",
"clone",
"(",
"'float'",
")",
"rsampar",
"=",
"'x'",
".",
"join",
"(",
"[",
"str",
"(",
"rp",
")",
"for",
"rp",
"in",
"resample_params",
"]",
")",
"args",
"=",
"[",
"image",
".",
"dimension",
",",
"inimage",
",",
"outimage",
",",
"rsampar",
",",
"int",
"(",
"use_voxels",
")",
",",
"interp_type",
"]",
"processed_args",
"=",
"utils",
".",
"_int_antsProcessArguments",
"(",
"args",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'ResampleImage'",
")",
"libfn",
"(",
"processed_args",
")",
"outimage",
"=",
"outimage",
".",
"clone",
"(",
"image",
".",
"pixeltype",
")",
"return",
"outimage",
"else",
":",
"raise",
"ValueError",
"(",
"'images with more than 1 component not currently supported'",
")"
] |
Resample image by spacing or number of voxels with
various interpolators. Works with multi-channel images.
ANTsR function: `resampleImage`
Arguments
---------
image : ANTsImage
input image
resample_params : tuple/list
vector of size dimension with numeric values
use_voxels : boolean
True means interpret resample params as voxel counts
interp_type : integer
one of 0 (linear), 1 (nearest neighbor), 2 (gaussian), 3 (windowed sinc), 4 (bspline)
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data("r16"))
>>> finn = ants.resample_image(fi,(50,60),True,0)
>>> filin = ants.resample_image(fi,(1.5,1.5),False,1)
|
[
"Resample",
"image",
"by",
"spacing",
"or",
"number",
"of",
"voxels",
"with",
"various",
"interpolators",
".",
"Works",
"with",
"multi",
"-",
"channel",
"images",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/registration/resample_image.py#L12-L56
|
237,163
|
ANTsX/ANTsPy
|
ants/core/ants_transform.py
|
apply_ants_transform
|
def apply_ants_transform(transform, data, data_type="point", reference=None, **kwargs):
"""
Apply ANTsTransform to data
ANTsR function: `applyAntsrTransform`
Arguments
---------
transform : ANTsTransform
transform to apply to image
data : ndarray/list/tuple
data to which transform will be applied
data_type : string
type of data
Options :
'point'
'vector'
'image'
reference : ANTsImage
target space for transforming image
kwargs : kwargs
additional options passed to `apply_ants_transform_to_image`
Returns
-------
ANTsImage if data_type == 'point'
OR
tuple if data_type == 'point' or data_type == 'vector'
"""
return transform.apply(data, data_type, reference, **kwargs)
|
python
|
def apply_ants_transform(transform, data, data_type="point", reference=None, **kwargs):
"""
Apply ANTsTransform to data
ANTsR function: `applyAntsrTransform`
Arguments
---------
transform : ANTsTransform
transform to apply to image
data : ndarray/list/tuple
data to which transform will be applied
data_type : string
type of data
Options :
'point'
'vector'
'image'
reference : ANTsImage
target space for transforming image
kwargs : kwargs
additional options passed to `apply_ants_transform_to_image`
Returns
-------
ANTsImage if data_type == 'point'
OR
tuple if data_type == 'point' or data_type == 'vector'
"""
return transform.apply(data, data_type, reference, **kwargs)
|
[
"def",
"apply_ants_transform",
"(",
"transform",
",",
"data",
",",
"data_type",
"=",
"\"point\"",
",",
"reference",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"transform",
".",
"apply",
"(",
"data",
",",
"data_type",
",",
"reference",
",",
"*",
"*",
"kwargs",
")"
] |
Apply ANTsTransform to data
ANTsR function: `applyAntsrTransform`
Arguments
---------
transform : ANTsTransform
transform to apply to image
data : ndarray/list/tuple
data to which transform will be applied
data_type : string
type of data
Options :
'point'
'vector'
'image'
reference : ANTsImage
target space for transforming image
kwargs : kwargs
additional options passed to `apply_ants_transform_to_image`
Returns
-------
ANTsImage if data_type == 'point'
OR
tuple if data_type == 'point' or data_type == 'vector'
|
[
"Apply",
"ANTsTransform",
"to",
"data"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_transform.py#L225-L258
|
237,164
|
ANTsX/ANTsPy
|
ants/core/ants_transform.py
|
compose_ants_transforms
|
def compose_ants_transforms(transform_list):
"""
Compose multiple ANTsTransform's together
ANTsR function: `composeAntsrTransforms`
Arguments
---------
transform_list : list/tuple of ANTsTransform object
list of transforms to compose together
Returns
-------
ANTsTransform
one transform that contains all given transforms
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_ants_data("r16")).clone('float')
>>> tx = ants.new_ants_transform(dimension=2)
>>> tx.set_parameters((0.9,0,0,1.1,10,11))
>>> inv_tx = tx.invert()
>>> single_tx = ants.compose_ants_transforms([tx, inv_tx])
>>> img_orig = single_tx.apply_to_image(img, img)
>>> rRotGenerator = ants.contrib.RandomRotate2D( ( 0, 40 ), reference=img )
>>> rShearGenerator=ants.contrib.RandomShear2D( (0,50), reference=img )
>>> tx1 = rRotGenerator.transform()
>>> tx2 = rShearGenerator.transform()
>>> rSrR = ants.compose_ants_transforms([tx1, tx2])
>>> rSrR.apply_to_image( img )
"""
precision = transform_list[0].precision
dimension = transform_list[0].dimension
for tx in transform_list:
if precision != tx.precision:
raise ValueError('All transforms must have the same precision')
if dimension != tx.dimension:
raise ValueError('All transforms must have the same dimension')
tx_ptr_list = list(reversed([tf.pointer for tf in transform_list]))
libfn = utils.get_lib_fn('composeTransforms%s' % (transform_list[0]._libsuffix))
itk_composed_tx = libfn(tx_ptr_list, precision, dimension)
return ANTsTransform(precision=precision, dimension=dimension,
transform_type='CompositeTransform', pointer=itk_composed_tx)
|
python
|
def compose_ants_transforms(transform_list):
"""
Compose multiple ANTsTransform's together
ANTsR function: `composeAntsrTransforms`
Arguments
---------
transform_list : list/tuple of ANTsTransform object
list of transforms to compose together
Returns
-------
ANTsTransform
one transform that contains all given transforms
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_ants_data("r16")).clone('float')
>>> tx = ants.new_ants_transform(dimension=2)
>>> tx.set_parameters((0.9,0,0,1.1,10,11))
>>> inv_tx = tx.invert()
>>> single_tx = ants.compose_ants_transforms([tx, inv_tx])
>>> img_orig = single_tx.apply_to_image(img, img)
>>> rRotGenerator = ants.contrib.RandomRotate2D( ( 0, 40 ), reference=img )
>>> rShearGenerator=ants.contrib.RandomShear2D( (0,50), reference=img )
>>> tx1 = rRotGenerator.transform()
>>> tx2 = rShearGenerator.transform()
>>> rSrR = ants.compose_ants_transforms([tx1, tx2])
>>> rSrR.apply_to_image( img )
"""
precision = transform_list[0].precision
dimension = transform_list[0].dimension
for tx in transform_list:
if precision != tx.precision:
raise ValueError('All transforms must have the same precision')
if dimension != tx.dimension:
raise ValueError('All transforms must have the same dimension')
tx_ptr_list = list(reversed([tf.pointer for tf in transform_list]))
libfn = utils.get_lib_fn('composeTransforms%s' % (transform_list[0]._libsuffix))
itk_composed_tx = libfn(tx_ptr_list, precision, dimension)
return ANTsTransform(precision=precision, dimension=dimension,
transform_type='CompositeTransform', pointer=itk_composed_tx)
|
[
"def",
"compose_ants_transforms",
"(",
"transform_list",
")",
":",
"precision",
"=",
"transform_list",
"[",
"0",
"]",
".",
"precision",
"dimension",
"=",
"transform_list",
"[",
"0",
"]",
".",
"dimension",
"for",
"tx",
"in",
"transform_list",
":",
"if",
"precision",
"!=",
"tx",
".",
"precision",
":",
"raise",
"ValueError",
"(",
"'All transforms must have the same precision'",
")",
"if",
"dimension",
"!=",
"tx",
".",
"dimension",
":",
"raise",
"ValueError",
"(",
"'All transforms must have the same dimension'",
")",
"tx_ptr_list",
"=",
"list",
"(",
"reversed",
"(",
"[",
"tf",
".",
"pointer",
"for",
"tf",
"in",
"transform_list",
"]",
")",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'composeTransforms%s'",
"%",
"(",
"transform_list",
"[",
"0",
"]",
".",
"_libsuffix",
")",
")",
"itk_composed_tx",
"=",
"libfn",
"(",
"tx_ptr_list",
",",
"precision",
",",
"dimension",
")",
"return",
"ANTsTransform",
"(",
"precision",
"=",
"precision",
",",
"dimension",
"=",
"dimension",
",",
"transform_type",
"=",
"'CompositeTransform'",
",",
"pointer",
"=",
"itk_composed_tx",
")"
] |
Compose multiple ANTsTransform's together
ANTsR function: `composeAntsrTransforms`
Arguments
---------
transform_list : list/tuple of ANTsTransform object
list of transforms to compose together
Returns
-------
ANTsTransform
one transform that contains all given transforms
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_ants_data("r16")).clone('float')
>>> tx = ants.new_ants_transform(dimension=2)
>>> tx.set_parameters((0.9,0,0,1.1,10,11))
>>> inv_tx = tx.invert()
>>> single_tx = ants.compose_ants_transforms([tx, inv_tx])
>>> img_orig = single_tx.apply_to_image(img, img)
>>> rRotGenerator = ants.contrib.RandomRotate2D( ( 0, 40 ), reference=img )
>>> rShearGenerator=ants.contrib.RandomShear2D( (0,50), reference=img )
>>> tx1 = rRotGenerator.transform()
>>> tx2 = rShearGenerator.transform()
>>> rSrR = ants.compose_ants_transforms([tx1, tx2])
>>> rSrR.apply_to_image( img )
|
[
"Compose",
"multiple",
"ANTsTransform",
"s",
"together"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_transform.py#L355-L400
|
237,165
|
ANTsX/ANTsPy
|
ants/core/ants_transform.py
|
transform_index_to_physical_point
|
def transform_index_to_physical_point(image, index):
"""
Get spatial point from index of an image.
ANTsR function: `antsTransformIndexToPhysicalPoint`
Arguments
---------
img : ANTsImage
image to get values from
index : list or tuple or numpy.ndarray
location in image
Returns
-------
tuple
Example
-------
>>> import ants
>>> import numpy as np
>>> img = ants.make_image((10,10),np.random.randn(100))
>>> pt = ants.transform_index_to_physical_point(img, (2,2))
"""
if not isinstance(image, iio.ANTsImage):
raise ValueError('image must be ANTsImage type')
if isinstance(index, np.ndarray):
index = index.tolist()
if not isinstance(index, (tuple,list)):
raise ValueError('index must be tuple or list')
if len(index) != image.dimension:
raise ValueError('len(index) != image.dimension')
index = [i+1 for i in index]
ndim = image.dimension
ptype = image.pixeltype
libfn = utils.get_lib_fn('TransformIndexToPhysicalPoint%s%i' % (utils.short_ptype(ptype), ndim))
point = libfn(image.pointer, [list(index)])
return np.array(point[0])
|
python
|
def transform_index_to_physical_point(image, index):
"""
Get spatial point from index of an image.
ANTsR function: `antsTransformIndexToPhysicalPoint`
Arguments
---------
img : ANTsImage
image to get values from
index : list or tuple or numpy.ndarray
location in image
Returns
-------
tuple
Example
-------
>>> import ants
>>> import numpy as np
>>> img = ants.make_image((10,10),np.random.randn(100))
>>> pt = ants.transform_index_to_physical_point(img, (2,2))
"""
if not isinstance(image, iio.ANTsImage):
raise ValueError('image must be ANTsImage type')
if isinstance(index, np.ndarray):
index = index.tolist()
if not isinstance(index, (tuple,list)):
raise ValueError('index must be tuple or list')
if len(index) != image.dimension:
raise ValueError('len(index) != image.dimension')
index = [i+1 for i in index]
ndim = image.dimension
ptype = image.pixeltype
libfn = utils.get_lib_fn('TransformIndexToPhysicalPoint%s%i' % (utils.short_ptype(ptype), ndim))
point = libfn(image.pointer, [list(index)])
return np.array(point[0])
|
[
"def",
"transform_index_to_physical_point",
"(",
"image",
",",
"index",
")",
":",
"if",
"not",
"isinstance",
"(",
"image",
",",
"iio",
".",
"ANTsImage",
")",
":",
"raise",
"ValueError",
"(",
"'image must be ANTsImage type'",
")",
"if",
"isinstance",
"(",
"index",
",",
"np",
".",
"ndarray",
")",
":",
"index",
"=",
"index",
".",
"tolist",
"(",
")",
"if",
"not",
"isinstance",
"(",
"index",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"raise",
"ValueError",
"(",
"'index must be tuple or list'",
")",
"if",
"len",
"(",
"index",
")",
"!=",
"image",
".",
"dimension",
":",
"raise",
"ValueError",
"(",
"'len(index) != image.dimension'",
")",
"index",
"=",
"[",
"i",
"+",
"1",
"for",
"i",
"in",
"index",
"]",
"ndim",
"=",
"image",
".",
"dimension",
"ptype",
"=",
"image",
".",
"pixeltype",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'TransformIndexToPhysicalPoint%s%i'",
"%",
"(",
"utils",
".",
"short_ptype",
"(",
"ptype",
")",
",",
"ndim",
")",
")",
"point",
"=",
"libfn",
"(",
"image",
".",
"pointer",
",",
"[",
"list",
"(",
"index",
")",
"]",
")",
"return",
"np",
".",
"array",
"(",
"point",
"[",
"0",
"]",
")"
] |
Get spatial point from index of an image.
ANTsR function: `antsTransformIndexToPhysicalPoint`
Arguments
---------
img : ANTsImage
image to get values from
index : list or tuple or numpy.ndarray
location in image
Returns
-------
tuple
Example
-------
>>> import ants
>>> import numpy as np
>>> img = ants.make_image((10,10),np.random.randn(100))
>>> pt = ants.transform_index_to_physical_point(img, (2,2))
|
[
"Get",
"spatial",
"point",
"from",
"index",
"of",
"an",
"image",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_transform.py#L403-L446
|
237,166
|
ANTsX/ANTsPy
|
ants/core/ants_transform.py
|
ANTsTransform.invert
|
def invert(self):
""" Invert the transform """
libfn = utils.get_lib_fn('inverseTransform%s' % (self._libsuffix))
inv_tx_ptr = libfn(self.pointer)
new_tx = ANTsTransform(precision=self.precision, dimension=self.dimension,
transform_type=self.transform_type, pointer=inv_tx_ptr)
return new_tx
|
python
|
def invert(self):
""" Invert the transform """
libfn = utils.get_lib_fn('inverseTransform%s' % (self._libsuffix))
inv_tx_ptr = libfn(self.pointer)
new_tx = ANTsTransform(precision=self.precision, dimension=self.dimension,
transform_type=self.transform_type, pointer=inv_tx_ptr)
return new_tx
|
[
"def",
"invert",
"(",
"self",
")",
":",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'inverseTransform%s'",
"%",
"(",
"self",
".",
"_libsuffix",
")",
")",
"inv_tx_ptr",
"=",
"libfn",
"(",
"self",
".",
"pointer",
")",
"new_tx",
"=",
"ANTsTransform",
"(",
"precision",
"=",
"self",
".",
"precision",
",",
"dimension",
"=",
"self",
".",
"dimension",
",",
"transform_type",
"=",
"self",
".",
"transform_type",
",",
"pointer",
"=",
"inv_tx_ptr",
")",
"return",
"new_tx"
] |
Invert the transform
|
[
"Invert",
"the",
"transform"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_transform.py#L87-L95
|
237,167
|
ANTsX/ANTsPy
|
ants/core/ants_transform.py
|
ANTsTransform.apply
|
def apply(self, data, data_type='point', reference=None, **kwargs):
"""
Apply transform to data
"""
if data_type == 'point':
return self.apply_to_point(data)
elif data_type == 'vector':
return self.apply_to_vector(data)
elif data_type == 'image':
return self.apply_to_image(data, reference, **kwargs)
|
python
|
def apply(self, data, data_type='point', reference=None, **kwargs):
"""
Apply transform to data
"""
if data_type == 'point':
return self.apply_to_point(data)
elif data_type == 'vector':
return self.apply_to_vector(data)
elif data_type == 'image':
return self.apply_to_image(data, reference, **kwargs)
|
[
"def",
"apply",
"(",
"self",
",",
"data",
",",
"data_type",
"=",
"'point'",
",",
"reference",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"data_type",
"==",
"'point'",
":",
"return",
"self",
".",
"apply_to_point",
"(",
"data",
")",
"elif",
"data_type",
"==",
"'vector'",
":",
"return",
"self",
".",
"apply_to_vector",
"(",
"data",
")",
"elif",
"data_type",
"==",
"'image'",
":",
"return",
"self",
".",
"apply_to_image",
"(",
"data",
",",
"reference",
",",
"*",
"*",
"kwargs",
")"
] |
Apply transform to data
|
[
"Apply",
"transform",
"to",
"data"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_transform.py#L97-L107
|
237,168
|
ANTsX/ANTsPy
|
ants/core/ants_transform.py
|
ANTsTransform.apply_to_point
|
def apply_to_point(self, point):
"""
Apply transform to a point
Arguments
---------
point : list/tuple
point to which the transform will be applied
Returns
-------
list : transformed point
Example
-------
>>> import ants
>>> tx = ants.new_ants_transform()
>>> params = tx.parameters
>>> tx.set_parameters(params*2)
>>> pt2 = tx.apply_to_point((1,2,3)) # should be (2,4,6)
"""
libfn = utils.get_lib_fn('transformPoint%s' % (self._libsuffix))
return tuple(libfn(self.pointer, point))
|
python
|
def apply_to_point(self, point):
"""
Apply transform to a point
Arguments
---------
point : list/tuple
point to which the transform will be applied
Returns
-------
list : transformed point
Example
-------
>>> import ants
>>> tx = ants.new_ants_transform()
>>> params = tx.parameters
>>> tx.set_parameters(params*2)
>>> pt2 = tx.apply_to_point((1,2,3)) # should be (2,4,6)
"""
libfn = utils.get_lib_fn('transformPoint%s' % (self._libsuffix))
return tuple(libfn(self.pointer, point))
|
[
"def",
"apply_to_point",
"(",
"self",
",",
"point",
")",
":",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'transformPoint%s'",
"%",
"(",
"self",
".",
"_libsuffix",
")",
")",
"return",
"tuple",
"(",
"libfn",
"(",
"self",
".",
"pointer",
",",
"point",
")",
")"
] |
Apply transform to a point
Arguments
---------
point : list/tuple
point to which the transform will be applied
Returns
-------
list : transformed point
Example
-------
>>> import ants
>>> tx = ants.new_ants_transform()
>>> params = tx.parameters
>>> tx.set_parameters(params*2)
>>> pt2 = tx.apply_to_point((1,2,3)) # should be (2,4,6)
|
[
"Apply",
"transform",
"to",
"a",
"point"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_transform.py#L109-L131
|
237,169
|
ANTsX/ANTsPy
|
ants/core/ants_transform.py
|
ANTsTransform.apply_to_vector
|
def apply_to_vector(self, vector):
"""
Apply transform to a vector
Arguments
---------
vector : list/tuple
vector to which the transform will be applied
Returns
-------
list : transformed vector
"""
if isinstance(vector, np.ndarray):
vector = vector.tolist()
libfn = utils.get_lib_fn('transformVector%s' % (self._libsuffix))
return np.asarray(libfn(self.pointer, vector))
|
python
|
def apply_to_vector(self, vector):
"""
Apply transform to a vector
Arguments
---------
vector : list/tuple
vector to which the transform will be applied
Returns
-------
list : transformed vector
"""
if isinstance(vector, np.ndarray):
vector = vector.tolist()
libfn = utils.get_lib_fn('transformVector%s' % (self._libsuffix))
return np.asarray(libfn(self.pointer, vector))
|
[
"def",
"apply_to_vector",
"(",
"self",
",",
"vector",
")",
":",
"if",
"isinstance",
"(",
"vector",
",",
"np",
".",
"ndarray",
")",
":",
"vector",
"=",
"vector",
".",
"tolist",
"(",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'transformVector%s'",
"%",
"(",
"self",
".",
"_libsuffix",
")",
")",
"return",
"np",
".",
"asarray",
"(",
"libfn",
"(",
"self",
".",
"pointer",
",",
"vector",
")",
")"
] |
Apply transform to a vector
Arguments
---------
vector : list/tuple
vector to which the transform will be applied
Returns
-------
list : transformed vector
|
[
"Apply",
"transform",
"to",
"a",
"vector"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_transform.py#L133-L150
|
237,170
|
ANTsX/ANTsPy
|
ants/viz/plot.py
|
plot_hist
|
def plot_hist(image, threshold=0., fit_line=False, normfreq=True,
## plot label arguments
title=None, grid=True, xlabel=None, ylabel=None,
## other plot arguments
facecolor='green', alpha=0.75):
"""
Plot a histogram from an ANTsImage
Arguments
---------
image : ANTsImage
image from which histogram will be created
"""
img_arr = image.numpy().flatten()
img_arr = img_arr[np.abs(img_arr) > threshold]
if normfreq != False:
normfreq = 1. if normfreq == True else normfreq
n, bins, patches = plt.hist(img_arr, 50, normed=normfreq, facecolor=facecolor, alpha=alpha)
if fit_line:
# add a 'best fit' line
y = mlab.normpdf( bins, img_arr.mean(), img_arr.std())
l = plt.plot(bins, y, 'r--', linewidth=1)
if xlabel is not None:
plt.xlabel(xlabel)
if ylabel is not None:
plt.ylabel(ylabel)
if title is not None:
plt.title(title)
plt.grid(grid)
plt.show()
|
python
|
def plot_hist(image, threshold=0., fit_line=False, normfreq=True,
## plot label arguments
title=None, grid=True, xlabel=None, ylabel=None,
## other plot arguments
facecolor='green', alpha=0.75):
"""
Plot a histogram from an ANTsImage
Arguments
---------
image : ANTsImage
image from which histogram will be created
"""
img_arr = image.numpy().flatten()
img_arr = img_arr[np.abs(img_arr) > threshold]
if normfreq != False:
normfreq = 1. if normfreq == True else normfreq
n, bins, patches = plt.hist(img_arr, 50, normed=normfreq, facecolor=facecolor, alpha=alpha)
if fit_line:
# add a 'best fit' line
y = mlab.normpdf( bins, img_arr.mean(), img_arr.std())
l = plt.plot(bins, y, 'r--', linewidth=1)
if xlabel is not None:
plt.xlabel(xlabel)
if ylabel is not None:
plt.ylabel(ylabel)
if title is not None:
plt.title(title)
plt.grid(grid)
plt.show()
|
[
"def",
"plot_hist",
"(",
"image",
",",
"threshold",
"=",
"0.",
",",
"fit_line",
"=",
"False",
",",
"normfreq",
"=",
"True",
",",
"## plot label arguments",
"title",
"=",
"None",
",",
"grid",
"=",
"True",
",",
"xlabel",
"=",
"None",
",",
"ylabel",
"=",
"None",
",",
"## other plot arguments",
"facecolor",
"=",
"'green'",
",",
"alpha",
"=",
"0.75",
")",
":",
"img_arr",
"=",
"image",
".",
"numpy",
"(",
")",
".",
"flatten",
"(",
")",
"img_arr",
"=",
"img_arr",
"[",
"np",
".",
"abs",
"(",
"img_arr",
")",
">",
"threshold",
"]",
"if",
"normfreq",
"!=",
"False",
":",
"normfreq",
"=",
"1.",
"if",
"normfreq",
"==",
"True",
"else",
"normfreq",
"n",
",",
"bins",
",",
"patches",
"=",
"plt",
".",
"hist",
"(",
"img_arr",
",",
"50",
",",
"normed",
"=",
"normfreq",
",",
"facecolor",
"=",
"facecolor",
",",
"alpha",
"=",
"alpha",
")",
"if",
"fit_line",
":",
"# add a 'best fit' line",
"y",
"=",
"mlab",
".",
"normpdf",
"(",
"bins",
",",
"img_arr",
".",
"mean",
"(",
")",
",",
"img_arr",
".",
"std",
"(",
")",
")",
"l",
"=",
"plt",
".",
"plot",
"(",
"bins",
",",
"y",
",",
"'r--'",
",",
"linewidth",
"=",
"1",
")",
"if",
"xlabel",
"is",
"not",
"None",
":",
"plt",
".",
"xlabel",
"(",
"xlabel",
")",
"if",
"ylabel",
"is",
"not",
"None",
":",
"plt",
".",
"ylabel",
"(",
"ylabel",
")",
"if",
"title",
"is",
"not",
"None",
":",
"plt",
".",
"title",
"(",
"title",
")",
"plt",
".",
"grid",
"(",
"grid",
")",
"plt",
".",
"show",
"(",
")"
] |
Plot a histogram from an ANTsImage
Arguments
---------
image : ANTsImage
image from which histogram will be created
|
[
"Plot",
"a",
"histogram",
"from",
"an",
"ANTsImage"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/viz/plot.py#L91-L124
|
237,171
|
ANTsX/ANTsPy
|
ants/utils/morphology.py
|
morphology
|
def morphology(image, operation, radius, mtype='binary', value=1,
shape='ball', radius_is_parametric=False, thickness=1,
lines=3, include_center=False):
"""
Apply morphological operations to an image
ANTsR function: `morphology`
Arguments
---------
input : ANTsImage
input image
operation : string
operation to apply
"close" Morpholgical closing
"dilate" Morpholgical dilation
"erode" Morpholgical erosion
"open" Morpholgical opening
radius : scalar
radius of structuring element
mtype : string
type of morphology
"binary" Binary operation on a single value
"grayscale" Grayscale operations
value : scalar
value to operation on (type='binary' only)
shape : string
shape of the structuring element ( type='binary' only )
"ball" spherical structuring element
"box" box shaped structuring element
"cross" cross shaped structuring element
"annulus" annulus shaped structuring element
"polygon" polygon structuring element
radius_is_parametric : boolean
used parametric radius boolean (shape='ball' and shape='annulus' only)
thickness : scalar
thickness (shape='annulus' only)
lines : integer
number of lines in polygon (shape='polygon' only)
include_center : boolean
include center of annulus boolean (shape='annulus' only)
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data('r16') , 2 )
>>> mask = ants.get_mask( fi )
>>> dilated_ball = ants.morphology( mask, operation='dilate', radius=3, mtype='binary', shape='ball')
>>> eroded_box = ants.morphology( mask, operation='erode', radius=3, mtype='binary', shape='box')
>>> opened_annulus = ants.morphology( mask, operation='open', radius=5, mtype='binary', shape='annulus', thickness=2)
"""
if image.components > 1:
raise ValueError('multichannel images not yet supported')
_sflag_dict = {'ball': 1, 'box': 2, 'cross': 3, 'annulus': 4, 'polygon': 5}
sFlag = _sflag_dict.get(shape, 0)
if sFlag == 0:
raise ValueError('invalid element shape')
radius_is_parametric = radius_is_parametric * 1
include_center = include_center * 1
if (mtype == 'binary'):
if (operation == 'dilate'):
if (sFlag == 5):
ret = iMath(image, 'MD', radius, value, sFlag, lines)
else:
ret = iMath(image, 'MD', radius, value, sFlag, radius_is_parametric, thickness, include_center)
elif (operation == 'erode'):
if (sFlag == 5):
ret = iMath(image, 'ME', radius, value, sFlag, lines)
else:
ret = iMath(image, 'ME', radius, value, sFlag, radius_is_parametric, thickness, include_center)
elif (operation == 'open'):
if (sFlag == 5):
ret = iMath(image, 'MO', radius, value, sFlag, lines)
else:
ret = iMath(image, 'MO', radius, value, sFlag, radius_is_parametric, thickness, include_center)
elif (operation == 'close'):
if (sFlag == 5):
ret = iMath(image, 'MC', radius, value, sFlag, lines)
else:
ret = iMath(image, 'MC', radius, value, sFlag, radius_is_parametric, thickness, include_center)
else:
raise ValueError('Invalid morphology operation')
elif (mtype == 'grayscale'):
if (operation == 'dilate'):
ret = iMath(image, 'GD', radius)
elif (operation == 'erode'):
ret = iMath(image, 'GE', radius)
elif (operation == 'open'):
ret = iMath(image, 'GO', radius)
elif (operation == 'close'):
ret = iMath(image, 'GC', radius)
else:
raise ValueError('Invalid morphology operation')
else:
raise ValueError('Invalid morphology type')
return ret
|
python
|
def morphology(image, operation, radius, mtype='binary', value=1,
shape='ball', radius_is_parametric=False, thickness=1,
lines=3, include_center=False):
"""
Apply morphological operations to an image
ANTsR function: `morphology`
Arguments
---------
input : ANTsImage
input image
operation : string
operation to apply
"close" Morpholgical closing
"dilate" Morpholgical dilation
"erode" Morpholgical erosion
"open" Morpholgical opening
radius : scalar
radius of structuring element
mtype : string
type of morphology
"binary" Binary operation on a single value
"grayscale" Grayscale operations
value : scalar
value to operation on (type='binary' only)
shape : string
shape of the structuring element ( type='binary' only )
"ball" spherical structuring element
"box" box shaped structuring element
"cross" cross shaped structuring element
"annulus" annulus shaped structuring element
"polygon" polygon structuring element
radius_is_parametric : boolean
used parametric radius boolean (shape='ball' and shape='annulus' only)
thickness : scalar
thickness (shape='annulus' only)
lines : integer
number of lines in polygon (shape='polygon' only)
include_center : boolean
include center of annulus boolean (shape='annulus' only)
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data('r16') , 2 )
>>> mask = ants.get_mask( fi )
>>> dilated_ball = ants.morphology( mask, operation='dilate', radius=3, mtype='binary', shape='ball')
>>> eroded_box = ants.morphology( mask, operation='erode', radius=3, mtype='binary', shape='box')
>>> opened_annulus = ants.morphology( mask, operation='open', radius=5, mtype='binary', shape='annulus', thickness=2)
"""
if image.components > 1:
raise ValueError('multichannel images not yet supported')
_sflag_dict = {'ball': 1, 'box': 2, 'cross': 3, 'annulus': 4, 'polygon': 5}
sFlag = _sflag_dict.get(shape, 0)
if sFlag == 0:
raise ValueError('invalid element shape')
radius_is_parametric = radius_is_parametric * 1
include_center = include_center * 1
if (mtype == 'binary'):
if (operation == 'dilate'):
if (sFlag == 5):
ret = iMath(image, 'MD', radius, value, sFlag, lines)
else:
ret = iMath(image, 'MD', radius, value, sFlag, radius_is_parametric, thickness, include_center)
elif (operation == 'erode'):
if (sFlag == 5):
ret = iMath(image, 'ME', radius, value, sFlag, lines)
else:
ret = iMath(image, 'ME', radius, value, sFlag, radius_is_parametric, thickness, include_center)
elif (operation == 'open'):
if (sFlag == 5):
ret = iMath(image, 'MO', radius, value, sFlag, lines)
else:
ret = iMath(image, 'MO', radius, value, sFlag, radius_is_parametric, thickness, include_center)
elif (operation == 'close'):
if (sFlag == 5):
ret = iMath(image, 'MC', radius, value, sFlag, lines)
else:
ret = iMath(image, 'MC', radius, value, sFlag, radius_is_parametric, thickness, include_center)
else:
raise ValueError('Invalid morphology operation')
elif (mtype == 'grayscale'):
if (operation == 'dilate'):
ret = iMath(image, 'GD', radius)
elif (operation == 'erode'):
ret = iMath(image, 'GE', radius)
elif (operation == 'open'):
ret = iMath(image, 'GO', radius)
elif (operation == 'close'):
ret = iMath(image, 'GC', radius)
else:
raise ValueError('Invalid morphology operation')
else:
raise ValueError('Invalid morphology type')
return ret
|
[
"def",
"morphology",
"(",
"image",
",",
"operation",
",",
"radius",
",",
"mtype",
"=",
"'binary'",
",",
"value",
"=",
"1",
",",
"shape",
"=",
"'ball'",
",",
"radius_is_parametric",
"=",
"False",
",",
"thickness",
"=",
"1",
",",
"lines",
"=",
"3",
",",
"include_center",
"=",
"False",
")",
":",
"if",
"image",
".",
"components",
">",
"1",
":",
"raise",
"ValueError",
"(",
"'multichannel images not yet supported'",
")",
"_sflag_dict",
"=",
"{",
"'ball'",
":",
"1",
",",
"'box'",
":",
"2",
",",
"'cross'",
":",
"3",
",",
"'annulus'",
":",
"4",
",",
"'polygon'",
":",
"5",
"}",
"sFlag",
"=",
"_sflag_dict",
".",
"get",
"(",
"shape",
",",
"0",
")",
"if",
"sFlag",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"'invalid element shape'",
")",
"radius_is_parametric",
"=",
"radius_is_parametric",
"*",
"1",
"include_center",
"=",
"include_center",
"*",
"1",
"if",
"(",
"mtype",
"==",
"'binary'",
")",
":",
"if",
"(",
"operation",
"==",
"'dilate'",
")",
":",
"if",
"(",
"sFlag",
"==",
"5",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'MD'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"lines",
")",
"else",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'MD'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"radius_is_parametric",
",",
"thickness",
",",
"include_center",
")",
"elif",
"(",
"operation",
"==",
"'erode'",
")",
":",
"if",
"(",
"sFlag",
"==",
"5",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'ME'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"lines",
")",
"else",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'ME'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"radius_is_parametric",
",",
"thickness",
",",
"include_center",
")",
"elif",
"(",
"operation",
"==",
"'open'",
")",
":",
"if",
"(",
"sFlag",
"==",
"5",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'MO'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"lines",
")",
"else",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'MO'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"radius_is_parametric",
",",
"thickness",
",",
"include_center",
")",
"elif",
"(",
"operation",
"==",
"'close'",
")",
":",
"if",
"(",
"sFlag",
"==",
"5",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'MC'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"lines",
")",
"else",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'MC'",
",",
"radius",
",",
"value",
",",
"sFlag",
",",
"radius_is_parametric",
",",
"thickness",
",",
"include_center",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Invalid morphology operation'",
")",
"elif",
"(",
"mtype",
"==",
"'grayscale'",
")",
":",
"if",
"(",
"operation",
"==",
"'dilate'",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'GD'",
",",
"radius",
")",
"elif",
"(",
"operation",
"==",
"'erode'",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'GE'",
",",
"radius",
")",
"elif",
"(",
"operation",
"==",
"'open'",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'GO'",
",",
"radius",
")",
"elif",
"(",
"operation",
"==",
"'close'",
")",
":",
"ret",
"=",
"iMath",
"(",
"image",
",",
"'GC'",
",",
"radius",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Invalid morphology operation'",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Invalid morphology type'",
")",
"return",
"ret"
] |
Apply morphological operations to an image
ANTsR function: `morphology`
Arguments
---------
input : ANTsImage
input image
operation : string
operation to apply
"close" Morpholgical closing
"dilate" Morpholgical dilation
"erode" Morpholgical erosion
"open" Morpholgical opening
radius : scalar
radius of structuring element
mtype : string
type of morphology
"binary" Binary operation on a single value
"grayscale" Grayscale operations
value : scalar
value to operation on (type='binary' only)
shape : string
shape of the structuring element ( type='binary' only )
"ball" spherical structuring element
"box" box shaped structuring element
"cross" cross shaped structuring element
"annulus" annulus shaped structuring element
"polygon" polygon structuring element
radius_is_parametric : boolean
used parametric radius boolean (shape='ball' and shape='annulus' only)
thickness : scalar
thickness (shape='annulus' only)
lines : integer
number of lines in polygon (shape='polygon' only)
include_center : boolean
include center of annulus boolean (shape='annulus' only)
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data('r16') , 2 )
>>> mask = ants.get_mask( fi )
>>> dilated_ball = ants.morphology( mask, operation='dilate', radius=3, mtype='binary', shape='ball')
>>> eroded_box = ants.morphology( mask, operation='erode', radius=3, mtype='binary', shape='box')
>>> opened_annulus = ants.morphology( mask, operation='open', radius=5, mtype='binary', shape='annulus', thickness=2)
|
[
"Apply",
"morphological",
"operations",
"to",
"an",
"image"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/morphology.py#L8-L120
|
237,172
|
ANTsX/ANTsPy
|
ants/utils/scalar_rgb_vector.py
|
rgb_to_vector
|
def rgb_to_vector(image):
"""
Convert an RGB ANTsImage to a Vector ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mni = ants.image_read(ants.get_data('mni'))
>>> mni_rgb = mni.scalar_to_rgb()
>>> mni_vector = mni.rgb_to_vector()
>>> mni_rgb2 = mni.vector_to_rgb()
"""
if image.pixeltype != 'unsigned char':
image = image.clone('unsigned char')
idim = image.dimension
libfn = utils.get_lib_fn('RgbToVector%i' % idim)
new_ptr = libfn(image.pointer)
new_img = iio.ANTsImage(pixeltype=image.pixeltype, dimension=image.dimension,
components=3, pointer=new_ptr, is_rgb=False)
return new_img
|
python
|
def rgb_to_vector(image):
"""
Convert an RGB ANTsImage to a Vector ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mni = ants.image_read(ants.get_data('mni'))
>>> mni_rgb = mni.scalar_to_rgb()
>>> mni_vector = mni.rgb_to_vector()
>>> mni_rgb2 = mni.vector_to_rgb()
"""
if image.pixeltype != 'unsigned char':
image = image.clone('unsigned char')
idim = image.dimension
libfn = utils.get_lib_fn('RgbToVector%i' % idim)
new_ptr = libfn(image.pointer)
new_img = iio.ANTsImage(pixeltype=image.pixeltype, dimension=image.dimension,
components=3, pointer=new_ptr, is_rgb=False)
return new_img
|
[
"def",
"rgb_to_vector",
"(",
"image",
")",
":",
"if",
"image",
".",
"pixeltype",
"!=",
"'unsigned char'",
":",
"image",
"=",
"image",
".",
"clone",
"(",
"'unsigned char'",
")",
"idim",
"=",
"image",
".",
"dimension",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'RgbToVector%i'",
"%",
"idim",
")",
"new_ptr",
"=",
"libfn",
"(",
"image",
".",
"pointer",
")",
"new_img",
"=",
"iio",
".",
"ANTsImage",
"(",
"pixeltype",
"=",
"image",
".",
"pixeltype",
",",
"dimension",
"=",
"image",
".",
"dimension",
",",
"components",
"=",
"3",
",",
"pointer",
"=",
"new_ptr",
",",
"is_rgb",
"=",
"False",
")",
"return",
"new_img"
] |
Convert an RGB ANTsImage to a Vector ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mni = ants.image_read(ants.get_data('mni'))
>>> mni_rgb = mni.scalar_to_rgb()
>>> mni_vector = mni.rgb_to_vector()
>>> mni_rgb2 = mni.vector_to_rgb()
|
[
"Convert",
"an",
"RGB",
"ANTsImage",
"to",
"a",
"Vector",
"ANTsImage"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/scalar_rgb_vector.py#L78-L106
|
237,173
|
ANTsX/ANTsPy
|
ants/utils/scalar_rgb_vector.py
|
vector_to_rgb
|
def vector_to_rgb(image):
"""
Convert an Vector ANTsImage to a RGB ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_data('r16'), pixeltype='unsigned char')
>>> img_rgb = img.clone().scalar_to_rgb()
>>> img_vec = img_rgb.rgb_to_vector()
>>> img_rgb2 = img_vec.vector_to_rgb()
"""
if image.pixeltype != 'unsigned char':
image = image.clone('unsigned char')
idim = image.dimension
libfn = utils.get_lib_fn('VectorToRgb%i' % idim)
new_ptr = libfn(image.pointer)
new_img = iio.ANTsImage(pixeltype=image.pixeltype, dimension=image.dimension,
components=3, pointer=new_ptr, is_rgb=True)
return new_img
|
python
|
def vector_to_rgb(image):
"""
Convert an Vector ANTsImage to a RGB ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_data('r16'), pixeltype='unsigned char')
>>> img_rgb = img.clone().scalar_to_rgb()
>>> img_vec = img_rgb.rgb_to_vector()
>>> img_rgb2 = img_vec.vector_to_rgb()
"""
if image.pixeltype != 'unsigned char':
image = image.clone('unsigned char')
idim = image.dimension
libfn = utils.get_lib_fn('VectorToRgb%i' % idim)
new_ptr = libfn(image.pointer)
new_img = iio.ANTsImage(pixeltype=image.pixeltype, dimension=image.dimension,
components=3, pointer=new_ptr, is_rgb=True)
return new_img
|
[
"def",
"vector_to_rgb",
"(",
"image",
")",
":",
"if",
"image",
".",
"pixeltype",
"!=",
"'unsigned char'",
":",
"image",
"=",
"image",
".",
"clone",
"(",
"'unsigned char'",
")",
"idim",
"=",
"image",
".",
"dimension",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'VectorToRgb%i'",
"%",
"idim",
")",
"new_ptr",
"=",
"libfn",
"(",
"image",
".",
"pointer",
")",
"new_img",
"=",
"iio",
".",
"ANTsImage",
"(",
"pixeltype",
"=",
"image",
".",
"pixeltype",
",",
"dimension",
"=",
"image",
".",
"dimension",
",",
"components",
"=",
"3",
",",
"pointer",
"=",
"new_ptr",
",",
"is_rgb",
"=",
"True",
")",
"return",
"new_img"
] |
Convert an Vector ANTsImage to a RGB ANTsImage
Arguments
---------
image : ANTsImage
RGB image to be converted
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_data('r16'), pixeltype='unsigned char')
>>> img_rgb = img.clone().scalar_to_rgb()
>>> img_vec = img_rgb.rgb_to_vector()
>>> img_rgb2 = img_vec.vector_to_rgb()
|
[
"Convert",
"an",
"Vector",
"ANTsImage",
"to",
"a",
"RGB",
"ANTsImage"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/scalar_rgb_vector.py#L109-L137
|
237,174
|
ANTsX/ANTsPy
|
ants/utils/quantile.py
|
quantile
|
def quantile(image, q, nonzero=True):
"""
Get the quantile values from an ANTsImage
"""
img_arr = image.numpy()
if isinstance(q, (list,tuple)):
q = [qq*100. if qq <= 1. else qq for qq in q]
if nonzero:
img_arr = img_arr[img_arr>0]
vals = [np.percentile(img_arr, qq) for qq in q]
return tuple(vals)
elif isinstance(q, (float,int)):
if q <= 1.:
q = q*100.
if nonzero:
img_arr = img_arr[img_arr>0]
return np.percentile(img_arr[img_arr>0], q)
else:
raise ValueError('q argument must be list/tuple or float/int')
|
python
|
def quantile(image, q, nonzero=True):
"""
Get the quantile values from an ANTsImage
"""
img_arr = image.numpy()
if isinstance(q, (list,tuple)):
q = [qq*100. if qq <= 1. else qq for qq in q]
if nonzero:
img_arr = img_arr[img_arr>0]
vals = [np.percentile(img_arr, qq) for qq in q]
return tuple(vals)
elif isinstance(q, (float,int)):
if q <= 1.:
q = q*100.
if nonzero:
img_arr = img_arr[img_arr>0]
return np.percentile(img_arr[img_arr>0], q)
else:
raise ValueError('q argument must be list/tuple or float/int')
|
[
"def",
"quantile",
"(",
"image",
",",
"q",
",",
"nonzero",
"=",
"True",
")",
":",
"img_arr",
"=",
"image",
".",
"numpy",
"(",
")",
"if",
"isinstance",
"(",
"q",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"q",
"=",
"[",
"qq",
"*",
"100.",
"if",
"qq",
"<=",
"1.",
"else",
"qq",
"for",
"qq",
"in",
"q",
"]",
"if",
"nonzero",
":",
"img_arr",
"=",
"img_arr",
"[",
"img_arr",
">",
"0",
"]",
"vals",
"=",
"[",
"np",
".",
"percentile",
"(",
"img_arr",
",",
"qq",
")",
"for",
"qq",
"in",
"q",
"]",
"return",
"tuple",
"(",
"vals",
")",
"elif",
"isinstance",
"(",
"q",
",",
"(",
"float",
",",
"int",
")",
")",
":",
"if",
"q",
"<=",
"1.",
":",
"q",
"=",
"q",
"*",
"100.",
"if",
"nonzero",
":",
"img_arr",
"=",
"img_arr",
"[",
"img_arr",
">",
"0",
"]",
"return",
"np",
".",
"percentile",
"(",
"img_arr",
"[",
"img_arr",
">",
"0",
"]",
",",
"q",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'q argument must be list/tuple or float/int'",
")"
] |
Get the quantile values from an ANTsImage
|
[
"Get",
"the",
"quantile",
"values",
"from",
"an",
"ANTsImage"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/quantile.py#L153-L171
|
237,175
|
ANTsX/ANTsPy
|
ants/utils/quantile.py
|
bandpass_filter_matrix
|
def bandpass_filter_matrix( matrix,
tr=1, lowf=0.01, highf=0.1, order = 3):
"""
Bandpass filter the input time series image
ANTsR function: `frequencyFilterfMRI`
Arguments
---------
image: input time series image
tr: sampling time interval (inverse of sampling rate)
lowf: low frequency cutoff
highf: high frequency cutoff
order: order of the butterworth filter run using `filtfilt`
Returns
-------
filtered matrix
Example
-------
>>> import numpy as np
>>> import ants
>>> import matplotlib.pyplot as plt
>>> brainSignal = np.random.randn( 400, 1000 )
>>> tr = 1
>>> filtered = ants.bandpass_filter_matrix( brainSignal, tr = tr )
>>> nsamples = brainSignal.shape[0]
>>> t = np.linspace(0, tr*nsamples, nsamples, endpoint=False)
>>> k = 20
>>> plt.plot(t, brainSignal[:,k], label='Noisy signal')
>>> plt.plot(t, filtered[:,k], label='Filtered signal')
>>> plt.xlabel('time (seconds)')
>>> plt.grid(True)
>>> plt.axis('tight')
>>> plt.legend(loc='upper left')
>>> plt.show()
"""
from scipy.signal import butter, filtfilt
def butter_bandpass(lowcut, highcut, fs, order ):
nyq = 0.5 * fs
low = lowcut / nyq
high = highcut / nyq
b, a = butter(order, [low, high], btype='band')
return b, a
def butter_bandpass_filter(data, lowcut, highcut, fs, order ):
b, a = butter_bandpass(lowcut, highcut, fs, order=order)
y = filtfilt(b, a, data)
return y
fs = 1/tr # sampling rate based on tr
nsamples = matrix.shape[0]
ncolumns = matrix.shape[1]
matrixOut = matrix.copy()
for k in range( ncolumns ):
matrixOut[:,k] = butter_bandpass_filter(
matrix[:,k], lowf, highf, fs, order=order )
return matrixOut
|
python
|
def bandpass_filter_matrix( matrix,
tr=1, lowf=0.01, highf=0.1, order = 3):
"""
Bandpass filter the input time series image
ANTsR function: `frequencyFilterfMRI`
Arguments
---------
image: input time series image
tr: sampling time interval (inverse of sampling rate)
lowf: low frequency cutoff
highf: high frequency cutoff
order: order of the butterworth filter run using `filtfilt`
Returns
-------
filtered matrix
Example
-------
>>> import numpy as np
>>> import ants
>>> import matplotlib.pyplot as plt
>>> brainSignal = np.random.randn( 400, 1000 )
>>> tr = 1
>>> filtered = ants.bandpass_filter_matrix( brainSignal, tr = tr )
>>> nsamples = brainSignal.shape[0]
>>> t = np.linspace(0, tr*nsamples, nsamples, endpoint=False)
>>> k = 20
>>> plt.plot(t, brainSignal[:,k], label='Noisy signal')
>>> plt.plot(t, filtered[:,k], label='Filtered signal')
>>> plt.xlabel('time (seconds)')
>>> plt.grid(True)
>>> plt.axis('tight')
>>> plt.legend(loc='upper left')
>>> plt.show()
"""
from scipy.signal import butter, filtfilt
def butter_bandpass(lowcut, highcut, fs, order ):
nyq = 0.5 * fs
low = lowcut / nyq
high = highcut / nyq
b, a = butter(order, [low, high], btype='band')
return b, a
def butter_bandpass_filter(data, lowcut, highcut, fs, order ):
b, a = butter_bandpass(lowcut, highcut, fs, order=order)
y = filtfilt(b, a, data)
return y
fs = 1/tr # sampling rate based on tr
nsamples = matrix.shape[0]
ncolumns = matrix.shape[1]
matrixOut = matrix.copy()
for k in range( ncolumns ):
matrixOut[:,k] = butter_bandpass_filter(
matrix[:,k], lowf, highf, fs, order=order )
return matrixOut
|
[
"def",
"bandpass_filter_matrix",
"(",
"matrix",
",",
"tr",
"=",
"1",
",",
"lowf",
"=",
"0.01",
",",
"highf",
"=",
"0.1",
",",
"order",
"=",
"3",
")",
":",
"from",
"scipy",
".",
"signal",
"import",
"butter",
",",
"filtfilt",
"def",
"butter_bandpass",
"(",
"lowcut",
",",
"highcut",
",",
"fs",
",",
"order",
")",
":",
"nyq",
"=",
"0.5",
"*",
"fs",
"low",
"=",
"lowcut",
"/",
"nyq",
"high",
"=",
"highcut",
"/",
"nyq",
"b",
",",
"a",
"=",
"butter",
"(",
"order",
",",
"[",
"low",
",",
"high",
"]",
",",
"btype",
"=",
"'band'",
")",
"return",
"b",
",",
"a",
"def",
"butter_bandpass_filter",
"(",
"data",
",",
"lowcut",
",",
"highcut",
",",
"fs",
",",
"order",
")",
":",
"b",
",",
"a",
"=",
"butter_bandpass",
"(",
"lowcut",
",",
"highcut",
",",
"fs",
",",
"order",
"=",
"order",
")",
"y",
"=",
"filtfilt",
"(",
"b",
",",
"a",
",",
"data",
")",
"return",
"y",
"fs",
"=",
"1",
"/",
"tr",
"# sampling rate based on tr",
"nsamples",
"=",
"matrix",
".",
"shape",
"[",
"0",
"]",
"ncolumns",
"=",
"matrix",
".",
"shape",
"[",
"1",
"]",
"matrixOut",
"=",
"matrix",
".",
"copy",
"(",
")",
"for",
"k",
"in",
"range",
"(",
"ncolumns",
")",
":",
"matrixOut",
"[",
":",
",",
"k",
"]",
"=",
"butter_bandpass_filter",
"(",
"matrix",
"[",
":",
",",
"k",
"]",
",",
"lowf",
",",
"highf",
",",
"fs",
",",
"order",
"=",
"order",
")",
"return",
"matrixOut"
] |
Bandpass filter the input time series image
ANTsR function: `frequencyFilterfMRI`
Arguments
---------
image: input time series image
tr: sampling time interval (inverse of sampling rate)
lowf: low frequency cutoff
highf: high frequency cutoff
order: order of the butterworth filter run using `filtfilt`
Returns
-------
filtered matrix
Example
-------
>>> import numpy as np
>>> import ants
>>> import matplotlib.pyplot as plt
>>> brainSignal = np.random.randn( 400, 1000 )
>>> tr = 1
>>> filtered = ants.bandpass_filter_matrix( brainSignal, tr = tr )
>>> nsamples = brainSignal.shape[0]
>>> t = np.linspace(0, tr*nsamples, nsamples, endpoint=False)
>>> k = 20
>>> plt.plot(t, brainSignal[:,k], label='Noisy signal')
>>> plt.plot(t, filtered[:,k], label='Filtered signal')
>>> plt.xlabel('time (seconds)')
>>> plt.grid(True)
>>> plt.axis('tight')
>>> plt.legend(loc='upper left')
>>> plt.show()
|
[
"Bandpass",
"filter",
"the",
"input",
"time",
"series",
"image"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/quantile.py#L227-L292
|
237,176
|
ANTsX/ANTsPy
|
ants/utils/quantile.py
|
compcor
|
def compcor( boldImage, ncompcor=4, quantile=0.975, mask=None, filter_type=False, degree=2 ):
"""
Compute noise components from the input image
ANTsR function: `compcor`
this is adapted from nipy code https://github.com/nipy/nipype/blob/e29ac95fc0fc00fedbcaa0adaf29d5878408ca7c/nipype/algorithms/confounds.py
Arguments
---------
boldImage: input time series image
ncompcor: number of noise components to return
quantile: quantile defining high-variance
mask: mask defining brain or specific tissues
filter_type: type off filter to apply to time series before computing
noise components.
'polynomial' - Legendre polynomial basis
False - None (mean-removal only)
degree: order of polynomial used to remove trends from the timeseries
Returns
-------
dictionary containing:
components: a numpy array
basis: a numpy array containing the (non-constant) filter regressors
Example
-------
>>> cc = ants.compcor( ants.image_read(ants.get_ants_data("ch2")) )
"""
def compute_tSTD(M, quantile, x=0, axis=0):
stdM = np.std(M, axis=axis)
# set bad values to x
stdM[stdM == 0] = x
stdM[np.isnan(stdM)] = x
tt = round( quantile*100 )
threshold_std = np.percentile( stdM, tt )
# threshold_std = quantile( stdM, quantile )
return { 'tSTD': stdM, 'threshold_std': threshold_std}
if mask is None:
temp = utils.slice_image( boldImage, axis=boldImage.dimension-1, idx=0 )
mask = utils.get_mask( temp )
imagematrix = core.timeseries_to_matrix( boldImage, mask )
temp = compute_tSTD( imagematrix, quantile, 0 )
tsnrmask = core.make_image( mask, temp['tSTD'] )
tsnrmask = utils.threshold_image( tsnrmask, temp['threshold_std'], temp['tSTD'].max() )
M = core.timeseries_to_matrix( boldImage, tsnrmask )
components = None
basis = np.array([])
if filter_type in ('polynomial', False):
M, basis = regress_poly(degree, M)
# M = M / compute_tSTD(M, 1.)['tSTD']
# "The covariance matrix C = MMT was constructed and decomposed into its
# principal components using a singular value decomposition."
u, _, _ = linalg.svd(M, full_matrices=False)
if components is None:
components = u[:, :ncompcor]
else:
components = np.hstack((components, u[:, :ncompcor]))
if components is None and ncompcor > 0:
raise ValueError('No components found')
return { 'components': components, 'basis': basis }
|
python
|
def compcor( boldImage, ncompcor=4, quantile=0.975, mask=None, filter_type=False, degree=2 ):
"""
Compute noise components from the input image
ANTsR function: `compcor`
this is adapted from nipy code https://github.com/nipy/nipype/blob/e29ac95fc0fc00fedbcaa0adaf29d5878408ca7c/nipype/algorithms/confounds.py
Arguments
---------
boldImage: input time series image
ncompcor: number of noise components to return
quantile: quantile defining high-variance
mask: mask defining brain or specific tissues
filter_type: type off filter to apply to time series before computing
noise components.
'polynomial' - Legendre polynomial basis
False - None (mean-removal only)
degree: order of polynomial used to remove trends from the timeseries
Returns
-------
dictionary containing:
components: a numpy array
basis: a numpy array containing the (non-constant) filter regressors
Example
-------
>>> cc = ants.compcor( ants.image_read(ants.get_ants_data("ch2")) )
"""
def compute_tSTD(M, quantile, x=0, axis=0):
stdM = np.std(M, axis=axis)
# set bad values to x
stdM[stdM == 0] = x
stdM[np.isnan(stdM)] = x
tt = round( quantile*100 )
threshold_std = np.percentile( stdM, tt )
# threshold_std = quantile( stdM, quantile )
return { 'tSTD': stdM, 'threshold_std': threshold_std}
if mask is None:
temp = utils.slice_image( boldImage, axis=boldImage.dimension-1, idx=0 )
mask = utils.get_mask( temp )
imagematrix = core.timeseries_to_matrix( boldImage, mask )
temp = compute_tSTD( imagematrix, quantile, 0 )
tsnrmask = core.make_image( mask, temp['tSTD'] )
tsnrmask = utils.threshold_image( tsnrmask, temp['threshold_std'], temp['tSTD'].max() )
M = core.timeseries_to_matrix( boldImage, tsnrmask )
components = None
basis = np.array([])
if filter_type in ('polynomial', False):
M, basis = regress_poly(degree, M)
# M = M / compute_tSTD(M, 1.)['tSTD']
# "The covariance matrix C = MMT was constructed and decomposed into its
# principal components using a singular value decomposition."
u, _, _ = linalg.svd(M, full_matrices=False)
if components is None:
components = u[:, :ncompcor]
else:
components = np.hstack((components, u[:, :ncompcor]))
if components is None and ncompcor > 0:
raise ValueError('No components found')
return { 'components': components, 'basis': basis }
|
[
"def",
"compcor",
"(",
"boldImage",
",",
"ncompcor",
"=",
"4",
",",
"quantile",
"=",
"0.975",
",",
"mask",
"=",
"None",
",",
"filter_type",
"=",
"False",
",",
"degree",
"=",
"2",
")",
":",
"def",
"compute_tSTD",
"(",
"M",
",",
"quantile",
",",
"x",
"=",
"0",
",",
"axis",
"=",
"0",
")",
":",
"stdM",
"=",
"np",
".",
"std",
"(",
"M",
",",
"axis",
"=",
"axis",
")",
"# set bad values to x",
"stdM",
"[",
"stdM",
"==",
"0",
"]",
"=",
"x",
"stdM",
"[",
"np",
".",
"isnan",
"(",
"stdM",
")",
"]",
"=",
"x",
"tt",
"=",
"round",
"(",
"quantile",
"*",
"100",
")",
"threshold_std",
"=",
"np",
".",
"percentile",
"(",
"stdM",
",",
"tt",
")",
"# threshold_std = quantile( stdM, quantile )",
"return",
"{",
"'tSTD'",
":",
"stdM",
",",
"'threshold_std'",
":",
"threshold_std",
"}",
"if",
"mask",
"is",
"None",
":",
"temp",
"=",
"utils",
".",
"slice_image",
"(",
"boldImage",
",",
"axis",
"=",
"boldImage",
".",
"dimension",
"-",
"1",
",",
"idx",
"=",
"0",
")",
"mask",
"=",
"utils",
".",
"get_mask",
"(",
"temp",
")",
"imagematrix",
"=",
"core",
".",
"timeseries_to_matrix",
"(",
"boldImage",
",",
"mask",
")",
"temp",
"=",
"compute_tSTD",
"(",
"imagematrix",
",",
"quantile",
",",
"0",
")",
"tsnrmask",
"=",
"core",
".",
"make_image",
"(",
"mask",
",",
"temp",
"[",
"'tSTD'",
"]",
")",
"tsnrmask",
"=",
"utils",
".",
"threshold_image",
"(",
"tsnrmask",
",",
"temp",
"[",
"'threshold_std'",
"]",
",",
"temp",
"[",
"'tSTD'",
"]",
".",
"max",
"(",
")",
")",
"M",
"=",
"core",
".",
"timeseries_to_matrix",
"(",
"boldImage",
",",
"tsnrmask",
")",
"components",
"=",
"None",
"basis",
"=",
"np",
".",
"array",
"(",
"[",
"]",
")",
"if",
"filter_type",
"in",
"(",
"'polynomial'",
",",
"False",
")",
":",
"M",
",",
"basis",
"=",
"regress_poly",
"(",
"degree",
",",
"M",
")",
"# M = M / compute_tSTD(M, 1.)['tSTD']",
"# \"The covariance matrix C = MMT was constructed and decomposed into its",
"# principal components using a singular value decomposition.\"",
"u",
",",
"_",
",",
"_",
"=",
"linalg",
".",
"svd",
"(",
"M",
",",
"full_matrices",
"=",
"False",
")",
"if",
"components",
"is",
"None",
":",
"components",
"=",
"u",
"[",
":",
",",
":",
"ncompcor",
"]",
"else",
":",
"components",
"=",
"np",
".",
"hstack",
"(",
"(",
"components",
",",
"u",
"[",
":",
",",
":",
"ncompcor",
"]",
")",
")",
"if",
"components",
"is",
"None",
"and",
"ncompcor",
">",
"0",
":",
"raise",
"ValueError",
"(",
"'No components found'",
")",
"return",
"{",
"'components'",
":",
"components",
",",
"'basis'",
":",
"basis",
"}"
] |
Compute noise components from the input image
ANTsR function: `compcor`
this is adapted from nipy code https://github.com/nipy/nipype/blob/e29ac95fc0fc00fedbcaa0adaf29d5878408ca7c/nipype/algorithms/confounds.py
Arguments
---------
boldImage: input time series image
ncompcor: number of noise components to return
quantile: quantile defining high-variance
mask: mask defining brain or specific tissues
filter_type: type off filter to apply to time series before computing
noise components.
'polynomial' - Legendre polynomial basis
False - None (mean-removal only)
degree: order of polynomial used to remove trends from the timeseries
Returns
-------
dictionary containing:
components: a numpy array
basis: a numpy array containing the (non-constant) filter regressors
Example
-------
>>> cc = ants.compcor( ants.image_read(ants.get_ants_data("ch2")) )
|
[
"Compute",
"noise",
"components",
"from",
"the",
"input",
"image"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/quantile.py#L295-L367
|
237,177
|
ANTsX/ANTsPy
|
ants/utils/bias_correction.py
|
n3_bias_field_correction
|
def n3_bias_field_correction(image, downsample_factor=3):
"""
N3 Bias Field Correction
ANTsR function: `n3BiasFieldCorrection`
Arguments
---------
image : ANTsImage
image to be bias corrected
downsample_factor : scalar
how much to downsample image before performing bias correction
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> image_n3 = ants.n3_bias_field_correction(image)
"""
outimage = image.clone()
args = [image.dimension, image, outimage, downsample_factor]
processed_args = pargs._int_antsProcessArguments(args)
libfn = utils.get_lib_fn('N3BiasFieldCorrection')
libfn(processed_args)
return outimage
|
python
|
def n3_bias_field_correction(image, downsample_factor=3):
"""
N3 Bias Field Correction
ANTsR function: `n3BiasFieldCorrection`
Arguments
---------
image : ANTsImage
image to be bias corrected
downsample_factor : scalar
how much to downsample image before performing bias correction
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> image_n3 = ants.n3_bias_field_correction(image)
"""
outimage = image.clone()
args = [image.dimension, image, outimage, downsample_factor]
processed_args = pargs._int_antsProcessArguments(args)
libfn = utils.get_lib_fn('N3BiasFieldCorrection')
libfn(processed_args)
return outimage
|
[
"def",
"n3_bias_field_correction",
"(",
"image",
",",
"downsample_factor",
"=",
"3",
")",
":",
"outimage",
"=",
"image",
".",
"clone",
"(",
")",
"args",
"=",
"[",
"image",
".",
"dimension",
",",
"image",
",",
"outimage",
",",
"downsample_factor",
"]",
"processed_args",
"=",
"pargs",
".",
"_int_antsProcessArguments",
"(",
"args",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'N3BiasFieldCorrection'",
")",
"libfn",
"(",
"processed_args",
")",
"return",
"outimage"
] |
N3 Bias Field Correction
ANTsR function: `n3BiasFieldCorrection`
Arguments
---------
image : ANTsImage
image to be bias corrected
downsample_factor : scalar
how much to downsample image before performing bias correction
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> image_n3 = ants.n3_bias_field_correction(image)
|
[
"N3",
"Bias",
"Field",
"Correction"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/bias_correction.py#L17-L46
|
237,178
|
ANTsX/ANTsPy
|
ants/utils/bias_correction.py
|
n4_bias_field_correction
|
def n4_bias_field_correction(image, mask=None, shrink_factor=4,
convergence={'iters':[50,50,50,50], 'tol':1e-07},
spline_param=200, verbose=False, weight_mask=None):
"""
N4 Bias Field Correction
ANTsR function: `n4BiasFieldCorrection`
Arguments
---------
image : ANTsImage
image to bias correct
mask : ANTsImage
input mask, if one is not passed one will be made
shrink_factor : scalar
Shrink factor for multi-resolution correction, typically integer less than 4
convergence : dict w/ keys `iters` and `tol`
iters : vector of maximum number of iterations for each shrinkage factor
tol : the convergence tolerance.
spline_param : integer
Parameter controlling number of control points in spline. Either single value, indicating how many control points, or vector with one entry per dimension of image, indicating the spacing in each direction.
verbose : boolean
enables verbose output.
weight_mask : ANTsImage (optional)
antsImage of weight mask
Returns
-------
ANTsImage
Example
-------
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> image_n4 = ants.n4_bias_field_correction(image)
"""
if image.pixeltype != 'float':
image = image.clone('float')
iters = convergence['iters']
tol = convergence['tol']
if mask is None:
mask = get_mask(image)
N4_CONVERGENCE_1 = '[%s, %.10f]' % ('x'.join([str(it) for it in iters]), tol)
N4_SHRINK_FACTOR_1 = str(shrink_factor)
if (not isinstance(spline_param, (list,tuple))) or (len(spline_param) == 1):
N4_BSPLINE_PARAMS = '[%i]' % spline_param
elif (isinstance(spline_param, (list,tuple))) and (len(spline_param) == image.dimension):
N4_BSPLINE_PARAMS = '[%s]' % ('x'.join([str(sp) for sp in spline_param]))
else:
raise ValueError('Length of splineParam must either be 1 or dimensionality of image')
if weight_mask is not None:
if not isinstance(weight_mask, iio.ANTsImage):
raise ValueError('Weight Image must be an antsImage')
outimage = image.clone()
kwargs = {
'd': outimage.dimension,
'i': image,
'w': weight_mask,
's': N4_SHRINK_FACTOR_1,
'c': N4_CONVERGENCE_1,
'b': N4_BSPLINE_PARAMS,
'x': mask,
'o': outimage,
'v': int(verbose)
}
processed_args = pargs._int_antsProcessArguments(kwargs)
libfn = utils.get_lib_fn('N4BiasFieldCorrection')
libfn(processed_args)
return outimage
|
python
|
def n4_bias_field_correction(image, mask=None, shrink_factor=4,
convergence={'iters':[50,50,50,50], 'tol':1e-07},
spline_param=200, verbose=False, weight_mask=None):
"""
N4 Bias Field Correction
ANTsR function: `n4BiasFieldCorrection`
Arguments
---------
image : ANTsImage
image to bias correct
mask : ANTsImage
input mask, if one is not passed one will be made
shrink_factor : scalar
Shrink factor for multi-resolution correction, typically integer less than 4
convergence : dict w/ keys `iters` and `tol`
iters : vector of maximum number of iterations for each shrinkage factor
tol : the convergence tolerance.
spline_param : integer
Parameter controlling number of control points in spline. Either single value, indicating how many control points, or vector with one entry per dimension of image, indicating the spacing in each direction.
verbose : boolean
enables verbose output.
weight_mask : ANTsImage (optional)
antsImage of weight mask
Returns
-------
ANTsImage
Example
-------
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> image_n4 = ants.n4_bias_field_correction(image)
"""
if image.pixeltype != 'float':
image = image.clone('float')
iters = convergence['iters']
tol = convergence['tol']
if mask is None:
mask = get_mask(image)
N4_CONVERGENCE_1 = '[%s, %.10f]' % ('x'.join([str(it) for it in iters]), tol)
N4_SHRINK_FACTOR_1 = str(shrink_factor)
if (not isinstance(spline_param, (list,tuple))) or (len(spline_param) == 1):
N4_BSPLINE_PARAMS = '[%i]' % spline_param
elif (isinstance(spline_param, (list,tuple))) and (len(spline_param) == image.dimension):
N4_BSPLINE_PARAMS = '[%s]' % ('x'.join([str(sp) for sp in spline_param]))
else:
raise ValueError('Length of splineParam must either be 1 or dimensionality of image')
if weight_mask is not None:
if not isinstance(weight_mask, iio.ANTsImage):
raise ValueError('Weight Image must be an antsImage')
outimage = image.clone()
kwargs = {
'd': outimage.dimension,
'i': image,
'w': weight_mask,
's': N4_SHRINK_FACTOR_1,
'c': N4_CONVERGENCE_1,
'b': N4_BSPLINE_PARAMS,
'x': mask,
'o': outimage,
'v': int(verbose)
}
processed_args = pargs._int_antsProcessArguments(kwargs)
libfn = utils.get_lib_fn('N4BiasFieldCorrection')
libfn(processed_args)
return outimage
|
[
"def",
"n4_bias_field_correction",
"(",
"image",
",",
"mask",
"=",
"None",
",",
"shrink_factor",
"=",
"4",
",",
"convergence",
"=",
"{",
"'iters'",
":",
"[",
"50",
",",
"50",
",",
"50",
",",
"50",
"]",
",",
"'tol'",
":",
"1e-07",
"}",
",",
"spline_param",
"=",
"200",
",",
"verbose",
"=",
"False",
",",
"weight_mask",
"=",
"None",
")",
":",
"if",
"image",
".",
"pixeltype",
"!=",
"'float'",
":",
"image",
"=",
"image",
".",
"clone",
"(",
"'float'",
")",
"iters",
"=",
"convergence",
"[",
"'iters'",
"]",
"tol",
"=",
"convergence",
"[",
"'tol'",
"]",
"if",
"mask",
"is",
"None",
":",
"mask",
"=",
"get_mask",
"(",
"image",
")",
"N4_CONVERGENCE_1",
"=",
"'[%s, %.10f]'",
"%",
"(",
"'x'",
".",
"join",
"(",
"[",
"str",
"(",
"it",
")",
"for",
"it",
"in",
"iters",
"]",
")",
",",
"tol",
")",
"N4_SHRINK_FACTOR_1",
"=",
"str",
"(",
"shrink_factor",
")",
"if",
"(",
"not",
"isinstance",
"(",
"spline_param",
",",
"(",
"list",
",",
"tuple",
")",
")",
")",
"or",
"(",
"len",
"(",
"spline_param",
")",
"==",
"1",
")",
":",
"N4_BSPLINE_PARAMS",
"=",
"'[%i]'",
"%",
"spline_param",
"elif",
"(",
"isinstance",
"(",
"spline_param",
",",
"(",
"list",
",",
"tuple",
")",
")",
")",
"and",
"(",
"len",
"(",
"spline_param",
")",
"==",
"image",
".",
"dimension",
")",
":",
"N4_BSPLINE_PARAMS",
"=",
"'[%s]'",
"%",
"(",
"'x'",
".",
"join",
"(",
"[",
"str",
"(",
"sp",
")",
"for",
"sp",
"in",
"spline_param",
"]",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Length of splineParam must either be 1 or dimensionality of image'",
")",
"if",
"weight_mask",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"weight_mask",
",",
"iio",
".",
"ANTsImage",
")",
":",
"raise",
"ValueError",
"(",
"'Weight Image must be an antsImage'",
")",
"outimage",
"=",
"image",
".",
"clone",
"(",
")",
"kwargs",
"=",
"{",
"'d'",
":",
"outimage",
".",
"dimension",
",",
"'i'",
":",
"image",
",",
"'w'",
":",
"weight_mask",
",",
"'s'",
":",
"N4_SHRINK_FACTOR_1",
",",
"'c'",
":",
"N4_CONVERGENCE_1",
",",
"'b'",
":",
"N4_BSPLINE_PARAMS",
",",
"'x'",
":",
"mask",
",",
"'o'",
":",
"outimage",
",",
"'v'",
":",
"int",
"(",
"verbose",
")",
"}",
"processed_args",
"=",
"pargs",
".",
"_int_antsProcessArguments",
"(",
"kwargs",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'N4BiasFieldCorrection'",
")",
"libfn",
"(",
"processed_args",
")",
"return",
"outimage"
] |
N4 Bias Field Correction
ANTsR function: `n4BiasFieldCorrection`
Arguments
---------
image : ANTsImage
image to bias correct
mask : ANTsImage
input mask, if one is not passed one will be made
shrink_factor : scalar
Shrink factor for multi-resolution correction, typically integer less than 4
convergence : dict w/ keys `iters` and `tol`
iters : vector of maximum number of iterations for each shrinkage factor
tol : the convergence tolerance.
spline_param : integer
Parameter controlling number of control points in spline. Either single value, indicating how many control points, or vector with one entry per dimension of image, indicating the spacing in each direction.
verbose : boolean
enables verbose output.
weight_mask : ANTsImage (optional)
antsImage of weight mask
Returns
-------
ANTsImage
Example
-------
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> image_n4 = ants.n4_bias_field_correction(image)
|
[
"N4",
"Bias",
"Field",
"Correction"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/bias_correction.py#L49-L126
|
237,179
|
ANTsX/ANTsPy
|
ants/utils/bias_correction.py
|
abp_n4
|
def abp_n4(image, intensity_truncation=(0.025,0.975,256), mask=None, usen3=False):
"""
Truncate outlier intensities and bias correct with the N4 algorithm.
ANTsR function: `abpN4`
Arguments
---------
image : ANTsImage
image to correct and truncate
intensity_truncation : 3-tuple
quantiles for intensity truncation
mask : ANTsImage (optional)
mask for bias correction
usen3 : boolean
if True, use N3 bias correction instead of N4
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read(ants.get_ants_data('r16'))
>>> image2 = ants.abp_n4(image)
"""
if (not isinstance(intensity_truncation, (list,tuple))) or (len(intensity_truncation) != 3):
raise ValueError('intensity_truncation must be list/tuple with 3 values')
outimage = iMath(image, 'TruncateIntensity',
intensity_truncation[0], intensity_truncation[1], intensity_truncation[2])
if usen3 == True:
outimage = n3_bias_field_correction(outimage, 4)
outimage = n3_bias_field_correction(outimage, 2)
return outimage
else:
outimage = n4_bias_field_correction(outimage, mask)
return outimage
|
python
|
def abp_n4(image, intensity_truncation=(0.025,0.975,256), mask=None, usen3=False):
"""
Truncate outlier intensities and bias correct with the N4 algorithm.
ANTsR function: `abpN4`
Arguments
---------
image : ANTsImage
image to correct and truncate
intensity_truncation : 3-tuple
quantiles for intensity truncation
mask : ANTsImage (optional)
mask for bias correction
usen3 : boolean
if True, use N3 bias correction instead of N4
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read(ants.get_ants_data('r16'))
>>> image2 = ants.abp_n4(image)
"""
if (not isinstance(intensity_truncation, (list,tuple))) or (len(intensity_truncation) != 3):
raise ValueError('intensity_truncation must be list/tuple with 3 values')
outimage = iMath(image, 'TruncateIntensity',
intensity_truncation[0], intensity_truncation[1], intensity_truncation[2])
if usen3 == True:
outimage = n3_bias_field_correction(outimage, 4)
outimage = n3_bias_field_correction(outimage, 2)
return outimage
else:
outimage = n4_bias_field_correction(outimage, mask)
return outimage
|
[
"def",
"abp_n4",
"(",
"image",
",",
"intensity_truncation",
"=",
"(",
"0.025",
",",
"0.975",
",",
"256",
")",
",",
"mask",
"=",
"None",
",",
"usen3",
"=",
"False",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"intensity_truncation",
",",
"(",
"list",
",",
"tuple",
")",
")",
")",
"or",
"(",
"len",
"(",
"intensity_truncation",
")",
"!=",
"3",
")",
":",
"raise",
"ValueError",
"(",
"'intensity_truncation must be list/tuple with 3 values'",
")",
"outimage",
"=",
"iMath",
"(",
"image",
",",
"'TruncateIntensity'",
",",
"intensity_truncation",
"[",
"0",
"]",
",",
"intensity_truncation",
"[",
"1",
"]",
",",
"intensity_truncation",
"[",
"2",
"]",
")",
"if",
"usen3",
"==",
"True",
":",
"outimage",
"=",
"n3_bias_field_correction",
"(",
"outimage",
",",
"4",
")",
"outimage",
"=",
"n3_bias_field_correction",
"(",
"outimage",
",",
"2",
")",
"return",
"outimage",
"else",
":",
"outimage",
"=",
"n4_bias_field_correction",
"(",
"outimage",
",",
"mask",
")",
"return",
"outimage"
] |
Truncate outlier intensities and bias correct with the N4 algorithm.
ANTsR function: `abpN4`
Arguments
---------
image : ANTsImage
image to correct and truncate
intensity_truncation : 3-tuple
quantiles for intensity truncation
mask : ANTsImage (optional)
mask for bias correction
usen3 : boolean
if True, use N3 bias correction instead of N4
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read(ants.get_ants_data('r16'))
>>> image2 = ants.abp_n4(image)
|
[
"Truncate",
"outlier",
"intensities",
"and",
"bias",
"correct",
"with",
"the",
"N4",
"algorithm",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/bias_correction.py#L129-L169
|
237,180
|
ANTsX/ANTsPy
|
ants/registration/metrics.py
|
image_mutual_information
|
def image_mutual_information(image1, image2):
"""
Compute mutual information between two ANTsImage types
ANTsR function: `antsImageMutualInformation`
Arguments
---------
image1 : ANTsImage
image 1
image2 : ANTsImage
image 2
Returns
-------
scalar
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data('r16') ).clone('float')
>>> mi = ants.image_read( ants.get_ants_data('r64') ).clone('float')
>>> mival = ants.image_mutual_information(fi, mi) # -0.1796141
"""
if (image1.pixeltype != 'float') or (image2.pixeltype != 'float'):
raise ValueError('Both images must have float pixeltype')
if image1.dimension != image2.dimension:
raise ValueError('Both images must have same dimension')
libfn = utils.get_lib_fn('antsImageMutualInformation%iD' % image1.dimension)
return libfn(image1.pointer, image2.pointer)
|
python
|
def image_mutual_information(image1, image2):
"""
Compute mutual information between two ANTsImage types
ANTsR function: `antsImageMutualInformation`
Arguments
---------
image1 : ANTsImage
image 1
image2 : ANTsImage
image 2
Returns
-------
scalar
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data('r16') ).clone('float')
>>> mi = ants.image_read( ants.get_ants_data('r64') ).clone('float')
>>> mival = ants.image_mutual_information(fi, mi) # -0.1796141
"""
if (image1.pixeltype != 'float') or (image2.pixeltype != 'float'):
raise ValueError('Both images must have float pixeltype')
if image1.dimension != image2.dimension:
raise ValueError('Both images must have same dimension')
libfn = utils.get_lib_fn('antsImageMutualInformation%iD' % image1.dimension)
return libfn(image1.pointer, image2.pointer)
|
[
"def",
"image_mutual_information",
"(",
"image1",
",",
"image2",
")",
":",
"if",
"(",
"image1",
".",
"pixeltype",
"!=",
"'float'",
")",
"or",
"(",
"image2",
".",
"pixeltype",
"!=",
"'float'",
")",
":",
"raise",
"ValueError",
"(",
"'Both images must have float pixeltype'",
")",
"if",
"image1",
".",
"dimension",
"!=",
"image2",
".",
"dimension",
":",
"raise",
"ValueError",
"(",
"'Both images must have same dimension'",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'antsImageMutualInformation%iD'",
"%",
"image1",
".",
"dimension",
")",
"return",
"libfn",
"(",
"image1",
".",
"pointer",
",",
"image2",
".",
"pointer",
")"
] |
Compute mutual information between two ANTsImage types
ANTsR function: `antsImageMutualInformation`
Arguments
---------
image1 : ANTsImage
image 1
image2 : ANTsImage
image 2
Returns
-------
scalar
Example
-------
>>> import ants
>>> fi = ants.image_read( ants.get_ants_data('r16') ).clone('float')
>>> mi = ants.image_read( ants.get_ants_data('r64') ).clone('float')
>>> mival = ants.image_mutual_information(fi, mi) # -0.1796141
|
[
"Compute",
"mutual",
"information",
"between",
"two",
"ANTsImage",
"types"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/registration/metrics.py#L10-L42
|
237,181
|
ANTsX/ANTsPy
|
ants/utils/get_mask.py
|
get_mask
|
def get_mask(image, low_thresh=None, high_thresh=None, cleanup=2):
"""
Get a binary mask image from the given image after thresholding
ANTsR function: `getMask`
Arguments
---------
image : ANTsImage
image from which mask will be computed. Can be an antsImage of 2, 3 or 4 dimensions.
low_thresh : scalar (optional)
An inclusive lower threshold for voxels to be included in the mask.
If not given, defaults to image mean.
high_thresh : scalar (optional)
An inclusive upper threshold for voxels to be included in the mask.
If not given, defaults to image max
cleanup : integer
If > 0, morphological operations will be applied to clean up the mask by eroding away small or weakly-connected areas, and closing holes.
If cleanup is >0, the following steps are applied
1. Erosion with radius 2 voxels
2. Retain largest component
3. Dilation with radius 1 voxel
4. Morphological closing
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> mask = ants.get_mask(image)
"""
cleanup = int(cleanup)
if isinstance(image, iio.ANTsImage):
if image.pixeltype != 'float':
image = image.clone('float')
if low_thresh is None:
low_thresh = image.mean()
if high_thresh is None:
high_thresh = image.max()
mask_image = threshold_image(image, low_thresh, high_thresh)
if cleanup > 0:
mask_image = iMath(mask_image, 'ME', cleanup)
mask_image = iMath(mask_image, 'GetLargestComponent')
mask_image = iMath(mask_image, 'MD', cleanup)
mask_image = iMath(mask_image, 'FillHoles').threshold_image( 1, 2 )
while ((mask_image.min() == mask_image.max()) and (cleanup > 0)):
cleanup = cleanup - 1
mask_image = threshold_image(image, low_thresh, high_thresh)
if cleanup > 0:
mask_image = iMath(mask_image, 'ME', cleanup)
mask_image = iMath(mask_image, 'MD', cleanup)
mask_image = iMath(mask_image, 'FillHoles').threshold_image( 1, 2 )
#if cleanup == 0:
# clustlab = label_clusters(mask_image, 1)
# mask_image = threshold_image(clustlab, 1, 1)
return mask_image
|
python
|
def get_mask(image, low_thresh=None, high_thresh=None, cleanup=2):
"""
Get a binary mask image from the given image after thresholding
ANTsR function: `getMask`
Arguments
---------
image : ANTsImage
image from which mask will be computed. Can be an antsImage of 2, 3 or 4 dimensions.
low_thresh : scalar (optional)
An inclusive lower threshold for voxels to be included in the mask.
If not given, defaults to image mean.
high_thresh : scalar (optional)
An inclusive upper threshold for voxels to be included in the mask.
If not given, defaults to image max
cleanup : integer
If > 0, morphological operations will be applied to clean up the mask by eroding away small or weakly-connected areas, and closing holes.
If cleanup is >0, the following steps are applied
1. Erosion with radius 2 voxels
2. Retain largest component
3. Dilation with radius 1 voxel
4. Morphological closing
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> mask = ants.get_mask(image)
"""
cleanup = int(cleanup)
if isinstance(image, iio.ANTsImage):
if image.pixeltype != 'float':
image = image.clone('float')
if low_thresh is None:
low_thresh = image.mean()
if high_thresh is None:
high_thresh = image.max()
mask_image = threshold_image(image, low_thresh, high_thresh)
if cleanup > 0:
mask_image = iMath(mask_image, 'ME', cleanup)
mask_image = iMath(mask_image, 'GetLargestComponent')
mask_image = iMath(mask_image, 'MD', cleanup)
mask_image = iMath(mask_image, 'FillHoles').threshold_image( 1, 2 )
while ((mask_image.min() == mask_image.max()) and (cleanup > 0)):
cleanup = cleanup - 1
mask_image = threshold_image(image, low_thresh, high_thresh)
if cleanup > 0:
mask_image = iMath(mask_image, 'ME', cleanup)
mask_image = iMath(mask_image, 'MD', cleanup)
mask_image = iMath(mask_image, 'FillHoles').threshold_image( 1, 2 )
#if cleanup == 0:
# clustlab = label_clusters(mask_image, 1)
# mask_image = threshold_image(clustlab, 1, 1)
return mask_image
|
[
"def",
"get_mask",
"(",
"image",
",",
"low_thresh",
"=",
"None",
",",
"high_thresh",
"=",
"None",
",",
"cleanup",
"=",
"2",
")",
":",
"cleanup",
"=",
"int",
"(",
"cleanup",
")",
"if",
"isinstance",
"(",
"image",
",",
"iio",
".",
"ANTsImage",
")",
":",
"if",
"image",
".",
"pixeltype",
"!=",
"'float'",
":",
"image",
"=",
"image",
".",
"clone",
"(",
"'float'",
")",
"if",
"low_thresh",
"is",
"None",
":",
"low_thresh",
"=",
"image",
".",
"mean",
"(",
")",
"if",
"high_thresh",
"is",
"None",
":",
"high_thresh",
"=",
"image",
".",
"max",
"(",
")",
"mask_image",
"=",
"threshold_image",
"(",
"image",
",",
"low_thresh",
",",
"high_thresh",
")",
"if",
"cleanup",
">",
"0",
":",
"mask_image",
"=",
"iMath",
"(",
"mask_image",
",",
"'ME'",
",",
"cleanup",
")",
"mask_image",
"=",
"iMath",
"(",
"mask_image",
",",
"'GetLargestComponent'",
")",
"mask_image",
"=",
"iMath",
"(",
"mask_image",
",",
"'MD'",
",",
"cleanup",
")",
"mask_image",
"=",
"iMath",
"(",
"mask_image",
",",
"'FillHoles'",
")",
".",
"threshold_image",
"(",
"1",
",",
"2",
")",
"while",
"(",
"(",
"mask_image",
".",
"min",
"(",
")",
"==",
"mask_image",
".",
"max",
"(",
")",
")",
"and",
"(",
"cleanup",
">",
"0",
")",
")",
":",
"cleanup",
"=",
"cleanup",
"-",
"1",
"mask_image",
"=",
"threshold_image",
"(",
"image",
",",
"low_thresh",
",",
"high_thresh",
")",
"if",
"cleanup",
">",
"0",
":",
"mask_image",
"=",
"iMath",
"(",
"mask_image",
",",
"'ME'",
",",
"cleanup",
")",
"mask_image",
"=",
"iMath",
"(",
"mask_image",
",",
"'MD'",
",",
"cleanup",
")",
"mask_image",
"=",
"iMath",
"(",
"mask_image",
",",
"'FillHoles'",
")",
".",
"threshold_image",
"(",
"1",
",",
"2",
")",
"#if cleanup == 0:",
"# clustlab = label_clusters(mask_image, 1)",
"# mask_image = threshold_image(clustlab, 1, 1)",
"return",
"mask_image"
] |
Get a binary mask image from the given image after thresholding
ANTsR function: `getMask`
Arguments
---------
image : ANTsImage
image from which mask will be computed. Can be an antsImage of 2, 3 or 4 dimensions.
low_thresh : scalar (optional)
An inclusive lower threshold for voxels to be included in the mask.
If not given, defaults to image mean.
high_thresh : scalar (optional)
An inclusive upper threshold for voxels to be included in the mask.
If not given, defaults to image max
cleanup : integer
If > 0, morphological operations will be applied to clean up the mask by eroding away small or weakly-connected areas, and closing holes.
If cleanup is >0, the following steps are applied
1. Erosion with radius 2 voxels
2. Retain largest component
3. Dilation with radius 1 voxel
4. Morphological closing
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> mask = ants.get_mask(image)
|
[
"Get",
"a",
"binary",
"mask",
"image",
"from",
"the",
"given",
"image",
"after",
"thresholding"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/get_mask.py#L13-L78
|
237,182
|
ANTsX/ANTsPy
|
ants/utils/label_image_centroids.py
|
label_image_centroids
|
def label_image_centroids(image, physical=False, convex=True, verbose=False):
"""
Converts a label image to coordinates summarizing their positions
ANTsR function: `labelImageCentroids`
Arguments
---------
image : ANTsImage
image of integer labels
physical : boolean
whether you want physical space coordinates or not
convex : boolean
if True, return centroid
if False return point with min average distance to other points with same label
Returns
-------
dictionary w/ following key-value pairs:
`labels` : 1D-ndarray
array of label values
`vertices` : pd.DataFrame
coordinates of label centroids
Example
-------
>>> import ants
>>> import numpy as np
>>> image = ants.from_numpy(np.asarray([[[0,2],[1,3]],[[4,6],[5,7]]]).astype('float32'))
>>> labels = ants.label_image_centroids(image)
"""
d = image.shape
if len(d) != 3:
raise ValueError('image must be 3 dimensions')
xcoords = np.asarray(np.arange(d[0]).tolist()*(d[1]*d[2]))
ycoords = np.asarray(np.repeat(np.arange(d[1]),d[0]).tolist()*d[2])
zcoords = np.asarray(np.repeat(np.arange(d[1]), d[0]*d[2]))
labels = image.numpy()
mylabels = np.sort(np.unique(labels[labels > 0])).astype('int')
n_labels = len(mylabels)
xc = np.zeros(n_labels)
yc = np.zeros(n_labels)
zc = np.zeros(n_labels)
if convex:
for i in mylabels:
idx = (labels == i).flatten()
xc[i-1] = np.mean(xcoords[idx])
yc[i-1] = np.mean(ycoords[idx])
zc[i-1] = np.mean(zcoords[idx])
else:
for i in mylabels:
idx = (labels == i).flatten()
xci = xcoords[idx]
yci = ycoords[idx]
zci = zcoords[idx]
dist = np.zeros(len(xci))
for j in range(len(xci)):
dist[j] = np.mean(np.sqrt((xci[j] - xci)**2 + (yci[j] - yci)**2 + (zci[j] - zci)**2))
mid = np.where(dist==np.min(dist))
xc[i-1] = xci[mid]
yc[i-1] = yci[mid]
zc[i-1] = zci[mid]
centroids = np.vstack([xc,yc,zc]).T
#if physical:
# centroids = tio.transform_index_to_physical_point(image, centroids)
return {
'labels': mylabels,
'vertices': centroids
}
|
python
|
def label_image_centroids(image, physical=False, convex=True, verbose=False):
"""
Converts a label image to coordinates summarizing their positions
ANTsR function: `labelImageCentroids`
Arguments
---------
image : ANTsImage
image of integer labels
physical : boolean
whether you want physical space coordinates or not
convex : boolean
if True, return centroid
if False return point with min average distance to other points with same label
Returns
-------
dictionary w/ following key-value pairs:
`labels` : 1D-ndarray
array of label values
`vertices` : pd.DataFrame
coordinates of label centroids
Example
-------
>>> import ants
>>> import numpy as np
>>> image = ants.from_numpy(np.asarray([[[0,2],[1,3]],[[4,6],[5,7]]]).astype('float32'))
>>> labels = ants.label_image_centroids(image)
"""
d = image.shape
if len(d) != 3:
raise ValueError('image must be 3 dimensions')
xcoords = np.asarray(np.arange(d[0]).tolist()*(d[1]*d[2]))
ycoords = np.asarray(np.repeat(np.arange(d[1]),d[0]).tolist()*d[2])
zcoords = np.asarray(np.repeat(np.arange(d[1]), d[0]*d[2]))
labels = image.numpy()
mylabels = np.sort(np.unique(labels[labels > 0])).astype('int')
n_labels = len(mylabels)
xc = np.zeros(n_labels)
yc = np.zeros(n_labels)
zc = np.zeros(n_labels)
if convex:
for i in mylabels:
idx = (labels == i).flatten()
xc[i-1] = np.mean(xcoords[idx])
yc[i-1] = np.mean(ycoords[idx])
zc[i-1] = np.mean(zcoords[idx])
else:
for i in mylabels:
idx = (labels == i).flatten()
xci = xcoords[idx]
yci = ycoords[idx]
zci = zcoords[idx]
dist = np.zeros(len(xci))
for j in range(len(xci)):
dist[j] = np.mean(np.sqrt((xci[j] - xci)**2 + (yci[j] - yci)**2 + (zci[j] - zci)**2))
mid = np.where(dist==np.min(dist))
xc[i-1] = xci[mid]
yc[i-1] = yci[mid]
zc[i-1] = zci[mid]
centroids = np.vstack([xc,yc,zc]).T
#if physical:
# centroids = tio.transform_index_to_physical_point(image, centroids)
return {
'labels': mylabels,
'vertices': centroids
}
|
[
"def",
"label_image_centroids",
"(",
"image",
",",
"physical",
"=",
"False",
",",
"convex",
"=",
"True",
",",
"verbose",
"=",
"False",
")",
":",
"d",
"=",
"image",
".",
"shape",
"if",
"len",
"(",
"d",
")",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"'image must be 3 dimensions'",
")",
"xcoords",
"=",
"np",
".",
"asarray",
"(",
"np",
".",
"arange",
"(",
"d",
"[",
"0",
"]",
")",
".",
"tolist",
"(",
")",
"*",
"(",
"d",
"[",
"1",
"]",
"*",
"d",
"[",
"2",
"]",
")",
")",
"ycoords",
"=",
"np",
".",
"asarray",
"(",
"np",
".",
"repeat",
"(",
"np",
".",
"arange",
"(",
"d",
"[",
"1",
"]",
")",
",",
"d",
"[",
"0",
"]",
")",
".",
"tolist",
"(",
")",
"*",
"d",
"[",
"2",
"]",
")",
"zcoords",
"=",
"np",
".",
"asarray",
"(",
"np",
".",
"repeat",
"(",
"np",
".",
"arange",
"(",
"d",
"[",
"1",
"]",
")",
",",
"d",
"[",
"0",
"]",
"*",
"d",
"[",
"2",
"]",
")",
")",
"labels",
"=",
"image",
".",
"numpy",
"(",
")",
"mylabels",
"=",
"np",
".",
"sort",
"(",
"np",
".",
"unique",
"(",
"labels",
"[",
"labels",
">",
"0",
"]",
")",
")",
".",
"astype",
"(",
"'int'",
")",
"n_labels",
"=",
"len",
"(",
"mylabels",
")",
"xc",
"=",
"np",
".",
"zeros",
"(",
"n_labels",
")",
"yc",
"=",
"np",
".",
"zeros",
"(",
"n_labels",
")",
"zc",
"=",
"np",
".",
"zeros",
"(",
"n_labels",
")",
"if",
"convex",
":",
"for",
"i",
"in",
"mylabels",
":",
"idx",
"=",
"(",
"labels",
"==",
"i",
")",
".",
"flatten",
"(",
")",
"xc",
"[",
"i",
"-",
"1",
"]",
"=",
"np",
".",
"mean",
"(",
"xcoords",
"[",
"idx",
"]",
")",
"yc",
"[",
"i",
"-",
"1",
"]",
"=",
"np",
".",
"mean",
"(",
"ycoords",
"[",
"idx",
"]",
")",
"zc",
"[",
"i",
"-",
"1",
"]",
"=",
"np",
".",
"mean",
"(",
"zcoords",
"[",
"idx",
"]",
")",
"else",
":",
"for",
"i",
"in",
"mylabels",
":",
"idx",
"=",
"(",
"labels",
"==",
"i",
")",
".",
"flatten",
"(",
")",
"xci",
"=",
"xcoords",
"[",
"idx",
"]",
"yci",
"=",
"ycoords",
"[",
"idx",
"]",
"zci",
"=",
"zcoords",
"[",
"idx",
"]",
"dist",
"=",
"np",
".",
"zeros",
"(",
"len",
"(",
"xci",
")",
")",
"for",
"j",
"in",
"range",
"(",
"len",
"(",
"xci",
")",
")",
":",
"dist",
"[",
"j",
"]",
"=",
"np",
".",
"mean",
"(",
"np",
".",
"sqrt",
"(",
"(",
"xci",
"[",
"j",
"]",
"-",
"xci",
")",
"**",
"2",
"+",
"(",
"yci",
"[",
"j",
"]",
"-",
"yci",
")",
"**",
"2",
"+",
"(",
"zci",
"[",
"j",
"]",
"-",
"zci",
")",
"**",
"2",
")",
")",
"mid",
"=",
"np",
".",
"where",
"(",
"dist",
"==",
"np",
".",
"min",
"(",
"dist",
")",
")",
"xc",
"[",
"i",
"-",
"1",
"]",
"=",
"xci",
"[",
"mid",
"]",
"yc",
"[",
"i",
"-",
"1",
"]",
"=",
"yci",
"[",
"mid",
"]",
"zc",
"[",
"i",
"-",
"1",
"]",
"=",
"zci",
"[",
"mid",
"]",
"centroids",
"=",
"np",
".",
"vstack",
"(",
"[",
"xc",
",",
"yc",
",",
"zc",
"]",
")",
".",
"T",
"#if physical:",
"# centroids = tio.transform_index_to_physical_point(image, centroids)",
"return",
"{",
"'labels'",
":",
"mylabels",
",",
"'vertices'",
":",
"centroids",
"}"
] |
Converts a label image to coordinates summarizing their positions
ANTsR function: `labelImageCentroids`
Arguments
---------
image : ANTsImage
image of integer labels
physical : boolean
whether you want physical space coordinates or not
convex : boolean
if True, return centroid
if False return point with min average distance to other points with same label
Returns
-------
dictionary w/ following key-value pairs:
`labels` : 1D-ndarray
array of label values
`vertices` : pd.DataFrame
coordinates of label centroids
Example
-------
>>> import ants
>>> import numpy as np
>>> image = ants.from_numpy(np.asarray([[[0,2],[1,3]],[[4,6],[5,7]]]).astype('float32'))
>>> labels = ants.label_image_centroids(image)
|
[
"Converts",
"a",
"label",
"image",
"to",
"coordinates",
"summarizing",
"their",
"positions"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/label_image_centroids.py#L10-L89
|
237,183
|
ANTsX/ANTsPy
|
ants/contrib/sampling/transforms.py
|
MultiResolutionImage.transform
|
def transform(self, X, y=None):
"""
Generate a set of multi-resolution ANTsImage types
Arguments
---------
X : ANTsImage
image to transform
y : ANTsImage (optional)
another image to transform
Example
-------
>>> import ants
>>> multires = ants.contrib.MultiResolutionImage(levels=4)
>>> img = ants.image_read(ants.get_data('r16'))
>>> imgs = multires.transform(img)
"""
insuffix = X._libsuffix
multires_fn = utils.get_lib_fn('multiResolutionAntsImage%s' % (insuffix))
casted_ptrs = multires_fn(X.pointer, self.levels)
imgs = []
for casted_ptr in casted_ptrs:
img = iio.ANTsImage(pixeltype=X.pixeltype, dimension=X.dimension,
components=X.components, pointer=casted_ptr)
if self.keep_shape:
img = img.resample_image_to_target(X)
imgs.append(img)
return imgs
|
python
|
def transform(self, X, y=None):
"""
Generate a set of multi-resolution ANTsImage types
Arguments
---------
X : ANTsImage
image to transform
y : ANTsImage (optional)
another image to transform
Example
-------
>>> import ants
>>> multires = ants.contrib.MultiResolutionImage(levels=4)
>>> img = ants.image_read(ants.get_data('r16'))
>>> imgs = multires.transform(img)
"""
insuffix = X._libsuffix
multires_fn = utils.get_lib_fn('multiResolutionAntsImage%s' % (insuffix))
casted_ptrs = multires_fn(X.pointer, self.levels)
imgs = []
for casted_ptr in casted_ptrs:
img = iio.ANTsImage(pixeltype=X.pixeltype, dimension=X.dimension,
components=X.components, pointer=casted_ptr)
if self.keep_shape:
img = img.resample_image_to_target(X)
imgs.append(img)
return imgs
|
[
"def",
"transform",
"(",
"self",
",",
"X",
",",
"y",
"=",
"None",
")",
":",
"insuffix",
"=",
"X",
".",
"_libsuffix",
"multires_fn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'multiResolutionAntsImage%s'",
"%",
"(",
"insuffix",
")",
")",
"casted_ptrs",
"=",
"multires_fn",
"(",
"X",
".",
"pointer",
",",
"self",
".",
"levels",
")",
"imgs",
"=",
"[",
"]",
"for",
"casted_ptr",
"in",
"casted_ptrs",
":",
"img",
"=",
"iio",
".",
"ANTsImage",
"(",
"pixeltype",
"=",
"X",
".",
"pixeltype",
",",
"dimension",
"=",
"X",
".",
"dimension",
",",
"components",
"=",
"X",
".",
"components",
",",
"pointer",
"=",
"casted_ptr",
")",
"if",
"self",
".",
"keep_shape",
":",
"img",
"=",
"img",
".",
"resample_image_to_target",
"(",
"X",
")",
"imgs",
".",
"append",
"(",
"img",
")",
"return",
"imgs"
] |
Generate a set of multi-resolution ANTsImage types
Arguments
---------
X : ANTsImage
image to transform
y : ANTsImage (optional)
another image to transform
Example
-------
>>> import ants
>>> multires = ants.contrib.MultiResolutionImage(levels=4)
>>> img = ants.image_read(ants.get_data('r16'))
>>> imgs = multires.transform(img)
|
[
"Generate",
"a",
"set",
"of",
"multi",
"-",
"resolution",
"ANTsImage",
"types"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/contrib/sampling/transforms.py#L60-L91
|
237,184
|
ANTsX/ANTsPy
|
ants/contrib/sampling/transforms.py
|
LocallyBlurIntensity.transform
|
def transform(self, X, y=None):
"""
Locally blur an image by applying a gradient anisotropic diffusion filter.
Arguments
---------
X : ANTsImage
image to transform
y : ANTsImage (optional)
another image to transform.
Example
-------
>>> import ants
>>> blur = ants.contrib.LocallyBlurIntensity(1,5)
>>> img2d = ants.image_read(ants.get_data('r16'))
>>> img2d_b = blur.transform(img2d)
>>> ants.plot(img2d)
>>> ants.plot(img2d_b)
>>> img3d = ants.image_read(ants.get_data('mni'))
>>> img3d_b = blur.transform(img3d)
>>> ants.plot(img3d)
>>> ants.plot(img3d_b)
"""
#if X.pixeltype != 'float':
# raise ValueError('image.pixeltype must be float ... use TypeCast transform or clone to float')
insuffix = X._libsuffix
cast_fn = utils.get_lib_fn('locallyBlurAntsImage%s' % (insuffix))
casted_ptr = cast_fn(X.pointer, self.iters, self.conductance)
return iio.ANTsImage(pixeltype=X.pixeltype, dimension=X.dimension,
components=X.components, pointer=casted_ptr)
|
python
|
def transform(self, X, y=None):
"""
Locally blur an image by applying a gradient anisotropic diffusion filter.
Arguments
---------
X : ANTsImage
image to transform
y : ANTsImage (optional)
another image to transform.
Example
-------
>>> import ants
>>> blur = ants.contrib.LocallyBlurIntensity(1,5)
>>> img2d = ants.image_read(ants.get_data('r16'))
>>> img2d_b = blur.transform(img2d)
>>> ants.plot(img2d)
>>> ants.plot(img2d_b)
>>> img3d = ants.image_read(ants.get_data('mni'))
>>> img3d_b = blur.transform(img3d)
>>> ants.plot(img3d)
>>> ants.plot(img3d_b)
"""
#if X.pixeltype != 'float':
# raise ValueError('image.pixeltype must be float ... use TypeCast transform or clone to float')
insuffix = X._libsuffix
cast_fn = utils.get_lib_fn('locallyBlurAntsImage%s' % (insuffix))
casted_ptr = cast_fn(X.pointer, self.iters, self.conductance)
return iio.ANTsImage(pixeltype=X.pixeltype, dimension=X.dimension,
components=X.components, pointer=casted_ptr)
|
[
"def",
"transform",
"(",
"self",
",",
"X",
",",
"y",
"=",
"None",
")",
":",
"#if X.pixeltype != 'float':",
"# raise ValueError('image.pixeltype must be float ... use TypeCast transform or clone to float')",
"insuffix",
"=",
"X",
".",
"_libsuffix",
"cast_fn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'locallyBlurAntsImage%s'",
"%",
"(",
"insuffix",
")",
")",
"casted_ptr",
"=",
"cast_fn",
"(",
"X",
".",
"pointer",
",",
"self",
".",
"iters",
",",
"self",
".",
"conductance",
")",
"return",
"iio",
".",
"ANTsImage",
"(",
"pixeltype",
"=",
"X",
".",
"pixeltype",
",",
"dimension",
"=",
"X",
".",
"dimension",
",",
"components",
"=",
"X",
".",
"components",
",",
"pointer",
"=",
"casted_ptr",
")"
] |
Locally blur an image by applying a gradient anisotropic diffusion filter.
Arguments
---------
X : ANTsImage
image to transform
y : ANTsImage (optional)
another image to transform.
Example
-------
>>> import ants
>>> blur = ants.contrib.LocallyBlurIntensity(1,5)
>>> img2d = ants.image_read(ants.get_data('r16'))
>>> img2d_b = blur.transform(img2d)
>>> ants.plot(img2d)
>>> ants.plot(img2d_b)
>>> img3d = ants.image_read(ants.get_data('mni'))
>>> img3d_b = blur.transform(img3d)
>>> ants.plot(img3d)
>>> ants.plot(img3d_b)
|
[
"Locally",
"blur",
"an",
"image",
"by",
"applying",
"a",
"gradient",
"anisotropic",
"diffusion",
"filter",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/contrib/sampling/transforms.py#L242-L273
|
237,185
|
ANTsX/ANTsPy
|
ants/utils/get_ants_data.py
|
get_data
|
def get_data(name=None):
"""
Get ANTsPy test data filename
ANTsR function: `getANTsRData`
Arguments
---------
name : string
name of test image tag to retrieve
Options:
- 'r16'
- 'r27'
- 'r64'
- 'r85'
- 'ch2'
- 'mni'
- 'surf'
Returns
-------
string
filepath of test image
Example
-------
>>> import ants
>>> mnipath = ants.get_ants_data('mni')
"""
if name is None:
files = []
for fname in os.listdir(data_path):
if (fname.endswith('.nii.gz')) or (fname.endswith('.jpg') or (fname.endswith('.csv'))):
fname = os.path.join(data_path, fname)
files.append(fname)
return files
else:
datapath = None
for fname in os.listdir(data_path):
if (name == fname.split('.')[0]) or ((name+'slice') == fname.split('.')[0]):
datapath = os.path.join(data_path, fname)
if datapath is None:
raise ValueError('File doesnt exist. Options: ' , os.listdir(data_path))
return datapath
|
python
|
def get_data(name=None):
"""
Get ANTsPy test data filename
ANTsR function: `getANTsRData`
Arguments
---------
name : string
name of test image tag to retrieve
Options:
- 'r16'
- 'r27'
- 'r64'
- 'r85'
- 'ch2'
- 'mni'
- 'surf'
Returns
-------
string
filepath of test image
Example
-------
>>> import ants
>>> mnipath = ants.get_ants_data('mni')
"""
if name is None:
files = []
for fname in os.listdir(data_path):
if (fname.endswith('.nii.gz')) or (fname.endswith('.jpg') or (fname.endswith('.csv'))):
fname = os.path.join(data_path, fname)
files.append(fname)
return files
else:
datapath = None
for fname in os.listdir(data_path):
if (name == fname.split('.')[0]) or ((name+'slice') == fname.split('.')[0]):
datapath = os.path.join(data_path, fname)
if datapath is None:
raise ValueError('File doesnt exist. Options: ' , os.listdir(data_path))
return datapath
|
[
"def",
"get_data",
"(",
"name",
"=",
"None",
")",
":",
"if",
"name",
"is",
"None",
":",
"files",
"=",
"[",
"]",
"for",
"fname",
"in",
"os",
".",
"listdir",
"(",
"data_path",
")",
":",
"if",
"(",
"fname",
".",
"endswith",
"(",
"'.nii.gz'",
")",
")",
"or",
"(",
"fname",
".",
"endswith",
"(",
"'.jpg'",
")",
"or",
"(",
"fname",
".",
"endswith",
"(",
"'.csv'",
")",
")",
")",
":",
"fname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"fname",
")",
"files",
".",
"append",
"(",
"fname",
")",
"return",
"files",
"else",
":",
"datapath",
"=",
"None",
"for",
"fname",
"in",
"os",
".",
"listdir",
"(",
"data_path",
")",
":",
"if",
"(",
"name",
"==",
"fname",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
"or",
"(",
"(",
"name",
"+",
"'slice'",
")",
"==",
"fname",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
":",
"datapath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"fname",
")",
"if",
"datapath",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'File doesnt exist. Options: '",
",",
"os",
".",
"listdir",
"(",
"data_path",
")",
")",
"return",
"datapath"
] |
Get ANTsPy test data filename
ANTsR function: `getANTsRData`
Arguments
---------
name : string
name of test image tag to retrieve
Options:
- 'r16'
- 'r27'
- 'r64'
- 'r85'
- 'ch2'
- 'mni'
- 'surf'
Returns
-------
string
filepath of test image
Example
-------
>>> import ants
>>> mnipath = ants.get_ants_data('mni')
|
[
"Get",
"ANTsPy",
"test",
"data",
"filename"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/get_ants_data.py#L11-L54
|
237,186
|
ANTsX/ANTsPy
|
ants/utils/invariant_image_similarity.py
|
convolve_image
|
def convolve_image(image, kernel_image, crop=True):
"""
Convolve one image with another
ANTsR function: `convolveImage`
Arguments
---------
image : ANTsImage
image to convolve
kernel_image : ANTsImage
image acting as kernel
crop : boolean
whether to automatically crop kernel_image
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read(ants.get_ants_data('r16'))
>>> convimg = ants.make_image( (3,3), (1,0,1,0,-4,0,1,0,1) )
>>> convout = ants.convolve_image( fi, convimg )
>>> convimg2 = ants.make_image( (3,3), (0,1,0,1,0,-1,0,-1,0) )
>>> convout2 = ants.convolve_image( fi, convimg2 )
"""
if not isinstance(image, iio.ANTsImage):
raise ValueError('image must be ANTsImage type')
if not isinstance(kernel_image, iio.ANTsImage):
raise ValueError('kernel must be ANTsImage type')
orig_ptype = image.pixeltype
if image.pixeltype != 'float':
image = image.clone('float')
if kernel_image.pixeltype != 'float':
kernel_image = kernel_image.clone('float')
if crop:
kernel_image_mask = utils.get_mask(kernel_image)
kernel_image = utils.crop_image(kernel_image, kernel_image_mask)
kernel_image_mask = utils.crop_image(kernel_image_mask, kernel_image_mask)
kernel_image[kernel_image_mask==0] = kernel_image[kernel_image_mask==1].mean()
libfn = utils.get_lib_fn('convolveImageF%i' % image.dimension)
conv_itk_image = libfn(image.pointer, kernel_image.pointer)
conv_ants_image = iio.ANTsImage(pixeltype=image.pixeltype, dimension=image.dimension,
components=image.components, pointer=conv_itk_image)
if orig_ptype != 'float':
conv_ants_image = conv_ants_image.clone(orig_ptype)
return conv_ants_image
|
python
|
def convolve_image(image, kernel_image, crop=True):
"""
Convolve one image with another
ANTsR function: `convolveImage`
Arguments
---------
image : ANTsImage
image to convolve
kernel_image : ANTsImage
image acting as kernel
crop : boolean
whether to automatically crop kernel_image
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read(ants.get_ants_data('r16'))
>>> convimg = ants.make_image( (3,3), (1,0,1,0,-4,0,1,0,1) )
>>> convout = ants.convolve_image( fi, convimg )
>>> convimg2 = ants.make_image( (3,3), (0,1,0,1,0,-1,0,-1,0) )
>>> convout2 = ants.convolve_image( fi, convimg2 )
"""
if not isinstance(image, iio.ANTsImage):
raise ValueError('image must be ANTsImage type')
if not isinstance(kernel_image, iio.ANTsImage):
raise ValueError('kernel must be ANTsImage type')
orig_ptype = image.pixeltype
if image.pixeltype != 'float':
image = image.clone('float')
if kernel_image.pixeltype != 'float':
kernel_image = kernel_image.clone('float')
if crop:
kernel_image_mask = utils.get_mask(kernel_image)
kernel_image = utils.crop_image(kernel_image, kernel_image_mask)
kernel_image_mask = utils.crop_image(kernel_image_mask, kernel_image_mask)
kernel_image[kernel_image_mask==0] = kernel_image[kernel_image_mask==1].mean()
libfn = utils.get_lib_fn('convolveImageF%i' % image.dimension)
conv_itk_image = libfn(image.pointer, kernel_image.pointer)
conv_ants_image = iio.ANTsImage(pixeltype=image.pixeltype, dimension=image.dimension,
components=image.components, pointer=conv_itk_image)
if orig_ptype != 'float':
conv_ants_image = conv_ants_image.clone(orig_ptype)
return conv_ants_image
|
[
"def",
"convolve_image",
"(",
"image",
",",
"kernel_image",
",",
"crop",
"=",
"True",
")",
":",
"if",
"not",
"isinstance",
"(",
"image",
",",
"iio",
".",
"ANTsImage",
")",
":",
"raise",
"ValueError",
"(",
"'image must be ANTsImage type'",
")",
"if",
"not",
"isinstance",
"(",
"kernel_image",
",",
"iio",
".",
"ANTsImage",
")",
":",
"raise",
"ValueError",
"(",
"'kernel must be ANTsImage type'",
")",
"orig_ptype",
"=",
"image",
".",
"pixeltype",
"if",
"image",
".",
"pixeltype",
"!=",
"'float'",
":",
"image",
"=",
"image",
".",
"clone",
"(",
"'float'",
")",
"if",
"kernel_image",
".",
"pixeltype",
"!=",
"'float'",
":",
"kernel_image",
"=",
"kernel_image",
".",
"clone",
"(",
"'float'",
")",
"if",
"crop",
":",
"kernel_image_mask",
"=",
"utils",
".",
"get_mask",
"(",
"kernel_image",
")",
"kernel_image",
"=",
"utils",
".",
"crop_image",
"(",
"kernel_image",
",",
"kernel_image_mask",
")",
"kernel_image_mask",
"=",
"utils",
".",
"crop_image",
"(",
"kernel_image_mask",
",",
"kernel_image_mask",
")",
"kernel_image",
"[",
"kernel_image_mask",
"==",
"0",
"]",
"=",
"kernel_image",
"[",
"kernel_image_mask",
"==",
"1",
"]",
".",
"mean",
"(",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'convolveImageF%i'",
"%",
"image",
".",
"dimension",
")",
"conv_itk_image",
"=",
"libfn",
"(",
"image",
".",
"pointer",
",",
"kernel_image",
".",
"pointer",
")",
"conv_ants_image",
"=",
"iio",
".",
"ANTsImage",
"(",
"pixeltype",
"=",
"image",
".",
"pixeltype",
",",
"dimension",
"=",
"image",
".",
"dimension",
",",
"components",
"=",
"image",
".",
"components",
",",
"pointer",
"=",
"conv_itk_image",
")",
"if",
"orig_ptype",
"!=",
"'float'",
":",
"conv_ants_image",
"=",
"conv_ants_image",
".",
"clone",
"(",
"orig_ptype",
")",
"return",
"conv_ants_image"
] |
Convolve one image with another
ANTsR function: `convolveImage`
Arguments
---------
image : ANTsImage
image to convolve
kernel_image : ANTsImage
image acting as kernel
crop : boolean
whether to automatically crop kernel_image
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read(ants.get_ants_data('r16'))
>>> convimg = ants.make_image( (3,3), (1,0,1,0,-4,0,1,0,1) )
>>> convout = ants.convolve_image( fi, convimg )
>>> convimg2 = ants.make_image( (3,3), (0,1,0,1,0,-1,0,-1,0) )
>>> convout2 = ants.convolve_image( fi, convimg2 )
|
[
"Convolve",
"one",
"image",
"with",
"another"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/invariant_image_similarity.py#L200-L255
|
237,187
|
ANTsX/ANTsPy
|
ants/utils/ndimage_to_list.py
|
ndimage_to_list
|
def ndimage_to_list(image):
"""
Split a n dimensional ANTsImage into a list
of n-1 dimensional ANTsImages
Arguments
---------
image : ANTsImage
n-dimensional image to split
Returns
-------
list of ANTsImage types
Example
-------
>>> import ants
>>> image = ants.image_read(ants.get_ants_data('r16'))
>>> image2 = ants.image_read(ants.get_ants_data('r16'))
>>> imageTar = ants.make_image( ( *image2.shape, 2 ) )
>>> image3 = ants.list_to_ndimage( imageTar, [image,image2])
>>> image3.dimension == 3
>>> images_unmerged = ants.ndimage_to_list( image3 )
>>> len(images_unmerged) == 2
>>> images_unmerged[0].dimension == 2
"""
inpixeltype = image.pixeltype
dimension = image.dimension
components = 1
imageShape = image.shape
nSections = imageShape[ dimension - 1 ]
subdimension = dimension - 1
suborigin = iio.get_origin( image )[0:subdimension]
subspacing = iio.get_spacing( image )[0:subdimension]
subdirection = np.eye( subdimension )
for i in range( subdimension ):
subdirection[i,:] = iio.get_direction( image )[i,0:subdimension]
subdim = image.shape[ 0:subdimension ]
imagelist = []
for i in range( nSections ):
img = utils.slice_image( image, axis = subdimension, idx = i )
iio.set_spacing( img, subspacing )
iio.set_origin( img, suborigin )
iio.set_direction( img, subdirection )
imagelist.append( img )
return imagelist
|
python
|
def ndimage_to_list(image):
"""
Split a n dimensional ANTsImage into a list
of n-1 dimensional ANTsImages
Arguments
---------
image : ANTsImage
n-dimensional image to split
Returns
-------
list of ANTsImage types
Example
-------
>>> import ants
>>> image = ants.image_read(ants.get_ants_data('r16'))
>>> image2 = ants.image_read(ants.get_ants_data('r16'))
>>> imageTar = ants.make_image( ( *image2.shape, 2 ) )
>>> image3 = ants.list_to_ndimage( imageTar, [image,image2])
>>> image3.dimension == 3
>>> images_unmerged = ants.ndimage_to_list( image3 )
>>> len(images_unmerged) == 2
>>> images_unmerged[0].dimension == 2
"""
inpixeltype = image.pixeltype
dimension = image.dimension
components = 1
imageShape = image.shape
nSections = imageShape[ dimension - 1 ]
subdimension = dimension - 1
suborigin = iio.get_origin( image )[0:subdimension]
subspacing = iio.get_spacing( image )[0:subdimension]
subdirection = np.eye( subdimension )
for i in range( subdimension ):
subdirection[i,:] = iio.get_direction( image )[i,0:subdimension]
subdim = image.shape[ 0:subdimension ]
imagelist = []
for i in range( nSections ):
img = utils.slice_image( image, axis = subdimension, idx = i )
iio.set_spacing( img, subspacing )
iio.set_origin( img, suborigin )
iio.set_direction( img, subdirection )
imagelist.append( img )
return imagelist
|
[
"def",
"ndimage_to_list",
"(",
"image",
")",
":",
"inpixeltype",
"=",
"image",
".",
"pixeltype",
"dimension",
"=",
"image",
".",
"dimension",
"components",
"=",
"1",
"imageShape",
"=",
"image",
".",
"shape",
"nSections",
"=",
"imageShape",
"[",
"dimension",
"-",
"1",
"]",
"subdimension",
"=",
"dimension",
"-",
"1",
"suborigin",
"=",
"iio",
".",
"get_origin",
"(",
"image",
")",
"[",
"0",
":",
"subdimension",
"]",
"subspacing",
"=",
"iio",
".",
"get_spacing",
"(",
"image",
")",
"[",
"0",
":",
"subdimension",
"]",
"subdirection",
"=",
"np",
".",
"eye",
"(",
"subdimension",
")",
"for",
"i",
"in",
"range",
"(",
"subdimension",
")",
":",
"subdirection",
"[",
"i",
",",
":",
"]",
"=",
"iio",
".",
"get_direction",
"(",
"image",
")",
"[",
"i",
",",
"0",
":",
"subdimension",
"]",
"subdim",
"=",
"image",
".",
"shape",
"[",
"0",
":",
"subdimension",
"]",
"imagelist",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"nSections",
")",
":",
"img",
"=",
"utils",
".",
"slice_image",
"(",
"image",
",",
"axis",
"=",
"subdimension",
",",
"idx",
"=",
"i",
")",
"iio",
".",
"set_spacing",
"(",
"img",
",",
"subspacing",
")",
"iio",
".",
"set_origin",
"(",
"img",
",",
"suborigin",
")",
"iio",
".",
"set_direction",
"(",
"img",
",",
"subdirection",
")",
"imagelist",
".",
"append",
"(",
"img",
")",
"return",
"imagelist"
] |
Split a n dimensional ANTsImage into a list
of n-1 dimensional ANTsImages
Arguments
---------
image : ANTsImage
n-dimensional image to split
Returns
-------
list of ANTsImage types
Example
-------
>>> import ants
>>> image = ants.image_read(ants.get_ants_data('r16'))
>>> image2 = ants.image_read(ants.get_ants_data('r16'))
>>> imageTar = ants.make_image( ( *image2.shape, 2 ) )
>>> image3 = ants.list_to_ndimage( imageTar, [image,image2])
>>> image3.dimension == 3
>>> images_unmerged = ants.ndimage_to_list( image3 )
>>> len(images_unmerged) == 2
>>> images_unmerged[0].dimension == 2
|
[
"Split",
"a",
"n",
"dimensional",
"ANTsImage",
"into",
"a",
"list",
"of",
"n",
"-",
"1",
"dimensional",
"ANTsImages"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/ndimage_to_list.py#L67-L113
|
237,188
|
ANTsX/ANTsPy
|
ants/utils/process_args.py
|
_int_antsProcessArguments
|
def _int_antsProcessArguments(args):
"""
Needs to be better validated.
"""
p_args = []
if isinstance(args, dict):
for argname, argval in args.items():
if '-MULTINAME-' in argname:
# have this little hack because python doesnt support
# multiple dict entries w/ the same key like R lists
argname = argname[:argname.find('-MULTINAME-')]
if argval is not None:
if len(argname) > 1:
p_args.append('--%s' % argname)
else:
p_args.append('-%s' % argname)
if isinstance(argval, iio.ANTsImage):
p_args.append(_ptrstr(argval.pointer))
elif isinstance(argval, list):
for av in argval:
if isinstance(av, iio.ANTsImage):
av = _ptrstr(av.pointer)
p_args.append(av)
else:
p_args.append(str(argval))
elif isinstance(args, list):
for arg in args:
if isinstance(arg, iio.ANTsImage):
pointer_string = _ptrstr(arg.pointer)
p_arg = pointer_string
elif arg is None:
pass
else:
p_arg = str(arg)
p_args.append(p_arg)
return p_args
|
python
|
def _int_antsProcessArguments(args):
"""
Needs to be better validated.
"""
p_args = []
if isinstance(args, dict):
for argname, argval in args.items():
if '-MULTINAME-' in argname:
# have this little hack because python doesnt support
# multiple dict entries w/ the same key like R lists
argname = argname[:argname.find('-MULTINAME-')]
if argval is not None:
if len(argname) > 1:
p_args.append('--%s' % argname)
else:
p_args.append('-%s' % argname)
if isinstance(argval, iio.ANTsImage):
p_args.append(_ptrstr(argval.pointer))
elif isinstance(argval, list):
for av in argval:
if isinstance(av, iio.ANTsImage):
av = _ptrstr(av.pointer)
p_args.append(av)
else:
p_args.append(str(argval))
elif isinstance(args, list):
for arg in args:
if isinstance(arg, iio.ANTsImage):
pointer_string = _ptrstr(arg.pointer)
p_arg = pointer_string
elif arg is None:
pass
else:
p_arg = str(arg)
p_args.append(p_arg)
return p_args
|
[
"def",
"_int_antsProcessArguments",
"(",
"args",
")",
":",
"p_args",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"args",
",",
"dict",
")",
":",
"for",
"argname",
",",
"argval",
"in",
"args",
".",
"items",
"(",
")",
":",
"if",
"'-MULTINAME-'",
"in",
"argname",
":",
"# have this little hack because python doesnt support",
"# multiple dict entries w/ the same key like R lists",
"argname",
"=",
"argname",
"[",
":",
"argname",
".",
"find",
"(",
"'-MULTINAME-'",
")",
"]",
"if",
"argval",
"is",
"not",
"None",
":",
"if",
"len",
"(",
"argname",
")",
">",
"1",
":",
"p_args",
".",
"append",
"(",
"'--%s'",
"%",
"argname",
")",
"else",
":",
"p_args",
".",
"append",
"(",
"'-%s'",
"%",
"argname",
")",
"if",
"isinstance",
"(",
"argval",
",",
"iio",
".",
"ANTsImage",
")",
":",
"p_args",
".",
"append",
"(",
"_ptrstr",
"(",
"argval",
".",
"pointer",
")",
")",
"elif",
"isinstance",
"(",
"argval",
",",
"list",
")",
":",
"for",
"av",
"in",
"argval",
":",
"if",
"isinstance",
"(",
"av",
",",
"iio",
".",
"ANTsImage",
")",
":",
"av",
"=",
"_ptrstr",
"(",
"av",
".",
"pointer",
")",
"p_args",
".",
"append",
"(",
"av",
")",
"else",
":",
"p_args",
".",
"append",
"(",
"str",
"(",
"argval",
")",
")",
"elif",
"isinstance",
"(",
"args",
",",
"list",
")",
":",
"for",
"arg",
"in",
"args",
":",
"if",
"isinstance",
"(",
"arg",
",",
"iio",
".",
"ANTsImage",
")",
":",
"pointer_string",
"=",
"_ptrstr",
"(",
"arg",
".",
"pointer",
")",
"p_arg",
"=",
"pointer_string",
"elif",
"arg",
"is",
"None",
":",
"pass",
"else",
":",
"p_arg",
"=",
"str",
"(",
"arg",
")",
"p_args",
".",
"append",
"(",
"p_arg",
")",
"return",
"p_args"
] |
Needs to be better validated.
|
[
"Needs",
"to",
"be",
"better",
"validated",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/process_args.py#L34-L71
|
237,189
|
ANTsX/ANTsPy
|
ants/learn/decomposition.py
|
initialize_eigenanatomy
|
def initialize_eigenanatomy(initmat, mask=None, initlabels=None, nreps=1, smoothing=0):
"""
InitializeEigenanatomy is a helper function to initialize sparseDecom
and sparseDecom2. Can be used to estimate sparseness parameters per
eigenvector. The user then only chooses nvecs and optional
regularization parameters.
Arguments
---------
initmat : np.ndarray or ANTsImage
input matrix where rows provide initial vector values.
alternatively, this can be an antsImage which contains labeled regions.
mask : ANTsImage
mask if available
initlabels : list/tuple of integers
which labels in initmat to use as initial components
nreps : integer
nrepetitions to use
smoothing : float
if using an initial label image, optionally smooth each roi
Returns
-------
dict w/ the following key/value pairs:
`initlist` : list of ANTsImage types
initialization list(s) for sparseDecom(2)
`mask` : ANTsImage
mask(s) for sparseDecom(2)
`enames` : list of strings
string names of components for sparseDecom(2)
Example
-------
>>> import ants
>>> import numpy as np
>>> mat = np.random.randn(4,100).astype('float32')
>>> init = ants.initialize_eigenanatomy(mat)
"""
if isinstance(initmat, iio.ANTsImage):
# create initmat from each of the unique labels
if mask is not None:
selectvec = mask > 0
else:
selectvec = initmat > 0
initmatvec = initmat[selectvec]
if initlabels is None:
ulabs = np.sort(np.unique(initmatvec))
ulabs = ulabs[ulabs > 0]
else:
ulabs = initlabels
nvox = len(initmatvec)
temp = np.zeros((len(ulabs), nvox))
for x in range(len(ulabs)):
timg = utils.threshold_image(initmat, ulabs[x]-1e-4, ulabs[x]+1e-4)
if smoothing > 0:
timg = utils.smooth_image(timg, smoothing)
temp[x,:] = timg[selectvec]
initmat = temp
nclasses = initmat.shape[0]
classlabels = ['init%i'%i for i in range(nclasses)]
initlist = []
if mask is None:
maskmat = np.zeros(initmat.shape)
maskmat[0,:] = 1
mask = core.from_numpy(maskmat.astype('float32'))
eanatnames = ['A'] * (nclasses*nreps)
ct = 0
for i in range(nclasses):
vecimg = mask.clone('float')
initf = initmat[i,:]
vecimg[mask==1] = initf
for nr in range(nreps):
initlist.append(vecimg)
eanatnames[ct+nr-1] = str(classlabels[i])
ct = ct + 1
return {'initlist': initlist, 'mask': mask, 'enames': eanatnames}
|
python
|
def initialize_eigenanatomy(initmat, mask=None, initlabels=None, nreps=1, smoothing=0):
"""
InitializeEigenanatomy is a helper function to initialize sparseDecom
and sparseDecom2. Can be used to estimate sparseness parameters per
eigenvector. The user then only chooses nvecs and optional
regularization parameters.
Arguments
---------
initmat : np.ndarray or ANTsImage
input matrix where rows provide initial vector values.
alternatively, this can be an antsImage which contains labeled regions.
mask : ANTsImage
mask if available
initlabels : list/tuple of integers
which labels in initmat to use as initial components
nreps : integer
nrepetitions to use
smoothing : float
if using an initial label image, optionally smooth each roi
Returns
-------
dict w/ the following key/value pairs:
`initlist` : list of ANTsImage types
initialization list(s) for sparseDecom(2)
`mask` : ANTsImage
mask(s) for sparseDecom(2)
`enames` : list of strings
string names of components for sparseDecom(2)
Example
-------
>>> import ants
>>> import numpy as np
>>> mat = np.random.randn(4,100).astype('float32')
>>> init = ants.initialize_eigenanatomy(mat)
"""
if isinstance(initmat, iio.ANTsImage):
# create initmat from each of the unique labels
if mask is not None:
selectvec = mask > 0
else:
selectvec = initmat > 0
initmatvec = initmat[selectvec]
if initlabels is None:
ulabs = np.sort(np.unique(initmatvec))
ulabs = ulabs[ulabs > 0]
else:
ulabs = initlabels
nvox = len(initmatvec)
temp = np.zeros((len(ulabs), nvox))
for x in range(len(ulabs)):
timg = utils.threshold_image(initmat, ulabs[x]-1e-4, ulabs[x]+1e-4)
if smoothing > 0:
timg = utils.smooth_image(timg, smoothing)
temp[x,:] = timg[selectvec]
initmat = temp
nclasses = initmat.shape[0]
classlabels = ['init%i'%i for i in range(nclasses)]
initlist = []
if mask is None:
maskmat = np.zeros(initmat.shape)
maskmat[0,:] = 1
mask = core.from_numpy(maskmat.astype('float32'))
eanatnames = ['A'] * (nclasses*nreps)
ct = 0
for i in range(nclasses):
vecimg = mask.clone('float')
initf = initmat[i,:]
vecimg[mask==1] = initf
for nr in range(nreps):
initlist.append(vecimg)
eanatnames[ct+nr-1] = str(classlabels[i])
ct = ct + 1
return {'initlist': initlist, 'mask': mask, 'enames': eanatnames}
|
[
"def",
"initialize_eigenanatomy",
"(",
"initmat",
",",
"mask",
"=",
"None",
",",
"initlabels",
"=",
"None",
",",
"nreps",
"=",
"1",
",",
"smoothing",
"=",
"0",
")",
":",
"if",
"isinstance",
"(",
"initmat",
",",
"iio",
".",
"ANTsImage",
")",
":",
"# create initmat from each of the unique labels",
"if",
"mask",
"is",
"not",
"None",
":",
"selectvec",
"=",
"mask",
">",
"0",
"else",
":",
"selectvec",
"=",
"initmat",
">",
"0",
"initmatvec",
"=",
"initmat",
"[",
"selectvec",
"]",
"if",
"initlabels",
"is",
"None",
":",
"ulabs",
"=",
"np",
".",
"sort",
"(",
"np",
".",
"unique",
"(",
"initmatvec",
")",
")",
"ulabs",
"=",
"ulabs",
"[",
"ulabs",
">",
"0",
"]",
"else",
":",
"ulabs",
"=",
"initlabels",
"nvox",
"=",
"len",
"(",
"initmatvec",
")",
"temp",
"=",
"np",
".",
"zeros",
"(",
"(",
"len",
"(",
"ulabs",
")",
",",
"nvox",
")",
")",
"for",
"x",
"in",
"range",
"(",
"len",
"(",
"ulabs",
")",
")",
":",
"timg",
"=",
"utils",
".",
"threshold_image",
"(",
"initmat",
",",
"ulabs",
"[",
"x",
"]",
"-",
"1e-4",
",",
"ulabs",
"[",
"x",
"]",
"+",
"1e-4",
")",
"if",
"smoothing",
">",
"0",
":",
"timg",
"=",
"utils",
".",
"smooth_image",
"(",
"timg",
",",
"smoothing",
")",
"temp",
"[",
"x",
",",
":",
"]",
"=",
"timg",
"[",
"selectvec",
"]",
"initmat",
"=",
"temp",
"nclasses",
"=",
"initmat",
".",
"shape",
"[",
"0",
"]",
"classlabels",
"=",
"[",
"'init%i'",
"%",
"i",
"for",
"i",
"in",
"range",
"(",
"nclasses",
")",
"]",
"initlist",
"=",
"[",
"]",
"if",
"mask",
"is",
"None",
":",
"maskmat",
"=",
"np",
".",
"zeros",
"(",
"initmat",
".",
"shape",
")",
"maskmat",
"[",
"0",
",",
":",
"]",
"=",
"1",
"mask",
"=",
"core",
".",
"from_numpy",
"(",
"maskmat",
".",
"astype",
"(",
"'float32'",
")",
")",
"eanatnames",
"=",
"[",
"'A'",
"]",
"*",
"(",
"nclasses",
"*",
"nreps",
")",
"ct",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"nclasses",
")",
":",
"vecimg",
"=",
"mask",
".",
"clone",
"(",
"'float'",
")",
"initf",
"=",
"initmat",
"[",
"i",
",",
":",
"]",
"vecimg",
"[",
"mask",
"==",
"1",
"]",
"=",
"initf",
"for",
"nr",
"in",
"range",
"(",
"nreps",
")",
":",
"initlist",
".",
"append",
"(",
"vecimg",
")",
"eanatnames",
"[",
"ct",
"+",
"nr",
"-",
"1",
"]",
"=",
"str",
"(",
"classlabels",
"[",
"i",
"]",
")",
"ct",
"=",
"ct",
"+",
"1",
"return",
"{",
"'initlist'",
":",
"initlist",
",",
"'mask'",
":",
"mask",
",",
"'enames'",
":",
"eanatnames",
"}"
] |
InitializeEigenanatomy is a helper function to initialize sparseDecom
and sparseDecom2. Can be used to estimate sparseness parameters per
eigenvector. The user then only chooses nvecs and optional
regularization parameters.
Arguments
---------
initmat : np.ndarray or ANTsImage
input matrix where rows provide initial vector values.
alternatively, this can be an antsImage which contains labeled regions.
mask : ANTsImage
mask if available
initlabels : list/tuple of integers
which labels in initmat to use as initial components
nreps : integer
nrepetitions to use
smoothing : float
if using an initial label image, optionally smooth each roi
Returns
-------
dict w/ the following key/value pairs:
`initlist` : list of ANTsImage types
initialization list(s) for sparseDecom(2)
`mask` : ANTsImage
mask(s) for sparseDecom(2)
`enames` : list of strings
string names of components for sparseDecom(2)
Example
-------
>>> import ants
>>> import numpy as np
>>> mat = np.random.randn(4,100).astype('float32')
>>> init = ants.initialize_eigenanatomy(mat)
|
[
"InitializeEigenanatomy",
"is",
"a",
"helper",
"function",
"to",
"initialize",
"sparseDecom",
"and",
"sparseDecom2",
".",
"Can",
"be",
"used",
"to",
"estimate",
"sparseness",
"parameters",
"per",
"eigenvector",
".",
"The",
"user",
"then",
"only",
"chooses",
"nvecs",
"and",
"optional",
"regularization",
"parameters",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/learn/decomposition.py#L251-L338
|
237,190
|
ANTsX/ANTsPy
|
ants/learn/decomposition.py
|
eig_seg
|
def eig_seg(mask, img_list, apply_segmentation_to_images=False, cthresh=0, smooth=1):
"""
Segment a mask into regions based on the max value in an image list.
At a given voxel the segmentation label will contain the index to the image
that has the largest value. If the 3rd image has the greatest value,
the segmentation label will be 3 at that voxel.
Arguments
---------
mask : ANTsImage
D-dimensional mask > 0 defining segmentation region.
img_list : collection of ANTsImage or np.ndarray
images to use
apply_segmentation_to_images : boolean
determines if original image list is modified by the segmentation.
cthresh : integer
throw away isolated clusters smaller than this value
smooth : float
smooth the input data first by this value
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mylist = [ants.image_read(ants.get_ants_data('r16')),
ants.image_read(ants.get_ants_data('r27')),
ants.image_read(ants.get_ants_data('r85'))]
>>> myseg = ants.eig_seg(ants.get_mask(mylist[0]), mylist)
"""
maskvox = mask > 0
maskseg = mask.clone()
maskseg[maskvox] = 0
if isinstance(img_list, np.ndarray):
mydata = img_list
elif isinstance(img_list, (tuple, list)):
mydata = core.image_list_to_matrix(img_list, mask)
if (smooth > 0):
for i in range(mydata.shape[0]):
temp_img = core.make_image(mask, mydata[i,:], pixeltype='float')
temp_img = utils.smooth_image(temp_img, smooth, sigma_in_physical_coordinates=True)
mydata[i,:] = temp_img[mask >= 0.5]
segids = np.argmax(np.abs(mydata), axis=0)+1
segmax = np.max(np.abs(mydata), axis=0)
maskseg[maskvox] = (segids * (segmax > 1e-09))
if cthresh > 0:
for kk in range(int(maskseg.max())):
timg = utils.threshold_image(maskseg, kk, kk)
timg = utils.label_clusters(timg, cthresh)
timg = utils.threshold_image(timg, 1, 1e15) * float(kk)
maskseg[maskseg == kk] = timg[maskseg == kk]
if (apply_segmentation_to_images) and (not isinstance(img_list, np.ndarray)):
for i in range(len(img_list)):
img = img_list[i]
img[maskseg != float(i)] = 0
img_list[i] = img
return maskseg
|
python
|
def eig_seg(mask, img_list, apply_segmentation_to_images=False, cthresh=0, smooth=1):
"""
Segment a mask into regions based on the max value in an image list.
At a given voxel the segmentation label will contain the index to the image
that has the largest value. If the 3rd image has the greatest value,
the segmentation label will be 3 at that voxel.
Arguments
---------
mask : ANTsImage
D-dimensional mask > 0 defining segmentation region.
img_list : collection of ANTsImage or np.ndarray
images to use
apply_segmentation_to_images : boolean
determines if original image list is modified by the segmentation.
cthresh : integer
throw away isolated clusters smaller than this value
smooth : float
smooth the input data first by this value
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mylist = [ants.image_read(ants.get_ants_data('r16')),
ants.image_read(ants.get_ants_data('r27')),
ants.image_read(ants.get_ants_data('r85'))]
>>> myseg = ants.eig_seg(ants.get_mask(mylist[0]), mylist)
"""
maskvox = mask > 0
maskseg = mask.clone()
maskseg[maskvox] = 0
if isinstance(img_list, np.ndarray):
mydata = img_list
elif isinstance(img_list, (tuple, list)):
mydata = core.image_list_to_matrix(img_list, mask)
if (smooth > 0):
for i in range(mydata.shape[0]):
temp_img = core.make_image(mask, mydata[i,:], pixeltype='float')
temp_img = utils.smooth_image(temp_img, smooth, sigma_in_physical_coordinates=True)
mydata[i,:] = temp_img[mask >= 0.5]
segids = np.argmax(np.abs(mydata), axis=0)+1
segmax = np.max(np.abs(mydata), axis=0)
maskseg[maskvox] = (segids * (segmax > 1e-09))
if cthresh > 0:
for kk in range(int(maskseg.max())):
timg = utils.threshold_image(maskseg, kk, kk)
timg = utils.label_clusters(timg, cthresh)
timg = utils.threshold_image(timg, 1, 1e15) * float(kk)
maskseg[maskseg == kk] = timg[maskseg == kk]
if (apply_segmentation_to_images) and (not isinstance(img_list, np.ndarray)):
for i in range(len(img_list)):
img = img_list[i]
img[maskseg != float(i)] = 0
img_list[i] = img
return maskseg
|
[
"def",
"eig_seg",
"(",
"mask",
",",
"img_list",
",",
"apply_segmentation_to_images",
"=",
"False",
",",
"cthresh",
"=",
"0",
",",
"smooth",
"=",
"1",
")",
":",
"maskvox",
"=",
"mask",
">",
"0",
"maskseg",
"=",
"mask",
".",
"clone",
"(",
")",
"maskseg",
"[",
"maskvox",
"]",
"=",
"0",
"if",
"isinstance",
"(",
"img_list",
",",
"np",
".",
"ndarray",
")",
":",
"mydata",
"=",
"img_list",
"elif",
"isinstance",
"(",
"img_list",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"mydata",
"=",
"core",
".",
"image_list_to_matrix",
"(",
"img_list",
",",
"mask",
")",
"if",
"(",
"smooth",
">",
"0",
")",
":",
"for",
"i",
"in",
"range",
"(",
"mydata",
".",
"shape",
"[",
"0",
"]",
")",
":",
"temp_img",
"=",
"core",
".",
"make_image",
"(",
"mask",
",",
"mydata",
"[",
"i",
",",
":",
"]",
",",
"pixeltype",
"=",
"'float'",
")",
"temp_img",
"=",
"utils",
".",
"smooth_image",
"(",
"temp_img",
",",
"smooth",
",",
"sigma_in_physical_coordinates",
"=",
"True",
")",
"mydata",
"[",
"i",
",",
":",
"]",
"=",
"temp_img",
"[",
"mask",
">=",
"0.5",
"]",
"segids",
"=",
"np",
".",
"argmax",
"(",
"np",
".",
"abs",
"(",
"mydata",
")",
",",
"axis",
"=",
"0",
")",
"+",
"1",
"segmax",
"=",
"np",
".",
"max",
"(",
"np",
".",
"abs",
"(",
"mydata",
")",
",",
"axis",
"=",
"0",
")",
"maskseg",
"[",
"maskvox",
"]",
"=",
"(",
"segids",
"*",
"(",
"segmax",
">",
"1e-09",
")",
")",
"if",
"cthresh",
">",
"0",
":",
"for",
"kk",
"in",
"range",
"(",
"int",
"(",
"maskseg",
".",
"max",
"(",
")",
")",
")",
":",
"timg",
"=",
"utils",
".",
"threshold_image",
"(",
"maskseg",
",",
"kk",
",",
"kk",
")",
"timg",
"=",
"utils",
".",
"label_clusters",
"(",
"timg",
",",
"cthresh",
")",
"timg",
"=",
"utils",
".",
"threshold_image",
"(",
"timg",
",",
"1",
",",
"1e15",
")",
"*",
"float",
"(",
"kk",
")",
"maskseg",
"[",
"maskseg",
"==",
"kk",
"]",
"=",
"timg",
"[",
"maskseg",
"==",
"kk",
"]",
"if",
"(",
"apply_segmentation_to_images",
")",
"and",
"(",
"not",
"isinstance",
"(",
"img_list",
",",
"np",
".",
"ndarray",
")",
")",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"img_list",
")",
")",
":",
"img",
"=",
"img_list",
"[",
"i",
"]",
"img",
"[",
"maskseg",
"!=",
"float",
"(",
"i",
")",
"]",
"=",
"0",
"img_list",
"[",
"i",
"]",
"=",
"img",
"return",
"maskseg"
] |
Segment a mask into regions based on the max value in an image list.
At a given voxel the segmentation label will contain the index to the image
that has the largest value. If the 3rd image has the greatest value,
the segmentation label will be 3 at that voxel.
Arguments
---------
mask : ANTsImage
D-dimensional mask > 0 defining segmentation region.
img_list : collection of ANTsImage or np.ndarray
images to use
apply_segmentation_to_images : boolean
determines if original image list is modified by the segmentation.
cthresh : integer
throw away isolated clusters smaller than this value
smooth : float
smooth the input data first by this value
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> mylist = [ants.image_read(ants.get_ants_data('r16')),
ants.image_read(ants.get_ants_data('r27')),
ants.image_read(ants.get_ants_data('r85'))]
>>> myseg = ants.eig_seg(ants.get_mask(mylist[0]), mylist)
|
[
"Segment",
"a",
"mask",
"into",
"regions",
"based",
"on",
"the",
"max",
"value",
"in",
"an",
"image",
"list",
".",
"At",
"a",
"given",
"voxel",
"the",
"segmentation",
"label",
"will",
"contain",
"the",
"index",
"to",
"the",
"image",
"that",
"has",
"the",
"largest",
"value",
".",
"If",
"the",
"3rd",
"image",
"has",
"the",
"greatest",
"value",
"the",
"segmentation",
"label",
"will",
"be",
"3",
"at",
"that",
"voxel",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/learn/decomposition.py#L342-L409
|
237,191
|
ANTsX/ANTsPy
|
ants/utils/label_stats.py
|
label_stats
|
def label_stats(image, label_image):
"""
Get label statistics from image
ANTsR function: `labelStats`
Arguments
---------
image : ANTsImage
Image from which statistics will be calculated
label_image : ANTsImage
Label image
Returns
-------
ndarray ?
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') , 2 )
>>> image = ants.resample_image( image, (64,64), 1, 0 )
>>> mask = ants.get_mask(image)
>>> segs1 = ants.kmeans_segmentation( image, 3 )
>>> stats = ants.label_stats(image, segs1['segmentation'])
"""
image_float = image.clone('float')
label_image_int = label_image.clone('unsigned int')
libfn = utils.get_lib_fn('labelStats%iD' % image.dimension)
df = libfn(image_float.pointer, label_image_int.pointer)
#df = df[order(df$LabelValue), ]
return pd.DataFrame(df)
|
python
|
def label_stats(image, label_image):
"""
Get label statistics from image
ANTsR function: `labelStats`
Arguments
---------
image : ANTsImage
Image from which statistics will be calculated
label_image : ANTsImage
Label image
Returns
-------
ndarray ?
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') , 2 )
>>> image = ants.resample_image( image, (64,64), 1, 0 )
>>> mask = ants.get_mask(image)
>>> segs1 = ants.kmeans_segmentation( image, 3 )
>>> stats = ants.label_stats(image, segs1['segmentation'])
"""
image_float = image.clone('float')
label_image_int = label_image.clone('unsigned int')
libfn = utils.get_lib_fn('labelStats%iD' % image.dimension)
df = libfn(image_float.pointer, label_image_int.pointer)
#df = df[order(df$LabelValue), ]
return pd.DataFrame(df)
|
[
"def",
"label_stats",
"(",
"image",
",",
"label_image",
")",
":",
"image_float",
"=",
"image",
".",
"clone",
"(",
"'float'",
")",
"label_image_int",
"=",
"label_image",
".",
"clone",
"(",
"'unsigned int'",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'labelStats%iD'",
"%",
"image",
".",
"dimension",
")",
"df",
"=",
"libfn",
"(",
"image_float",
".",
"pointer",
",",
"label_image_int",
".",
"pointer",
")",
"#df = df[order(df$LabelValue), ]",
"return",
"pd",
".",
"DataFrame",
"(",
"df",
")"
] |
Get label statistics from image
ANTsR function: `labelStats`
Arguments
---------
image : ANTsImage
Image from which statistics will be calculated
label_image : ANTsImage
Label image
Returns
-------
ndarray ?
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') , 2 )
>>> image = ants.resample_image( image, (64,64), 1, 0 )
>>> mask = ants.get_mask(image)
>>> segs1 = ants.kmeans_segmentation( image, 3 )
>>> stats = ants.label_stats(image, segs1['segmentation'])
|
[
"Get",
"label",
"statistics",
"from",
"image"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/utils/label_stats.py#L8-L41
|
237,192
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.spacing
|
def spacing(self):
"""
Get image spacing
Returns
-------
tuple
"""
libfn = utils.get_lib_fn('getSpacing%s'%self._libsuffix)
return libfn(self.pointer)
|
python
|
def spacing(self):
"""
Get image spacing
Returns
-------
tuple
"""
libfn = utils.get_lib_fn('getSpacing%s'%self._libsuffix)
return libfn(self.pointer)
|
[
"def",
"spacing",
"(",
"self",
")",
":",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'getSpacing%s'",
"%",
"self",
".",
"_libsuffix",
")",
"return",
"libfn",
"(",
"self",
".",
"pointer",
")"
] |
Get image spacing
Returns
-------
tuple
|
[
"Get",
"image",
"spacing"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L95-L104
|
237,193
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.set_spacing
|
def set_spacing(self, new_spacing):
"""
Set image spacing
Arguments
---------
new_spacing : tuple or list
updated spacing for the image.
should have one value for each dimension
Returns
-------
None
"""
if not isinstance(new_spacing, (tuple, list)):
raise ValueError('arg must be tuple or list')
if len(new_spacing) != self.dimension:
raise ValueError('must give a spacing value for each dimension (%i)' % self.dimension)
libfn = utils.get_lib_fn('setSpacing%s'%self._libsuffix)
libfn(self.pointer, new_spacing)
|
python
|
def set_spacing(self, new_spacing):
"""
Set image spacing
Arguments
---------
new_spacing : tuple or list
updated spacing for the image.
should have one value for each dimension
Returns
-------
None
"""
if not isinstance(new_spacing, (tuple, list)):
raise ValueError('arg must be tuple or list')
if len(new_spacing) != self.dimension:
raise ValueError('must give a spacing value for each dimension (%i)' % self.dimension)
libfn = utils.get_lib_fn('setSpacing%s'%self._libsuffix)
libfn(self.pointer, new_spacing)
|
[
"def",
"set_spacing",
"(",
"self",
",",
"new_spacing",
")",
":",
"if",
"not",
"isinstance",
"(",
"new_spacing",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"raise",
"ValueError",
"(",
"'arg must be tuple or list'",
")",
"if",
"len",
"(",
"new_spacing",
")",
"!=",
"self",
".",
"dimension",
":",
"raise",
"ValueError",
"(",
"'must give a spacing value for each dimension (%i)'",
"%",
"self",
".",
"dimension",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'setSpacing%s'",
"%",
"self",
".",
"_libsuffix",
")",
"libfn",
"(",
"self",
".",
"pointer",
",",
"new_spacing",
")"
] |
Set image spacing
Arguments
---------
new_spacing : tuple or list
updated spacing for the image.
should have one value for each dimension
Returns
-------
None
|
[
"Set",
"image",
"spacing"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L106-L126
|
237,194
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.origin
|
def origin(self):
"""
Get image origin
Returns
-------
tuple
"""
libfn = utils.get_lib_fn('getOrigin%s'%self._libsuffix)
return libfn(self.pointer)
|
python
|
def origin(self):
"""
Get image origin
Returns
-------
tuple
"""
libfn = utils.get_lib_fn('getOrigin%s'%self._libsuffix)
return libfn(self.pointer)
|
[
"def",
"origin",
"(",
"self",
")",
":",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'getOrigin%s'",
"%",
"self",
".",
"_libsuffix",
")",
"return",
"libfn",
"(",
"self",
".",
"pointer",
")"
] |
Get image origin
Returns
-------
tuple
|
[
"Get",
"image",
"origin"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L129-L138
|
237,195
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.set_origin
|
def set_origin(self, new_origin):
"""
Set image origin
Arguments
---------
new_origin : tuple or list
updated origin for the image.
should have one value for each dimension
Returns
-------
None
"""
if not isinstance(new_origin, (tuple, list)):
raise ValueError('arg must be tuple or list')
if len(new_origin) != self.dimension:
raise ValueError('must give a origin value for each dimension (%i)' % self.dimension)
libfn = utils.get_lib_fn('setOrigin%s'%self._libsuffix)
libfn(self.pointer, new_origin)
|
python
|
def set_origin(self, new_origin):
"""
Set image origin
Arguments
---------
new_origin : tuple or list
updated origin for the image.
should have one value for each dimension
Returns
-------
None
"""
if not isinstance(new_origin, (tuple, list)):
raise ValueError('arg must be tuple or list')
if len(new_origin) != self.dimension:
raise ValueError('must give a origin value for each dimension (%i)' % self.dimension)
libfn = utils.get_lib_fn('setOrigin%s'%self._libsuffix)
libfn(self.pointer, new_origin)
|
[
"def",
"set_origin",
"(",
"self",
",",
"new_origin",
")",
":",
"if",
"not",
"isinstance",
"(",
"new_origin",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"raise",
"ValueError",
"(",
"'arg must be tuple or list'",
")",
"if",
"len",
"(",
"new_origin",
")",
"!=",
"self",
".",
"dimension",
":",
"raise",
"ValueError",
"(",
"'must give a origin value for each dimension (%i)'",
"%",
"self",
".",
"dimension",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'setOrigin%s'",
"%",
"self",
".",
"_libsuffix",
")",
"libfn",
"(",
"self",
".",
"pointer",
",",
"new_origin",
")"
] |
Set image origin
Arguments
---------
new_origin : tuple or list
updated origin for the image.
should have one value for each dimension
Returns
-------
None
|
[
"Set",
"image",
"origin"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L140-L160
|
237,196
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.direction
|
def direction(self):
"""
Get image direction
Returns
-------
tuple
"""
libfn = utils.get_lib_fn('getDirection%s'%self._libsuffix)
return libfn(self.pointer)
|
python
|
def direction(self):
"""
Get image direction
Returns
-------
tuple
"""
libfn = utils.get_lib_fn('getDirection%s'%self._libsuffix)
return libfn(self.pointer)
|
[
"def",
"direction",
"(",
"self",
")",
":",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'getDirection%s'",
"%",
"self",
".",
"_libsuffix",
")",
"return",
"libfn",
"(",
"self",
".",
"pointer",
")"
] |
Get image direction
Returns
-------
tuple
|
[
"Get",
"image",
"direction"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L163-L172
|
237,197
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.set_direction
|
def set_direction(self, new_direction):
"""
Set image direction
Arguments
---------
new_direction : numpy.ndarray or tuple or list
updated direction for the image.
should have one value for each dimension
Returns
-------
None
"""
if isinstance(new_direction, (tuple,list)):
new_direction = np.asarray(new_direction)
if not isinstance(new_direction, np.ndarray):
raise ValueError('arg must be np.ndarray or tuple or list')
if len(new_direction) != self.dimension:
raise ValueError('must give a origin value for each dimension (%i)' % self.dimension)
libfn = utils.get_lib_fn('setDirection%s'%self._libsuffix)
libfn(self.pointer, new_direction)
|
python
|
def set_direction(self, new_direction):
"""
Set image direction
Arguments
---------
new_direction : numpy.ndarray or tuple or list
updated direction for the image.
should have one value for each dimension
Returns
-------
None
"""
if isinstance(new_direction, (tuple,list)):
new_direction = np.asarray(new_direction)
if not isinstance(new_direction, np.ndarray):
raise ValueError('arg must be np.ndarray or tuple or list')
if len(new_direction) != self.dimension:
raise ValueError('must give a origin value for each dimension (%i)' % self.dimension)
libfn = utils.get_lib_fn('setDirection%s'%self._libsuffix)
libfn(self.pointer, new_direction)
|
[
"def",
"set_direction",
"(",
"self",
",",
"new_direction",
")",
":",
"if",
"isinstance",
"(",
"new_direction",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"new_direction",
"=",
"np",
".",
"asarray",
"(",
"new_direction",
")",
"if",
"not",
"isinstance",
"(",
"new_direction",
",",
"np",
".",
"ndarray",
")",
":",
"raise",
"ValueError",
"(",
"'arg must be np.ndarray or tuple or list'",
")",
"if",
"len",
"(",
"new_direction",
")",
"!=",
"self",
".",
"dimension",
":",
"raise",
"ValueError",
"(",
"'must give a origin value for each dimension (%i)'",
"%",
"self",
".",
"dimension",
")",
"libfn",
"=",
"utils",
".",
"get_lib_fn",
"(",
"'setDirection%s'",
"%",
"self",
".",
"_libsuffix",
")",
"libfn",
"(",
"self",
".",
"pointer",
",",
"new_direction",
")"
] |
Set image direction
Arguments
---------
new_direction : numpy.ndarray or tuple or list
updated direction for the image.
should have one value for each dimension
Returns
-------
None
|
[
"Set",
"image",
"direction"
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L174-L197
|
237,198
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.astype
|
def astype(self, dtype):
"""
Cast & clone an ANTsImage to a given numpy datatype.
Map:
uint8 : unsigned char
uint32 : unsigned int
float32 : float
float64 : double
"""
if dtype not in _supported_dtypes:
raise ValueError('Datatype %s not supported. Supported types are %s' % (dtype, _supported_dtypes))
pixeltype = _npy_to_itk_map[dtype]
return self.clone(pixeltype)
|
python
|
def astype(self, dtype):
"""
Cast & clone an ANTsImage to a given numpy datatype.
Map:
uint8 : unsigned char
uint32 : unsigned int
float32 : float
float64 : double
"""
if dtype not in _supported_dtypes:
raise ValueError('Datatype %s not supported. Supported types are %s' % (dtype, _supported_dtypes))
pixeltype = _npy_to_itk_map[dtype]
return self.clone(pixeltype)
|
[
"def",
"astype",
"(",
"self",
",",
"dtype",
")",
":",
"if",
"dtype",
"not",
"in",
"_supported_dtypes",
":",
"raise",
"ValueError",
"(",
"'Datatype %s not supported. Supported types are %s'",
"%",
"(",
"dtype",
",",
"_supported_dtypes",
")",
")",
"pixeltype",
"=",
"_npy_to_itk_map",
"[",
"dtype",
"]",
"return",
"self",
".",
"clone",
"(",
"pixeltype",
")"
] |
Cast & clone an ANTsImage to a given numpy datatype.
Map:
uint8 : unsigned char
uint32 : unsigned int
float32 : float
float64 : double
|
[
"Cast",
"&",
"clone",
"an",
"ANTsImage",
"to",
"a",
"given",
"numpy",
"datatype",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L302-L316
|
237,199
|
ANTsX/ANTsPy
|
ants/core/ants_image.py
|
ANTsImage.new_image_like
|
def new_image_like(self, data):
"""
Create a new ANTsImage with the same header information, but with
a new image array.
Arguments
---------
data : ndarray or py::capsule
New array or pointer for the image.
It must have the same shape as the current
image data.
Returns
-------
ANTsImage
"""
if not isinstance(data, np.ndarray):
raise ValueError('data must be a numpy array')
if not self.has_components:
if data.shape != self.shape:
raise ValueError('given array shape (%s) and image array shape (%s) do not match' % (data.shape, self.shape))
else:
if (data.shape[-1] != self.components) or (data.shape[:-1] != self.shape):
raise ValueError('given array shape (%s) and image array shape (%s) do not match' % (data.shape[1:], self.shape))
return iio2.from_numpy(data, origin=self.origin,
spacing=self.spacing, direction=self.direction,
has_components=self.has_components)
|
python
|
def new_image_like(self, data):
"""
Create a new ANTsImage with the same header information, but with
a new image array.
Arguments
---------
data : ndarray or py::capsule
New array or pointer for the image.
It must have the same shape as the current
image data.
Returns
-------
ANTsImage
"""
if not isinstance(data, np.ndarray):
raise ValueError('data must be a numpy array')
if not self.has_components:
if data.shape != self.shape:
raise ValueError('given array shape (%s) and image array shape (%s) do not match' % (data.shape, self.shape))
else:
if (data.shape[-1] != self.components) or (data.shape[:-1] != self.shape):
raise ValueError('given array shape (%s) and image array shape (%s) do not match' % (data.shape[1:], self.shape))
return iio2.from_numpy(data, origin=self.origin,
spacing=self.spacing, direction=self.direction,
has_components=self.has_components)
|
[
"def",
"new_image_like",
"(",
"self",
",",
"data",
")",
":",
"if",
"not",
"isinstance",
"(",
"data",
",",
"np",
".",
"ndarray",
")",
":",
"raise",
"ValueError",
"(",
"'data must be a numpy array'",
")",
"if",
"not",
"self",
".",
"has_components",
":",
"if",
"data",
".",
"shape",
"!=",
"self",
".",
"shape",
":",
"raise",
"ValueError",
"(",
"'given array shape (%s) and image array shape (%s) do not match'",
"%",
"(",
"data",
".",
"shape",
",",
"self",
".",
"shape",
")",
")",
"else",
":",
"if",
"(",
"data",
".",
"shape",
"[",
"-",
"1",
"]",
"!=",
"self",
".",
"components",
")",
"or",
"(",
"data",
".",
"shape",
"[",
":",
"-",
"1",
"]",
"!=",
"self",
".",
"shape",
")",
":",
"raise",
"ValueError",
"(",
"'given array shape (%s) and image array shape (%s) do not match'",
"%",
"(",
"data",
".",
"shape",
"[",
"1",
":",
"]",
",",
"self",
".",
"shape",
")",
")",
"return",
"iio2",
".",
"from_numpy",
"(",
"data",
",",
"origin",
"=",
"self",
".",
"origin",
",",
"spacing",
"=",
"self",
".",
"spacing",
",",
"direction",
"=",
"self",
".",
"direction",
",",
"has_components",
"=",
"self",
".",
"has_components",
")"
] |
Create a new ANTsImage with the same header information, but with
a new image array.
Arguments
---------
data : ndarray or py::capsule
New array or pointer for the image.
It must have the same shape as the current
image data.
Returns
-------
ANTsImage
|
[
"Create",
"a",
"new",
"ANTsImage",
"with",
"the",
"same",
"header",
"information",
"but",
"with",
"a",
"new",
"image",
"array",
"."
] |
638020af2cdfc5ff4bdb9809ffe67aa505727a3b
|
https://github.com/ANTsX/ANTsPy/blob/638020af2cdfc5ff4bdb9809ffe67aa505727a3b/ants/core/ants_image.py#L318-L345
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.