id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
237,800
serge-sans-paille/pythran
pythran/analyses/range_values.py
RangeValues.generic_visit
def generic_visit(self, node): """ Other nodes are not known and range value neither. """ super(RangeValues, self).generic_visit(node) return self.add(node, UNKNOWN_RANGE)
python
def generic_visit(self, node): """ Other nodes are not known and range value neither. """ super(RangeValues, self).generic_visit(node) return self.add(node, UNKNOWN_RANGE)
[ "def", "generic_visit", "(", "self", ",", "node", ")", ":", "super", "(", "RangeValues", ",", "self", ")", ".", "generic_visit", "(", "node", ")", "return", "self", ".", "add", "(", "node", ",", "UNKNOWN_RANGE", ")" ]
Other nodes are not known and range value neither.
[ "Other", "nodes", "are", "not", "known", "and", "range", "value", "neither", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/range_values.py#L448-L451
237,801
serge-sans-paille/pythran
pythran/run.py
compile_flags
def compile_flags(args): """ Build a dictionnary with an entry for cppflags, ldflags, and cxxflags. These options are filled according to the command line defined options """ compiler_options = { 'define_macros': args.defines, 'undef_macros': args.undefs, 'include_dirs': args.include_dirs, 'extra_compile_args': args.extra_flags, 'library_dirs': args.libraries_dir, 'extra_link_args': args.extra_flags, } for param in ('opts', ): val = getattr(args, param, None) if val: compiler_options[param] = val return compiler_options
python
def compile_flags(args): """ Build a dictionnary with an entry for cppflags, ldflags, and cxxflags. These options are filled according to the command line defined options """ compiler_options = { 'define_macros': args.defines, 'undef_macros': args.undefs, 'include_dirs': args.include_dirs, 'extra_compile_args': args.extra_flags, 'library_dirs': args.libraries_dir, 'extra_link_args': args.extra_flags, } for param in ('opts', ): val = getattr(args, param, None) if val: compiler_options[param] = val return compiler_options
[ "def", "compile_flags", "(", "args", ")", ":", "compiler_options", "=", "{", "'define_macros'", ":", "args", ".", "defines", ",", "'undef_macros'", ":", "args", ".", "undefs", ",", "'include_dirs'", ":", "args", ".", "include_dirs", ",", "'extra_compile_args'", ":", "args", ".", "extra_flags", ",", "'library_dirs'", ":", "args", ".", "libraries_dir", ",", "'extra_link_args'", ":", "args", ".", "extra_flags", ",", "}", "for", "param", "in", "(", "'opts'", ",", ")", ":", "val", "=", "getattr", "(", "args", ",", "param", ",", "None", ")", "if", "val", ":", "compiler_options", "[", "param", "]", "=", "val", "return", "compiler_options" ]
Build a dictionnary with an entry for cppflags, ldflags, and cxxflags. These options are filled according to the command line defined options
[ "Build", "a", "dictionnary", "with", "an", "entry", "for", "cppflags", "ldflags", "and", "cxxflags", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/run.py#L25-L46
237,802
serge-sans-paille/pythran
pythran/optimizations/iter_transformation.py
IterTransformation.find_matching_builtin
def find_matching_builtin(self, node): """ Return matched keyword. If the node alias on a correct keyword (and only it), it matches. """ for path in EQUIVALENT_ITERATORS.keys(): correct_alias = {path_to_node(path)} if self.aliases[node.func] == correct_alias: return path
python
def find_matching_builtin(self, node): """ Return matched keyword. If the node alias on a correct keyword (and only it), it matches. """ for path in EQUIVALENT_ITERATORS.keys(): correct_alias = {path_to_node(path)} if self.aliases[node.func] == correct_alias: return path
[ "def", "find_matching_builtin", "(", "self", ",", "node", ")", ":", "for", "path", "in", "EQUIVALENT_ITERATORS", ".", "keys", "(", ")", ":", "correct_alias", "=", "{", "path_to_node", "(", "path", ")", "}", "if", "self", ".", "aliases", "[", "node", ".", "func", "]", "==", "correct_alias", ":", "return", "path" ]
Return matched keyword. If the node alias on a correct keyword (and only it), it matches.
[ "Return", "matched", "keyword", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/optimizations/iter_transformation.py#L58-L67
237,803
serge-sans-paille/pythran
pythran/optimizations/iter_transformation.py
IterTransformation.visit_Module
def visit_Module(self, node): """Add itertools import for imap, izip or ifilter iterator.""" self.generic_visit(node) import_alias = ast.alias(name='itertools', asname=mangle('itertools')) if self.use_itertools: importIt = ast.Import(names=[import_alias]) node.body.insert(0, importIt) return node
python
def visit_Module(self, node): """Add itertools import for imap, izip or ifilter iterator.""" self.generic_visit(node) import_alias = ast.alias(name='itertools', asname=mangle('itertools')) if self.use_itertools: importIt = ast.Import(names=[import_alias]) node.body.insert(0, importIt) return node
[ "def", "visit_Module", "(", "self", ",", "node", ")", ":", "self", ".", "generic_visit", "(", "node", ")", "import_alias", "=", "ast", ".", "alias", "(", "name", "=", "'itertools'", ",", "asname", "=", "mangle", "(", "'itertools'", ")", ")", "if", "self", ".", "use_itertools", ":", "importIt", "=", "ast", ".", "Import", "(", "names", "=", "[", "import_alias", "]", ")", "node", ".", "body", ".", "insert", "(", "0", ",", "importIt", ")", "return", "node" ]
Add itertools import for imap, izip or ifilter iterator.
[ "Add", "itertools", "import", "for", "imap", "izip", "or", "ifilter", "iterator", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/optimizations/iter_transformation.py#L69-L76
237,804
serge-sans-paille/pythran
pythran/optimizations/iter_transformation.py
IterTransformation.visit_Call
def visit_Call(self, node): """Replace function call by its correct iterator if it is possible.""" if node in self.potential_iterator: matched_path = self.find_matching_builtin(node) if matched_path is None: return self.generic_visit(node) # Special handling for map which can't be turn to imap with None as # a parameter as map(None, [1, 2]) == [1, 2] while # list(imap(None, [1, 2])) == [(1,), (2,)] if (matched_path[1] == "map" and MODULES["__builtin__"]["None"] in self.aliases[node.args[0]]): return self.generic_visit(node) # if a dtype conversion is implied if matched_path[1] in ('array', 'asarray') and len(node.args) != 1: return self.generic_visit(node) path = EQUIVALENT_ITERATORS[matched_path] if path: node.func = path_to_attr(path) self.use_itertools |= path[0] == 'itertools' else: node = node.args[0] self.update = True return self.generic_visit(node)
python
def visit_Call(self, node): """Replace function call by its correct iterator if it is possible.""" if node in self.potential_iterator: matched_path = self.find_matching_builtin(node) if matched_path is None: return self.generic_visit(node) # Special handling for map which can't be turn to imap with None as # a parameter as map(None, [1, 2]) == [1, 2] while # list(imap(None, [1, 2])) == [(1,), (2,)] if (matched_path[1] == "map" and MODULES["__builtin__"]["None"] in self.aliases[node.args[0]]): return self.generic_visit(node) # if a dtype conversion is implied if matched_path[1] in ('array', 'asarray') and len(node.args) != 1: return self.generic_visit(node) path = EQUIVALENT_ITERATORS[matched_path] if path: node.func = path_to_attr(path) self.use_itertools |= path[0] == 'itertools' else: node = node.args[0] self.update = True return self.generic_visit(node)
[ "def", "visit_Call", "(", "self", ",", "node", ")", ":", "if", "node", "in", "self", ".", "potential_iterator", ":", "matched_path", "=", "self", ".", "find_matching_builtin", "(", "node", ")", "if", "matched_path", "is", "None", ":", "return", "self", ".", "generic_visit", "(", "node", ")", "# Special handling for map which can't be turn to imap with None as", "# a parameter as map(None, [1, 2]) == [1, 2] while", "# list(imap(None, [1, 2])) == [(1,), (2,)]", "if", "(", "matched_path", "[", "1", "]", "==", "\"map\"", "and", "MODULES", "[", "\"__builtin__\"", "]", "[", "\"None\"", "]", "in", "self", ".", "aliases", "[", "node", ".", "args", "[", "0", "]", "]", ")", ":", "return", "self", ".", "generic_visit", "(", "node", ")", "# if a dtype conversion is implied", "if", "matched_path", "[", "1", "]", "in", "(", "'array'", ",", "'asarray'", ")", "and", "len", "(", "node", ".", "args", ")", "!=", "1", ":", "return", "self", ".", "generic_visit", "(", "node", ")", "path", "=", "EQUIVALENT_ITERATORS", "[", "matched_path", "]", "if", "path", ":", "node", ".", "func", "=", "path_to_attr", "(", "path", ")", "self", ".", "use_itertools", "|=", "path", "[", "0", "]", "==", "'itertools'", "else", ":", "node", "=", "node", ".", "args", "[", "0", "]", "self", ".", "update", "=", "True", "return", "self", ".", "generic_visit", "(", "node", ")" ]
Replace function call by its correct iterator if it is possible.
[ "Replace", "function", "call", "by", "its", "correct", "iterator", "if", "it", "is", "possible", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/optimizations/iter_transformation.py#L78-L105
237,805
serge-sans-paille/pythran
docs/papers/iop2014/xp/numba/hyantes.py
run
def run(xmin, ymin, xmax, ymax, step, range_, range_x, range_y, t): X,Y = t.shape pt = np.zeros((X,Y)) "omp parallel for" for i in range(X): for j in range(Y): for k in t: tmp = 6368.* np.arccos( np.cos(xmin+step*i)*np.cos( k[0] ) * np.cos((ymin+step*j)-k[1])+ np.sin(xmin+step*i)*np.sin(k[0])) if tmp < range_: pt[i][j]+=k[2] / (1+tmp) return pt
python
def run(xmin, ymin, xmax, ymax, step, range_, range_x, range_y, t): X,Y = t.shape pt = np.zeros((X,Y)) "omp parallel for" for i in range(X): for j in range(Y): for k in t: tmp = 6368.* np.arccos( np.cos(xmin+step*i)*np.cos( k[0] ) * np.cos((ymin+step*j)-k[1])+ np.sin(xmin+step*i)*np.sin(k[0])) if tmp < range_: pt[i][j]+=k[2] / (1+tmp) return pt
[ "def", "run", "(", "xmin", ",", "ymin", ",", "xmax", ",", "ymax", ",", "step", ",", "range_", ",", "range_x", ",", "range_y", ",", "t", ")", ":", "X", ",", "Y", "=", "t", ".", "shape", "pt", "=", "np", ".", "zeros", "(", "(", "X", ",", "Y", ")", ")", "for", "i", "in", "range", "(", "X", ")", ":", "for", "j", "in", "range", "(", "Y", ")", ":", "for", "k", "in", "t", ":", "tmp", "=", "6368.", "*", "np", ".", "arccos", "(", "np", ".", "cos", "(", "xmin", "+", "step", "*", "i", ")", "*", "np", ".", "cos", "(", "k", "[", "0", "]", ")", "*", "np", ".", "cos", "(", "(", "ymin", "+", "step", "*", "j", ")", "-", "k", "[", "1", "]", ")", "+", "np", ".", "sin", "(", "xmin", "+", "step", "*", "i", ")", "*", "np", ".", "sin", "(", "k", "[", "0", "]", ")", ")", "if", "tmp", "<", "range_", ":", "pt", "[", "i", "]", "[", "j", "]", "+=", "k", "[", "2", "]", "/", "(", "1", "+", "tmp", ")", "return", "pt" ]
omp parallel for
[ "omp", "parallel", "for" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/docs/papers/iop2014/xp/numba/hyantes.py#L4-L14
237,806
serge-sans-paille/pythran
pythran/interval.py
max_values
def max_values(args): """ Return possible range for max function. """ return Interval(max(x.low for x in args), max(x.high for x in args))
python
def max_values(args): """ Return possible range for max function. """ return Interval(max(x.low for x in args), max(x.high for x in args))
[ "def", "max_values", "(", "args", ")", ":", "return", "Interval", "(", "max", "(", "x", ".", "low", "for", "x", "in", "args", ")", ",", "max", "(", "x", ".", "high", "for", "x", "in", "args", ")", ")" ]
Return possible range for max function.
[ "Return", "possible", "range", "for", "max", "function", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/interval.py#L405-L407
237,807
serge-sans-paille/pythran
pythran/interval.py
min_values
def min_values(args): """ Return possible range for min function. """ return Interval(min(x.low for x in args), min(x.high for x in args))
python
def min_values(args): """ Return possible range for min function. """ return Interval(min(x.low for x in args), min(x.high for x in args))
[ "def", "min_values", "(", "args", ")", ":", "return", "Interval", "(", "min", "(", "x", ".", "low", "for", "x", "in", "args", ")", ",", "min", "(", "x", ".", "high", "for", "x", "in", "args", ")", ")" ]
Return possible range for min function.
[ "Return", "possible", "range", "for", "min", "function", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/interval.py#L410-L412
237,808
serge-sans-paille/pythran
pythran/interval.py
Interval.union
def union(self, other): """ Intersect current range with other.""" return Interval(min(self.low, other.low), max(self.high, other.high))
python
def union(self, other): """ Intersect current range with other.""" return Interval(min(self.low, other.low), max(self.high, other.high))
[ "def", "union", "(", "self", ",", "other", ")", ":", "return", "Interval", "(", "min", "(", "self", ".", "low", ",", "other", ".", "low", ")", ",", "max", "(", "self", ".", "high", ",", "other", ".", "high", ")", ")" ]
Intersect current range with other.
[ "Intersect", "current", "range", "with", "other", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/interval.py#L38-L40
237,809
serge-sans-paille/pythran
pythran/interval.py
Interval.widen
def widen(self, other): """ Widen current range. """ if self.low < other.low: low = -float("inf") else: low = self.low if self.high > other.high: high = float("inf") else: high = self.high return Interval(low, high)
python
def widen(self, other): """ Widen current range. """ if self.low < other.low: low = -float("inf") else: low = self.low if self.high > other.high: high = float("inf") else: high = self.high return Interval(low, high)
[ "def", "widen", "(", "self", ",", "other", ")", ":", "if", "self", ".", "low", "<", "other", ".", "low", ":", "low", "=", "-", "float", "(", "\"inf\"", ")", "else", ":", "low", "=", "self", ".", "low", "if", "self", ".", "high", ">", "other", ".", "high", ":", "high", "=", "float", "(", "\"inf\"", ")", "else", ":", "high", "=", "self", ".", "high", "return", "Interval", "(", "low", ",", "high", ")" ]
Widen current range.
[ "Widen", "current", "range", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/interval.py#L45-L55
237,810
serge-sans-paille/pythran
pythran/transformations/remove_named_arguments.py
RemoveNamedArguments.handle_keywords
def handle_keywords(self, func, node, offset=0): ''' Gather keywords to positional argument information Assumes the named parameter exist, raises a KeyError otherwise ''' func_argument_names = {} for i, arg in enumerate(func.args.args[offset:]): assert isinstance(arg, ast.Name) func_argument_names[arg.id] = i nargs = len(func.args.args) - offset defaults = func.args.defaults keywords = {func_argument_names[kw.arg]: kw.value for kw in node.keywords} node.args.extend([None] * (1 + max(keywords.keys()) - len(node.args))) replacements = {} for index, arg in enumerate(node.args): if arg is None: if index in keywords: replacements[index] = deepcopy(keywords[index]) else: # must be a default value replacements[index] = deepcopy(defaults[index - nargs]) return replacements
python
def handle_keywords(self, func, node, offset=0): ''' Gather keywords to positional argument information Assumes the named parameter exist, raises a KeyError otherwise ''' func_argument_names = {} for i, arg in enumerate(func.args.args[offset:]): assert isinstance(arg, ast.Name) func_argument_names[arg.id] = i nargs = len(func.args.args) - offset defaults = func.args.defaults keywords = {func_argument_names[kw.arg]: kw.value for kw in node.keywords} node.args.extend([None] * (1 + max(keywords.keys()) - len(node.args))) replacements = {} for index, arg in enumerate(node.args): if arg is None: if index in keywords: replacements[index] = deepcopy(keywords[index]) else: # must be a default value replacements[index] = deepcopy(defaults[index - nargs]) return replacements
[ "def", "handle_keywords", "(", "self", ",", "func", ",", "node", ",", "offset", "=", "0", ")", ":", "func_argument_names", "=", "{", "}", "for", "i", ",", "arg", "in", "enumerate", "(", "func", ".", "args", ".", "args", "[", "offset", ":", "]", ")", ":", "assert", "isinstance", "(", "arg", ",", "ast", ".", "Name", ")", "func_argument_names", "[", "arg", ".", "id", "]", "=", "i", "nargs", "=", "len", "(", "func", ".", "args", ".", "args", ")", "-", "offset", "defaults", "=", "func", ".", "args", ".", "defaults", "keywords", "=", "{", "func_argument_names", "[", "kw", ".", "arg", "]", ":", "kw", ".", "value", "for", "kw", "in", "node", ".", "keywords", "}", "node", ".", "args", ".", "extend", "(", "[", "None", "]", "*", "(", "1", "+", "max", "(", "keywords", ".", "keys", "(", ")", ")", "-", "len", "(", "node", ".", "args", ")", ")", ")", "replacements", "=", "{", "}", "for", "index", ",", "arg", "in", "enumerate", "(", "node", ".", "args", ")", ":", "if", "arg", "is", "None", ":", "if", "index", "in", "keywords", ":", "replacements", "[", "index", "]", "=", "deepcopy", "(", "keywords", "[", "index", "]", ")", "else", ":", "# must be a default value", "replacements", "[", "index", "]", "=", "deepcopy", "(", "defaults", "[", "index", "-", "nargs", "]", ")", "return", "replacements" ]
Gather keywords to positional argument information Assumes the named parameter exist, raises a KeyError otherwise
[ "Gather", "keywords", "to", "positional", "argument", "information" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/transformations/remove_named_arguments.py#L38-L62
237,811
serge-sans-paille/pythran
pythran/tables.py
update_effects
def update_effects(self, node): """ Combiner when we update the first argument of a function. It turn type of first parameter in combination of all others parameters types. """ return [self.combine(node.args[0], node_args_k, register=True, aliasing_type=True) for node_args_k in node.args[1:]]
python
def update_effects(self, node): """ Combiner when we update the first argument of a function. It turn type of first parameter in combination of all others parameters types. """ return [self.combine(node.args[0], node_args_k, register=True, aliasing_type=True) for node_args_k in node.args[1:]]
[ "def", "update_effects", "(", "self", ",", "node", ")", ":", "return", "[", "self", ".", "combine", "(", "node", ".", "args", "[", "0", "]", ",", "node_args_k", ",", "register", "=", "True", ",", "aliasing_type", "=", "True", ")", "for", "node_args_k", "in", "node", ".", "args", "[", "1", ":", "]", "]" ]
Combiner when we update the first argument of a function. It turn type of first parameter in combination of all others parameters types.
[ "Combiner", "when", "we", "update", "the", "first", "argument", "of", "a", "function", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/tables.py#L161-L170
237,812
serge-sans-paille/pythran
pythran/tables.py
save_method
def save_method(elements, module_path): """ Recursively save methods with module name and signature. """ for elem, signature in elements.items(): if isinstance(signature, dict): # Submodule case save_method(signature, module_path + (elem,)) elif isinstance(signature, Class): save_method(signature.fields, module_path + (elem,)) elif signature.ismethod(): # in case of duplicates, there must be a __dispatch__ record # and it is the only recorded one if elem in methods and module_path[0] != '__dispatch__': assert elem in MODULES['__dispatch__'] path = ('__dispatch__',) methods[elem] = (path, MODULES['__dispatch__'][elem]) else: methods[elem] = (module_path, signature)
python
def save_method(elements, module_path): """ Recursively save methods with module name and signature. """ for elem, signature in elements.items(): if isinstance(signature, dict): # Submodule case save_method(signature, module_path + (elem,)) elif isinstance(signature, Class): save_method(signature.fields, module_path + (elem,)) elif signature.ismethod(): # in case of duplicates, there must be a __dispatch__ record # and it is the only recorded one if elem in methods and module_path[0] != '__dispatch__': assert elem in MODULES['__dispatch__'] path = ('__dispatch__',) methods[elem] = (path, MODULES['__dispatch__'][elem]) else: methods[elem] = (module_path, signature)
[ "def", "save_method", "(", "elements", ",", "module_path", ")", ":", "for", "elem", ",", "signature", "in", "elements", ".", "items", "(", ")", ":", "if", "isinstance", "(", "signature", ",", "dict", ")", ":", "# Submodule case", "save_method", "(", "signature", ",", "module_path", "+", "(", "elem", ",", ")", ")", "elif", "isinstance", "(", "signature", ",", "Class", ")", ":", "save_method", "(", "signature", ".", "fields", ",", "module_path", "+", "(", "elem", ",", ")", ")", "elif", "signature", ".", "ismethod", "(", ")", ":", "# in case of duplicates, there must be a __dispatch__ record", "# and it is the only recorded one", "if", "elem", "in", "methods", "and", "module_path", "[", "0", "]", "!=", "'__dispatch__'", ":", "assert", "elem", "in", "MODULES", "[", "'__dispatch__'", "]", "path", "=", "(", "'__dispatch__'", ",", ")", "methods", "[", "elem", "]", "=", "(", "path", ",", "MODULES", "[", "'__dispatch__'", "]", "[", "elem", "]", ")", "else", ":", "methods", "[", "elem", "]", "=", "(", "module_path", ",", "signature", ")" ]
Recursively save methods with module name and signature.
[ "Recursively", "save", "methods", "with", "module", "name", "and", "signature", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/tables.py#L4609-L4624
237,813
serge-sans-paille/pythran
pythran/tables.py
save_function
def save_function(elements, module_path): """ Recursively save functions with module name and signature. """ for elem, signature in elements.items(): if isinstance(signature, dict): # Submodule case save_function(signature, module_path + (elem,)) elif signature.isstaticfunction(): functions.setdefault(elem, []).append((module_path, signature,)) elif isinstance(signature, Class): save_function(signature.fields, module_path + (elem,))
python
def save_function(elements, module_path): """ Recursively save functions with module name and signature. """ for elem, signature in elements.items(): if isinstance(signature, dict): # Submodule case save_function(signature, module_path + (elem,)) elif signature.isstaticfunction(): functions.setdefault(elem, []).append((module_path, signature,)) elif isinstance(signature, Class): save_function(signature.fields, module_path + (elem,))
[ "def", "save_function", "(", "elements", ",", "module_path", ")", ":", "for", "elem", ",", "signature", "in", "elements", ".", "items", "(", ")", ":", "if", "isinstance", "(", "signature", ",", "dict", ")", ":", "# Submodule case", "save_function", "(", "signature", ",", "module_path", "+", "(", "elem", ",", ")", ")", "elif", "signature", ".", "isstaticfunction", "(", ")", ":", "functions", ".", "setdefault", "(", "elem", ",", "[", "]", ")", ".", "append", "(", "(", "module_path", ",", "signature", ",", ")", ")", "elif", "isinstance", "(", "signature", ",", "Class", ")", ":", "save_function", "(", "signature", ".", "fields", ",", "module_path", "+", "(", "elem", ",", ")", ")" ]
Recursively save functions with module name and signature.
[ "Recursively", "save", "functions", "with", "module", "name", "and", "signature", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/tables.py#L4634-L4642
237,814
serge-sans-paille/pythran
pythran/tables.py
save_attribute
def save_attribute(elements, module_path): """ Recursively save attributes with module name and signature. """ for elem, signature in elements.items(): if isinstance(signature, dict): # Submodule case save_attribute(signature, module_path + (elem,)) elif signature.isattribute(): assert elem not in attributes # we need unicity attributes[elem] = (module_path, signature,) elif isinstance(signature, Class): save_attribute(signature.fields, module_path + (elem,))
python
def save_attribute(elements, module_path): """ Recursively save attributes with module name and signature. """ for elem, signature in elements.items(): if isinstance(signature, dict): # Submodule case save_attribute(signature, module_path + (elem,)) elif signature.isattribute(): assert elem not in attributes # we need unicity attributes[elem] = (module_path, signature,) elif isinstance(signature, Class): save_attribute(signature.fields, module_path + (elem,))
[ "def", "save_attribute", "(", "elements", ",", "module_path", ")", ":", "for", "elem", ",", "signature", "in", "elements", ".", "items", "(", ")", ":", "if", "isinstance", "(", "signature", ",", "dict", ")", ":", "# Submodule case", "save_attribute", "(", "signature", ",", "module_path", "+", "(", "elem", ",", ")", ")", "elif", "signature", ".", "isattribute", "(", ")", ":", "assert", "elem", "not", "in", "attributes", "# we need unicity", "attributes", "[", "elem", "]", "=", "(", "module_path", ",", "signature", ",", ")", "elif", "isinstance", "(", "signature", ",", "Class", ")", ":", "save_attribute", "(", "signature", ".", "fields", ",", "module_path", "+", "(", "elem", ",", ")", ")" ]
Recursively save attributes with module name and signature.
[ "Recursively", "save", "attributes", "with", "module", "name", "and", "signature", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/tables.py#L4653-L4662
237,815
serge-sans-paille/pythran
pythran/optimizations/list_to_tuple.py
ListToTuple.visit_Assign
def visit_Assign(self, node): """ Replace list calls by static_list calls when possible >>> import gast as ast >>> from pythran import passmanager, backend >>> node = ast.parse("def foo(n): x = __builtin__.list(n); x[0] = 0; return __builtin__.tuple(x)") >>> pm = passmanager.PassManager("test") >>> _, node = pm.apply(ListToTuple, node) >>> print(pm.dump(backend.Python, node)) def foo(n): x = __builtin__.pythran.static_list(n) x[0] = 0 return __builtin__.tuple(x) >>> node = ast.parse("def foo(n): x = __builtin__.list(n); x[0] = 0; return x") >>> pm = passmanager.PassManager("test") >>> _, node = pm.apply(ListToTuple, node) >>> print(pm.dump(backend.Python, node)) def foo(n): x = __builtin__.list(n) x[0] = 0 return x """ self.generic_visit(node) if node.value not in self.fixed_size_list: return node node.value = self.convert(node.value) return node
python
def visit_Assign(self, node): """ Replace list calls by static_list calls when possible >>> import gast as ast >>> from pythran import passmanager, backend >>> node = ast.parse("def foo(n): x = __builtin__.list(n); x[0] = 0; return __builtin__.tuple(x)") >>> pm = passmanager.PassManager("test") >>> _, node = pm.apply(ListToTuple, node) >>> print(pm.dump(backend.Python, node)) def foo(n): x = __builtin__.pythran.static_list(n) x[0] = 0 return __builtin__.tuple(x) >>> node = ast.parse("def foo(n): x = __builtin__.list(n); x[0] = 0; return x") >>> pm = passmanager.PassManager("test") >>> _, node = pm.apply(ListToTuple, node) >>> print(pm.dump(backend.Python, node)) def foo(n): x = __builtin__.list(n) x[0] = 0 return x """ self.generic_visit(node) if node.value not in self.fixed_size_list: return node node.value = self.convert(node.value) return node
[ "def", "visit_Assign", "(", "self", ",", "node", ")", ":", "self", ".", "generic_visit", "(", "node", ")", "if", "node", ".", "value", "not", "in", "self", ".", "fixed_size_list", ":", "return", "node", "node", ".", "value", "=", "self", ".", "convert", "(", "node", ".", "value", ")", "return", "node" ]
Replace list calls by static_list calls when possible >>> import gast as ast >>> from pythran import passmanager, backend >>> node = ast.parse("def foo(n): x = __builtin__.list(n); x[0] = 0; return __builtin__.tuple(x)") >>> pm = passmanager.PassManager("test") >>> _, node = pm.apply(ListToTuple, node) >>> print(pm.dump(backend.Python, node)) def foo(n): x = __builtin__.pythran.static_list(n) x[0] = 0 return __builtin__.tuple(x) >>> node = ast.parse("def foo(n): x = __builtin__.list(n); x[0] = 0; return x") >>> pm = passmanager.PassManager("test") >>> _, node = pm.apply(ListToTuple, node) >>> print(pm.dump(backend.Python, node)) def foo(n): x = __builtin__.list(n) x[0] = 0 return x
[ "Replace", "list", "calls", "by", "static_list", "calls", "when", "possible" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/optimizations/list_to_tuple.py#L66-L95
237,816
serge-sans-paille/pythran
setup.py
BuildWithThirdParty.copy_pkg
def copy_pkg(self, pkg, src_only=False): "Install boost deps from the third_party directory" if getattr(self, 'no_' + pkg) is None: print('Copying boost dependencies') to_copy = pkg, else: return src = os.path.join('third_party', *to_copy) # copy to the build tree if not src_only: target = os.path.join(self.build_lib, 'pythran', *to_copy) shutil.rmtree(target, True) shutil.copytree(src, target) # copy them to the source tree too, needed for sdist target = os.path.join('pythran', *to_copy) shutil.rmtree(target, True) shutil.copytree(src, target)
python
def copy_pkg(self, pkg, src_only=False): "Install boost deps from the third_party directory" if getattr(self, 'no_' + pkg) is None: print('Copying boost dependencies') to_copy = pkg, else: return src = os.path.join('third_party', *to_copy) # copy to the build tree if not src_only: target = os.path.join(self.build_lib, 'pythran', *to_copy) shutil.rmtree(target, True) shutil.copytree(src, target) # copy them to the source tree too, needed for sdist target = os.path.join('pythran', *to_copy) shutil.rmtree(target, True) shutil.copytree(src, target)
[ "def", "copy_pkg", "(", "self", ",", "pkg", ",", "src_only", "=", "False", ")", ":", "if", "getattr", "(", "self", ",", "'no_'", "+", "pkg", ")", "is", "None", ":", "print", "(", "'Copying boost dependencies'", ")", "to_copy", "=", "pkg", ",", "else", ":", "return", "src", "=", "os", ".", "path", ".", "join", "(", "'third_party'", ",", "*", "to_copy", ")", "# copy to the build tree", "if", "not", "src_only", ":", "target", "=", "os", ".", "path", ".", "join", "(", "self", ".", "build_lib", ",", "'pythran'", ",", "*", "to_copy", ")", "shutil", ".", "rmtree", "(", "target", ",", "True", ")", "shutil", ".", "copytree", "(", "src", ",", "target", ")", "# copy them to the source tree too, needed for sdist", "target", "=", "os", ".", "path", ".", "join", "(", "'pythran'", ",", "*", "to_copy", ")", "shutil", ".", "rmtree", "(", "target", ",", "True", ")", "shutil", ".", "copytree", "(", "src", ",", "target", ")" ]
Install boost deps from the third_party directory
[ "Install", "boost", "deps", "from", "the", "third_party", "directory" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/setup.py#L75-L95
237,817
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
Check.check_list
def check_list(self, node_list, pattern_list): """ Check if list of node are equal. """ if len(node_list) != len(pattern_list): return False else: return all(Check(node_elt, self.placeholders).visit(pattern_list[i]) for i, node_elt in enumerate(node_list))
python
def check_list(self, node_list, pattern_list): """ Check if list of node are equal. """ if len(node_list) != len(pattern_list): return False else: return all(Check(node_elt, self.placeholders).visit(pattern_list[i]) for i, node_elt in enumerate(node_list))
[ "def", "check_list", "(", "self", ",", "node_list", ",", "pattern_list", ")", ":", "if", "len", "(", "node_list", ")", "!=", "len", "(", "pattern_list", ")", ":", "return", "False", "else", ":", "return", "all", "(", "Check", "(", "node_elt", ",", "self", ".", "placeholders", ")", ".", "visit", "(", "pattern_list", "[", "i", "]", ")", "for", "i", ",", "node_elt", "in", "enumerate", "(", "node_list", ")", ")" ]
Check if list of node are equal.
[ "Check", "if", "list", "of", "node", "are", "equal", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L67-L74
237,818
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
Check.visit_Placeholder
def visit_Placeholder(self, pattern): """ Save matching node or compare it with the existing one. FIXME : What if the new placeholder is a better choice? """ if (pattern.id in self.placeholders and not Check(self.node, self.placeholders).visit( self.placeholders[pattern.id])): return False else: self.placeholders[pattern.id] = self.node return True
python
def visit_Placeholder(self, pattern): """ Save matching node or compare it with the existing one. FIXME : What if the new placeholder is a better choice? """ if (pattern.id in self.placeholders and not Check(self.node, self.placeholders).visit( self.placeholders[pattern.id])): return False else: self.placeholders[pattern.id] = self.node return True
[ "def", "visit_Placeholder", "(", "self", ",", "pattern", ")", ":", "if", "(", "pattern", ".", "id", "in", "self", ".", "placeholders", "and", "not", "Check", "(", "self", ".", "node", ",", "self", ".", "placeholders", ")", ".", "visit", "(", "self", ".", "placeholders", "[", "pattern", ".", "id", "]", ")", ")", ":", "return", "False", "else", ":", "self", ".", "placeholders", "[", "pattern", ".", "id", "]", "=", "self", ".", "node", "return", "True" ]
Save matching node or compare it with the existing one. FIXME : What if the new placeholder is a better choice?
[ "Save", "matching", "node", "or", "compare", "it", "with", "the", "existing", "one", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L76-L88
237,819
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
Check.visit_AST_or
def visit_AST_or(self, pattern): """ Match if any of the or content match with the other node. """ return any(self.field_match(self.node, value_or) for value_or in pattern.args)
python
def visit_AST_or(self, pattern): """ Match if any of the or content match with the other node. """ return any(self.field_match(self.node, value_or) for value_or in pattern.args)
[ "def", "visit_AST_or", "(", "self", ",", "pattern", ")", ":", "return", "any", "(", "self", ".", "field_match", "(", "self", ".", "node", ",", "value_or", ")", "for", "value_or", "in", "pattern", ".", "args", ")" ]
Match if any of the or content match with the other node.
[ "Match", "if", "any", "of", "the", "or", "content", "match", "with", "the", "other", "node", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L95-L98
237,820
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
Check.visit_Set
def visit_Set(self, pattern): """ Set have unordered values. """ if len(pattern.elts) > MAX_UNORDERED_LENGTH: raise DamnTooLongPattern("Pattern for Set is too long") return (isinstance(self.node, Set) and any(self.check_list(self.node.elts, pattern_elts) for pattern_elts in permutations(pattern.elts)))
python
def visit_Set(self, pattern): """ Set have unordered values. """ if len(pattern.elts) > MAX_UNORDERED_LENGTH: raise DamnTooLongPattern("Pattern for Set is too long") return (isinstance(self.node, Set) and any(self.check_list(self.node.elts, pattern_elts) for pattern_elts in permutations(pattern.elts)))
[ "def", "visit_Set", "(", "self", ",", "pattern", ")", ":", "if", "len", "(", "pattern", ".", "elts", ")", ">", "MAX_UNORDERED_LENGTH", ":", "raise", "DamnTooLongPattern", "(", "\"Pattern for Set is too long\"", ")", "return", "(", "isinstance", "(", "self", ".", "node", ",", "Set", ")", "and", "any", "(", "self", ".", "check_list", "(", "self", ".", "node", ".", "elts", ",", "pattern_elts", ")", "for", "pattern_elts", "in", "permutations", "(", "pattern", ".", "elts", ")", ")", ")" ]
Set have unordered values.
[ "Set", "have", "unordered", "values", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L100-L106
237,821
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
Check.visit_Dict
def visit_Dict(self, pattern): """ Dict can match with unordered values. """ if not isinstance(self.node, Dict): return False if len(pattern.keys) > MAX_UNORDERED_LENGTH: raise DamnTooLongPattern("Pattern for Dict is too long") for permutation in permutations(range(len(self.node.keys))): for i, value in enumerate(permutation): if not self.field_match(self.node.keys[i], pattern.keys[value]): break else: pattern_values = [pattern.values[i] for i in permutation] return self.check_list(self.node.values, pattern_values) return False
python
def visit_Dict(self, pattern): """ Dict can match with unordered values. """ if not isinstance(self.node, Dict): return False if len(pattern.keys) > MAX_UNORDERED_LENGTH: raise DamnTooLongPattern("Pattern for Dict is too long") for permutation in permutations(range(len(self.node.keys))): for i, value in enumerate(permutation): if not self.field_match(self.node.keys[i], pattern.keys[value]): break else: pattern_values = [pattern.values[i] for i in permutation] return self.check_list(self.node.values, pattern_values) return False
[ "def", "visit_Dict", "(", "self", ",", "pattern", ")", ":", "if", "not", "isinstance", "(", "self", ".", "node", ",", "Dict", ")", ":", "return", "False", "if", "len", "(", "pattern", ".", "keys", ")", ">", "MAX_UNORDERED_LENGTH", ":", "raise", "DamnTooLongPattern", "(", "\"Pattern for Dict is too long\"", ")", "for", "permutation", "in", "permutations", "(", "range", "(", "len", "(", "self", ".", "node", ".", "keys", ")", ")", ")", ":", "for", "i", ",", "value", "in", "enumerate", "(", "permutation", ")", ":", "if", "not", "self", ".", "field_match", "(", "self", ".", "node", ".", "keys", "[", "i", "]", ",", "pattern", ".", "keys", "[", "value", "]", ")", ":", "break", "else", ":", "pattern_values", "=", "[", "pattern", ".", "values", "[", "i", "]", "for", "i", "in", "permutation", "]", "return", "self", ".", "check_list", "(", "self", ".", "node", ".", "values", ",", "pattern_values", ")", "return", "False" ]
Dict can match with unordered values.
[ "Dict", "can", "match", "with", "unordered", "values", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L108-L122
237,822
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
Check.field_match
def field_match(self, node_field, pattern_field): """ Check if two fields match. Field match if: - If it is a list, all values have to match. - If if is a node, recursively check it. - Otherwise, check values are equal. """ is_good_list = (isinstance(pattern_field, list) and self.check_list(node_field, pattern_field)) is_good_node = (isinstance(pattern_field, AST) and Check(node_field, self.placeholders).visit(pattern_field)) def strict_eq(f0, f1): try: return f0 == f1 or (isnan(f0) and isnan(f1)) except TypeError: return f0 == f1 is_same = strict_eq(pattern_field, node_field) return is_good_list or is_good_node or is_same
python
def field_match(self, node_field, pattern_field): """ Check if two fields match. Field match if: - If it is a list, all values have to match. - If if is a node, recursively check it. - Otherwise, check values are equal. """ is_good_list = (isinstance(pattern_field, list) and self.check_list(node_field, pattern_field)) is_good_node = (isinstance(pattern_field, AST) and Check(node_field, self.placeholders).visit(pattern_field)) def strict_eq(f0, f1): try: return f0 == f1 or (isnan(f0) and isnan(f1)) except TypeError: return f0 == f1 is_same = strict_eq(pattern_field, node_field) return is_good_list or is_good_node or is_same
[ "def", "field_match", "(", "self", ",", "node_field", ",", "pattern_field", ")", ":", "is_good_list", "=", "(", "isinstance", "(", "pattern_field", ",", "list", ")", "and", "self", ".", "check_list", "(", "node_field", ",", "pattern_field", ")", ")", "is_good_node", "=", "(", "isinstance", "(", "pattern_field", ",", "AST", ")", "and", "Check", "(", "node_field", ",", "self", ".", "placeholders", ")", ".", "visit", "(", "pattern_field", ")", ")", "def", "strict_eq", "(", "f0", ",", "f1", ")", ":", "try", ":", "return", "f0", "==", "f1", "or", "(", "isnan", "(", "f0", ")", "and", "isnan", "(", "f1", ")", ")", "except", "TypeError", ":", "return", "f0", "==", "f1", "is_same", "=", "strict_eq", "(", "pattern_field", ",", "node_field", ")", "return", "is_good_list", "or", "is_good_node", "or", "is_same" ]
Check if two fields match. Field match if: - If it is a list, all values have to match. - If if is a node, recursively check it. - Otherwise, check values are equal.
[ "Check", "if", "two", "fields", "match", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L124-L146
237,823
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
Check.generic_visit
def generic_visit(self, pattern): """ Check if the pattern match with the checked node. a node match if: - type match - all field match """ return (isinstance(pattern, type(self.node)) and all(self.field_match(value, getattr(pattern, field)) for field, value in iter_fields(self.node)))
python
def generic_visit(self, pattern): """ Check if the pattern match with the checked node. a node match if: - type match - all field match """ return (isinstance(pattern, type(self.node)) and all(self.field_match(value, getattr(pattern, field)) for field, value in iter_fields(self.node)))
[ "def", "generic_visit", "(", "self", ",", "pattern", ")", ":", "return", "(", "isinstance", "(", "pattern", ",", "type", "(", "self", ".", "node", ")", ")", "and", "all", "(", "self", ".", "field_match", "(", "value", ",", "getattr", "(", "pattern", ",", "field", ")", ")", "for", "field", ",", "value", "in", "iter_fields", "(", "self", ".", "node", ")", ")", ")" ]
Check if the pattern match with the checked node. a node match if: - type match - all field match
[ "Check", "if", "the", "pattern", "match", "with", "the", "checked", "node", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L148-L158
237,824
serge-sans-paille/pythran
pythran/analyses/ast_matcher.py
ASTMatcher.visit
def visit(self, node): """ Visitor looking for matching between current node and pattern. If it match, save it but whatever happen, keep going. """ if Check(node, dict()).visit(self.pattern): self.result.add(node) self.generic_visit(node)
python
def visit(self, node): """ Visitor looking for matching between current node and pattern. If it match, save it but whatever happen, keep going. """ if Check(node, dict()).visit(self.pattern): self.result.add(node) self.generic_visit(node)
[ "def", "visit", "(", "self", ",", "node", ")", ":", "if", "Check", "(", "node", ",", "dict", "(", ")", ")", ".", "visit", "(", "self", ".", "pattern", ")", ":", "self", ".", "result", ".", "add", "(", "node", ")", "self", ".", "generic_visit", "(", "node", ")" ]
Visitor looking for matching between current node and pattern. If it match, save it but whatever happen, keep going.
[ "Visitor", "looking", "for", "matching", "between", "current", "node", "and", "pattern", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/ast_matcher.py#L199-L207
237,825
serge-sans-paille/pythran
pythran/analyses/lazyness_analysis.py
LazynessAnalysis.visit_Call
def visit_Call(self, node): """ Compute use of variables in a function call. Each arg is use once and function name too. Information about modified arguments is forwarded to func_args_lazyness. """ md.visit(self, node) for arg in node.args: self.visit(arg) self.func_args_lazyness(node.func, node.args, node) self.visit(node.func)
python
def visit_Call(self, node): """ Compute use of variables in a function call. Each arg is use once and function name too. Information about modified arguments is forwarded to func_args_lazyness. """ md.visit(self, node) for arg in node.args: self.visit(arg) self.func_args_lazyness(node.func, node.args, node) self.visit(node.func)
[ "def", "visit_Call", "(", "self", ",", "node", ")", ":", "md", ".", "visit", "(", "self", ",", "node", ")", "for", "arg", "in", "node", ".", "args", ":", "self", ".", "visit", "(", "arg", ")", "self", ".", "func_args_lazyness", "(", "node", ".", "func", ",", "node", ".", "args", ",", "node", ")", "self", ".", "visit", "(", "node", ".", "func", ")" ]
Compute use of variables in a function call. Each arg is use once and function name too. Information about modified arguments is forwarded to func_args_lazyness.
[ "Compute", "use", "of", "variables", "in", "a", "function", "call", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/lazyness_analysis.py#L359-L371
237,826
serge-sans-paille/pythran
docs/papers/iop2014/xp/numba/nqueens.py
n_queens
def n_queens(queen_count): """N-Queens solver. Args: queen_count: the number of queens to solve for. This is also the board size. Yields: Solutions to the problem. Each yielded value is looks like (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the queen, and the index into the tuple indicates the row. """ out =list() cols = range(queen_count) #for vec in permutations(cols): for vec in permutations(cols,None): if (queen_count == len(set(vec[i]+i for i in cols)) == len(set(vec[i]-i for i in cols))): #yield vec out.append(vec) return out
python
def n_queens(queen_count): """N-Queens solver. Args: queen_count: the number of queens to solve for. This is also the board size. Yields: Solutions to the problem. Each yielded value is looks like (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the queen, and the index into the tuple indicates the row. """ out =list() cols = range(queen_count) #for vec in permutations(cols): for vec in permutations(cols,None): if (queen_count == len(set(vec[i]+i for i in cols)) == len(set(vec[i]-i for i in cols))): #yield vec out.append(vec) return out
[ "def", "n_queens", "(", "queen_count", ")", ":", "out", "=", "list", "(", ")", "cols", "=", "range", "(", "queen_count", ")", "#for vec in permutations(cols):", "for", "vec", "in", "permutations", "(", "cols", ",", "None", ")", ":", "if", "(", "queen_count", "==", "len", "(", "set", "(", "vec", "[", "i", "]", "+", "i", "for", "i", "in", "cols", ")", ")", "==", "len", "(", "set", "(", "vec", "[", "i", "]", "-", "i", "for", "i", "in", "cols", ")", ")", ")", ":", "#yield vec", "out", ".", "append", "(", "vec", ")", "return", "out" ]
N-Queens solver. Args: queen_count: the number of queens to solve for. This is also the board size. Yields: Solutions to the problem. Each yielded value is looks like (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the queen, and the index into the tuple indicates the row.
[ "N", "-", "Queens", "solver", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/docs/papers/iop2014/xp/numba/nqueens.py#L30-L50
237,827
serge-sans-paille/pythran
pythran/optimizations/inlining.py
Inlining.visit_Stmt
def visit_Stmt(self, node): """ Add new variable definition before the Statement. """ save_defs, self.defs = self.defs or list(), list() self.generic_visit(node) new_defs, self.defs = self.defs, save_defs return new_defs + [node]
python
def visit_Stmt(self, node): """ Add new variable definition before the Statement. """ save_defs, self.defs = self.defs or list(), list() self.generic_visit(node) new_defs, self.defs = self.defs, save_defs return new_defs + [node]
[ "def", "visit_Stmt", "(", "self", ",", "node", ")", ":", "save_defs", ",", "self", ".", "defs", "=", "self", ".", "defs", "or", "list", "(", ")", ",", "list", "(", ")", "self", ".", "generic_visit", "(", "node", ")", "new_defs", ",", "self", ".", "defs", "=", "self", ".", "defs", ",", "save_defs", "return", "new_defs", "+", "[", "node", "]" ]
Add new variable definition before the Statement.
[ "Add", "new", "variable", "definition", "before", "the", "Statement", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/optimizations/inlining.py#L44-L49
237,828
serge-sans-paille/pythran
pythran/optimizations/inlining.py
Inlining.visit_Call
def visit_Call(self, node): """ Replace function call by inlined function's body. We can inline if it aliases on only one function. """ func_aliases = self.aliases[node.func] if len(func_aliases) == 1: function_def = next(iter(func_aliases)) if (isinstance(function_def, ast.FunctionDef) and function_def.name in self.inlinable): self.update = True to_inline = copy.deepcopy(self.inlinable[function_def.name]) arg_to_value = dict() values = node.args values += to_inline.args.defaults[len(node.args) - len(to_inline.args.args):] for arg_fun, arg_call in zip(to_inline.args.args, values): v_name = "__pythran_inline{}{}{}".format(function_def.name, arg_fun.id, self.call_count) new_var = ast.Name(id=v_name, ctx=ast.Store(), annotation=None) self.defs.append(ast.Assign(targets=[new_var], value=arg_call)) arg_to_value[arg_fun.id] = ast.Name(id=v_name, ctx=ast.Load(), annotation=None) self.call_count += 1 return Inliner(arg_to_value).visit(to_inline.body[0]) return node
python
def visit_Call(self, node): """ Replace function call by inlined function's body. We can inline if it aliases on only one function. """ func_aliases = self.aliases[node.func] if len(func_aliases) == 1: function_def = next(iter(func_aliases)) if (isinstance(function_def, ast.FunctionDef) and function_def.name in self.inlinable): self.update = True to_inline = copy.deepcopy(self.inlinable[function_def.name]) arg_to_value = dict() values = node.args values += to_inline.args.defaults[len(node.args) - len(to_inline.args.args):] for arg_fun, arg_call in zip(to_inline.args.args, values): v_name = "__pythran_inline{}{}{}".format(function_def.name, arg_fun.id, self.call_count) new_var = ast.Name(id=v_name, ctx=ast.Store(), annotation=None) self.defs.append(ast.Assign(targets=[new_var], value=arg_call)) arg_to_value[arg_fun.id] = ast.Name(id=v_name, ctx=ast.Load(), annotation=None) self.call_count += 1 return Inliner(arg_to_value).visit(to_inline.body[0]) return node
[ "def", "visit_Call", "(", "self", ",", "node", ")", ":", "func_aliases", "=", "self", ".", "aliases", "[", "node", ".", "func", "]", "if", "len", "(", "func_aliases", ")", "==", "1", ":", "function_def", "=", "next", "(", "iter", "(", "func_aliases", ")", ")", "if", "(", "isinstance", "(", "function_def", ",", "ast", ".", "FunctionDef", ")", "and", "function_def", ".", "name", "in", "self", ".", "inlinable", ")", ":", "self", ".", "update", "=", "True", "to_inline", "=", "copy", ".", "deepcopy", "(", "self", ".", "inlinable", "[", "function_def", ".", "name", "]", ")", "arg_to_value", "=", "dict", "(", ")", "values", "=", "node", ".", "args", "values", "+=", "to_inline", ".", "args", ".", "defaults", "[", "len", "(", "node", ".", "args", ")", "-", "len", "(", "to_inline", ".", "args", ".", "args", ")", ":", "]", "for", "arg_fun", ",", "arg_call", "in", "zip", "(", "to_inline", ".", "args", ".", "args", ",", "values", ")", ":", "v_name", "=", "\"__pythran_inline{}{}{}\"", ".", "format", "(", "function_def", ".", "name", ",", "arg_fun", ".", "id", ",", "self", ".", "call_count", ")", "new_var", "=", "ast", ".", "Name", "(", "id", "=", "v_name", ",", "ctx", "=", "ast", ".", "Store", "(", ")", ",", "annotation", "=", "None", ")", "self", ".", "defs", ".", "append", "(", "ast", ".", "Assign", "(", "targets", "=", "[", "new_var", "]", ",", "value", "=", "arg_call", ")", ")", "arg_to_value", "[", "arg_fun", ".", "id", "]", "=", "ast", ".", "Name", "(", "id", "=", "v_name", ",", "ctx", "=", "ast", ".", "Load", "(", ")", ",", "annotation", "=", "None", ")", "self", ".", "call_count", "+=", "1", "return", "Inliner", "(", "arg_to_value", ")", ".", "visit", "(", "to_inline", ".", "body", "[", "0", "]", ")", "return", "node" ]
Replace function call by inlined function's body. We can inline if it aliases on only one function.
[ "Replace", "function", "call", "by", "inlined", "function", "s", "body", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/optimizations/inlining.py#L62-L93
237,829
serge-sans-paille/pythran
pythran/conversion.py
size_container_folding
def size_container_folding(value): """ Convert value to ast expression if size is not too big. Converter for sized container. """ if len(value) < MAX_LEN: if isinstance(value, list): return ast.List([to_ast(elt) for elt in value], ast.Load()) elif isinstance(value, tuple): return ast.Tuple([to_ast(elt) for elt in value], ast.Load()) elif isinstance(value, set): return ast.Set([to_ast(elt) for elt in value]) elif isinstance(value, dict): keys = [to_ast(elt) for elt in value.keys()] values = [to_ast(elt) for elt in value.values()] return ast.Dict(keys, values) elif isinstance(value, np.ndarray): return ast.Call(func=ast.Attribute( ast.Name(mangle('numpy'), ast.Load(), None), 'array', ast.Load()), args=[to_ast(totuple(value.tolist())), ast.Attribute( ast.Name(mangle('numpy'), ast.Load(), None), value.dtype.name, ast.Load())], keywords=[]) else: raise ConversionError() else: raise ToNotEval()
python
def size_container_folding(value): """ Convert value to ast expression if size is not too big. Converter for sized container. """ if len(value) < MAX_LEN: if isinstance(value, list): return ast.List([to_ast(elt) for elt in value], ast.Load()) elif isinstance(value, tuple): return ast.Tuple([to_ast(elt) for elt in value], ast.Load()) elif isinstance(value, set): return ast.Set([to_ast(elt) for elt in value]) elif isinstance(value, dict): keys = [to_ast(elt) for elt in value.keys()] values = [to_ast(elt) for elt in value.values()] return ast.Dict(keys, values) elif isinstance(value, np.ndarray): return ast.Call(func=ast.Attribute( ast.Name(mangle('numpy'), ast.Load(), None), 'array', ast.Load()), args=[to_ast(totuple(value.tolist())), ast.Attribute( ast.Name(mangle('numpy'), ast.Load(), None), value.dtype.name, ast.Load())], keywords=[]) else: raise ConversionError() else: raise ToNotEval()
[ "def", "size_container_folding", "(", "value", ")", ":", "if", "len", "(", "value", ")", "<", "MAX_LEN", ":", "if", "isinstance", "(", "value", ",", "list", ")", ":", "return", "ast", ".", "List", "(", "[", "to_ast", "(", "elt", ")", "for", "elt", "in", "value", "]", ",", "ast", ".", "Load", "(", ")", ")", "elif", "isinstance", "(", "value", ",", "tuple", ")", ":", "return", "ast", ".", "Tuple", "(", "[", "to_ast", "(", "elt", ")", "for", "elt", "in", "value", "]", ",", "ast", ".", "Load", "(", ")", ")", "elif", "isinstance", "(", "value", ",", "set", ")", ":", "return", "ast", ".", "Set", "(", "[", "to_ast", "(", "elt", ")", "for", "elt", "in", "value", "]", ")", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "keys", "=", "[", "to_ast", "(", "elt", ")", "for", "elt", "in", "value", ".", "keys", "(", ")", "]", "values", "=", "[", "to_ast", "(", "elt", ")", "for", "elt", "in", "value", ".", "values", "(", ")", "]", "return", "ast", ".", "Dict", "(", "keys", ",", "values", ")", "elif", "isinstance", "(", "value", ",", "np", ".", "ndarray", ")", ":", "return", "ast", ".", "Call", "(", "func", "=", "ast", ".", "Attribute", "(", "ast", ".", "Name", "(", "mangle", "(", "'numpy'", ")", ",", "ast", ".", "Load", "(", ")", ",", "None", ")", ",", "'array'", ",", "ast", ".", "Load", "(", ")", ")", ",", "args", "=", "[", "to_ast", "(", "totuple", "(", "value", ".", "tolist", "(", ")", ")", ")", ",", "ast", ".", "Attribute", "(", "ast", ".", "Name", "(", "mangle", "(", "'numpy'", ")", ",", "ast", ".", "Load", "(", ")", ",", "None", ")", ",", "value", ".", "dtype", ".", "name", ",", "ast", ".", "Load", "(", ")", ")", "]", ",", "keywords", "=", "[", "]", ")", "else", ":", "raise", "ConversionError", "(", ")", "else", ":", "raise", "ToNotEval", "(", ")" ]
Convert value to ast expression if size is not too big. Converter for sized container.
[ "Convert", "value", "to", "ast", "expression", "if", "size", "is", "not", "too", "big", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/conversion.py#L34-L65
237,830
serge-sans-paille/pythran
pythran/conversion.py
builtin_folding
def builtin_folding(value): """ Convert builtin function to ast expression. """ if isinstance(value, (type(None), bool)): name = str(value) elif value.__name__ in ("bool", "float", "int"): name = value.__name__ + "_" else: name = value.__name__ return ast.Attribute(ast.Name('__builtin__', ast.Load(), None), name, ast.Load())
python
def builtin_folding(value): """ Convert builtin function to ast expression. """ if isinstance(value, (type(None), bool)): name = str(value) elif value.__name__ in ("bool", "float", "int"): name = value.__name__ + "_" else: name = value.__name__ return ast.Attribute(ast.Name('__builtin__', ast.Load(), None), name, ast.Load())
[ "def", "builtin_folding", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "type", "(", "None", ")", ",", "bool", ")", ")", ":", "name", "=", "str", "(", "value", ")", "elif", "value", ".", "__name__", "in", "(", "\"bool\"", ",", "\"float\"", ",", "\"int\"", ")", ":", "name", "=", "value", ".", "__name__", "+", "\"_\"", "else", ":", "name", "=", "value", ".", "__name__", "return", "ast", ".", "Attribute", "(", "ast", ".", "Name", "(", "'__builtin__'", ",", "ast", ".", "Load", "(", ")", ",", "None", ")", ",", "name", ",", "ast", ".", "Load", "(", ")", ")" ]
Convert builtin function to ast expression.
[ "Convert", "builtin", "function", "to", "ast", "expression", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/conversion.py#L68-L77
237,831
serge-sans-paille/pythran
pythran/conversion.py
to_ast
def to_ast(value): """ Turn a value into ast expression. >>> a = 1 >>> print(ast.dump(to_ast(a))) Num(n=1) >>> a = [1, 2, 3] >>> print(ast.dump(to_ast(a))) List(elts=[Num(n=1), Num(n=2), Num(n=3)], ctx=Load()) """ if isinstance(value, (type(None), bool)): return builtin_folding(value) if sys.version_info[0] == 2 and isinstance(value, long): from pythran.syntax import PythranSyntaxError raise PythranSyntaxError("constant folding results in big int") if any(value is t for t in (bool, int, float)): iinfo = np.iinfo(int) if isinstance(value, int) and not (iinfo.min <= value <= iinfo.max): from pythran.syntax import PythranSyntaxError raise PythranSyntaxError("constant folding results in big int") return builtin_folding(value) elif isinstance(value, np.generic): return to_ast(np.asscalar(value)) elif isinstance(value, numbers.Number): return ast.Num(value) elif isinstance(value, str): return ast.Str(value) elif isinstance(value, (list, tuple, set, dict, np.ndarray)): return size_container_folding(value) elif hasattr(value, "__module__") and value.__module__ == "__builtin__": # TODO Can be done the same way for others modules return builtin_folding(value) # only meaningful for python3 elif sys.version_info.major == 3: if isinstance(value, (filter, map, zip)): return to_ast(list(value)) raise ToNotEval()
python
def to_ast(value): """ Turn a value into ast expression. >>> a = 1 >>> print(ast.dump(to_ast(a))) Num(n=1) >>> a = [1, 2, 3] >>> print(ast.dump(to_ast(a))) List(elts=[Num(n=1), Num(n=2), Num(n=3)], ctx=Load()) """ if isinstance(value, (type(None), bool)): return builtin_folding(value) if sys.version_info[0] == 2 and isinstance(value, long): from pythran.syntax import PythranSyntaxError raise PythranSyntaxError("constant folding results in big int") if any(value is t for t in (bool, int, float)): iinfo = np.iinfo(int) if isinstance(value, int) and not (iinfo.min <= value <= iinfo.max): from pythran.syntax import PythranSyntaxError raise PythranSyntaxError("constant folding results in big int") return builtin_folding(value) elif isinstance(value, np.generic): return to_ast(np.asscalar(value)) elif isinstance(value, numbers.Number): return ast.Num(value) elif isinstance(value, str): return ast.Str(value) elif isinstance(value, (list, tuple, set, dict, np.ndarray)): return size_container_folding(value) elif hasattr(value, "__module__") and value.__module__ == "__builtin__": # TODO Can be done the same way for others modules return builtin_folding(value) # only meaningful for python3 elif sys.version_info.major == 3: if isinstance(value, (filter, map, zip)): return to_ast(list(value)) raise ToNotEval()
[ "def", "to_ast", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "type", "(", "None", ")", ",", "bool", ")", ")", ":", "return", "builtin_folding", "(", "value", ")", "if", "sys", ".", "version_info", "[", "0", "]", "==", "2", "and", "isinstance", "(", "value", ",", "long", ")", ":", "from", "pythran", ".", "syntax", "import", "PythranSyntaxError", "raise", "PythranSyntaxError", "(", "\"constant folding results in big int\"", ")", "if", "any", "(", "value", "is", "t", "for", "t", "in", "(", "bool", ",", "int", ",", "float", ")", ")", ":", "iinfo", "=", "np", ".", "iinfo", "(", "int", ")", "if", "isinstance", "(", "value", ",", "int", ")", "and", "not", "(", "iinfo", ".", "min", "<=", "value", "<=", "iinfo", ".", "max", ")", ":", "from", "pythran", ".", "syntax", "import", "PythranSyntaxError", "raise", "PythranSyntaxError", "(", "\"constant folding results in big int\"", ")", "return", "builtin_folding", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "np", ".", "generic", ")", ":", "return", "to_ast", "(", "np", ".", "asscalar", "(", "value", ")", ")", "elif", "isinstance", "(", "value", ",", "numbers", ".", "Number", ")", ":", "return", "ast", ".", "Num", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "str", ")", ":", "return", "ast", ".", "Str", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ",", "set", ",", "dict", ",", "np", ".", "ndarray", ")", ")", ":", "return", "size_container_folding", "(", "value", ")", "elif", "hasattr", "(", "value", ",", "\"__module__\"", ")", "and", "value", ".", "__module__", "==", "\"__builtin__\"", ":", "# TODO Can be done the same way for others modules", "return", "builtin_folding", "(", "value", ")", "# only meaningful for python3", "elif", "sys", ".", "version_info", ".", "major", "==", "3", ":", "if", "isinstance", "(", "value", ",", "(", "filter", ",", "map", ",", "zip", ")", ")", ":", "return", "to_ast", "(", "list", "(", "value", ")", ")", "raise", "ToNotEval", "(", ")" ]
Turn a value into ast expression. >>> a = 1 >>> print(ast.dump(to_ast(a))) Num(n=1) >>> a = [1, 2, 3] >>> print(ast.dump(to_ast(a))) List(elts=[Num(n=1), Num(n=2), Num(n=3)], ctx=Load())
[ "Turn", "a", "value", "into", "ast", "expression", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/conversion.py#L80-L118
237,832
serge-sans-paille/pythran
pythran/analyses/global_declarations.py
GlobalDeclarations.visit_Module
def visit_Module(self, node): """ Import module define a new variable name. """ duc = SilentDefUseChains() duc.visit(node) for d in duc.locals[node]: self.result[d.name()] = d.node
python
def visit_Module(self, node): """ Import module define a new variable name. """ duc = SilentDefUseChains() duc.visit(node) for d in duc.locals[node]: self.result[d.name()] = d.node
[ "def", "visit_Module", "(", "self", ",", "node", ")", ":", "duc", "=", "SilentDefUseChains", "(", ")", "duc", ".", "visit", "(", "node", ")", "for", "d", "in", "duc", ".", "locals", "[", "node", "]", ":", "self", ".", "result", "[", "d", ".", "name", "(", ")", "]", "=", "d", ".", "node" ]
Import module define a new variable name.
[ "Import", "module", "define", "a", "new", "variable", "name", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/global_declarations.py#L39-L44
237,833
serge-sans-paille/pythran
pythran/utils.py
attr_to_path
def attr_to_path(node): """ Compute path and final object for an attribute node """ def get_intrinsic_path(modules, attr): """ Get function path and intrinsic from an ast.Attribute. """ if isinstance(attr, ast.Name): return modules[demangle(attr.id)], (demangle(attr.id),) elif isinstance(attr, ast.Attribute): module, path = get_intrinsic_path(modules, attr.value) return module[attr.attr], path + (attr.attr,) obj, path = get_intrinsic_path(MODULES, node) if not obj.isliteral(): path = path[:-1] + ('functor', path[-1]) return obj, ('pythonic', ) + path
python
def attr_to_path(node): """ Compute path and final object for an attribute node """ def get_intrinsic_path(modules, attr): """ Get function path and intrinsic from an ast.Attribute. """ if isinstance(attr, ast.Name): return modules[demangle(attr.id)], (demangle(attr.id),) elif isinstance(attr, ast.Attribute): module, path = get_intrinsic_path(modules, attr.value) return module[attr.attr], path + (attr.attr,) obj, path = get_intrinsic_path(MODULES, node) if not obj.isliteral(): path = path[:-1] + ('functor', path[-1]) return obj, ('pythonic', ) + path
[ "def", "attr_to_path", "(", "node", ")", ":", "def", "get_intrinsic_path", "(", "modules", ",", "attr", ")", ":", "\"\"\" Get function path and intrinsic from an ast.Attribute. \"\"\"", "if", "isinstance", "(", "attr", ",", "ast", ".", "Name", ")", ":", "return", "modules", "[", "demangle", "(", "attr", ".", "id", ")", "]", ",", "(", "demangle", "(", "attr", ".", "id", ")", ",", ")", "elif", "isinstance", "(", "attr", ",", "ast", ".", "Attribute", ")", ":", "module", ",", "path", "=", "get_intrinsic_path", "(", "modules", ",", "attr", ".", "value", ")", "return", "module", "[", "attr", ".", "attr", "]", ",", "path", "+", "(", "attr", ".", "attr", ",", ")", "obj", ",", "path", "=", "get_intrinsic_path", "(", "MODULES", ",", "node", ")", "if", "not", "obj", ".", "isliteral", "(", ")", ":", "path", "=", "path", "[", ":", "-", "1", "]", "+", "(", "'functor'", ",", "path", "[", "-", "1", "]", ")", "return", "obj", ",", "(", "'pythonic'", ",", ")", "+", "path" ]
Compute path and final object for an attribute node
[ "Compute", "path", "and", "final", "object", "for", "an", "attribute", "node" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/utils.py#L10-L23
237,834
serge-sans-paille/pythran
pythran/utils.py
path_to_attr
def path_to_attr(path): """ Transform path to ast.Attribute. >>> import gast as ast >>> path = ('__builtin__', 'my', 'constant') >>> value = path_to_attr(path) >>> ref = ast.Attribute( ... value=ast.Attribute(value=ast.Name(id="__builtin__", ... ctx=ast.Load(), ... annotation=None), ... attr="my", ctx=ast.Load()), ... attr="constant", ctx=ast.Load()) >>> ast.dump(ref) == ast.dump(value) True """ return reduce(lambda hpath, last: ast.Attribute(hpath, last, ast.Load()), path[1:], ast.Name(mangle(path[0]), ast.Load(), None))
python
def path_to_attr(path): """ Transform path to ast.Attribute. >>> import gast as ast >>> path = ('__builtin__', 'my', 'constant') >>> value = path_to_attr(path) >>> ref = ast.Attribute( ... value=ast.Attribute(value=ast.Name(id="__builtin__", ... ctx=ast.Load(), ... annotation=None), ... attr="my", ctx=ast.Load()), ... attr="constant", ctx=ast.Load()) >>> ast.dump(ref) == ast.dump(value) True """ return reduce(lambda hpath, last: ast.Attribute(hpath, last, ast.Load()), path[1:], ast.Name(mangle(path[0]), ast.Load(), None))
[ "def", "path_to_attr", "(", "path", ")", ":", "return", "reduce", "(", "lambda", "hpath", ",", "last", ":", "ast", ".", "Attribute", "(", "hpath", ",", "last", ",", "ast", ".", "Load", "(", ")", ")", ",", "path", "[", "1", ":", "]", ",", "ast", ".", "Name", "(", "mangle", "(", "path", "[", "0", "]", ")", ",", "ast", ".", "Load", "(", ")", ",", "None", ")", ")" ]
Transform path to ast.Attribute. >>> import gast as ast >>> path = ('__builtin__', 'my', 'constant') >>> value = path_to_attr(path) >>> ref = ast.Attribute( ... value=ast.Attribute(value=ast.Name(id="__builtin__", ... ctx=ast.Load(), ... annotation=None), ... attr="my", ctx=ast.Load()), ... attr="constant", ctx=ast.Load()) >>> ast.dump(ref) == ast.dump(value) True
[ "Transform", "path", "to", "ast", ".", "Attribute", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/utils.py#L26-L43
237,835
serge-sans-paille/pythran
pythran/utils.py
get_variable
def get_variable(assignable): """ Return modified variable name. >>> import gast as ast >>> ref = ast.Subscript( ... value=ast.Subscript( ... value=ast.Name(id='a', ctx=ast.Load(), annotation=None), ... slice=ast.Index(value=ast.Name('i', ast.Load(), None)), ... ctx=ast.Load()), ... slice=ast.Index(value=ast.Name(id='j', ... ctx=ast.Load(), annotation=None)), ... ctx=ast.Load()) >>> ast.dump(get_variable(ref)) "Name(id='a', ctx=Load(), annotation=None)" """ msg = "Only name and subscript can be assigned." assert isinstance(assignable, (ast.Name, ast.Subscript)), msg while isinstance(assignable, ast.Subscript) or isattr(assignable): if isattr(assignable): assignable = assignable.args[0] else: assignable = assignable.value return assignable
python
def get_variable(assignable): """ Return modified variable name. >>> import gast as ast >>> ref = ast.Subscript( ... value=ast.Subscript( ... value=ast.Name(id='a', ctx=ast.Load(), annotation=None), ... slice=ast.Index(value=ast.Name('i', ast.Load(), None)), ... ctx=ast.Load()), ... slice=ast.Index(value=ast.Name(id='j', ... ctx=ast.Load(), annotation=None)), ... ctx=ast.Load()) >>> ast.dump(get_variable(ref)) "Name(id='a', ctx=Load(), annotation=None)" """ msg = "Only name and subscript can be assigned." assert isinstance(assignable, (ast.Name, ast.Subscript)), msg while isinstance(assignable, ast.Subscript) or isattr(assignable): if isattr(assignable): assignable = assignable.args[0] else: assignable = assignable.value return assignable
[ "def", "get_variable", "(", "assignable", ")", ":", "msg", "=", "\"Only name and subscript can be assigned.\"", "assert", "isinstance", "(", "assignable", ",", "(", "ast", ".", "Name", ",", "ast", ".", "Subscript", ")", ")", ",", "msg", "while", "isinstance", "(", "assignable", ",", "ast", ".", "Subscript", ")", "or", "isattr", "(", "assignable", ")", ":", "if", "isattr", "(", "assignable", ")", ":", "assignable", "=", "assignable", ".", "args", "[", "0", "]", "else", ":", "assignable", "=", "assignable", ".", "value", "return", "assignable" ]
Return modified variable name. >>> import gast as ast >>> ref = ast.Subscript( ... value=ast.Subscript( ... value=ast.Name(id='a', ctx=ast.Load(), annotation=None), ... slice=ast.Index(value=ast.Name('i', ast.Load(), None)), ... ctx=ast.Load()), ... slice=ast.Index(value=ast.Name(id='j', ... ctx=ast.Load(), annotation=None)), ... ctx=ast.Load()) >>> ast.dump(get_variable(ref)) "Name(id='a', ctx=Load(), annotation=None)"
[ "Return", "modified", "variable", "name", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/utils.py#L64-L87
237,836
serge-sans-paille/pythran
pythran/types/reorder.py
Reorder.prepare
def prepare(self, node): """ Format type dependencies information to use if for reordering. """ super(Reorder, self).prepare(node) candidates = self.type_dependencies.successors( TypeDependencies.NoDeps) # We first select function which may have a result without calling any # others functions. # Then we check if no loops type dependencies exists. If it exists, we # can safely remove the dependency as it could be compute without this # information. # As we can compute type for this function, successors can potentially # be computed # FIXME: This is false in some cases # # def bar(i): # if i > 0: # return foo(i) # else: # return [] # # def foo(i): # return [len(bar(i-1)) + len(bar(i - 2))] # # If we check for function without deps first, we will pick bar and say # it returns empty list while candidates: new_candidates = list() for n in candidates: # remove edges that imply a circular dependency for p in list(self.type_dependencies.predecessors(n)): if nx.has_path(self.type_dependencies, n, p): self.type_dependencies.remove_edge(p, n) if n not in self.type_dependencies.successors(n): new_candidates.extend(self.type_dependencies.successors(n)) candidates = new_candidates
python
def prepare(self, node): """ Format type dependencies information to use if for reordering. """ super(Reorder, self).prepare(node) candidates = self.type_dependencies.successors( TypeDependencies.NoDeps) # We first select function which may have a result without calling any # others functions. # Then we check if no loops type dependencies exists. If it exists, we # can safely remove the dependency as it could be compute without this # information. # As we can compute type for this function, successors can potentially # be computed # FIXME: This is false in some cases # # def bar(i): # if i > 0: # return foo(i) # else: # return [] # # def foo(i): # return [len(bar(i-1)) + len(bar(i - 2))] # # If we check for function without deps first, we will pick bar and say # it returns empty list while candidates: new_candidates = list() for n in candidates: # remove edges that imply a circular dependency for p in list(self.type_dependencies.predecessors(n)): if nx.has_path(self.type_dependencies, n, p): self.type_dependencies.remove_edge(p, n) if n not in self.type_dependencies.successors(n): new_candidates.extend(self.type_dependencies.successors(n)) candidates = new_candidates
[ "def", "prepare", "(", "self", ",", "node", ")", ":", "super", "(", "Reorder", ",", "self", ")", ".", "prepare", "(", "node", ")", "candidates", "=", "self", ".", "type_dependencies", ".", "successors", "(", "TypeDependencies", ".", "NoDeps", ")", "# We first select function which may have a result without calling any", "# others functions.", "# Then we check if no loops type dependencies exists. If it exists, we", "# can safely remove the dependency as it could be compute without this", "# information.", "# As we can compute type for this function, successors can potentially", "# be computed", "# FIXME: This is false in some cases", "#", "# def bar(i):", "# if i > 0:", "# return foo(i)", "# else:", "# return []", "#", "# def foo(i):", "# return [len(bar(i-1)) + len(bar(i - 2))]", "#", "# If we check for function without deps first, we will pick bar and say", "# it returns empty list", "while", "candidates", ":", "new_candidates", "=", "list", "(", ")", "for", "n", "in", "candidates", ":", "# remove edges that imply a circular dependency", "for", "p", "in", "list", "(", "self", ".", "type_dependencies", ".", "predecessors", "(", "n", ")", ")", ":", "if", "nx", ".", "has_path", "(", "self", ".", "type_dependencies", ",", "n", ",", "p", ")", ":", "self", ".", "type_dependencies", ".", "remove_edge", "(", "p", ",", "n", ")", "if", "n", "not", "in", "self", ".", "type_dependencies", ".", "successors", "(", "n", ")", ":", "new_candidates", ".", "extend", "(", "self", ".", "type_dependencies", ".", "successors", "(", "n", ")", ")", "candidates", "=", "new_candidates" ]
Format type dependencies information to use if for reordering.
[ "Format", "type", "dependencies", "information", "to", "use", "if", "for", "reordering", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/types/reorder.py#L57-L91
237,837
serge-sans-paille/pythran
pythran/types/reorder.py
Reorder.visit_Module
def visit_Module(self, node): """ Keep everything but function definition then add sorted functions. Most of the time, many function sort work so we use function calldepth as a "sort hint" to simplify typing. """ newbody = list() olddef = list() for stmt in node.body: if isinstance(stmt, ast.FunctionDef): olddef.append(stmt) else: newbody.append(stmt) try: newdef = topological_sort( self.type_dependencies, self.ordered_global_declarations) newdef = [f for f in newdef if isinstance(f, ast.FunctionDef)] except nx.exception.NetworkXUnfeasible: raise PythranSyntaxError("Infinite function recursion") assert set(newdef) == set(olddef), "A function have been lost..." node.body = newbody + newdef self.update = True return node
python
def visit_Module(self, node): """ Keep everything but function definition then add sorted functions. Most of the time, many function sort work so we use function calldepth as a "sort hint" to simplify typing. """ newbody = list() olddef = list() for stmt in node.body: if isinstance(stmt, ast.FunctionDef): olddef.append(stmt) else: newbody.append(stmt) try: newdef = topological_sort( self.type_dependencies, self.ordered_global_declarations) newdef = [f for f in newdef if isinstance(f, ast.FunctionDef)] except nx.exception.NetworkXUnfeasible: raise PythranSyntaxError("Infinite function recursion") assert set(newdef) == set(olddef), "A function have been lost..." node.body = newbody + newdef self.update = True return node
[ "def", "visit_Module", "(", "self", ",", "node", ")", ":", "newbody", "=", "list", "(", ")", "olddef", "=", "list", "(", ")", "for", "stmt", "in", "node", ".", "body", ":", "if", "isinstance", "(", "stmt", ",", "ast", ".", "FunctionDef", ")", ":", "olddef", ".", "append", "(", "stmt", ")", "else", ":", "newbody", ".", "append", "(", "stmt", ")", "try", ":", "newdef", "=", "topological_sort", "(", "self", ".", "type_dependencies", ",", "self", ".", "ordered_global_declarations", ")", "newdef", "=", "[", "f", "for", "f", "in", "newdef", "if", "isinstance", "(", "f", ",", "ast", ".", "FunctionDef", ")", "]", "except", "nx", ".", "exception", ".", "NetworkXUnfeasible", ":", "raise", "PythranSyntaxError", "(", "\"Infinite function recursion\"", ")", "assert", "set", "(", "newdef", ")", "==", "set", "(", "olddef", ")", ",", "\"A function have been lost...\"", "node", ".", "body", "=", "newbody", "+", "newdef", "self", ".", "update", "=", "True", "return", "node" ]
Keep everything but function definition then add sorted functions. Most of the time, many function sort work so we use function calldepth as a "sort hint" to simplify typing.
[ "Keep", "everything", "but", "function", "definition", "then", "add", "sorted", "functions", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/types/reorder.py#L93-L118
237,838
serge-sans-paille/pythran
pythran/optimizations/dead_code_elimination.py
DeadCodeElimination.visit
def visit(self, node): """ Add OMPDirective from the old node to the new one. """ old_omp = metadata.get(node, OMPDirective) node = super(DeadCodeElimination, self).visit(node) if not metadata.get(node, OMPDirective): for omp_directive in old_omp: metadata.add(node, omp_directive) return node
python
def visit(self, node): """ Add OMPDirective from the old node to the new one. """ old_omp = metadata.get(node, OMPDirective) node = super(DeadCodeElimination, self).visit(node) if not metadata.get(node, OMPDirective): for omp_directive in old_omp: metadata.add(node, omp_directive) return node
[ "def", "visit", "(", "self", ",", "node", ")", ":", "old_omp", "=", "metadata", ".", "get", "(", "node", ",", "OMPDirective", ")", "node", "=", "super", "(", "DeadCodeElimination", ",", "self", ")", ".", "visit", "(", "node", ")", "if", "not", "metadata", ".", "get", "(", "node", ",", "OMPDirective", ")", ":", "for", "omp_directive", "in", "old_omp", ":", "metadata", ".", "add", "(", "node", ",", "omp_directive", ")", "return", "node" ]
Add OMPDirective from the old node to the new one.
[ "Add", "OMPDirective", "from", "the", "old", "node", "to", "the", "new", "one", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/optimizations/dead_code_elimination.py#L133-L140
237,839
serge-sans-paille/pythran
pythran/analyses/aliases.py
save_intrinsic_alias
def save_intrinsic_alias(module): """ Recursively save default aliases for pythonic functions. """ for v in module.values(): if isinstance(v, dict): # Submodules case save_intrinsic_alias(v) else: IntrinsicAliases[v] = frozenset((v,)) if isinstance(v, Class): save_intrinsic_alias(v.fields)
python
def save_intrinsic_alias(module): """ Recursively save default aliases for pythonic functions. """ for v in module.values(): if isinstance(v, dict): # Submodules case save_intrinsic_alias(v) else: IntrinsicAliases[v] = frozenset((v,)) if isinstance(v, Class): save_intrinsic_alias(v.fields)
[ "def", "save_intrinsic_alias", "(", "module", ")", ":", "for", "v", "in", "module", ".", "values", "(", ")", ":", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "# Submodules case", "save_intrinsic_alias", "(", "v", ")", "else", ":", "IntrinsicAliases", "[", "v", "]", "=", "frozenset", "(", "(", "v", ",", ")", ")", "if", "isinstance", "(", "v", ",", "Class", ")", ":", "save_intrinsic_alias", "(", "v", ".", "fields", ")" ]
Recursively save default aliases for pythonic functions.
[ "Recursively", "save", "default", "aliases", "for", "pythonic", "functions", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L53-L61
237,840
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_IfExp
def visit_IfExp(self, node): ''' Resulting node alias to either branch >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b, c): return a if c else b') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.IfExp) (a if c else b) => ['a', 'b'] ''' self.visit(node.test) rec = [self.visit(n) for n in (node.body, node.orelse)] return self.add(node, set.union(*rec))
python
def visit_IfExp(self, node): ''' Resulting node alias to either branch >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b, c): return a if c else b') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.IfExp) (a if c else b) => ['a', 'b'] ''' self.visit(node.test) rec = [self.visit(n) for n in (node.body, node.orelse)] return self.add(node, set.union(*rec))
[ "def", "visit_IfExp", "(", "self", ",", "node", ")", ":", "self", ".", "visit", "(", "node", ".", "test", ")", "rec", "=", "[", "self", ".", "visit", "(", "n", ")", "for", "n", "in", "(", "node", ".", "body", ",", "node", ".", "orelse", ")", "]", "return", "self", ".", "add", "(", "node", ",", "set", ".", "union", "(", "*", "rec", ")", ")" ]
Resulting node alias to either branch >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b, c): return a if c else b') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.IfExp) (a if c else b) => ['a', 'b']
[ "Resulting", "node", "alias", "to", "either", "branch" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L164-L177
237,841
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_Dict
def visit_Dict(self, node): ''' A dict is abstracted as an unordered container of its values >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return {0: a, 1: b}') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Dict) {0: a, 1: b} => ['|a|', '|b|'] where the |id| notation means something that may contain ``id``. ''' if node.keys: elts_aliases = set() for key, val in zip(node.keys, node.values): self.visit(key) # res ignored, just to fill self.aliases elt_aliases = self.visit(val) elts_aliases.update(map(ContainerOf, elt_aliases)) else: elts_aliases = None return self.add(node, elts_aliases)
python
def visit_Dict(self, node): ''' A dict is abstracted as an unordered container of its values >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return {0: a, 1: b}') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Dict) {0: a, 1: b} => ['|a|', '|b|'] where the |id| notation means something that may contain ``id``. ''' if node.keys: elts_aliases = set() for key, val in zip(node.keys, node.values): self.visit(key) # res ignored, just to fill self.aliases elt_aliases = self.visit(val) elts_aliases.update(map(ContainerOf, elt_aliases)) else: elts_aliases = None return self.add(node, elts_aliases)
[ "def", "visit_Dict", "(", "self", ",", "node", ")", ":", "if", "node", ".", "keys", ":", "elts_aliases", "=", "set", "(", ")", "for", "key", ",", "val", "in", "zip", "(", "node", ".", "keys", ",", "node", ".", "values", ")", ":", "self", ".", "visit", "(", "key", ")", "# res ignored, just to fill self.aliases", "elt_aliases", "=", "self", ".", "visit", "(", "val", ")", "elts_aliases", ".", "update", "(", "map", "(", "ContainerOf", ",", "elt_aliases", ")", ")", "else", ":", "elts_aliases", "=", "None", "return", "self", ".", "add", "(", "node", ",", "elts_aliases", ")" ]
A dict is abstracted as an unordered container of its values >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return {0: a, 1: b}') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Dict) {0: a, 1: b} => ['|a|', '|b|'] where the |id| notation means something that may contain ``id``.
[ "A", "dict", "is", "abstracted", "as", "an", "unordered", "container", "of", "its", "values" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L179-L200
237,842
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_Set
def visit_Set(self, node): ''' A set is abstracted as an unordered container of its elements >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return {a, b}') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {a, b} => ['|a|', '|b|'] where the |id| notation means something that may contain ``id``. ''' if node.elts: elts_aliases = {ContainerOf(alias) for elt in node.elts for alias in self.visit(elt)} else: elts_aliases = None return self.add(node, elts_aliases)
python
def visit_Set(self, node): ''' A set is abstracted as an unordered container of its elements >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return {a, b}') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {a, b} => ['|a|', '|b|'] where the |id| notation means something that may contain ``id``. ''' if node.elts: elts_aliases = {ContainerOf(alias) for elt in node.elts for alias in self.visit(elt)} else: elts_aliases = None return self.add(node, elts_aliases)
[ "def", "visit_Set", "(", "self", ",", "node", ")", ":", "if", "node", ".", "elts", ":", "elts_aliases", "=", "{", "ContainerOf", "(", "alias", ")", "for", "elt", "in", "node", ".", "elts", "for", "alias", "in", "self", ".", "visit", "(", "elt", ")", "}", "else", ":", "elts_aliases", "=", "None", "return", "self", ".", "add", "(", "node", ",", "elts_aliases", ")" ]
A set is abstracted as an unordered container of its elements >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return {a, b}') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {a, b} => ['|a|', '|b|'] where the |id| notation means something that may contain ``id``.
[ "A", "set", "is", "abstracted", "as", "an", "unordered", "container", "of", "its", "elements" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L202-L221
237,843
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_Return
def visit_Return(self, node): ''' A side effect of computing aliases on a Return is that it updates the ``return_alias`` field of current function >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return a') >>> result = pm.gather(Aliases, module) >>> module.body[0].return_alias # doctest: +ELLIPSIS <function ...merge_return_aliases at...> This field is a function that takes as many nodes as the function argument count as input and returns an expression based on these arguments if the function happens to create aliasing between its input and output. In our case: >>> f = module.body[0].return_alias >>> Aliases.dump(f([ast.Name('A', ast.Load(), None), ast.Num(1)])) ['A'] This also works if the relationship between input and output is more complex: >>> module = ast.parse('def foo(a, b): return a or b[0]') >>> result = pm.gather(Aliases, module) >>> f = module.body[0].return_alias >>> List = ast.List([ast.Name('L0', ast.Load(), None)], ast.Load()) >>> Aliases.dump(f([ast.Name('B', ast.Load(), None), List])) ['B', '[L0][0]'] Which actually means that when called with two arguments ``B`` and the single-element list ``[L[0]]``, ``foo`` may returns either the first argument, or the first element of the second argument. ''' if not node.value: return ret_aliases = self.visit(node.value) if Aliases.RetId in self.aliases: ret_aliases = ret_aliases.union(self.aliases[Aliases.RetId]) self.aliases[Aliases.RetId] = ret_aliases
python
def visit_Return(self, node): ''' A side effect of computing aliases on a Return is that it updates the ``return_alias`` field of current function >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return a') >>> result = pm.gather(Aliases, module) >>> module.body[0].return_alias # doctest: +ELLIPSIS <function ...merge_return_aliases at...> This field is a function that takes as many nodes as the function argument count as input and returns an expression based on these arguments if the function happens to create aliasing between its input and output. In our case: >>> f = module.body[0].return_alias >>> Aliases.dump(f([ast.Name('A', ast.Load(), None), ast.Num(1)])) ['A'] This also works if the relationship between input and output is more complex: >>> module = ast.parse('def foo(a, b): return a or b[0]') >>> result = pm.gather(Aliases, module) >>> f = module.body[0].return_alias >>> List = ast.List([ast.Name('L0', ast.Load(), None)], ast.Load()) >>> Aliases.dump(f([ast.Name('B', ast.Load(), None), List])) ['B', '[L0][0]'] Which actually means that when called with two arguments ``B`` and the single-element list ``[L[0]]``, ``foo`` may returns either the first argument, or the first element of the second argument. ''' if not node.value: return ret_aliases = self.visit(node.value) if Aliases.RetId in self.aliases: ret_aliases = ret_aliases.union(self.aliases[Aliases.RetId]) self.aliases[Aliases.RetId] = ret_aliases
[ "def", "visit_Return", "(", "self", ",", "node", ")", ":", "if", "not", "node", ".", "value", ":", "return", "ret_aliases", "=", "self", ".", "visit", "(", "node", ".", "value", ")", "if", "Aliases", ".", "RetId", "in", "self", ".", "aliases", ":", "ret_aliases", "=", "ret_aliases", ".", "union", "(", "self", ".", "aliases", "[", "Aliases", ".", "RetId", "]", ")", "self", ".", "aliases", "[", "Aliases", ".", "RetId", "]", "=", "ret_aliases" ]
A side effect of computing aliases on a Return is that it updates the ``return_alias`` field of current function >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return a') >>> result = pm.gather(Aliases, module) >>> module.body[0].return_alias # doctest: +ELLIPSIS <function ...merge_return_aliases at...> This field is a function that takes as many nodes as the function argument count as input and returns an expression based on these arguments if the function happens to create aliasing between its input and output. In our case: >>> f = module.body[0].return_alias >>> Aliases.dump(f([ast.Name('A', ast.Load(), None), ast.Num(1)])) ['A'] This also works if the relationship between input and output is more complex: >>> module = ast.parse('def foo(a, b): return a or b[0]') >>> result = pm.gather(Aliases, module) >>> f = module.body[0].return_alias >>> List = ast.List([ast.Name('L0', ast.Load(), None)], ast.Load()) >>> Aliases.dump(f([ast.Name('B', ast.Load(), None), List])) ['B', '[L0][0]'] Which actually means that when called with two arguments ``B`` and the single-element list ``[L[0]]``, ``foo`` may returns either the first argument, or the first element of the second argument.
[ "A", "side", "effect", "of", "computing", "aliases", "on", "a", "Return", "is", "that", "it", "updates", "the", "return_alias", "field", "of", "current", "function" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L223-L263
237,844
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_Subscript
def visit_Subscript(self, node): ''' Resulting node alias stores the subscript relationship if we don't know anything about the subscripted node. >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a): return a[0]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) a[0] => ['a[0]'] If we know something about the container, e.g. in case of a list, we can use this information to get more accurate informations: >>> module = ast.parse('def foo(a, b, c): return [a, b][c]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) [a, b][c] => ['a', 'b'] Moreover, in case of a tuple indexed by a constant value, we can further refine the aliasing information: >>> fun = """ ... def f(a, b): return a, b ... def foo(a, b): return f(a, b)[0]""" >>> module = ast.parse(fun) >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) f(a, b)[0] => ['a'] Nothing is done for slices, even if the indices are known :-/ >>> module = ast.parse('def foo(a, b, c): return [a, b, c][1:]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) [a, b, c][1:] => ['<unbound-value>'] ''' if isinstance(node.slice, ast.Index): aliases = set() self.visit(node.slice) value_aliases = self.visit(node.value) for alias in value_aliases: if isinstance(alias, ContainerOf): if isinstance(node.slice.value, ast.Slice): continue if isinstance(node.slice.value, ast.Num): if node.slice.value.n != alias.index: continue # FIXME: what if the index is a slice variable... aliases.add(alias.containee) elif isinstance(getattr(alias, 'ctx', None), ast.Param): aliases.add(ast.Subscript(alias, node.slice, node.ctx)) else: # could be enhanced through better handling of containers aliases = None self.generic_visit(node) return self.add(node, aliases)
python
def visit_Subscript(self, node): ''' Resulting node alias stores the subscript relationship if we don't know anything about the subscripted node. >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a): return a[0]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) a[0] => ['a[0]'] If we know something about the container, e.g. in case of a list, we can use this information to get more accurate informations: >>> module = ast.parse('def foo(a, b, c): return [a, b][c]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) [a, b][c] => ['a', 'b'] Moreover, in case of a tuple indexed by a constant value, we can further refine the aliasing information: >>> fun = """ ... def f(a, b): return a, b ... def foo(a, b): return f(a, b)[0]""" >>> module = ast.parse(fun) >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) f(a, b)[0] => ['a'] Nothing is done for slices, even if the indices are known :-/ >>> module = ast.parse('def foo(a, b, c): return [a, b, c][1:]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) [a, b, c][1:] => ['<unbound-value>'] ''' if isinstance(node.slice, ast.Index): aliases = set() self.visit(node.slice) value_aliases = self.visit(node.value) for alias in value_aliases: if isinstance(alias, ContainerOf): if isinstance(node.slice.value, ast.Slice): continue if isinstance(node.slice.value, ast.Num): if node.slice.value.n != alias.index: continue # FIXME: what if the index is a slice variable... aliases.add(alias.containee) elif isinstance(getattr(alias, 'ctx', None), ast.Param): aliases.add(ast.Subscript(alias, node.slice, node.ctx)) else: # could be enhanced through better handling of containers aliases = None self.generic_visit(node) return self.add(node, aliases)
[ "def", "visit_Subscript", "(", "self", ",", "node", ")", ":", "if", "isinstance", "(", "node", ".", "slice", ",", "ast", ".", "Index", ")", ":", "aliases", "=", "set", "(", ")", "self", ".", "visit", "(", "node", ".", "slice", ")", "value_aliases", "=", "self", ".", "visit", "(", "node", ".", "value", ")", "for", "alias", "in", "value_aliases", ":", "if", "isinstance", "(", "alias", ",", "ContainerOf", ")", ":", "if", "isinstance", "(", "node", ".", "slice", ".", "value", ",", "ast", ".", "Slice", ")", ":", "continue", "if", "isinstance", "(", "node", ".", "slice", ".", "value", ",", "ast", ".", "Num", ")", ":", "if", "node", ".", "slice", ".", "value", ".", "n", "!=", "alias", ".", "index", ":", "continue", "# FIXME: what if the index is a slice variable...", "aliases", ".", "add", "(", "alias", ".", "containee", ")", "elif", "isinstance", "(", "getattr", "(", "alias", ",", "'ctx'", ",", "None", ")", ",", "ast", ".", "Param", ")", ":", "aliases", ".", "add", "(", "ast", ".", "Subscript", "(", "alias", ",", "node", ".", "slice", ",", "node", ".", "ctx", ")", ")", "else", ":", "# could be enhanced through better handling of containers", "aliases", "=", "None", "self", ".", "generic_visit", "(", "node", ")", "return", "self", ".", "add", "(", "node", ",", "aliases", ")" ]
Resulting node alias stores the subscript relationship if we don't know anything about the subscripted node. >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a): return a[0]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) a[0] => ['a[0]'] If we know something about the container, e.g. in case of a list, we can use this information to get more accurate informations: >>> module = ast.parse('def foo(a, b, c): return [a, b][c]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) [a, b][c] => ['a', 'b'] Moreover, in case of a tuple indexed by a constant value, we can further refine the aliasing information: >>> fun = """ ... def f(a, b): return a, b ... def foo(a, b): return f(a, b)[0]""" >>> module = ast.parse(fun) >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) f(a, b)[0] => ['a'] Nothing is done for slices, even if the indices are known :-/ >>> module = ast.parse('def foo(a, b, c): return [a, b, c][1:]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Subscript) [a, b, c][1:] => ['<unbound-value>']
[ "Resulting", "node", "alias", "stores", "the", "subscript", "relationship", "if", "we", "don", "t", "know", "anything", "about", "the", "subscripted", "node", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L388-L445
237,845
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_Tuple
def visit_Tuple(self, node): ''' A tuple is abstracted as an ordered container of its values >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return a, b') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Tuple) (a, b) => ['|[0]=a|', '|[1]=b|'] where the |[i]=id| notation means something that may contain ``id`` at index ``i``. ''' if node.elts: elts_aliases = set() for i, elt in enumerate(node.elts): elt_aliases = self.visit(elt) elts_aliases.update(ContainerOf(alias, i) for alias in elt_aliases) else: elts_aliases = None return self.add(node, elts_aliases)
python
def visit_Tuple(self, node): ''' A tuple is abstracted as an ordered container of its values >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return a, b') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Tuple) (a, b) => ['|[0]=a|', '|[1]=b|'] where the |[i]=id| notation means something that may contain ``id`` at index ``i``. ''' if node.elts: elts_aliases = set() for i, elt in enumerate(node.elts): elt_aliases = self.visit(elt) elts_aliases.update(ContainerOf(alias, i) for alias in elt_aliases) else: elts_aliases = None return self.add(node, elts_aliases)
[ "def", "visit_Tuple", "(", "self", ",", "node", ")", ":", "if", "node", ".", "elts", ":", "elts_aliases", "=", "set", "(", ")", "for", "i", ",", "elt", "in", "enumerate", "(", "node", ".", "elts", ")", ":", "elt_aliases", "=", "self", ".", "visit", "(", "elt", ")", "elts_aliases", ".", "update", "(", "ContainerOf", "(", "alias", ",", "i", ")", "for", "alias", "in", "elt_aliases", ")", "else", ":", "elts_aliases", "=", "None", "return", "self", ".", "add", "(", "node", ",", "elts_aliases", ")" ]
A tuple is abstracted as an ordered container of its values >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return a, b') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Tuple) (a, b) => ['|[0]=a|', '|[1]=b|'] where the |[i]=id| notation means something that may contain ``id`` at index ``i``.
[ "A", "tuple", "is", "abstracted", "as", "an", "ordered", "container", "of", "its", "values" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L463-L485
237,846
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_ListComp
def visit_ListComp(self, node): ''' A comprehension is not abstracted in any way >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return [a for i in b]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.ListComp) [a for i in b] => ['<unbound-value>'] ''' for generator in node.generators: self.visit_comprehension(generator) self.visit(node.elt) return self.add(node)
python
def visit_ListComp(self, node): ''' A comprehension is not abstracted in any way >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return [a for i in b]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.ListComp) [a for i in b] => ['<unbound-value>'] ''' for generator in node.generators: self.visit_comprehension(generator) self.visit(node.elt) return self.add(node)
[ "def", "visit_ListComp", "(", "self", ",", "node", ")", ":", "for", "generator", "in", "node", ".", "generators", ":", "self", ".", "visit_comprehension", "(", "generator", ")", "self", ".", "visit", "(", "node", ".", "elt", ")", "return", "self", ".", "add", "(", "node", ")" ]
A comprehension is not abstracted in any way >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse('def foo(a, b): return [a for i in b]') >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.ListComp) [a for i in b] => ['<unbound-value>']
[ "A", "comprehension", "is", "not", "abstracted", "in", "any", "way" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L493-L507
237,847
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_FunctionDef
def visit_FunctionDef(self, node): ''' Initialise aliasing default value before visiting. Add aliasing values for : - Pythonic - globals declarations - current function arguments ''' self.aliases = IntrinsicAliases.copy() self.aliases.update((f.name, {f}) for f in self.global_declarations.values()) self.aliases.update((arg.id, {arg}) for arg in node.args.args) self.generic_visit(node) if Aliases.RetId in self.aliases: # parametrize the expression def parametrize(exp): # constant(?) or global -> no change if isinstance(exp, (ast.Index, Intrinsic, ast.FunctionDef)): return lambda _: {exp} elif isinstance(exp, ContainerOf): pcontainee = parametrize(exp.containee) index = exp.index return lambda args: { ContainerOf(pc, index) for pc in pcontainee(args) } elif isinstance(exp, ast.Name): try: w = node.args.args.index(exp) def return_alias(args): if w < len(args): return {args[w]} else: return {node.args.defaults[w - len(args)]} return return_alias except ValueError: return lambda _: self.get_unbound_value_set() elif isinstance(exp, ast.Subscript): values = parametrize(exp.value) slices = parametrize(exp.slice) return lambda args: { ast.Subscript(value, slice, ast.Load()) for value in values(args) for slice in slices(args)} else: return lambda _: self.get_unbound_value_set() # this is a little tricky: for each returned alias, # parametrize builds a function that, given a list of args, # returns the alias # then as we may have multiple returned alias, we compute the union # of these returned aliases return_aliases = [parametrize(ret_alias) for ret_alias in self.aliases[Aliases.RetId]] def merge_return_aliases(args): merged_return_aliases = set() for return_alias in return_aliases: merged_return_aliases.update(return_alias(args)) return merged_return_aliases node.return_alias = merge_return_aliases
python
def visit_FunctionDef(self, node): ''' Initialise aliasing default value before visiting. Add aliasing values for : - Pythonic - globals declarations - current function arguments ''' self.aliases = IntrinsicAliases.copy() self.aliases.update((f.name, {f}) for f in self.global_declarations.values()) self.aliases.update((arg.id, {arg}) for arg in node.args.args) self.generic_visit(node) if Aliases.RetId in self.aliases: # parametrize the expression def parametrize(exp): # constant(?) or global -> no change if isinstance(exp, (ast.Index, Intrinsic, ast.FunctionDef)): return lambda _: {exp} elif isinstance(exp, ContainerOf): pcontainee = parametrize(exp.containee) index = exp.index return lambda args: { ContainerOf(pc, index) for pc in pcontainee(args) } elif isinstance(exp, ast.Name): try: w = node.args.args.index(exp) def return_alias(args): if w < len(args): return {args[w]} else: return {node.args.defaults[w - len(args)]} return return_alias except ValueError: return lambda _: self.get_unbound_value_set() elif isinstance(exp, ast.Subscript): values = parametrize(exp.value) slices = parametrize(exp.slice) return lambda args: { ast.Subscript(value, slice, ast.Load()) for value in values(args) for slice in slices(args)} else: return lambda _: self.get_unbound_value_set() # this is a little tricky: for each returned alias, # parametrize builds a function that, given a list of args, # returns the alias # then as we may have multiple returned alias, we compute the union # of these returned aliases return_aliases = [parametrize(ret_alias) for ret_alias in self.aliases[Aliases.RetId]] def merge_return_aliases(args): merged_return_aliases = set() for return_alias in return_aliases: merged_return_aliases.update(return_alias(args)) return merged_return_aliases node.return_alias = merge_return_aliases
[ "def", "visit_FunctionDef", "(", "self", ",", "node", ")", ":", "self", ".", "aliases", "=", "IntrinsicAliases", ".", "copy", "(", ")", "self", ".", "aliases", ".", "update", "(", "(", "f", ".", "name", ",", "{", "f", "}", ")", "for", "f", "in", "self", ".", "global_declarations", ".", "values", "(", ")", ")", "self", ".", "aliases", ".", "update", "(", "(", "arg", ".", "id", ",", "{", "arg", "}", ")", "for", "arg", "in", "node", ".", "args", ".", "args", ")", "self", ".", "generic_visit", "(", "node", ")", "if", "Aliases", ".", "RetId", "in", "self", ".", "aliases", ":", "# parametrize the expression", "def", "parametrize", "(", "exp", ")", ":", "# constant(?) or global -> no change", "if", "isinstance", "(", "exp", ",", "(", "ast", ".", "Index", ",", "Intrinsic", ",", "ast", ".", "FunctionDef", ")", ")", ":", "return", "lambda", "_", ":", "{", "exp", "}", "elif", "isinstance", "(", "exp", ",", "ContainerOf", ")", ":", "pcontainee", "=", "parametrize", "(", "exp", ".", "containee", ")", "index", "=", "exp", ".", "index", "return", "lambda", "args", ":", "{", "ContainerOf", "(", "pc", ",", "index", ")", "for", "pc", "in", "pcontainee", "(", "args", ")", "}", "elif", "isinstance", "(", "exp", ",", "ast", ".", "Name", ")", ":", "try", ":", "w", "=", "node", ".", "args", ".", "args", ".", "index", "(", "exp", ")", "def", "return_alias", "(", "args", ")", ":", "if", "w", "<", "len", "(", "args", ")", ":", "return", "{", "args", "[", "w", "]", "}", "else", ":", "return", "{", "node", ".", "args", ".", "defaults", "[", "w", "-", "len", "(", "args", ")", "]", "}", "return", "return_alias", "except", "ValueError", ":", "return", "lambda", "_", ":", "self", ".", "get_unbound_value_set", "(", ")", "elif", "isinstance", "(", "exp", ",", "ast", ".", "Subscript", ")", ":", "values", "=", "parametrize", "(", "exp", ".", "value", ")", "slices", "=", "parametrize", "(", "exp", ".", "slice", ")", "return", "lambda", "args", ":", "{", "ast", ".", "Subscript", "(", "value", ",", "slice", ",", "ast", ".", "Load", "(", ")", ")", "for", "value", "in", "values", "(", "args", ")", "for", "slice", "in", "slices", "(", "args", ")", "}", "else", ":", "return", "lambda", "_", ":", "self", ".", "get_unbound_value_set", "(", ")", "# this is a little tricky: for each returned alias,", "# parametrize builds a function that, given a list of args,", "# returns the alias", "# then as we may have multiple returned alias, we compute the union", "# of these returned aliases", "return_aliases", "=", "[", "parametrize", "(", "ret_alias", ")", "for", "ret_alias", "in", "self", ".", "aliases", "[", "Aliases", ".", "RetId", "]", "]", "def", "merge_return_aliases", "(", "args", ")", ":", "merged_return_aliases", "=", "set", "(", ")", "for", "return_alias", "in", "return_aliases", ":", "merged_return_aliases", ".", "update", "(", "return_alias", "(", "args", ")", ")", "return", "merged_return_aliases", "node", ".", "return_alias", "=", "merge_return_aliases" ]
Initialise aliasing default value before visiting. Add aliasing values for : - Pythonic - globals declarations - current function arguments
[ "Initialise", "aliasing", "default", "value", "before", "visiting", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L532-L600
237,848
serge-sans-paille/pythran
pythran/analyses/aliases.py
Aliases.visit_For
def visit_For(self, node): ''' For loop creates aliasing between the target and the content of the iterator >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse(""" ... def foo(a): ... for i in a: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|i|'] Not very useful, unless we know something about the iterated container >>> module = ast.parse(""" ... def foo(a, b): ... for i in [a, b]: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|a|', '|b|'] ''' iter_aliases = self.visit(node.iter) if all(isinstance(x, ContainerOf) for x in iter_aliases): target_aliases = set() for iter_alias in iter_aliases: target_aliases.add(iter_alias.containee) else: target_aliases = {node.target} self.add(node.target, target_aliases) self.aliases[node.target.id] = self.result[node.target] self.generic_visit(node) self.generic_visit(node)
python
def visit_For(self, node): ''' For loop creates aliasing between the target and the content of the iterator >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse(""" ... def foo(a): ... for i in a: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|i|'] Not very useful, unless we know something about the iterated container >>> module = ast.parse(""" ... def foo(a, b): ... for i in [a, b]: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|a|', '|b|'] ''' iter_aliases = self.visit(node.iter) if all(isinstance(x, ContainerOf) for x in iter_aliases): target_aliases = set() for iter_alias in iter_aliases: target_aliases.add(iter_alias.containee) else: target_aliases = {node.target} self.add(node.target, target_aliases) self.aliases[node.target.id] = self.result[node.target] self.generic_visit(node) self.generic_visit(node)
[ "def", "visit_For", "(", "self", ",", "node", ")", ":", "iter_aliases", "=", "self", ".", "visit", "(", "node", ".", "iter", ")", "if", "all", "(", "isinstance", "(", "x", ",", "ContainerOf", ")", "for", "x", "in", "iter_aliases", ")", ":", "target_aliases", "=", "set", "(", ")", "for", "iter_alias", "in", "iter_aliases", ":", "target_aliases", ".", "add", "(", "iter_alias", ".", "containee", ")", "else", ":", "target_aliases", "=", "{", "node", ".", "target", "}", "self", ".", "add", "(", "node", ".", "target", ",", "target_aliases", ")", "self", ".", "aliases", "[", "node", ".", "target", ".", "id", "]", "=", "self", ".", "result", "[", "node", ".", "target", "]", "self", ".", "generic_visit", "(", "node", ")", "self", ".", "generic_visit", "(", "node", ")" ]
For loop creates aliasing between the target and the content of the iterator >>> from pythran import passmanager >>> pm = passmanager.PassManager('demo') >>> module = ast.parse(""" ... def foo(a): ... for i in a: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|i|'] Not very useful, unless we know something about the iterated container >>> module = ast.parse(""" ... def foo(a, b): ... for i in [a, b]: ... {i}""") >>> result = pm.gather(Aliases, module) >>> Aliases.dump(result, filter=ast.Set) {i} => ['|a|', '|b|']
[ "For", "loop", "creates", "aliasing", "between", "the", "target", "and", "the", "content", "of", "the", "iterator" ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/aliases.py#L628-L666
237,849
serge-sans-paille/pythran
pythran/analyses/argument_read_once.py
ArgumentReadOnce.prepare
def prepare(self, node): """ Initialise arguments effects as this analysis in inter-procedural. Initialisation done for Pythonic functions and default values set for user defined functions. """ super(ArgumentReadOnce, self).prepare(node) # global functions init for n in self.global_declarations.values(): fe = ArgumentReadOnce.FunctionEffects(n) self.node_to_functioneffect[n] = fe self.result.add(fe) # Pythonic functions init def save_effect(module): """ Recursively save read once effect for Pythonic functions. """ for intr in module.values(): if isinstance(intr, dict): # Submodule case save_effect(intr) else: fe = ArgumentReadOnce.FunctionEffects(intr) self.node_to_functioneffect[intr] = fe self.result.add(fe) if isinstance(intr, intrinsic.Class): # Class case save_effect(intr.fields) for module in MODULES.values(): save_effect(module)
python
def prepare(self, node): """ Initialise arguments effects as this analysis in inter-procedural. Initialisation done for Pythonic functions and default values set for user defined functions. """ super(ArgumentReadOnce, self).prepare(node) # global functions init for n in self.global_declarations.values(): fe = ArgumentReadOnce.FunctionEffects(n) self.node_to_functioneffect[n] = fe self.result.add(fe) # Pythonic functions init def save_effect(module): """ Recursively save read once effect for Pythonic functions. """ for intr in module.values(): if isinstance(intr, dict): # Submodule case save_effect(intr) else: fe = ArgumentReadOnce.FunctionEffects(intr) self.node_to_functioneffect[intr] = fe self.result.add(fe) if isinstance(intr, intrinsic.Class): # Class case save_effect(intr.fields) for module in MODULES.values(): save_effect(module)
[ "def", "prepare", "(", "self", ",", "node", ")", ":", "super", "(", "ArgumentReadOnce", ",", "self", ")", ".", "prepare", "(", "node", ")", "# global functions init", "for", "n", "in", "self", ".", "global_declarations", ".", "values", "(", ")", ":", "fe", "=", "ArgumentReadOnce", ".", "FunctionEffects", "(", "n", ")", "self", ".", "node_to_functioneffect", "[", "n", "]", "=", "fe", "self", ".", "result", ".", "add", "(", "fe", ")", "# Pythonic functions init", "def", "save_effect", "(", "module", ")", ":", "\"\"\" Recursively save read once effect for Pythonic functions. \"\"\"", "for", "intr", "in", "module", ".", "values", "(", ")", ":", "if", "isinstance", "(", "intr", ",", "dict", ")", ":", "# Submodule case", "save_effect", "(", "intr", ")", "else", ":", "fe", "=", "ArgumentReadOnce", ".", "FunctionEffects", "(", "intr", ")", "self", ".", "node_to_functioneffect", "[", "intr", "]", "=", "fe", "self", ".", "result", ".", "add", "(", "fe", ")", "if", "isinstance", "(", "intr", ",", "intrinsic", ".", "Class", ")", ":", "# Class case", "save_effect", "(", "intr", ".", "fields", ")", "for", "module", "in", "MODULES", ".", "values", "(", ")", ":", "save_effect", "(", "module", ")" ]
Initialise arguments effects as this analysis in inter-procedural. Initialisation done for Pythonic functions and default values set for user defined functions.
[ "Initialise", "arguments", "effects", "as", "this", "analysis", "in", "inter", "-", "procedural", "." ]
7e1b5af2dddfabc50bd2a977f0178be269b349b5
https://github.com/serge-sans-paille/pythran/blob/7e1b5af2dddfabc50bd2a977f0178be269b349b5/pythran/analyses/argument_read_once.py#L60-L88
237,850
astropy/regions
regions/io/ds9/write.py
ds9_objects_to_string
def ds9_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional This overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is 'deg'(degrees) Returns ------- region_string : `str` DS9 region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, ds9_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(ds9_objects_to_string([reg_sky])) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_ds9(coordsys, fmt, radunit)
python
def ds9_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional This overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is 'deg'(degrees) Returns ------- region_string : `str` DS9 region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, ds9_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(ds9_objects_to_string([reg_sky])) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_ds9(coordsys, fmt, radunit)
[ "def", "ds9_objects_to_string", "(", "regions", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "shapelist", "=", "to_shape_list", "(", "regions", ",", "coordsys", ")", "return", "shapelist", ".", "to_ds9", "(", "coordsys", ",", "fmt", ",", "radunit", ")" ]
Converts a `list` of `~regions.Region` to DS9 region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional This overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is 'deg'(degrees) Returns ------- region_string : `str` DS9 region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, ds9_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(ds9_objects_to_string([reg_sky])) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000)
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "DS9", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/write.py#L12-L46
237,851
astropy/regions
regions/io/ds9/write.py
write_ds9
def write_ds9(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 string and write to file. Parameters ---------- regions : `list` List of `regions.Region` objects filename : `str` Filename in which the string is to be written. coordsys : `str`, optional #TODO Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_ds9 >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_ds9([reg_sky], 'test_write.reg') >>> with open('test_write.reg') as f: ... print(f.read()) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ output = ds9_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
python
def write_ds9(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to DS9 string and write to file. Parameters ---------- regions : `list` List of `regions.Region` objects filename : `str` Filename in which the string is to be written. coordsys : `str`, optional #TODO Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_ds9 >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_ds9([reg_sky], 'test_write.reg') >>> with open('test_write.reg') as f: ... print(f.read()) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000) """ output = ds9_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
[ "def", "write_ds9", "(", "regions", ",", "filename", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "output", "=", "ds9_objects_to_string", "(", "regions", ",", "coordsys", ",", "fmt", ",", "radunit", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fh", ":", "fh", ".", "write", "(", "output", ")" ]
Converts a `list` of `~regions.Region` to DS9 string and write to file. Parameters ---------- regions : `list` List of `regions.Region` objects filename : `str` Filename in which the string is to be written. coordsys : `str`, optional #TODO Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_ds9 >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_ds9([reg_sky], 'test_write.reg') >>> with open('test_write.reg') as f: ... print(f.read()) # Region file format: DS9 astropy/regions fk5 circle(1.000007,2.000002,5.000000)
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "DS9", "string", "and", "write", "to", "file", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/write.py#L49-L83
237,852
astropy/regions
regions/io/crtf/write.py
crtf_objects_to_string
def crtf_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Returns ------- region_string : `str` CRTF region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, crtf_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(crtf_objects_to_string([reg_sky])) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_crtf(coordsys, fmt, radunit)
python
def crtf_objects_to_string(regions, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Returns ------- region_string : `str` CRTF region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, crtf_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(crtf_objects_to_string([reg_sky])) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ shapelist = to_shape_list(regions, coordsys) return shapelist.to_crtf(coordsys, fmt, radunit)
[ "def", "crtf_objects_to_string", "(", "regions", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "shapelist", "=", "to_shape_list", "(", "regions", ",", "coordsys", ")", "return", "shapelist", ".", "to_crtf", "(", "coordsys", ",", "fmt", ",", "radunit", ")" ]
Converts a `list` of `~regions.Region` to CRTF region string. Parameters ---------- regions : `list` List of `~regions.Region` objects coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate system frame for all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Returns ------- region_string : `str` CRTF region string Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, crtf_objects_to_string >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> print(crtf_objects_to_string([reg_sky])) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg]
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "CRTF", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/write.py#L12-L48
237,853
astropy/regions
regions/io/crtf/write.py
write_crtf
def write_crtf(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF string and write to file. Parameters ---------- regions : `list` List of `~regions.Region` objects filename : `str` Filename in which the string is to be written. Default is 'new.crtf' coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_crtf >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_crtf([reg_sky], 'test_write.crtf') >>> with open('test_write.crtf') as f: ... print(f.read()) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ output = crtf_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
python
def write_crtf(regions, filename, coordsys='fk5', fmt='.6f', radunit='deg'): """ Converts a `list` of `~regions.Region` to CRTF string and write to file. Parameters ---------- regions : `list` List of `~regions.Region` objects filename : `str` Filename in which the string is to be written. Default is 'new.crtf' coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_crtf >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_crtf([reg_sky], 'test_write.crtf') >>> with open('test_write.crtf') as f: ... print(f.read()) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg] """ output = crtf_objects_to_string(regions, coordsys, fmt, radunit) with open(filename, 'w') as fh: fh.write(output)
[ "def", "write_crtf", "(", "regions", ",", "filename", ",", "coordsys", "=", "'fk5'", ",", "fmt", "=", "'.6f'", ",", "radunit", "=", "'deg'", ")", ":", "output", "=", "crtf_objects_to_string", "(", "regions", ",", "coordsys", ",", "fmt", ",", "radunit", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fh", ":", "fh", ".", "write", "(", "output", ")" ]
Converts a `list` of `~regions.Region` to CRTF string and write to file. Parameters ---------- regions : `list` List of `~regions.Region` objects filename : `str` Filename in which the string is to be written. Default is 'new.crtf' coordsys : `str`, optional Astropy Coordinate system that overrides the coordinate frames of all regions. Default is 'fk5'. fmt : `str`, optional A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : `str`, optional This denotes the unit of the radius. Default is deg (degrees) Examples -------- >>> from astropy import units as u >>> from astropy.coordinates import SkyCoord >>> from regions import CircleSkyRegion, write_crtf >>> reg_sky = CircleSkyRegion(SkyCoord(1 * u.deg, 2 * u.deg), 5 * u.deg) >>> write_crtf([reg_sky], 'test_write.crtf') >>> with open('test_write.crtf') as f: ... print(f.read()) #CRTF global coord=fk5 +circle[[1.000007deg, 2.000002deg], 5.000000deg]
[ "Converts", "a", "list", "of", "~regions", ".", "Region", "to", "CRTF", "string", "and", "write", "to", "file", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/write.py#L51-L86
237,854
astropy/regions
regions/shapes/rectangle.py
RectanglePixelRegion.corners
def corners(self): """ Return the x, y coordinate pairs that define the corners """ corners = [(-self.width/2, -self.height/2), ( self.width/2, -self.height/2), ( self.width/2, self.height/2), (-self.width/2, self.height/2), ] rotmat = [[np.cos(self.angle), np.sin(self.angle)], [-np.sin(self.angle), np.cos(self.angle)]] return np.dot(corners, rotmat) + np.array([self.center.x, self.center.y])
python
def corners(self): """ Return the x, y coordinate pairs that define the corners """ corners = [(-self.width/2, -self.height/2), ( self.width/2, -self.height/2), ( self.width/2, self.height/2), (-self.width/2, self.height/2), ] rotmat = [[np.cos(self.angle), np.sin(self.angle)], [-np.sin(self.angle), np.cos(self.angle)]] return np.dot(corners, rotmat) + np.array([self.center.x, self.center.y])
[ "def", "corners", "(", "self", ")", ":", "corners", "=", "[", "(", "-", "self", ".", "width", "/", "2", ",", "-", "self", ".", "height", "/", "2", ")", ",", "(", "self", ".", "width", "/", "2", ",", "-", "self", ".", "height", "/", "2", ")", ",", "(", "self", ".", "width", "/", "2", ",", "self", ".", "height", "/", "2", ")", ",", "(", "-", "self", ".", "width", "/", "2", ",", "self", ".", "height", "/", "2", ")", ",", "]", "rotmat", "=", "[", "[", "np", ".", "cos", "(", "self", ".", "angle", ")", ",", "np", ".", "sin", "(", "self", ".", "angle", ")", "]", ",", "[", "-", "np", ".", "sin", "(", "self", ".", "angle", ")", ",", "np", ".", "cos", "(", "self", ".", "angle", ")", "]", "]", "return", "np", ".", "dot", "(", "corners", ",", "rotmat", ")", "+", "np", ".", "array", "(", "[", "self", ".", "center", ".", "x", ",", "self", ".", "center", ".", "y", "]", ")" ]
Return the x, y coordinate pairs that define the corners
[ "Return", "the", "x", "y", "coordinate", "pairs", "that", "define", "the", "corners" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/shapes/rectangle.py#L202-L216
237,855
astropy/regions
regions/shapes/rectangle.py
RectanglePixelRegion.to_polygon
def to_polygon(self): """ Return a 4-cornered polygon equivalent to this rectangle """ x,y = self.corners.T vertices = PixCoord(x=x, y=y) return PolygonPixelRegion(vertices=vertices, meta=self.meta, visual=self.visual)
python
def to_polygon(self): """ Return a 4-cornered polygon equivalent to this rectangle """ x,y = self.corners.T vertices = PixCoord(x=x, y=y) return PolygonPixelRegion(vertices=vertices, meta=self.meta, visual=self.visual)
[ "def", "to_polygon", "(", "self", ")", ":", "x", ",", "y", "=", "self", ".", "corners", ".", "T", "vertices", "=", "PixCoord", "(", "x", "=", "x", ",", "y", "=", "y", ")", "return", "PolygonPixelRegion", "(", "vertices", "=", "vertices", ",", "meta", "=", "self", ".", "meta", ",", "visual", "=", "self", ".", "visual", ")" ]
Return a 4-cornered polygon equivalent to this rectangle
[ "Return", "a", "4", "-", "cornered", "polygon", "equivalent", "to", "this", "rectangle" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/shapes/rectangle.py#L218-L225
237,856
astropy/regions
regions/shapes/rectangle.py
RectanglePixelRegion._lower_left_xy
def _lower_left_xy(self): """ Compute lower left `xy` position. This is used for the conversion to matplotlib in ``as_artist`` Taken from http://photutils.readthedocs.io/en/latest/_modules/photutils/aperture/rectangle.html#RectangularAperture.plot """ hw = self.width / 2. hh = self.height / 2. sint = np.sin(self.angle) cost = np.cos(self.angle) dx = (hh * sint) - (hw * cost) dy = -(hh * cost) - (hw * sint) x = self.center.x + dx y = self.center.y + dy return x, y
python
def _lower_left_xy(self): """ Compute lower left `xy` position. This is used for the conversion to matplotlib in ``as_artist`` Taken from http://photutils.readthedocs.io/en/latest/_modules/photutils/aperture/rectangle.html#RectangularAperture.plot """ hw = self.width / 2. hh = self.height / 2. sint = np.sin(self.angle) cost = np.cos(self.angle) dx = (hh * sint) - (hw * cost) dy = -(hh * cost) - (hw * sint) x = self.center.x + dx y = self.center.y + dy return x, y
[ "def", "_lower_left_xy", "(", "self", ")", ":", "hw", "=", "self", ".", "width", "/", "2.", "hh", "=", "self", ".", "height", "/", "2.", "sint", "=", "np", ".", "sin", "(", "self", ".", "angle", ")", "cost", "=", "np", ".", "cos", "(", "self", ".", "angle", ")", "dx", "=", "(", "hh", "*", "sint", ")", "-", "(", "hw", "*", "cost", ")", "dy", "=", "-", "(", "hh", "*", "cost", ")", "-", "(", "hw", "*", "sint", ")", "x", "=", "self", ".", "center", ".", "x", "+", "dx", "y", "=", "self", ".", "center", ".", "y", "+", "dy", "return", "x", ",", "y" ]
Compute lower left `xy` position. This is used for the conversion to matplotlib in ``as_artist`` Taken from http://photutils.readthedocs.io/en/latest/_modules/photutils/aperture/rectangle.html#RectangularAperture.plot
[ "Compute", "lower", "left", "xy", "position", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/shapes/rectangle.py#L228-L244
237,857
astropy/regions
regions/core/compound.py
CompoundPixelRegion._make_annulus_path
def _make_annulus_path(patch_inner, patch_outer): """ Defines a matplotlib annulus path from two patches. This preserves the cubic Bezier curves (CURVE4) of the aperture paths. # This is borrowed from photutils aperture. """ import matplotlib.path as mpath path_inner = patch_inner.get_path() transform_inner = patch_inner.get_transform() path_inner = transform_inner.transform_path(path_inner) path_outer = patch_outer.get_path() transform_outer = patch_outer.get_transform() path_outer = transform_outer.transform_path(path_outer) verts_inner = path_inner.vertices[:-1][::-1] verts_inner = np.concatenate((verts_inner, [verts_inner[-1]])) verts = np.vstack((path_outer.vertices, verts_inner)) codes = np.hstack((path_outer.codes, path_inner.codes)) return mpath.Path(verts, codes)
python
def _make_annulus_path(patch_inner, patch_outer): """ Defines a matplotlib annulus path from two patches. This preserves the cubic Bezier curves (CURVE4) of the aperture paths. # This is borrowed from photutils aperture. """ import matplotlib.path as mpath path_inner = patch_inner.get_path() transform_inner = patch_inner.get_transform() path_inner = transform_inner.transform_path(path_inner) path_outer = patch_outer.get_path() transform_outer = patch_outer.get_transform() path_outer = transform_outer.transform_path(path_outer) verts_inner = path_inner.vertices[:-1][::-1] verts_inner = np.concatenate((verts_inner, [verts_inner[-1]])) verts = np.vstack((path_outer.vertices, verts_inner)) codes = np.hstack((path_outer.codes, path_inner.codes)) return mpath.Path(verts, codes)
[ "def", "_make_annulus_path", "(", "patch_inner", ",", "patch_outer", ")", ":", "import", "matplotlib", ".", "path", "as", "mpath", "path_inner", "=", "patch_inner", ".", "get_path", "(", ")", "transform_inner", "=", "patch_inner", ".", "get_transform", "(", ")", "path_inner", "=", "transform_inner", ".", "transform_path", "(", "path_inner", ")", "path_outer", "=", "patch_outer", ".", "get_path", "(", ")", "transform_outer", "=", "patch_outer", ".", "get_transform", "(", ")", "path_outer", "=", "transform_outer", ".", "transform_path", "(", "path_outer", ")", "verts_inner", "=", "path_inner", ".", "vertices", "[", ":", "-", "1", "]", "[", ":", ":", "-", "1", "]", "verts_inner", "=", "np", ".", "concatenate", "(", "(", "verts_inner", ",", "[", "verts_inner", "[", "-", "1", "]", "]", ")", ")", "verts", "=", "np", ".", "vstack", "(", "(", "path_outer", ".", "vertices", ",", "verts_inner", ")", ")", "codes", "=", "np", ".", "hstack", "(", "(", "path_outer", ".", "codes", ",", "path_inner", ".", "codes", ")", ")", "return", "mpath", ".", "Path", "(", "verts", ",", "codes", ")" ]
Defines a matplotlib annulus path from two patches. This preserves the cubic Bezier curves (CURVE4) of the aperture paths. # This is borrowed from photutils aperture.
[ "Defines", "a", "matplotlib", "annulus", "path", "from", "two", "patches", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/compound.py#L104-L130
237,858
astropy/regions
regions/io/fits/read.py
read_fits_region
def read_fits_region(filename, errors='strict'): """ Reads a FITS region file and scans for any fits regions table and converts them into `Region` objects. Parameters ---------- filename : str The file path errors : ``warn``, ``ignore``, ``strict`` The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `FITSRegionParserError`. ``warn`` will raise a `FITSRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : list Python list of `regions.Region` objects. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from regions import read_fits_region >>> file_read = get_pkg_data_filename('data/region.fits', ... package='regions.io.fits.tests') >>> regions = read_fits_region(file_read) """ regions = [] hdul = fits.open(filename) for hdu in hdul: if hdu.name == 'REGION': table = Table.read(hdu) wcs = WCS(hdu.header, keysel=['image', 'binary', 'pixel']) regions_list = FITSRegionParser(table, errors).shapes.to_regions() for reg in regions_list: regions.append(reg.to_sky(wcs)) return regions
python
def read_fits_region(filename, errors='strict'): """ Reads a FITS region file and scans for any fits regions table and converts them into `Region` objects. Parameters ---------- filename : str The file path errors : ``warn``, ``ignore``, ``strict`` The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `FITSRegionParserError`. ``warn`` will raise a `FITSRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : list Python list of `regions.Region` objects. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from regions import read_fits_region >>> file_read = get_pkg_data_filename('data/region.fits', ... package='regions.io.fits.tests') >>> regions = read_fits_region(file_read) """ regions = [] hdul = fits.open(filename) for hdu in hdul: if hdu.name == 'REGION': table = Table.read(hdu) wcs = WCS(hdu.header, keysel=['image', 'binary', 'pixel']) regions_list = FITSRegionParser(table, errors).shapes.to_regions() for reg in regions_list: regions.append(reg.to_sky(wcs)) return regions
[ "def", "read_fits_region", "(", "filename", ",", "errors", "=", "'strict'", ")", ":", "regions", "=", "[", "]", "hdul", "=", "fits", ".", "open", "(", "filename", ")", "for", "hdu", "in", "hdul", ":", "if", "hdu", ".", "name", "==", "'REGION'", ":", "table", "=", "Table", ".", "read", "(", "hdu", ")", "wcs", "=", "WCS", "(", "hdu", ".", "header", ",", "keysel", "=", "[", "'image'", ",", "'binary'", ",", "'pixel'", "]", ")", "regions_list", "=", "FITSRegionParser", "(", "table", ",", "errors", ")", ".", "shapes", ".", "to_regions", "(", ")", "for", "reg", "in", "regions_list", ":", "regions", ".", "append", "(", "reg", ".", "to_sky", "(", "wcs", ")", ")", "return", "regions" ]
Reads a FITS region file and scans for any fits regions table and converts them into `Region` objects. Parameters ---------- filename : str The file path errors : ``warn``, ``ignore``, ``strict`` The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `FITSRegionParserError`. ``warn`` will raise a `FITSRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : list Python list of `regions.Region` objects. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from regions import read_fits_region >>> file_read = get_pkg_data_filename('data/region.fits', ... package='regions.io.fits.tests') >>> regions = read_fits_region(file_read)
[ "Reads", "a", "FITS", "region", "file", "and", "scans", "for", "any", "fits", "regions", "table", "and", "converts", "them", "into", "Region", "objects", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/fits/read.py#L228-L270
237,859
astropy/regions
regions/io/core.py
to_shape_list
def to_shape_list(region_list, coordinate_system='fk5'): """ Converts a list of regions into a `regions.ShapeList` object. Parameters ---------- region_list: python list Lists of `regions.Region` objects format_type: str ('DS9' or 'CRTF') The format type of the Shape object. Default is 'DS9'. coordinate_system: str The astropy coordinate system frame in which all the coordinates present in the `region_list` will be converted. Default is 'fk5'. Returns ------- shape_list: `regions.ShapeList` object list of `regions.Shape` objects. """ shape_list = ShapeList() for region in region_list: coord = [] if isinstance(region, SkyRegion): reg_type = region.__class__.__name__[:-9].lower() else: reg_type = region.__class__.__name__[:-11].lower() for val in regions_attributes[reg_type]: coord.append(getattr(region, val)) if reg_type == 'polygon': coord = [x for x in region.vertices] if coordinate_system: coordsys = coordinate_system else: if isinstance(region, SkyRegion): coordsys = coord[0].name else: coordsys = 'image' frame = coordinates.frame_transform_graph.lookup_name(coordsys) new_coord = [] for val in coord: if isinstance(val, Angle) or isinstance(val, u.Quantity) or isinstance(val, numbers.Number): new_coord.append(val) elif isinstance(val, PixCoord): new_coord.append(u.Quantity(val.x, u.dimensionless_unscaled)) new_coord.append(u.Quantity(val.y, u.dimensionless_unscaled)) else: new_coord.append(Angle(val.transform_to(frame).spherical.lon)) new_coord.append(Angle(val.transform_to(frame).spherical.lat)) meta = dict(region.meta) meta.update(region.visual) if reg_type == 'text': meta['text'] = meta.get('text', meta.pop('label', '')) include = region.meta.pop('include', True) shape_list.append(Shape(coordsys, reg_type, new_coord, meta, False, include)) return shape_list
python
def to_shape_list(region_list, coordinate_system='fk5'): """ Converts a list of regions into a `regions.ShapeList` object. Parameters ---------- region_list: python list Lists of `regions.Region` objects format_type: str ('DS9' or 'CRTF') The format type of the Shape object. Default is 'DS9'. coordinate_system: str The astropy coordinate system frame in which all the coordinates present in the `region_list` will be converted. Default is 'fk5'. Returns ------- shape_list: `regions.ShapeList` object list of `regions.Shape` objects. """ shape_list = ShapeList() for region in region_list: coord = [] if isinstance(region, SkyRegion): reg_type = region.__class__.__name__[:-9].lower() else: reg_type = region.__class__.__name__[:-11].lower() for val in regions_attributes[reg_type]: coord.append(getattr(region, val)) if reg_type == 'polygon': coord = [x for x in region.vertices] if coordinate_system: coordsys = coordinate_system else: if isinstance(region, SkyRegion): coordsys = coord[0].name else: coordsys = 'image' frame = coordinates.frame_transform_graph.lookup_name(coordsys) new_coord = [] for val in coord: if isinstance(val, Angle) or isinstance(val, u.Quantity) or isinstance(val, numbers.Number): new_coord.append(val) elif isinstance(val, PixCoord): new_coord.append(u.Quantity(val.x, u.dimensionless_unscaled)) new_coord.append(u.Quantity(val.y, u.dimensionless_unscaled)) else: new_coord.append(Angle(val.transform_to(frame).spherical.lon)) new_coord.append(Angle(val.transform_to(frame).spherical.lat)) meta = dict(region.meta) meta.update(region.visual) if reg_type == 'text': meta['text'] = meta.get('text', meta.pop('label', '')) include = region.meta.pop('include', True) shape_list.append(Shape(coordsys, reg_type, new_coord, meta, False, include)) return shape_list
[ "def", "to_shape_list", "(", "region_list", ",", "coordinate_system", "=", "'fk5'", ")", ":", "shape_list", "=", "ShapeList", "(", ")", "for", "region", "in", "region_list", ":", "coord", "=", "[", "]", "if", "isinstance", "(", "region", ",", "SkyRegion", ")", ":", "reg_type", "=", "region", ".", "__class__", ".", "__name__", "[", ":", "-", "9", "]", ".", "lower", "(", ")", "else", ":", "reg_type", "=", "region", ".", "__class__", ".", "__name__", "[", ":", "-", "11", "]", ".", "lower", "(", ")", "for", "val", "in", "regions_attributes", "[", "reg_type", "]", ":", "coord", ".", "append", "(", "getattr", "(", "region", ",", "val", ")", ")", "if", "reg_type", "==", "'polygon'", ":", "coord", "=", "[", "x", "for", "x", "in", "region", ".", "vertices", "]", "if", "coordinate_system", ":", "coordsys", "=", "coordinate_system", "else", ":", "if", "isinstance", "(", "region", ",", "SkyRegion", ")", ":", "coordsys", "=", "coord", "[", "0", "]", ".", "name", "else", ":", "coordsys", "=", "'image'", "frame", "=", "coordinates", ".", "frame_transform_graph", ".", "lookup_name", "(", "coordsys", ")", "new_coord", "=", "[", "]", "for", "val", "in", "coord", ":", "if", "isinstance", "(", "val", ",", "Angle", ")", "or", "isinstance", "(", "val", ",", "u", ".", "Quantity", ")", "or", "isinstance", "(", "val", ",", "numbers", ".", "Number", ")", ":", "new_coord", ".", "append", "(", "val", ")", "elif", "isinstance", "(", "val", ",", "PixCoord", ")", ":", "new_coord", ".", "append", "(", "u", ".", "Quantity", "(", "val", ".", "x", ",", "u", ".", "dimensionless_unscaled", ")", ")", "new_coord", ".", "append", "(", "u", ".", "Quantity", "(", "val", ".", "y", ",", "u", ".", "dimensionless_unscaled", ")", ")", "else", ":", "new_coord", ".", "append", "(", "Angle", "(", "val", ".", "transform_to", "(", "frame", ")", ".", "spherical", ".", "lon", ")", ")", "new_coord", ".", "append", "(", "Angle", "(", "val", ".", "transform_to", "(", "frame", ")", ".", "spherical", ".", "lat", ")", ")", "meta", "=", "dict", "(", "region", ".", "meta", ")", "meta", ".", "update", "(", "region", ".", "visual", ")", "if", "reg_type", "==", "'text'", ":", "meta", "[", "'text'", "]", "=", "meta", ".", "get", "(", "'text'", ",", "meta", ".", "pop", "(", "'label'", ",", "''", ")", ")", "include", "=", "region", ".", "meta", ".", "pop", "(", "'include'", ",", "True", ")", "shape_list", ".", "append", "(", "Shape", "(", "coordsys", ",", "reg_type", ",", "new_coord", ",", "meta", ",", "False", ",", "include", ")", ")", "return", "shape_list" ]
Converts a list of regions into a `regions.ShapeList` object. Parameters ---------- region_list: python list Lists of `regions.Region` objects format_type: str ('DS9' or 'CRTF') The format type of the Shape object. Default is 'DS9'. coordinate_system: str The astropy coordinate system frame in which all the coordinates present in the `region_list` will be converted. Default is 'fk5'. Returns ------- shape_list: `regions.ShapeList` object list of `regions.Shape` objects.
[ "Converts", "a", "list", "of", "regions", "into", "a", "regions", ".", "ShapeList", "object", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L670-L738
237,860
astropy/regions
regions/io/core.py
to_ds9_meta
def to_ds9_meta(shape_meta): """ Makes the meta data DS9 compatible by filtering and mapping the valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Shape` object Returns ------- meta : dict DS9 compatible meta dictionary """ # meta keys allowed in DS9. valid_keys = ['symbol', 'include', 'tag', 'line', 'comment', 'name', 'select', 'highlite', 'fixed', 'label', 'text', 'edit', 'move', 'rotate', 'delete', 'source', 'background'] # visual keys allowed in DS9 valid_keys += ['color', 'dash', 'linewidth', 'font', 'dashlist', 'fill', 'textangle', 'symsize'] # mapped to actual names in DS9 key_mappings = {'symbol': 'point', 'linewidth': 'width', 'label': 'text'} meta = _to_io_meta(shape_meta, valid_keys, key_mappings) if 'font' in meta: meta['font'] += " {0} {1} {2}".format(shape_meta.get('fontsize', 12), shape_meta.get('fontstyle', 'normal'), shape_meta.get('fontweight', 'roman')) return meta
python
def to_ds9_meta(shape_meta): """ Makes the meta data DS9 compatible by filtering and mapping the valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Shape` object Returns ------- meta : dict DS9 compatible meta dictionary """ # meta keys allowed in DS9. valid_keys = ['symbol', 'include', 'tag', 'line', 'comment', 'name', 'select', 'highlite', 'fixed', 'label', 'text', 'edit', 'move', 'rotate', 'delete', 'source', 'background'] # visual keys allowed in DS9 valid_keys += ['color', 'dash', 'linewidth', 'font', 'dashlist', 'fill', 'textangle', 'symsize'] # mapped to actual names in DS9 key_mappings = {'symbol': 'point', 'linewidth': 'width', 'label': 'text'} meta = _to_io_meta(shape_meta, valid_keys, key_mappings) if 'font' in meta: meta['font'] += " {0} {1} {2}".format(shape_meta.get('fontsize', 12), shape_meta.get('fontstyle', 'normal'), shape_meta.get('fontweight', 'roman')) return meta
[ "def", "to_ds9_meta", "(", "shape_meta", ")", ":", "# meta keys allowed in DS9.", "valid_keys", "=", "[", "'symbol'", ",", "'include'", ",", "'tag'", ",", "'line'", ",", "'comment'", ",", "'name'", ",", "'select'", ",", "'highlite'", ",", "'fixed'", ",", "'label'", ",", "'text'", ",", "'edit'", ",", "'move'", ",", "'rotate'", ",", "'delete'", ",", "'source'", ",", "'background'", "]", "# visual keys allowed in DS9", "valid_keys", "+=", "[", "'color'", ",", "'dash'", ",", "'linewidth'", ",", "'font'", ",", "'dashlist'", ",", "'fill'", ",", "'textangle'", ",", "'symsize'", "]", "# mapped to actual names in DS9", "key_mappings", "=", "{", "'symbol'", ":", "'point'", ",", "'linewidth'", ":", "'width'", ",", "'label'", ":", "'text'", "}", "meta", "=", "_to_io_meta", "(", "shape_meta", ",", "valid_keys", ",", "key_mappings", ")", "if", "'font'", "in", "meta", ":", "meta", "[", "'font'", "]", "+=", "\" {0} {1} {2}\"", ".", "format", "(", "shape_meta", ".", "get", "(", "'fontsize'", ",", "12", ")", ",", "shape_meta", ".", "get", "(", "'fontstyle'", ",", "'normal'", ")", ",", "shape_meta", ".", "get", "(", "'fontweight'", ",", "'roman'", ")", ")", "return", "meta" ]
Makes the meta data DS9 compatible by filtering and mapping the valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Shape` object Returns ------- meta : dict DS9 compatible meta dictionary
[ "Makes", "the", "meta", "data", "DS9", "compatible", "by", "filtering", "and", "mapping", "the", "valid", "keys" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L741-L775
237,861
astropy/regions
regions/io/core.py
_to_io_meta
def _to_io_meta(shape_meta, valid_keys, key_mappings): """ This is used to make meta data compatible with a specific io by filtering and mapping to it's valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Region` object valid_keys : python list Contains all the valid keys of a particular file format. key_mappings : python dict Maps to the actual name of the key in the format. Returns ------- meta : dict io compatible meta dictionary according to valid_keys and key_mappings """ meta = dict() for key in shape_meta: if key in valid_keys: meta[key_mappings.get(key, key)] = shape_meta[key] return meta
python
def _to_io_meta(shape_meta, valid_keys, key_mappings): """ This is used to make meta data compatible with a specific io by filtering and mapping to it's valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Region` object valid_keys : python list Contains all the valid keys of a particular file format. key_mappings : python dict Maps to the actual name of the key in the format. Returns ------- meta : dict io compatible meta dictionary according to valid_keys and key_mappings """ meta = dict() for key in shape_meta: if key in valid_keys: meta[key_mappings.get(key, key)] = shape_meta[key] return meta
[ "def", "_to_io_meta", "(", "shape_meta", ",", "valid_keys", ",", "key_mappings", ")", ":", "meta", "=", "dict", "(", ")", "for", "key", "in", "shape_meta", ":", "if", "key", "in", "valid_keys", ":", "meta", "[", "key_mappings", ".", "get", "(", "key", ",", "key", ")", "]", "=", "shape_meta", "[", "key", "]", "return", "meta" ]
This is used to make meta data compatible with a specific io by filtering and mapping to it's valid keys Parameters ---------- shape_meta: dict meta attribute of a `regions.Region` object valid_keys : python list Contains all the valid keys of a particular file format. key_mappings : python dict Maps to the actual name of the key in the format. Returns ------- meta : dict io compatible meta dictionary according to valid_keys and key_mappings
[ "This", "is", "used", "to", "make", "meta", "data", "compatible", "with", "a", "specific", "io", "by", "filtering", "and", "mapping", "to", "it", "s", "valid", "keys" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L809-L835
237,862
astropy/regions
regions/io/core.py
Shape.convert_coords
def convert_coords(self): """ Process list of coordinates This mainly searches for tuple of coordinates in the coordinate list and creates a SkyCoord or PixCoord object from them if appropriate for a given region type. This involves again some coordinate transformation, so this step could be moved to the parsing process """ if self.coordsys in ['image', 'physical']: coords = self._convert_pix_coords() else: coords = self._convert_sky_coords() if self.region_type == 'line': coords = [coords[0][0], coords[0][1]] if self.region_type == 'text': coords.append(self.meta['text']) return coords
python
def convert_coords(self): """ Process list of coordinates This mainly searches for tuple of coordinates in the coordinate list and creates a SkyCoord or PixCoord object from them if appropriate for a given region type. This involves again some coordinate transformation, so this step could be moved to the parsing process """ if self.coordsys in ['image', 'physical']: coords = self._convert_pix_coords() else: coords = self._convert_sky_coords() if self.region_type == 'line': coords = [coords[0][0], coords[0][1]] if self.region_type == 'text': coords.append(self.meta['text']) return coords
[ "def", "convert_coords", "(", "self", ")", ":", "if", "self", ".", "coordsys", "in", "[", "'image'", ",", "'physical'", "]", ":", "coords", "=", "self", ".", "_convert_pix_coords", "(", ")", "else", ":", "coords", "=", "self", ".", "_convert_sky_coords", "(", ")", "if", "self", ".", "region_type", "==", "'line'", ":", "coords", "=", "[", "coords", "[", "0", "]", "[", "0", "]", ",", "coords", "[", "0", "]", "[", "1", "]", "]", "if", "self", ".", "region_type", "==", "'text'", ":", "coords", ".", "append", "(", "self", ".", "meta", "[", "'text'", "]", ")", "return", "coords" ]
Process list of coordinates This mainly searches for tuple of coordinates in the coordinate list and creates a SkyCoord or PixCoord object from them if appropriate for a given region type. This involves again some coordinate transformation, so this step could be moved to the parsing process
[ "Process", "list", "of", "coordinates" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L527-L547
237,863
astropy/regions
regions/io/core.py
Shape._convert_sky_coords
def _convert_sky_coords(self): """ Convert to sky coordinates """ parsed_angles = [(x, y) for x, y in zip(self.coord[:-1:2], self.coord[1::2]) if (isinstance(x, coordinates.Angle) and isinstance(y, coordinates.Angle)) ] frame = coordinates.frame_transform_graph.lookup_name(self.coordsys) lon, lat = zip(*parsed_angles) if hasattr(lon, '__len__') and hasattr(lat, '__len__') and len(lon) == 1 and len(lat) == 1: # force entries to be scalar if they are length-1 lon, lat = u.Quantity(lon[0]), u.Quantity(lat[0]) else: # otherwise, they are vector quantities lon, lat = u.Quantity(lon), u.Quantity(lat) sphcoords = coordinates.UnitSphericalRepresentation(lon, lat) coords = [SkyCoord(frame(sphcoords))] if self.region_type != 'polygon': coords += self.coord[len(coords * 2):] return coords
python
def _convert_sky_coords(self): """ Convert to sky coordinates """ parsed_angles = [(x, y) for x, y in zip(self.coord[:-1:2], self.coord[1::2]) if (isinstance(x, coordinates.Angle) and isinstance(y, coordinates.Angle)) ] frame = coordinates.frame_transform_graph.lookup_name(self.coordsys) lon, lat = zip(*parsed_angles) if hasattr(lon, '__len__') and hasattr(lat, '__len__') and len(lon) == 1 and len(lat) == 1: # force entries to be scalar if they are length-1 lon, lat = u.Quantity(lon[0]), u.Quantity(lat[0]) else: # otherwise, they are vector quantities lon, lat = u.Quantity(lon), u.Quantity(lat) sphcoords = coordinates.UnitSphericalRepresentation(lon, lat) coords = [SkyCoord(frame(sphcoords))] if self.region_type != 'polygon': coords += self.coord[len(coords * 2):] return coords
[ "def", "_convert_sky_coords", "(", "self", ")", ":", "parsed_angles", "=", "[", "(", "x", ",", "y", ")", "for", "x", ",", "y", "in", "zip", "(", "self", ".", "coord", "[", ":", "-", "1", ":", "2", "]", ",", "self", ".", "coord", "[", "1", ":", ":", "2", "]", ")", "if", "(", "isinstance", "(", "x", ",", "coordinates", ".", "Angle", ")", "and", "isinstance", "(", "y", ",", "coordinates", ".", "Angle", ")", ")", "]", "frame", "=", "coordinates", ".", "frame_transform_graph", ".", "lookup_name", "(", "self", ".", "coordsys", ")", "lon", ",", "lat", "=", "zip", "(", "*", "parsed_angles", ")", "if", "hasattr", "(", "lon", ",", "'__len__'", ")", "and", "hasattr", "(", "lat", ",", "'__len__'", ")", "and", "len", "(", "lon", ")", "==", "1", "and", "len", "(", "lat", ")", "==", "1", ":", "# force entries to be scalar if they are length-1", "lon", ",", "lat", "=", "u", ".", "Quantity", "(", "lon", "[", "0", "]", ")", ",", "u", ".", "Quantity", "(", "lat", "[", "0", "]", ")", "else", ":", "# otherwise, they are vector quantities", "lon", ",", "lat", "=", "u", ".", "Quantity", "(", "lon", ")", ",", "u", ".", "Quantity", "(", "lat", ")", "sphcoords", "=", "coordinates", ".", "UnitSphericalRepresentation", "(", "lon", ",", "lat", ")", "coords", "=", "[", "SkyCoord", "(", "frame", "(", "sphcoords", ")", ")", "]", "if", "self", ".", "region_type", "!=", "'polygon'", ":", "coords", "+=", "self", ".", "coord", "[", "len", "(", "coords", "*", "2", ")", ":", "]", "return", "coords" ]
Convert to sky coordinates
[ "Convert", "to", "sky", "coordinates" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L549-L572
237,864
astropy/regions
regions/io/core.py
Shape._convert_pix_coords
def _convert_pix_coords(self): """ Convert to pixel coordinates, `regions.PixCoord` """ if self.region_type in ['polygon', 'line']: # have to special-case polygon in the phys coord case # b/c can't typecheck when iterating as in sky coord case coords = [PixCoord(self.coord[0::2], self.coord[1::2])] else: temp = [_.value for _ in self.coord] coord = PixCoord(temp[0], temp[1]) coords = [coord] + temp[2:] # The angle remains as a quantity object. # Modulus check makes sure that it works for ellipse/rectangle annulus if self.region_type in ['ellipse', 'rectangle'] and len(coords) % 2 == 0: coords[-1] = self.coord[-1] return coords
python
def _convert_pix_coords(self): """ Convert to pixel coordinates, `regions.PixCoord` """ if self.region_type in ['polygon', 'line']: # have to special-case polygon in the phys coord case # b/c can't typecheck when iterating as in sky coord case coords = [PixCoord(self.coord[0::2], self.coord[1::2])] else: temp = [_.value for _ in self.coord] coord = PixCoord(temp[0], temp[1]) coords = [coord] + temp[2:] # The angle remains as a quantity object. # Modulus check makes sure that it works for ellipse/rectangle annulus if self.region_type in ['ellipse', 'rectangle'] and len(coords) % 2 == 0: coords[-1] = self.coord[-1] return coords
[ "def", "_convert_pix_coords", "(", "self", ")", ":", "if", "self", ".", "region_type", "in", "[", "'polygon'", ",", "'line'", "]", ":", "# have to special-case polygon in the phys coord case", "# b/c can't typecheck when iterating as in sky coord case", "coords", "=", "[", "PixCoord", "(", "self", ".", "coord", "[", "0", ":", ":", "2", "]", ",", "self", ".", "coord", "[", "1", ":", ":", "2", "]", ")", "]", "else", ":", "temp", "=", "[", "_", ".", "value", "for", "_", "in", "self", ".", "coord", "]", "coord", "=", "PixCoord", "(", "temp", "[", "0", "]", ",", "temp", "[", "1", "]", ")", "coords", "=", "[", "coord", "]", "+", "temp", "[", "2", ":", "]", "# The angle remains as a quantity object.", "# Modulus check makes sure that it works for ellipse/rectangle annulus", "if", "self", ".", "region_type", "in", "[", "'ellipse'", ",", "'rectangle'", "]", "and", "len", "(", "coords", ")", "%", "2", "==", "0", ":", "coords", "[", "-", "1", "]", "=", "self", ".", "coord", "[", "-", "1", "]", "return", "coords" ]
Convert to pixel coordinates, `regions.PixCoord`
[ "Convert", "to", "pixel", "coordinates", "regions", ".", "PixCoord" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L574-L592
237,865
astropy/regions
regions/io/core.py
Shape.to_region
def to_region(self): """ Converts to region, ``regions.Region`` object """ coords = self.convert_coords() log.debug(coords) viz_keywords = ['color', 'dash', 'dashlist', 'width', 'font', 'symsize', 'symbol', 'symsize', 'fontsize', 'fontstyle', 'usetex', 'labelpos', 'labeloff', 'linewidth', 'linestyle', 'point', 'textangle', 'fontweight'] if isinstance(coords[0], SkyCoord): reg = self.shape_to_sky_region[self.region_type](*coords) elif isinstance(coords[0], PixCoord): reg = self.shape_to_pixel_region[self.region_type](*coords) else: self._raise_error("No central coordinate") reg.visual = RegionVisual() reg.meta = RegionMeta() # both 'text' and 'label' should be set to the same value, where we # default to the 'text' value since that is the one used by ds9 regions label = self.meta.get('text', self.meta.get('label', "")) if label != '': reg.meta['label'] = label for key in self.meta: if key in viz_keywords: reg.visual[key] = self.meta[key] else: reg.meta[key] = self.meta[key] reg.meta['include'] = self.include return reg
python
def to_region(self): """ Converts to region, ``regions.Region`` object """ coords = self.convert_coords() log.debug(coords) viz_keywords = ['color', 'dash', 'dashlist', 'width', 'font', 'symsize', 'symbol', 'symsize', 'fontsize', 'fontstyle', 'usetex', 'labelpos', 'labeloff', 'linewidth', 'linestyle', 'point', 'textangle', 'fontweight'] if isinstance(coords[0], SkyCoord): reg = self.shape_to_sky_region[self.region_type](*coords) elif isinstance(coords[0], PixCoord): reg = self.shape_to_pixel_region[self.region_type](*coords) else: self._raise_error("No central coordinate") reg.visual = RegionVisual() reg.meta = RegionMeta() # both 'text' and 'label' should be set to the same value, where we # default to the 'text' value since that is the one used by ds9 regions label = self.meta.get('text', self.meta.get('label', "")) if label != '': reg.meta['label'] = label for key in self.meta: if key in viz_keywords: reg.visual[key] = self.meta[key] else: reg.meta[key] = self.meta[key] reg.meta['include'] = self.include return reg
[ "def", "to_region", "(", "self", ")", ":", "coords", "=", "self", ".", "convert_coords", "(", ")", "log", ".", "debug", "(", "coords", ")", "viz_keywords", "=", "[", "'color'", ",", "'dash'", ",", "'dashlist'", ",", "'width'", ",", "'font'", ",", "'symsize'", ",", "'symbol'", ",", "'symsize'", ",", "'fontsize'", ",", "'fontstyle'", ",", "'usetex'", ",", "'labelpos'", ",", "'labeloff'", ",", "'linewidth'", ",", "'linestyle'", ",", "'point'", ",", "'textangle'", ",", "'fontweight'", "]", "if", "isinstance", "(", "coords", "[", "0", "]", ",", "SkyCoord", ")", ":", "reg", "=", "self", ".", "shape_to_sky_region", "[", "self", ".", "region_type", "]", "(", "*", "coords", ")", "elif", "isinstance", "(", "coords", "[", "0", "]", ",", "PixCoord", ")", ":", "reg", "=", "self", ".", "shape_to_pixel_region", "[", "self", ".", "region_type", "]", "(", "*", "coords", ")", "else", ":", "self", ".", "_raise_error", "(", "\"No central coordinate\"", ")", "reg", ".", "visual", "=", "RegionVisual", "(", ")", "reg", ".", "meta", "=", "RegionMeta", "(", ")", "# both 'text' and 'label' should be set to the same value, where we", "# default to the 'text' value since that is the one used by ds9 regions", "label", "=", "self", ".", "meta", ".", "get", "(", "'text'", ",", "self", ".", "meta", ".", "get", "(", "'label'", ",", "\"\"", ")", ")", "if", "label", "!=", "''", ":", "reg", ".", "meta", "[", "'label'", "]", "=", "label", "for", "key", "in", "self", ".", "meta", ":", "if", "key", "in", "viz_keywords", ":", "reg", ".", "visual", "[", "key", "]", "=", "self", ".", "meta", "[", "key", "]", "else", ":", "reg", ".", "meta", "[", "key", "]", "=", "self", ".", "meta", "[", "key", "]", "reg", ".", "meta", "[", "'include'", "]", "=", "self", ".", "include", "return", "reg" ]
Converts to region, ``regions.Region`` object
[ "Converts", "to", "region", "regions", ".", "Region", "object" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L594-L628
237,866
astropy/regions
regions/io/core.py
Shape.check_crtf
def check_crtf(self): """ Checks for CRTF compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by CRTF".format(self.region_type)) if self.coordsys not in valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame in " "astropy supported by CRTF".format(self.coordsys))
python
def check_crtf(self): """ Checks for CRTF compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by CRTF".format(self.region_type)) if self.coordsys not in valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame in " "astropy supported by CRTF".format(self.coordsys))
[ "def", "check_crtf", "(", "self", ")", ":", "if", "self", ".", "region_type", "not", "in", "regions_attributes", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid region type in this package\"", "\"supported by CRTF\"", ".", "format", "(", "self", ".", "region_type", ")", ")", "if", "self", ".", "coordsys", "not", "in", "valid_coordsys", "[", "'CRTF'", "]", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid coordinate reference frame in \"", "\"astropy supported by CRTF\"", ".", "format", "(", "self", ".", "coordsys", ")", ")" ]
Checks for CRTF compatibility.
[ "Checks", "for", "CRTF", "compatibility", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L633-L643
237,867
astropy/regions
regions/io/core.py
Shape.check_ds9
def check_ds9(self): """ Checks for DS9 compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by DS9".format(self.region_type)) if self.coordsys not in valid_coordsys['DS9']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy supported by DS9".format(self.coordsys))
python
def check_ds9(self): """ Checks for DS9 compatibility. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" "supported by DS9".format(self.region_type)) if self.coordsys not in valid_coordsys['DS9']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy supported by DS9".format(self.coordsys))
[ "def", "check_ds9", "(", "self", ")", ":", "if", "self", ".", "region_type", "not", "in", "regions_attributes", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid region type in this package\"", "\"supported by DS9\"", ".", "format", "(", "self", ".", "region_type", ")", ")", "if", "self", ".", "coordsys", "not", "in", "valid_coordsys", "[", "'DS9'", "]", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid coordinate reference frame \"", "\"in astropy supported by DS9\"", ".", "format", "(", "self", ".", "coordsys", ")", ")" ]
Checks for DS9 compatibility.
[ "Checks", "for", "DS9", "compatibility", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L645-L655
237,868
astropy/regions
regions/io/core.py
Shape._validate
def _validate(self): """ Checks whether all the attributes of this object is valid. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" .format(self.region_type)) if self.coordsys not in valid_coordsys['DS9'] + valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy".format(self.coordsys))
python
def _validate(self): """ Checks whether all the attributes of this object is valid. """ if self.region_type not in regions_attributes: raise ValueError("'{0}' is not a valid region type in this package" .format(self.region_type)) if self.coordsys not in valid_coordsys['DS9'] + valid_coordsys['CRTF']: raise ValueError("'{0}' is not a valid coordinate reference frame " "in astropy".format(self.coordsys))
[ "def", "_validate", "(", "self", ")", ":", "if", "self", ".", "region_type", "not", "in", "regions_attributes", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid region type in this package\"", ".", "format", "(", "self", ".", "region_type", ")", ")", "if", "self", ".", "coordsys", "not", "in", "valid_coordsys", "[", "'DS9'", "]", "+", "valid_coordsys", "[", "'CRTF'", "]", ":", "raise", "ValueError", "(", "\"'{0}' is not a valid coordinate reference frame \"", "\"in astropy\"", ".", "format", "(", "self", ".", "coordsys", ")", ")" ]
Checks whether all the attributes of this object is valid.
[ "Checks", "whether", "all", "the", "attributes", "of", "this", "object", "is", "valid", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/core.py#L657-L667
237,869
astropy/regions
regions/io/crtf/read.py
read_crtf
def read_crtf(filename, errors='strict'): """ Reads a CRTF region file and returns a list of region objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.CRTFRegionParserError`. ``warn`` will raise a `~regions.CRTFRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python `list` of `~regions.Region` objects. Examples -------- >>> from regions import read_crtf >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/CRTFgeneral.crtf', package='regions.io.crtf.tests') >>> regs = read_crtf(file, errors='warn') >>> print(regs[0]) Region: CircleSkyRegion center: <SkyCoord (FK4: equinox=B1950.000, obstime=B1950.000): (ra, dec) in deg (273.1, -23.18333333)> radius: 2.3 arcsec >>> print(regs[0].meta) {'frame': 'BARY', 'corr': ['I', 'Q'], 'include': True, 'type': 'ann'} >>> print(regs[0].visual) {'color': 'blue'} """ with open(filename) as fh: if regex_begin.search(fh.readline()): region_string = fh.read() parser = CRTFParser(region_string, errors) return parser.shapes.to_regions() else: raise CRTFRegionParserError('Every CRTF Region must start with "#CRTF" ')
python
def read_crtf(filename, errors='strict'): """ Reads a CRTF region file and returns a list of region objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.CRTFRegionParserError`. ``warn`` will raise a `~regions.CRTFRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python `list` of `~regions.Region` objects. Examples -------- >>> from regions import read_crtf >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/CRTFgeneral.crtf', package='regions.io.crtf.tests') >>> regs = read_crtf(file, errors='warn') >>> print(regs[0]) Region: CircleSkyRegion center: <SkyCoord (FK4: equinox=B1950.000, obstime=B1950.000): (ra, dec) in deg (273.1, -23.18333333)> radius: 2.3 arcsec >>> print(regs[0].meta) {'frame': 'BARY', 'corr': ['I', 'Q'], 'include': True, 'type': 'ann'} >>> print(regs[0].visual) {'color': 'blue'} """ with open(filename) as fh: if regex_begin.search(fh.readline()): region_string = fh.read() parser = CRTFParser(region_string, errors) return parser.shapes.to_regions() else: raise CRTFRegionParserError('Every CRTF Region must start with "#CRTF" ')
[ "def", "read_crtf", "(", "filename", ",", "errors", "=", "'strict'", ")", ":", "with", "open", "(", "filename", ")", "as", "fh", ":", "if", "regex_begin", ".", "search", "(", "fh", ".", "readline", "(", ")", ")", ":", "region_string", "=", "fh", ".", "read", "(", ")", "parser", "=", "CRTFParser", "(", "region_string", ",", "errors", ")", "return", "parser", ".", "shapes", ".", "to_regions", "(", ")", "else", ":", "raise", "CRTFRegionParserError", "(", "'Every CRTF Region must start with \"#CRTF\" '", ")" ]
Reads a CRTF region file and returns a list of region objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.CRTFRegionParserError`. ``warn`` will raise a `~regions.CRTFRegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python `list` of `~regions.Region` objects. Examples -------- >>> from regions import read_crtf >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/CRTFgeneral.crtf', package='regions.io.crtf.tests') >>> regs = read_crtf(file, errors='warn') >>> print(regs[0]) Region: CircleSkyRegion center: <SkyCoord (FK4: equinox=B1950.000, obstime=B1950.000): (ra, dec) in deg (273.1, -23.18333333)> radius: 2.3 arcsec >>> print(regs[0].meta) {'frame': 'BARY', 'corr': ['I', 'Q'], 'include': True, 'type': 'ann'} >>> print(regs[0].visual) {'color': 'blue'}
[ "Reads", "a", "CRTF", "region", "file", "and", "returns", "a", "list", "of", "region", "objects", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L43-L85
237,870
astropy/regions
regions/io/crtf/read.py
CRTFParser.parse_line
def parse_line(self, line): """ Parses a single line. """ # Skip blanks if line == '': return # Skip comments if regex_comment.search(line): return # Special case / header: parse global parameters into metadata global_parameters = regex_global.search(line) if global_parameters: self.parse_global_meta(global_parameters.group('parameters')) return # Tries to check the validity of the line. crtf_line = regex_line.search(line) if crtf_line: # Tries to parse the line. # Finds info about the region. region = regex_region.search(crtf_line.group('region')) type_ = region.group('type') or 'reg' include = region.group('include') or '+' region_type = region.group('regiontype').lower() if region_type in self.valid_definition: helper = CRTFRegionParser(self.global_meta, include, type_, region_type, *crtf_line.group('region', 'parameters')) self.shapes.append(helper.shape) else: self._raise_error("Not a valid CRTF Region type: '{0}'.".format(region_type)) else: self._raise_error("Not a valid CRTF line: '{0}'.".format(line)) return
python
def parse_line(self, line): """ Parses a single line. """ # Skip blanks if line == '': return # Skip comments if regex_comment.search(line): return # Special case / header: parse global parameters into metadata global_parameters = regex_global.search(line) if global_parameters: self.parse_global_meta(global_parameters.group('parameters')) return # Tries to check the validity of the line. crtf_line = regex_line.search(line) if crtf_line: # Tries to parse the line. # Finds info about the region. region = regex_region.search(crtf_line.group('region')) type_ = region.group('type') or 'reg' include = region.group('include') or '+' region_type = region.group('regiontype').lower() if region_type in self.valid_definition: helper = CRTFRegionParser(self.global_meta, include, type_, region_type, *crtf_line.group('region', 'parameters')) self.shapes.append(helper.shape) else: self._raise_error("Not a valid CRTF Region type: '{0}'.".format(region_type)) else: self._raise_error("Not a valid CRTF line: '{0}'.".format(line)) return
[ "def", "parse_line", "(", "self", ",", "line", ")", ":", "# Skip blanks", "if", "line", "==", "''", ":", "return", "# Skip comments", "if", "regex_comment", ".", "search", "(", "line", ")", ":", "return", "# Special case / header: parse global parameters into metadata", "global_parameters", "=", "regex_global", ".", "search", "(", "line", ")", "if", "global_parameters", ":", "self", ".", "parse_global_meta", "(", "global_parameters", ".", "group", "(", "'parameters'", ")", ")", "return", "# Tries to check the validity of the line.", "crtf_line", "=", "regex_line", ".", "search", "(", "line", ")", "if", "crtf_line", ":", "# Tries to parse the line.", "# Finds info about the region.", "region", "=", "regex_region", ".", "search", "(", "crtf_line", ".", "group", "(", "'region'", ")", ")", "type_", "=", "region", ".", "group", "(", "'type'", ")", "or", "'reg'", "include", "=", "region", ".", "group", "(", "'include'", ")", "or", "'+'", "region_type", "=", "region", ".", "group", "(", "'regiontype'", ")", ".", "lower", "(", ")", "if", "region_type", "in", "self", ".", "valid_definition", ":", "helper", "=", "CRTFRegionParser", "(", "self", ".", "global_meta", ",", "include", ",", "type_", ",", "region_type", ",", "*", "crtf_line", ".", "group", "(", "'region'", ",", "'parameters'", ")", ")", "self", ".", "shapes", ".", "append", "(", "helper", ".", "shape", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not a valid CRTF Region type: '{0}'.\"", ".", "format", "(", "region_type", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not a valid CRTF line: '{0}'.\"", ".", "format", "(", "line", ")", ")", "return" ]
Parses a single line.
[ "Parses", "a", "single", "line", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L161-L199
237,871
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.parse
def parse(self): """ Starting point to parse the CRTF region string. """ self.convert_meta() self.coordsys = self.meta.get('coord', 'image').lower() self.set_coordsys() self.convert_coordinates() self.make_shape()
python
def parse(self): """ Starting point to parse the CRTF region string. """ self.convert_meta() self.coordsys = self.meta.get('coord', 'image').lower() self.set_coordsys() self.convert_coordinates() self.make_shape()
[ "def", "parse", "(", "self", ")", ":", "self", ".", "convert_meta", "(", ")", "self", ".", "coordsys", "=", "self", ".", "meta", ".", "get", "(", "'coord'", ",", "'image'", ")", ".", "lower", "(", ")", "self", ".", "set_coordsys", "(", ")", "self", ".", "convert_coordinates", "(", ")", "self", ".", "make_shape", "(", ")" ]
Starting point to parse the CRTF region string.
[ "Starting", "point", "to", "parse", "the", "CRTF", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L320-L329
237,872
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.set_coordsys
def set_coordsys(self): """ Mapping to astropy's coordinate system name # TODO: needs expert attention (Most reference systems are not mapped) """ if self.coordsys.lower() in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[self.coordsys.lower()]
python
def set_coordsys(self): """ Mapping to astropy's coordinate system name # TODO: needs expert attention (Most reference systems are not mapped) """ if self.coordsys.lower() in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[self.coordsys.lower()]
[ "def", "set_coordsys", "(", "self", ")", ":", "if", "self", ".", "coordsys", ".", "lower", "(", ")", "in", "self", ".", "coordsys_mapping", ":", "self", ".", "coordsys", "=", "self", ".", "coordsys_mapping", "[", "self", ".", "coordsys", ".", "lower", "(", ")", "]" ]
Mapping to astropy's coordinate system name # TODO: needs expert attention (Most reference systems are not mapped)
[ "Mapping", "to", "astropy", "s", "coordinate", "system", "name" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L331-L338
237,873
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.convert_coordinates
def convert_coordinates(self): """ Convert coordinate string to `~astropy.coordinates.Angle` or `~astropy.units.quantity.Quantity` objects """ coord_list_str = regex_coordinate.findall(self.reg_str) + regex_length.findall(self.reg_str) coord_list = [] if self.region_type == 'poly': if len(coord_list_str) < 4: self._raise_error('Not in proper format: {} polygon should have > 4 coordinates'.format(self.reg_str)) if coord_list_str[0] != coord_list_str[-1]: self._raise_error("Not in proper format: '{0}', " "In polygon, the last and first coordinates should be same".format(self.reg_str)) else: if len(coord_list_str) != len(self.language_spec[self.region_type]): self._raise_error("Not in proper format: '{0}', " "Does not contain expected number of parameters for the region '{1}'" .format(self.reg_str, self.region_type)) for attr_spec, val_str in zip(self.language_spec[self.region_type], coord_list_str): if attr_spec == 'c': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_coordinate(val_str[0])) coord_list.append(CoordinateParser.parse_coordinate(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a coordinate".format(val_str)) if attr_spec == 'pl': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[0])) coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a pair of length".format(val_str)) if attr_spec == 'l': if isinstance(val_str, six.string_types): coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str)) else: self._raise_error("Not in proper format: {0} should be a single length".format(val_str)) if attr_spec == 's': if self.region_type == 'symbol': if val_str in valid_symbols: self.meta['symbol'] = val_str else: self._raise_error("Not in proper format: '{0}' should be a symbol".format(val_str)) elif self.region_type == 'text': self.meta['text'] = val_str[1:-1] self.coord = coord_list
python
def convert_coordinates(self): """ Convert coordinate string to `~astropy.coordinates.Angle` or `~astropy.units.quantity.Quantity` objects """ coord_list_str = regex_coordinate.findall(self.reg_str) + regex_length.findall(self.reg_str) coord_list = [] if self.region_type == 'poly': if len(coord_list_str) < 4: self._raise_error('Not in proper format: {} polygon should have > 4 coordinates'.format(self.reg_str)) if coord_list_str[0] != coord_list_str[-1]: self._raise_error("Not in proper format: '{0}', " "In polygon, the last and first coordinates should be same".format(self.reg_str)) else: if len(coord_list_str) != len(self.language_spec[self.region_type]): self._raise_error("Not in proper format: '{0}', " "Does not contain expected number of parameters for the region '{1}'" .format(self.reg_str, self.region_type)) for attr_spec, val_str in zip(self.language_spec[self.region_type], coord_list_str): if attr_spec == 'c': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_coordinate(val_str[0])) coord_list.append(CoordinateParser.parse_coordinate(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a coordinate".format(val_str)) if attr_spec == 'pl': if len(val_str) == 2 and val_str[1] != '': coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[0])) coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str[1])) else: self._raise_error("Not in proper format: {0} should be a pair of length".format(val_str)) if attr_spec == 'l': if isinstance(val_str, six.string_types): coord_list.append(CoordinateParser.parse_angular_length_quantity(val_str)) else: self._raise_error("Not in proper format: {0} should be a single length".format(val_str)) if attr_spec == 's': if self.region_type == 'symbol': if val_str in valid_symbols: self.meta['symbol'] = val_str else: self._raise_error("Not in proper format: '{0}' should be a symbol".format(val_str)) elif self.region_type == 'text': self.meta['text'] = val_str[1:-1] self.coord = coord_list
[ "def", "convert_coordinates", "(", "self", ")", ":", "coord_list_str", "=", "regex_coordinate", ".", "findall", "(", "self", ".", "reg_str", ")", "+", "regex_length", ".", "findall", "(", "self", ".", "reg_str", ")", "coord_list", "=", "[", "]", "if", "self", ".", "region_type", "==", "'poly'", ":", "if", "len", "(", "coord_list_str", ")", "<", "4", ":", "self", ".", "_raise_error", "(", "'Not in proper format: {} polygon should have > 4 coordinates'", ".", "format", "(", "self", ".", "reg_str", ")", ")", "if", "coord_list_str", "[", "0", "]", "!=", "coord_list_str", "[", "-", "1", "]", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: '{0}', \"", "\"In polygon, the last and first coordinates should be same\"", ".", "format", "(", "self", ".", "reg_str", ")", ")", "else", ":", "if", "len", "(", "coord_list_str", ")", "!=", "len", "(", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", ")", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: '{0}', \"", "\"Does not contain expected number of parameters for the region '{1}'\"", ".", "format", "(", "self", ".", "reg_str", ",", "self", ".", "region_type", ")", ")", "for", "attr_spec", ",", "val_str", "in", "zip", "(", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", ",", "coord_list_str", ")", ":", "if", "attr_spec", "==", "'c'", ":", "if", "len", "(", "val_str", ")", "==", "2", "and", "val_str", "[", "1", "]", "!=", "''", ":", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_coordinate", "(", "val_str", "[", "0", "]", ")", ")", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_coordinate", "(", "val_str", "[", "1", "]", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: {0} should be a coordinate\"", ".", "format", "(", "val_str", ")", ")", "if", "attr_spec", "==", "'pl'", ":", "if", "len", "(", "val_str", ")", "==", "2", "and", "val_str", "[", "1", "]", "!=", "''", ":", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "val_str", "[", "0", "]", ")", ")", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "val_str", "[", "1", "]", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: {0} should be a pair of length\"", ".", "format", "(", "val_str", ")", ")", "if", "attr_spec", "==", "'l'", ":", "if", "isinstance", "(", "val_str", ",", "six", ".", "string_types", ")", ":", "coord_list", ".", "append", "(", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "val_str", ")", ")", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: {0} should be a single length\"", ".", "format", "(", "val_str", ")", ")", "if", "attr_spec", "==", "'s'", ":", "if", "self", ".", "region_type", "==", "'symbol'", ":", "if", "val_str", "in", "valid_symbols", ":", "self", ".", "meta", "[", "'symbol'", "]", "=", "val_str", "else", ":", "self", ".", "_raise_error", "(", "\"Not in proper format: '{0}' should be a symbol\"", ".", "format", "(", "val_str", ")", ")", "elif", "self", ".", "region_type", "==", "'text'", ":", "self", ".", "meta", "[", "'text'", "]", "=", "val_str", "[", "1", ":", "-", "1", "]", "self", ".", "coord", "=", "coord_list" ]
Convert coordinate string to `~astropy.coordinates.Angle` or `~astropy.units.quantity.Quantity` objects
[ "Convert", "coordinate", "string", "to", "~astropy", ".", "coordinates", ".", "Angle", "or", "~astropy", ".", "units", ".", "quantity", ".", "Quantity", "objects" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L340-L387
237,874
astropy/regions
regions/io/crtf/read.py
CRTFRegionParser.convert_meta
def convert_meta(self): """ Parses the meta_str to python dictionary and stores in ``meta`` attribute. """ if self.meta_str: self.meta_str = regex_meta.findall(self.meta_str + ',') if self.meta_str: for par in self.meta_str: if par[0] is not '': val1 = par[0] val2 = par[1] else: val1 = par[2] val2 = par[3] val1 = val1.strip() val2 = val2.strip() if val1 in CRTFParser.valid_global_keys or val1 == 'label': if val1 in ('range', 'corr', 'labeloff'): val2 = val2.split(',') val2 = [x.strip() for x in val2] self.meta[val1] = val2 else: self._raise_error("'{0}' is not a valid meta key".format(val1)) self.meta['include'] = self.include != '-' self.include = self.meta['include'] if 'range' in self.meta: self.meta['range'] = [u.Quantity(x) for x in self.meta['range']] self.meta['type'] = self.type_
python
def convert_meta(self): """ Parses the meta_str to python dictionary and stores in ``meta`` attribute. """ if self.meta_str: self.meta_str = regex_meta.findall(self.meta_str + ',') if self.meta_str: for par in self.meta_str: if par[0] is not '': val1 = par[0] val2 = par[1] else: val1 = par[2] val2 = par[3] val1 = val1.strip() val2 = val2.strip() if val1 in CRTFParser.valid_global_keys or val1 == 'label': if val1 in ('range', 'corr', 'labeloff'): val2 = val2.split(',') val2 = [x.strip() for x in val2] self.meta[val1] = val2 else: self._raise_error("'{0}' is not a valid meta key".format(val1)) self.meta['include'] = self.include != '-' self.include = self.meta['include'] if 'range' in self.meta: self.meta['range'] = [u.Quantity(x) for x in self.meta['range']] self.meta['type'] = self.type_
[ "def", "convert_meta", "(", "self", ")", ":", "if", "self", ".", "meta_str", ":", "self", ".", "meta_str", "=", "regex_meta", ".", "findall", "(", "self", ".", "meta_str", "+", "','", ")", "if", "self", ".", "meta_str", ":", "for", "par", "in", "self", ".", "meta_str", ":", "if", "par", "[", "0", "]", "is", "not", "''", ":", "val1", "=", "par", "[", "0", "]", "val2", "=", "par", "[", "1", "]", "else", ":", "val1", "=", "par", "[", "2", "]", "val2", "=", "par", "[", "3", "]", "val1", "=", "val1", ".", "strip", "(", ")", "val2", "=", "val2", ".", "strip", "(", ")", "if", "val1", "in", "CRTFParser", ".", "valid_global_keys", "or", "val1", "==", "'label'", ":", "if", "val1", "in", "(", "'range'", ",", "'corr'", ",", "'labeloff'", ")", ":", "val2", "=", "val2", ".", "split", "(", "','", ")", "val2", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "val2", "]", "self", ".", "meta", "[", "val1", "]", "=", "val2", "else", ":", "self", ".", "_raise_error", "(", "\"'{0}' is not a valid meta key\"", ".", "format", "(", "val1", ")", ")", "self", ".", "meta", "[", "'include'", "]", "=", "self", ".", "include", "!=", "'-'", "self", ".", "include", "=", "self", ".", "meta", "[", "'include'", "]", "if", "'range'", "in", "self", ".", "meta", ":", "self", ".", "meta", "[", "'range'", "]", "=", "[", "u", ".", "Quantity", "(", "x", ")", "for", "x", "in", "self", ".", "meta", "[", "'range'", "]", "]", "self", ".", "meta", "[", "'type'", "]", "=", "self", ".", "type_" ]
Parses the meta_str to python dictionary and stores in ``meta`` attribute.
[ "Parses", "the", "meta_str", "to", "python", "dictionary", "and", "stores", "in", "meta", "attribute", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/crtf/read.py#L389-L419
237,875
astropy/regions
regions/io/fits/write.py
fits_region_objects_to_table
def fits_region_objects_to_table(regions): """ Converts list of regions to FITS region table. Parameters ---------- regions : list List of `regions.Region` objects Returns ------- region_string : `~astropy.table.Table` FITS region table Examples -------- >>> from regions import CirclePixelRegion, PixCoord >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> table = fits_region_objects_to_table([reg_pixel]) >>> print(table) X [1] Y [1] SHAPE R [4] ROTANG COMPONENT pix pix pix deg ----- ----- ------ ---------- ------ --------- 1.0 2.0 circle 5.0 .. 0.0 0 1 """ for reg in regions: if isinstance(reg, SkyRegion): raise TypeError('Every region must be a pixel region'.format(reg)) shape_list = to_shape_list(regions, coordinate_system='image') return shape_list.to_fits()
python
def fits_region_objects_to_table(regions): """ Converts list of regions to FITS region table. Parameters ---------- regions : list List of `regions.Region` objects Returns ------- region_string : `~astropy.table.Table` FITS region table Examples -------- >>> from regions import CirclePixelRegion, PixCoord >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> table = fits_region_objects_to_table([reg_pixel]) >>> print(table) X [1] Y [1] SHAPE R [4] ROTANG COMPONENT pix pix pix deg ----- ----- ------ ---------- ------ --------- 1.0 2.0 circle 5.0 .. 0.0 0 1 """ for reg in regions: if isinstance(reg, SkyRegion): raise TypeError('Every region must be a pixel region'.format(reg)) shape_list = to_shape_list(regions, coordinate_system='image') return shape_list.to_fits()
[ "def", "fits_region_objects_to_table", "(", "regions", ")", ":", "for", "reg", "in", "regions", ":", "if", "isinstance", "(", "reg", ",", "SkyRegion", ")", ":", "raise", "TypeError", "(", "'Every region must be a pixel region'", ".", "format", "(", "reg", ")", ")", "shape_list", "=", "to_shape_list", "(", "regions", ",", "coordinate_system", "=", "'image'", ")", "return", "shape_list", ".", "to_fits", "(", ")" ]
Converts list of regions to FITS region table. Parameters ---------- regions : list List of `regions.Region` objects Returns ------- region_string : `~astropy.table.Table` FITS region table Examples -------- >>> from regions import CirclePixelRegion, PixCoord >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> table = fits_region_objects_to_table([reg_pixel]) >>> print(table) X [1] Y [1] SHAPE R [4] ROTANG COMPONENT pix pix pix deg ----- ----- ------ ---------- ------ --------- 1.0 2.0 circle 5.0 .. 0.0 0 1
[ "Converts", "list", "of", "regions", "to", "FITS", "region", "table", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/fits/write.py#L15-L47
237,876
astropy/regions
regions/io/fits/write.py
write_fits_region
def write_fits_region(filename, regions, header=None): """ Converts list of regions to FITS region table and write to a file. Parameters ---------- filename: str Filename in which the table is to be written. Default is 'new.fits' regions: list List of `regions.Region` objects header: `~astropy.io.fits.header.Header` object The FITS header. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from astropy.io import fits >>> file_sample = get_pkg_data_filename('data/fits_region.fits', package='regions.io.fits.tests') >>> from regions import CirclePixelRegion, PixCoord, write_fits_region >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> hdul = fits.open(file_sample) >>> write_fits_region('region_output.fits', regions=[reg_pixel], header=hdul[1].header) """ output = fits_region_objects_to_table(regions) bin_table = fits.BinTableHDU(data=output, header=header) bin_table.writeto(filename)
python
def write_fits_region(filename, regions, header=None): """ Converts list of regions to FITS region table and write to a file. Parameters ---------- filename: str Filename in which the table is to be written. Default is 'new.fits' regions: list List of `regions.Region` objects header: `~astropy.io.fits.header.Header` object The FITS header. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from astropy.io import fits >>> file_sample = get_pkg_data_filename('data/fits_region.fits', package='regions.io.fits.tests') >>> from regions import CirclePixelRegion, PixCoord, write_fits_region >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> hdul = fits.open(file_sample) >>> write_fits_region('region_output.fits', regions=[reg_pixel], header=hdul[1].header) """ output = fits_region_objects_to_table(regions) bin_table = fits.BinTableHDU(data=output, header=header) bin_table.writeto(filename)
[ "def", "write_fits_region", "(", "filename", ",", "regions", ",", "header", "=", "None", ")", ":", "output", "=", "fits_region_objects_to_table", "(", "regions", ")", "bin_table", "=", "fits", ".", "BinTableHDU", "(", "data", "=", "output", ",", "header", "=", "header", ")", "bin_table", ".", "writeto", "(", "filename", ")" ]
Converts list of regions to FITS region table and write to a file. Parameters ---------- filename: str Filename in which the table is to be written. Default is 'new.fits' regions: list List of `regions.Region` objects header: `~astropy.io.fits.header.Header` object The FITS header. Examples -------- >>> from astropy.utils.data import get_pkg_data_filename >>> from astropy.io import fits >>> file_sample = get_pkg_data_filename('data/fits_region.fits', package='regions.io.fits.tests') >>> from regions import CirclePixelRegion, PixCoord, write_fits_region >>> reg_pixel = CirclePixelRegion(PixCoord(1, 2), 5) >>> hdul = fits.open(file_sample) >>> write_fits_region('region_output.fits', regions=[reg_pixel], header=hdul[1].header)
[ "Converts", "list", "of", "regions", "to", "FITS", "region", "table", "and", "write", "to", "a", "file", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/fits/write.py#L50-L78
237,877
astropy/regions
regions/_utils/examples.py
make_example_dataset
def make_example_dataset(data='simulated', config=None): """Make example dataset. This is a factory function for ``ExampleDataset`` objects. The following config options are available (default values shown): * ``crval = 0, 0`` * ``crpix = 180, 90`` * ``cdelt = -1, 1`` * ``shape = 180, 360`` * ``ctype = 'GLON-AIT', 'GLAT-AIT'`` Parameters ---------- data : {'simulated', 'fermi'} Which dataset to use config : dict or None Configuration options Returns ------- dataset : ``ExampleDataset`` Example dataset object Examples -------- Make an example dataset: >>> from regions import make_example_dataset >>> config = dict(crpix=(18, 9), cdelt=(-10, 10), shape=(18, 36)) >>> dataset = make_example_dataset(data='simulated', config=config) Access properties of the ``dataset`` object: >>> dataset.source_table >>> dataset.event_table >>> ExampleDataset.wcs >>> ExampleDataset.image >>> ExampleDataset.hdu_list """ if data == 'simulated': return ExampleDatasetSimulated(config=config) elif data == 'fermi': return ExampleDatasetFermi(config=config) else: raise ValueError('Invalid selection data: {}'.format(data))
python
def make_example_dataset(data='simulated', config=None): """Make example dataset. This is a factory function for ``ExampleDataset`` objects. The following config options are available (default values shown): * ``crval = 0, 0`` * ``crpix = 180, 90`` * ``cdelt = -1, 1`` * ``shape = 180, 360`` * ``ctype = 'GLON-AIT', 'GLAT-AIT'`` Parameters ---------- data : {'simulated', 'fermi'} Which dataset to use config : dict or None Configuration options Returns ------- dataset : ``ExampleDataset`` Example dataset object Examples -------- Make an example dataset: >>> from regions import make_example_dataset >>> config = dict(crpix=(18, 9), cdelt=(-10, 10), shape=(18, 36)) >>> dataset = make_example_dataset(data='simulated', config=config) Access properties of the ``dataset`` object: >>> dataset.source_table >>> dataset.event_table >>> ExampleDataset.wcs >>> ExampleDataset.image >>> ExampleDataset.hdu_list """ if data == 'simulated': return ExampleDatasetSimulated(config=config) elif data == 'fermi': return ExampleDatasetFermi(config=config) else: raise ValueError('Invalid selection data: {}'.format(data))
[ "def", "make_example_dataset", "(", "data", "=", "'simulated'", ",", "config", "=", "None", ")", ":", "if", "data", "==", "'simulated'", ":", "return", "ExampleDatasetSimulated", "(", "config", "=", "config", ")", "elif", "data", "==", "'fermi'", ":", "return", "ExampleDatasetFermi", "(", "config", "=", "config", ")", "else", ":", "raise", "ValueError", "(", "'Invalid selection data: {}'", ".", "format", "(", "data", ")", ")" ]
Make example dataset. This is a factory function for ``ExampleDataset`` objects. The following config options are available (default values shown): * ``crval = 0, 0`` * ``crpix = 180, 90`` * ``cdelt = -1, 1`` * ``shape = 180, 360`` * ``ctype = 'GLON-AIT', 'GLAT-AIT'`` Parameters ---------- data : {'simulated', 'fermi'} Which dataset to use config : dict or None Configuration options Returns ------- dataset : ``ExampleDataset`` Example dataset object Examples -------- Make an example dataset: >>> from regions import make_example_dataset >>> config = dict(crpix=(18, 9), cdelt=(-10, 10), shape=(18, 36)) >>> dataset = make_example_dataset(data='simulated', config=config) Access properties of the ``dataset`` object: >>> dataset.source_table >>> dataset.event_table >>> ExampleDataset.wcs >>> ExampleDataset.image >>> ExampleDataset.hdu_list
[ "Make", "example", "dataset", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/examples.py#L17-L64
237,878
astropy/regions
regions/_utils/examples.py
_table_to_bintable
def _table_to_bintable(table): """Convert `~astropy.table.Table` to `astropy.io.fits.BinTable`.""" data = table.as_array() header = fits.Header() header.update(table.meta) name = table.meta.pop('name', None) return fits.BinTableHDU(data, header, name=name)
python
def _table_to_bintable(table): """Convert `~astropy.table.Table` to `astropy.io.fits.BinTable`.""" data = table.as_array() header = fits.Header() header.update(table.meta) name = table.meta.pop('name', None) return fits.BinTableHDU(data, header, name=name)
[ "def", "_table_to_bintable", "(", "table", ")", ":", "data", "=", "table", ".", "as_array", "(", ")", "header", "=", "fits", ".", "Header", "(", ")", "header", ".", "update", "(", "table", ".", "meta", ")", "name", "=", "table", ".", "meta", ".", "pop", "(", "'name'", ",", "None", ")", "return", "fits", ".", "BinTableHDU", "(", "data", ",", "header", ",", "name", "=", "name", ")" ]
Convert `~astropy.table.Table` to `astropy.io.fits.BinTable`.
[ "Convert", "~astropy", ".", "table", ".", "Table", "to", "astropy", ".", "io", ".", "fits", ".", "BinTable", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/examples.py#L223-L229
237,879
astropy/regions
regions/io/ds9/read.py
read_ds9
def read_ds9(filename, errors='strict'): """ Read a DS9 region file in as a `list` of `~regions.Region` objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.DS9RegionParserError`. ``warn`` will raise a `~regions.DS9RegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python list of `~regions.Region` objects. Examples -------- >>> from regions import read_ds9 >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/physical_reference.reg', package='regions.io.ds9.tests') >>> regs = read_ds9(file, errors='warn') >>> print(regs[0]) Region: CirclePixelRegion center: PixCoord(x=330.0, y=1090.0) radius: 40.0 >>> print(regs[0].meta) {'label': 'Circle', 'select': '1', 'highlite': '1', 'fixed': '0', 'edit': '1', 'move': '1', 'delete': '1', 'source': '1', 'tag': ['{foo}', '{foo bar}'], 'include': True} >>> print(regs[0].visual) {'dashlist': '8 3', 'dash': '0', 'color': 'pink', 'linewidth': '3', 'font': 'times', 'fontsize': '10', 'fontstyle': 'normal', 'fontweight': 'roman'} """ with open(filename) as fh: region_string = fh.read() parser = DS9Parser(region_string, errors=errors) return parser.shapes.to_regions()
python
def read_ds9(filename, errors='strict'): """ Read a DS9 region file in as a `list` of `~regions.Region` objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.DS9RegionParserError`. ``warn`` will raise a `~regions.DS9RegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python list of `~regions.Region` objects. Examples -------- >>> from regions import read_ds9 >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/physical_reference.reg', package='regions.io.ds9.tests') >>> regs = read_ds9(file, errors='warn') >>> print(regs[0]) Region: CirclePixelRegion center: PixCoord(x=330.0, y=1090.0) radius: 40.0 >>> print(regs[0].meta) {'label': 'Circle', 'select': '1', 'highlite': '1', 'fixed': '0', 'edit': '1', 'move': '1', 'delete': '1', 'source': '1', 'tag': ['{foo}', '{foo bar}'], 'include': True} >>> print(regs[0].visual) {'dashlist': '8 3', 'dash': '0', 'color': 'pink', 'linewidth': '3', 'font': 'times', 'fontsize': '10', 'fontstyle': 'normal', 'fontweight': 'roman'} """ with open(filename) as fh: region_string = fh.read() parser = DS9Parser(region_string, errors=errors) return parser.shapes.to_regions()
[ "def", "read_ds9", "(", "filename", ",", "errors", "=", "'strict'", ")", ":", "with", "open", "(", "filename", ")", "as", "fh", ":", "region_string", "=", "fh", ".", "read", "(", ")", "parser", "=", "DS9Parser", "(", "region_string", ",", "errors", "=", "errors", ")", "return", "parser", ".", "shapes", ".", "to_regions", "(", ")" ]
Read a DS9 region file in as a `list` of `~regions.Region` objects. Parameters ---------- filename : `str` The file path errors : ``warn``, ``ignore``, ``strict``, optional The error handling scheme to use for handling parsing errors. The default is 'strict', which will raise a `~regions.DS9RegionParserError`. ``warn`` will raise a `~regions.DS9RegionParserWarning`, and ``ignore`` will do nothing (i.e., be silent). Returns ------- regions : `list` Python list of `~regions.Region` objects. Examples -------- >>> from regions import read_ds9 >>> from astropy.utils.data import get_pkg_data_filename >>> file = get_pkg_data_filename('data/physical_reference.reg', package='regions.io.ds9.tests') >>> regs = read_ds9(file, errors='warn') >>> print(regs[0]) Region: CirclePixelRegion center: PixCoord(x=330.0, y=1090.0) radius: 40.0 >>> print(regs[0].meta) {'label': 'Circle', 'select': '1', 'highlite': '1', 'fixed': '0', 'edit': '1', 'move': '1', 'delete': '1', 'source': '1', 'tag': ['{foo}', '{foo bar}'], 'include': True} >>> print(regs[0].visual) {'dashlist': '8 3', 'dash': '0', 'color': 'pink', 'linewidth': '3', 'font': 'times', 'fontsize': '10', 'fontstyle': 'normal', 'fontweight': 'roman'}
[ "Read", "a", "DS9", "region", "file", "in", "as", "a", "list", "of", "~regions", ".", "Region", "objects", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L38-L77
237,880
astropy/regions
regions/io/ds9/read.py
DS9Parser.set_coordsys
def set_coordsys(self, coordsys): """ Transform coordinate system # TODO: needs expert attention """ if coordsys in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[coordsys] else: self.coordsys = coordsys
python
def set_coordsys(self, coordsys): """ Transform coordinate system # TODO: needs expert attention """ if coordsys in self.coordsys_mapping: self.coordsys = self.coordsys_mapping[coordsys] else: self.coordsys = coordsys
[ "def", "set_coordsys", "(", "self", ",", "coordsys", ")", ":", "if", "coordsys", "in", "self", ".", "coordsys_mapping", ":", "self", ".", "coordsys", "=", "self", ".", "coordsys_mapping", "[", "coordsys", "]", "else", ":", "self", ".", "coordsys", "=", "coordsys" ]
Transform coordinate system # TODO: needs expert attention
[ "Transform", "coordinate", "system" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L215-L224
237,881
astropy/regions
regions/io/ds9/read.py
DS9Parser.run
def run(self): """ Run all steps """ for line_ in self.region_string.split('\n'): for line in line_.split(";"): self.parse_line(line) log.debug('Global state: {}'.format(self))
python
def run(self): """ Run all steps """ for line_ in self.region_string.split('\n'): for line in line_.split(";"): self.parse_line(line) log.debug('Global state: {}'.format(self))
[ "def", "run", "(", "self", ")", ":", "for", "line_", "in", "self", ".", "region_string", ".", "split", "(", "'\\n'", ")", ":", "for", "line", "in", "line_", ".", "split", "(", "\";\"", ")", ":", "self", ".", "parse_line", "(", "line", ")", "log", ".", "debug", "(", "'Global state: {}'", ".", "format", "(", "self", ")", ")" ]
Run all steps
[ "Run", "all", "steps" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L226-L233
237,882
astropy/regions
regions/io/ds9/read.py
DS9Parser.parse_meta
def parse_meta(meta_str): """ Parse the metadata for a single ds9 region string. Parameters ---------- meta_str : `str` Meta string, the metadata is everything after the close-paren of the region coordinate specification. All metadata is specified as key=value pairs separated by whitespace, but sometimes the values can also be whitespace separated. Returns ------- meta : `~collections.OrderedDict` Dictionary containing the meta data """ keys_vals = [(x, y) for x, _, y in regex_meta.findall(meta_str.strip())] extra_text = regex_meta.split(meta_str.strip())[-1] result = OrderedDict() for key, val in keys_vals: # regex can include trailing whitespace or inverted commas # remove it val = val.strip().strip("'").strip('"') if key == 'text': val = val.lstrip("{").rstrip("}") if key in result: if key == 'tag': result[key].append(val) else: raise ValueError("Duplicate key {0} found".format(key)) else: if key == 'tag': result[key] = [val] else: result[key] = val if extra_text: result['comment'] = extra_text return result
python
def parse_meta(meta_str): """ Parse the metadata for a single ds9 region string. Parameters ---------- meta_str : `str` Meta string, the metadata is everything after the close-paren of the region coordinate specification. All metadata is specified as key=value pairs separated by whitespace, but sometimes the values can also be whitespace separated. Returns ------- meta : `~collections.OrderedDict` Dictionary containing the meta data """ keys_vals = [(x, y) for x, _, y in regex_meta.findall(meta_str.strip())] extra_text = regex_meta.split(meta_str.strip())[-1] result = OrderedDict() for key, val in keys_vals: # regex can include trailing whitespace or inverted commas # remove it val = val.strip().strip("'").strip('"') if key == 'text': val = val.lstrip("{").rstrip("}") if key in result: if key == 'tag': result[key].append(val) else: raise ValueError("Duplicate key {0} found".format(key)) else: if key == 'tag': result[key] = [val] else: result[key] = val if extra_text: result['comment'] = extra_text return result
[ "def", "parse_meta", "(", "meta_str", ")", ":", "keys_vals", "=", "[", "(", "x", ",", "y", ")", "for", "x", ",", "_", ",", "y", "in", "regex_meta", ".", "findall", "(", "meta_str", ".", "strip", "(", ")", ")", "]", "extra_text", "=", "regex_meta", ".", "split", "(", "meta_str", ".", "strip", "(", ")", ")", "[", "-", "1", "]", "result", "=", "OrderedDict", "(", ")", "for", "key", ",", "val", "in", "keys_vals", ":", "# regex can include trailing whitespace or inverted commas", "# remove it", "val", "=", "val", ".", "strip", "(", ")", ".", "strip", "(", "\"'\"", ")", ".", "strip", "(", "'\"'", ")", "if", "key", "==", "'text'", ":", "val", "=", "val", ".", "lstrip", "(", "\"{\"", ")", ".", "rstrip", "(", "\"}\"", ")", "if", "key", "in", "result", ":", "if", "key", "==", "'tag'", ":", "result", "[", "key", "]", ".", "append", "(", "val", ")", "else", ":", "raise", "ValueError", "(", "\"Duplicate key {0} found\"", ".", "format", "(", "key", ")", ")", "else", ":", "if", "key", "==", "'tag'", ":", "result", "[", "key", "]", "=", "[", "val", "]", "else", ":", "result", "[", "key", "]", "=", "val", "if", "extra_text", ":", "result", "[", "'comment'", "]", "=", "extra_text", "return", "result" ]
Parse the metadata for a single ds9 region string. Parameters ---------- meta_str : `str` Meta string, the metadata is everything after the close-paren of the region coordinate specification. All metadata is specified as key=value pairs separated by whitespace, but sometimes the values can also be whitespace separated. Returns ------- meta : `~collections.OrderedDict` Dictionary containing the meta data
[ "Parse", "the", "metadata", "for", "a", "single", "ds9", "region", "string", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L288-L327
237,883
astropy/regions
regions/io/ds9/read.py
DS9Parser.parse_region
def parse_region(self, include, region_type, region_end, line): """ Extract a Shape from a region string """ if self.coordsys is None: raise DS9RegionParserError("No coordinate system specified and a" " region has been found.") else: helper = DS9RegionParser(coordsys=self.coordsys, include=include, region_type=region_type, region_end=region_end, global_meta=self.global_meta, line=line) helper.parse() self.shapes.append(helper.shape)
python
def parse_region(self, include, region_type, region_end, line): """ Extract a Shape from a region string """ if self.coordsys is None: raise DS9RegionParserError("No coordinate system specified and a" " region has been found.") else: helper = DS9RegionParser(coordsys=self.coordsys, include=include, region_type=region_type, region_end=region_end, global_meta=self.global_meta, line=line) helper.parse() self.shapes.append(helper.shape)
[ "def", "parse_region", "(", "self", ",", "include", ",", "region_type", ",", "region_end", ",", "line", ")", ":", "if", "self", ".", "coordsys", "is", "None", ":", "raise", "DS9RegionParserError", "(", "\"No coordinate system specified and a\"", "\" region has been found.\"", ")", "else", ":", "helper", "=", "DS9RegionParser", "(", "coordsys", "=", "self", ".", "coordsys", ",", "include", "=", "include", ",", "region_type", "=", "region_type", ",", "region_end", "=", "region_end", ",", "global_meta", "=", "self", ".", "global_meta", ",", "line", "=", "line", ")", "helper", ".", "parse", "(", ")", "self", ".", "shapes", ".", "append", "(", "helper", ".", "shape", ")" ]
Extract a Shape from a region string
[ "Extract", "a", "Shape", "from", "a", "region", "string" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L329-L344
237,884
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.parse
def parse(self): """ Convert line to shape object """ log.debug(self) self.parse_composite() self.split_line() self.convert_coordinates() self.convert_meta() self.make_shape() log.debug(self)
python
def parse(self): """ Convert line to shape object """ log.debug(self) self.parse_composite() self.split_line() self.convert_coordinates() self.convert_meta() self.make_shape() log.debug(self)
[ "def", "parse", "(", "self", ")", ":", "log", ".", "debug", "(", "self", ")", "self", ".", "parse_composite", "(", ")", "self", ".", "split_line", "(", ")", "self", ".", "convert_coordinates", "(", ")", "self", ".", "convert_meta", "(", ")", "self", ".", "make_shape", "(", ")", "log", ".", "debug", "(", "self", ")" ]
Convert line to shape object
[ "Convert", "line", "to", "shape", "object" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L431-L442
237,885
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.split_line
def split_line(self): """ Split line into coordinates and meta string """ # coordinate of the # symbol or end of the line (-1) if not found hash_or_end = self.line.find("#") temp = self.line[self.region_end:hash_or_end].strip(" |") self.coord_str = regex_paren.sub("", temp) # don't want any meta_str if there is no metadata found if hash_or_end >= 0: self.meta_str = self.line[hash_or_end:] else: self.meta_str = ""
python
def split_line(self): """ Split line into coordinates and meta string """ # coordinate of the # symbol or end of the line (-1) if not found hash_or_end = self.line.find("#") temp = self.line[self.region_end:hash_or_end].strip(" |") self.coord_str = regex_paren.sub("", temp) # don't want any meta_str if there is no metadata found if hash_or_end >= 0: self.meta_str = self.line[hash_or_end:] else: self.meta_str = ""
[ "def", "split_line", "(", "self", ")", ":", "# coordinate of the # symbol or end of the line (-1) if not found", "hash_or_end", "=", "self", ".", "line", ".", "find", "(", "\"#\"", ")", "temp", "=", "self", ".", "line", "[", "self", ".", "region_end", ":", "hash_or_end", "]", ".", "strip", "(", "\" |\"", ")", "self", ".", "coord_str", "=", "regex_paren", ".", "sub", "(", "\"\"", ",", "temp", ")", "# don't want any meta_str if there is no metadata found", "if", "hash_or_end", ">=", "0", ":", "self", ".", "meta_str", "=", "self", ".", "line", "[", "hash_or_end", ":", "]", "else", ":", "self", ".", "meta_str", "=", "\"\"" ]
Split line into coordinates and meta string
[ "Split", "line", "into", "coordinates", "and", "meta", "string" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L450-L463
237,886
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.convert_coordinates
def convert_coordinates(self): """ Convert coordinate string to objects """ coord_list = [] # strip out "null" elements, i.e. ''. It might be possible to eliminate # these some other way, i.e. with regex directly, but I don't know how. # We need to copy in order not to burn up the iterators elements = [x for x in regex_splitter.split(self.coord_str) if x] element_parsers = self.language_spec[self.region_type] for ii, (element, element_parser) in enumerate(zip(elements, element_parsers)): if element_parser is coordinate: unit = self.coordinate_units[self.coordsys][ii % 2] coord_list.append(element_parser(element, unit)) elif self.coordinate_units[self.coordsys][0] is u.dimensionless_unscaled: coord_list.append(element_parser(element, unit=u.dimensionless_unscaled)) else: coord_list.append(element_parser(element)) if self.region_type in ['ellipse', 'box'] and len(coord_list) % 2 == 1: coord_list[-1] = CoordinateParser.parse_angular_length_quantity(elements[len(coord_list)-1]) # Reset iterator for ellipse and annulus # Note that this cannot be done with copy.deepcopy on python2 if self.region_type in ['ellipse', 'annulus']: self.language_spec[self.region_type] = itertools.chain( (coordinate, coordinate), itertools.cycle((radius,))) self.coord = coord_list
python
def convert_coordinates(self): """ Convert coordinate string to objects """ coord_list = [] # strip out "null" elements, i.e. ''. It might be possible to eliminate # these some other way, i.e. with regex directly, but I don't know how. # We need to copy in order not to burn up the iterators elements = [x for x in regex_splitter.split(self.coord_str) if x] element_parsers = self.language_spec[self.region_type] for ii, (element, element_parser) in enumerate(zip(elements, element_parsers)): if element_parser is coordinate: unit = self.coordinate_units[self.coordsys][ii % 2] coord_list.append(element_parser(element, unit)) elif self.coordinate_units[self.coordsys][0] is u.dimensionless_unscaled: coord_list.append(element_parser(element, unit=u.dimensionless_unscaled)) else: coord_list.append(element_parser(element)) if self.region_type in ['ellipse', 'box'] and len(coord_list) % 2 == 1: coord_list[-1] = CoordinateParser.parse_angular_length_quantity(elements[len(coord_list)-1]) # Reset iterator for ellipse and annulus # Note that this cannot be done with copy.deepcopy on python2 if self.region_type in ['ellipse', 'annulus']: self.language_spec[self.region_type] = itertools.chain( (coordinate, coordinate), itertools.cycle((radius,))) self.coord = coord_list
[ "def", "convert_coordinates", "(", "self", ")", ":", "coord_list", "=", "[", "]", "# strip out \"null\" elements, i.e. ''. It might be possible to eliminate", "# these some other way, i.e. with regex directly, but I don't know how.", "# We need to copy in order not to burn up the iterators", "elements", "=", "[", "x", "for", "x", "in", "regex_splitter", ".", "split", "(", "self", ".", "coord_str", ")", "if", "x", "]", "element_parsers", "=", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", "for", "ii", ",", "(", "element", ",", "element_parser", ")", "in", "enumerate", "(", "zip", "(", "elements", ",", "element_parsers", ")", ")", ":", "if", "element_parser", "is", "coordinate", ":", "unit", "=", "self", ".", "coordinate_units", "[", "self", ".", "coordsys", "]", "[", "ii", "%", "2", "]", "coord_list", ".", "append", "(", "element_parser", "(", "element", ",", "unit", ")", ")", "elif", "self", ".", "coordinate_units", "[", "self", ".", "coordsys", "]", "[", "0", "]", "is", "u", ".", "dimensionless_unscaled", ":", "coord_list", ".", "append", "(", "element_parser", "(", "element", ",", "unit", "=", "u", ".", "dimensionless_unscaled", ")", ")", "else", ":", "coord_list", ".", "append", "(", "element_parser", "(", "element", ")", ")", "if", "self", ".", "region_type", "in", "[", "'ellipse'", ",", "'box'", "]", "and", "len", "(", "coord_list", ")", "%", "2", "==", "1", ":", "coord_list", "[", "-", "1", "]", "=", "CoordinateParser", ".", "parse_angular_length_quantity", "(", "elements", "[", "len", "(", "coord_list", ")", "-", "1", "]", ")", "# Reset iterator for ellipse and annulus", "# Note that this cannot be done with copy.deepcopy on python2", "if", "self", ".", "region_type", "in", "[", "'ellipse'", ",", "'annulus'", "]", ":", "self", ".", "language_spec", "[", "self", ".", "region_type", "]", "=", "itertools", ".", "chain", "(", "(", "coordinate", ",", "coordinate", ")", ",", "itertools", ".", "cycle", "(", "(", "radius", ",", ")", ")", ")", "self", ".", "coord", "=", "coord_list" ]
Convert coordinate string to objects
[ "Convert", "coordinate", "string", "to", "objects" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L465-L494
237,887
astropy/regions
regions/io/ds9/read.py
DS9RegionParser.convert_meta
def convert_meta(self): """ Convert meta string to dict """ meta_ = DS9Parser.parse_meta(self.meta_str) self.meta = copy.deepcopy(self.global_meta) self.meta.update(meta_) # the 'include' is not part of the metadata string; # it is pre-parsed as part of the shape type and should always # override the global one self.include = self.meta.get('include', True) if self.include == '' else self.include != '-' self.meta['include'] = self.include
python
def convert_meta(self): """ Convert meta string to dict """ meta_ = DS9Parser.parse_meta(self.meta_str) self.meta = copy.deepcopy(self.global_meta) self.meta.update(meta_) # the 'include' is not part of the metadata string; # it is pre-parsed as part of the shape type and should always # override the global one self.include = self.meta.get('include', True) if self.include == '' else self.include != '-' self.meta['include'] = self.include
[ "def", "convert_meta", "(", "self", ")", ":", "meta_", "=", "DS9Parser", ".", "parse_meta", "(", "self", ".", "meta_str", ")", "self", ".", "meta", "=", "copy", ".", "deepcopy", "(", "self", ".", "global_meta", ")", "self", ".", "meta", ".", "update", "(", "meta_", ")", "# the 'include' is not part of the metadata string;", "# it is pre-parsed as part of the shape type and should always", "# override the global one", "self", ".", "include", "=", "self", ".", "meta", ".", "get", "(", "'include'", ",", "True", ")", "if", "self", ".", "include", "==", "''", "else", "self", ".", "include", "!=", "'-'", "self", ".", "meta", "[", "'include'", "]", "=", "self", ".", "include" ]
Convert meta string to dict
[ "Convert", "meta", "string", "to", "dict" ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/io/ds9/read.py#L496-L507
237,888
astropy/regions
regions/core/pixcoord.py
PixCoord._validate
def _validate(val, name, expected='any'): """Validate that a given object is an appropriate `PixCoord`. This is used for input validation throughout the regions package, especially in the `__init__` method of pixel region classes. Parameters ---------- val : `PixCoord` The object to check name : str Parameter name (used for error messages) expected : {'any', 'scalar', 'not scalar'} What kind of PixCoord to check for Returns ------- val : `PixCoord` The input object (at the moment unmodified, might do fix-ups here later) """ if not isinstance(val, PixCoord): raise TypeError('{} must be a PixCoord'.format(name)) if expected == 'any': pass elif expected == 'scalar': if not val.isscalar: raise ValueError('{} must be a scalar PixCoord'.format(name)) elif expected == 'not scalar': if val.isscalar: raise ValueError('{} must be a non-scalar PixCoord'.format(name)) else: raise ValueError('Invalid argument for `expected`: {}'.format(expected)) return val
python
def _validate(val, name, expected='any'): """Validate that a given object is an appropriate `PixCoord`. This is used for input validation throughout the regions package, especially in the `__init__` method of pixel region classes. Parameters ---------- val : `PixCoord` The object to check name : str Parameter name (used for error messages) expected : {'any', 'scalar', 'not scalar'} What kind of PixCoord to check for Returns ------- val : `PixCoord` The input object (at the moment unmodified, might do fix-ups here later) """ if not isinstance(val, PixCoord): raise TypeError('{} must be a PixCoord'.format(name)) if expected == 'any': pass elif expected == 'scalar': if not val.isscalar: raise ValueError('{} must be a scalar PixCoord'.format(name)) elif expected == 'not scalar': if val.isscalar: raise ValueError('{} must be a non-scalar PixCoord'.format(name)) else: raise ValueError('Invalid argument for `expected`: {}'.format(expected)) return val
[ "def", "_validate", "(", "val", ",", "name", ",", "expected", "=", "'any'", ")", ":", "if", "not", "isinstance", "(", "val", ",", "PixCoord", ")", ":", "raise", "TypeError", "(", "'{} must be a PixCoord'", ".", "format", "(", "name", ")", ")", "if", "expected", "==", "'any'", ":", "pass", "elif", "expected", "==", "'scalar'", ":", "if", "not", "val", ".", "isscalar", ":", "raise", "ValueError", "(", "'{} must be a scalar PixCoord'", ".", "format", "(", "name", ")", ")", "elif", "expected", "==", "'not scalar'", ":", "if", "val", ".", "isscalar", ":", "raise", "ValueError", "(", "'{} must be a non-scalar PixCoord'", ".", "format", "(", "name", ")", ")", "else", ":", "raise", "ValueError", "(", "'Invalid argument for `expected`: {}'", ".", "format", "(", "expected", ")", ")", "return", "val" ]
Validate that a given object is an appropriate `PixCoord`. This is used for input validation throughout the regions package, especially in the `__init__` method of pixel region classes. Parameters ---------- val : `PixCoord` The object to check name : str Parameter name (used for error messages) expected : {'any', 'scalar', 'not scalar'} What kind of PixCoord to check for Returns ------- val : `PixCoord` The input object (at the moment unmodified, might do fix-ups here later)
[ "Validate", "that", "a", "given", "object", "is", "an", "appropriate", "PixCoord", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L45-L79
237,889
astropy/regions
regions/core/pixcoord.py
PixCoord.to_sky
def to_sky(self, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Convert this `PixCoord` to `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.from_pixel`. See parameter description there. """ return SkyCoord.from_pixel( xp=self.x, yp=self.y, wcs=wcs, origin=origin, mode=mode, )
python
def to_sky(self, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Convert this `PixCoord` to `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.from_pixel`. See parameter description there. """ return SkyCoord.from_pixel( xp=self.x, yp=self.y, wcs=wcs, origin=origin, mode=mode, )
[ "def", "to_sky", "(", "self", ",", "wcs", ",", "origin", "=", "_DEFAULT_WCS_ORIGIN", ",", "mode", "=", "_DEFAULT_WCS_MODE", ")", ":", "return", "SkyCoord", ".", "from_pixel", "(", "xp", "=", "self", ".", "x", ",", "yp", "=", "self", ".", "y", ",", "wcs", "=", "wcs", ",", "origin", "=", "origin", ",", "mode", "=", "mode", ",", ")" ]
Convert this `PixCoord` to `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.from_pixel`. See parameter description there.
[ "Convert", "this", "PixCoord", "to", "~astropy", ".", "coordinates", ".", "SkyCoord", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L123-L132
237,890
astropy/regions
regions/core/pixcoord.py
PixCoord.from_sky
def from_sky(cls, skycoord, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Create `PixCoord` from `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.to_pixel`. See parameter description there. """ x, y = skycoord.to_pixel(wcs=wcs, origin=origin, mode=mode) return cls(x=x, y=y)
python
def from_sky(cls, skycoord, wcs, origin=_DEFAULT_WCS_ORIGIN, mode=_DEFAULT_WCS_MODE): """Create `PixCoord` from `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.to_pixel`. See parameter description there. """ x, y = skycoord.to_pixel(wcs=wcs, origin=origin, mode=mode) return cls(x=x, y=y)
[ "def", "from_sky", "(", "cls", ",", "skycoord", ",", "wcs", ",", "origin", "=", "_DEFAULT_WCS_ORIGIN", ",", "mode", "=", "_DEFAULT_WCS_MODE", ")", ":", "x", ",", "y", "=", "skycoord", ".", "to_pixel", "(", "wcs", "=", "wcs", ",", "origin", "=", "origin", ",", "mode", "=", "mode", ")", "return", "cls", "(", "x", "=", "x", ",", "y", "=", "y", ")" ]
Create `PixCoord` from `~astropy.coordinates.SkyCoord`. Calls :meth:`astropy.coordinates.SkyCoord.to_pixel`. See parameter description there.
[ "Create", "PixCoord", "from", "~astropy", ".", "coordinates", ".", "SkyCoord", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L135-L142
237,891
astropy/regions
regions/core/pixcoord.py
PixCoord.separation
def separation(self, other): r"""Separation to another pixel coordinate. This is the two-dimensional cartesian separation :math:`d` with .. math:: d = \sqrt{(x_1 - x_2) ^ 2 + (y_1 - y_2) ^ 2} Parameters ---------- other : `PixCoord` Other pixel coordinate Returns ------- separation : `numpy.array` Separation in pixels """ dx = other.x - self.x dy = other.y - self.y return np.hypot(dx, dy)
python
def separation(self, other): r"""Separation to another pixel coordinate. This is the two-dimensional cartesian separation :math:`d` with .. math:: d = \sqrt{(x_1 - x_2) ^ 2 + (y_1 - y_2) ^ 2} Parameters ---------- other : `PixCoord` Other pixel coordinate Returns ------- separation : `numpy.array` Separation in pixels """ dx = other.x - self.x dy = other.y - self.y return np.hypot(dx, dy)
[ "def", "separation", "(", "self", ",", "other", ")", ":", "dx", "=", "other", ".", "x", "-", "self", ".", "x", "dy", "=", "other", ".", "y", "-", "self", ".", "y", "return", "np", ".", "hypot", "(", "dx", ",", "dy", ")" ]
r"""Separation to another pixel coordinate. This is the two-dimensional cartesian separation :math:`d` with .. math:: d = \sqrt{(x_1 - x_2) ^ 2 + (y_1 - y_2) ^ 2} Parameters ---------- other : `PixCoord` Other pixel coordinate Returns ------- separation : `numpy.array` Separation in pixels
[ "r", "Separation", "to", "another", "pixel", "coordinate", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/core/pixcoord.py#L144-L164
237,892
astropy/regions
regions/_utils/wcs_helpers.py
skycoord_to_pixel_scale_angle
def skycoord_to_pixel_scale_angle(skycoord, wcs, small_offset=1 * u.arcsec): """ Convert a set of SkyCoord coordinates into pixel coordinates, pixel scales, and position angles. Parameters ---------- skycoord : `~astropy.coordinates.SkyCoord` Sky coordinates wcs : `~astropy.wcs.WCS` The WCS transformation to use small_offset : `~astropy.units.Quantity` A small offset to use to compute the angle Returns ------- pixcoord : `~regions.PixCoord` Pixel coordinates scale : float The pixel scale at each location, in degrees/pixel angle : `~astropy.units.Quantity` The position angle of the celestial coordinate system in pixel space. """ # Convert to pixel coordinates x, y = skycoord_to_pixel(skycoord, wcs, mode=skycoord_to_pixel_mode) pixcoord = PixCoord(x=x, y=y) # We take a point directly 'above' (in latitude) the position requested # and convert it to pixel coordinates, then we use that to figure out the # scale and position angle of the coordinate system at the location of # the points. # Find the coordinates as a representation object r_old = skycoord.represent_as('unitspherical') # Add a a small perturbation in the latitude direction (since longitude # is more difficult because it is not directly an angle). dlat = small_offset r_new = UnitSphericalRepresentation(r_old.lon, r_old.lat + dlat) coords_offset = skycoord.realize_frame(r_new) # Find pixel coordinates of offset coordinates x_offset, y_offset = skycoord_to_pixel(coords_offset, wcs, mode=skycoord_to_pixel_mode) # Find vector dx = x_offset - x dy = y_offset - y # Find the length of the vector scale = np.hypot(dx, dy) / dlat.to('degree').value # Find the position angle angle = np.arctan2(dy, dx) * u.radian return pixcoord, scale, angle
python
def skycoord_to_pixel_scale_angle(skycoord, wcs, small_offset=1 * u.arcsec): """ Convert a set of SkyCoord coordinates into pixel coordinates, pixel scales, and position angles. Parameters ---------- skycoord : `~astropy.coordinates.SkyCoord` Sky coordinates wcs : `~astropy.wcs.WCS` The WCS transformation to use small_offset : `~astropy.units.Quantity` A small offset to use to compute the angle Returns ------- pixcoord : `~regions.PixCoord` Pixel coordinates scale : float The pixel scale at each location, in degrees/pixel angle : `~astropy.units.Quantity` The position angle of the celestial coordinate system in pixel space. """ # Convert to pixel coordinates x, y = skycoord_to_pixel(skycoord, wcs, mode=skycoord_to_pixel_mode) pixcoord = PixCoord(x=x, y=y) # We take a point directly 'above' (in latitude) the position requested # and convert it to pixel coordinates, then we use that to figure out the # scale and position angle of the coordinate system at the location of # the points. # Find the coordinates as a representation object r_old = skycoord.represent_as('unitspherical') # Add a a small perturbation in the latitude direction (since longitude # is more difficult because it is not directly an angle). dlat = small_offset r_new = UnitSphericalRepresentation(r_old.lon, r_old.lat + dlat) coords_offset = skycoord.realize_frame(r_new) # Find pixel coordinates of offset coordinates x_offset, y_offset = skycoord_to_pixel(coords_offset, wcs, mode=skycoord_to_pixel_mode) # Find vector dx = x_offset - x dy = y_offset - y # Find the length of the vector scale = np.hypot(dx, dy) / dlat.to('degree').value # Find the position angle angle = np.arctan2(dy, dx) * u.radian return pixcoord, scale, angle
[ "def", "skycoord_to_pixel_scale_angle", "(", "skycoord", ",", "wcs", ",", "small_offset", "=", "1", "*", "u", ".", "arcsec", ")", ":", "# Convert to pixel coordinates", "x", ",", "y", "=", "skycoord_to_pixel", "(", "skycoord", ",", "wcs", ",", "mode", "=", "skycoord_to_pixel_mode", ")", "pixcoord", "=", "PixCoord", "(", "x", "=", "x", ",", "y", "=", "y", ")", "# We take a point directly 'above' (in latitude) the position requested", "# and convert it to pixel coordinates, then we use that to figure out the", "# scale and position angle of the coordinate system at the location of", "# the points.", "# Find the coordinates as a representation object", "r_old", "=", "skycoord", ".", "represent_as", "(", "'unitspherical'", ")", "# Add a a small perturbation in the latitude direction (since longitude", "# is more difficult because it is not directly an angle).", "dlat", "=", "small_offset", "r_new", "=", "UnitSphericalRepresentation", "(", "r_old", ".", "lon", ",", "r_old", ".", "lat", "+", "dlat", ")", "coords_offset", "=", "skycoord", ".", "realize_frame", "(", "r_new", ")", "# Find pixel coordinates of offset coordinates", "x_offset", ",", "y_offset", "=", "skycoord_to_pixel", "(", "coords_offset", ",", "wcs", ",", "mode", "=", "skycoord_to_pixel_mode", ")", "# Find vector", "dx", "=", "x_offset", "-", "x", "dy", "=", "y_offset", "-", "y", "# Find the length of the vector", "scale", "=", "np", ".", "hypot", "(", "dx", ",", "dy", ")", "/", "dlat", ".", "to", "(", "'degree'", ")", ".", "value", "# Find the position angle", "angle", "=", "np", ".", "arctan2", "(", "dy", ",", "dx", ")", "*", "u", ".", "radian", "return", "pixcoord", ",", "scale", ",", "angle" ]
Convert a set of SkyCoord coordinates into pixel coordinates, pixel scales, and position angles. Parameters ---------- skycoord : `~astropy.coordinates.SkyCoord` Sky coordinates wcs : `~astropy.wcs.WCS` The WCS transformation to use small_offset : `~astropy.units.Quantity` A small offset to use to compute the angle Returns ------- pixcoord : `~regions.PixCoord` Pixel coordinates scale : float The pixel scale at each location, in degrees/pixel angle : `~astropy.units.Quantity` The position angle of the celestial coordinate system in pixel space.
[ "Convert", "a", "set", "of", "SkyCoord", "coordinates", "into", "pixel", "coordinates", "pixel", "scales", "and", "position", "angles", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/wcs_helpers.py#L13-L69
237,893
astropy/regions
regions/_utils/wcs_helpers.py
assert_angle
def assert_angle(name, q): """ Check that ``q`` is an angular `~astropy.units.Quantity`. """ if isinstance(q, u.Quantity): if q.unit.physical_type == 'angle': pass else: raise ValueError("{0} should have angular units".format(name)) else: raise TypeError("{0} should be a Quantity instance".format(name))
python
def assert_angle(name, q): """ Check that ``q`` is an angular `~astropy.units.Quantity`. """ if isinstance(q, u.Quantity): if q.unit.physical_type == 'angle': pass else: raise ValueError("{0} should have angular units".format(name)) else: raise TypeError("{0} should be a Quantity instance".format(name))
[ "def", "assert_angle", "(", "name", ",", "q", ")", ":", "if", "isinstance", "(", "q", ",", "u", ".", "Quantity", ")", ":", "if", "q", ".", "unit", ".", "physical_type", "==", "'angle'", ":", "pass", "else", ":", "raise", "ValueError", "(", "\"{0} should have angular units\"", ".", "format", "(", "name", ")", ")", "else", ":", "raise", "TypeError", "(", "\"{0} should be a Quantity instance\"", ".", "format", "(", "name", ")", ")" ]
Check that ``q`` is an angular `~astropy.units.Quantity`.
[ "Check", "that", "q", "is", "an", "angular", "~astropy", ".", "units", ".", "Quantity", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/regions/_utils/wcs_helpers.py#L86-L96
237,894
astropy/regions
ah_bootstrap.py
_silence
def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr
python
def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr
[ "def", "_silence", "(", ")", ":", "old_stdout", "=", "sys", ".", "stdout", "old_stderr", "=", "sys", ".", "stderr", "sys", ".", "stdout", "=", "_DummyFile", "(", ")", "sys", ".", "stderr", "=", "_DummyFile", "(", ")", "exception_occurred", "=", "False", "try", ":", "yield", "except", ":", "exception_occurred", "=", "True", "# Go ahead and clean up so that exception handling can work normally", "sys", ".", "stdout", "=", "old_stdout", "sys", ".", "stderr", "=", "old_stderr", "raise", "if", "not", "exception_occurred", ":", "sys", ".", "stdout", "=", "old_stdout", "sys", ".", "stderr", "=", "old_stderr" ]
A context manager that silences sys.stdout and sys.stderr.
[ "A", "context", "manager", "that", "silences", "sys", ".", "stdout", "and", "sys", ".", "stderr", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L914-L933
237,895
astropy/regions
ah_bootstrap.py
use_astropy_helpers
def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run()
python
def use_astropy_helpers(**kwargs): """ Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`. """ global BOOTSTRAPPER config = BOOTSTRAPPER.config config.update(**kwargs) # Create a new bootstrapper with the updated configuration and run it BOOTSTRAPPER = _Bootstrapper(**config) BOOTSTRAPPER.run()
[ "def", "use_astropy_helpers", "(", "*", "*", "kwargs", ")", ":", "global", "BOOTSTRAPPER", "config", "=", "BOOTSTRAPPER", ".", "config", "config", ".", "update", "(", "*", "*", "kwargs", ")", "# Create a new bootstrapper with the updated configuration and run it", "BOOTSTRAPPER", "=", "_Bootstrapper", "(", "*", "*", "config", ")", "BOOTSTRAPPER", ".", "run", "(", ")" ]
Ensure that the `astropy_helpers` module is available and is importable. This supports automatic submodule initialization if astropy_helpers is included in a project as a git submodule, or will download it from PyPI if necessary. Parameters ---------- path : str or None, optional A filesystem path relative to the root of the project's source code that should be added to `sys.path` so that `astropy_helpers` can be imported from that path. If the path is a git submodule it will automatically be initialized and/or updated. The path may also be to a ``.tar.gz`` archive of the astropy_helpers source distribution. In this case the archive is automatically unpacked and made temporarily available on `sys.path` as a ``.egg`` archive. If `None` skip straight to downloading. download_if_needed : bool, optional If the provided filesystem path is not found an attempt will be made to download astropy_helpers from PyPI. It will then be made temporarily available on `sys.path` as a ``.egg`` archive (using the ``setup_requires`` feature of setuptools. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. index_url : str, optional If provided, use a different URL for the Python package index than the main PyPI server. use_git : bool, optional If `False` no git commands will be used--this effectively disables support for git submodules. If the ``--no-git`` option is given at the command line the value of this argument is overridden to `False`. auto_upgrade : bool, optional By default, when installing a package from a non-development source distribution ah_boostrap will try to automatically check for patch releases to astropy-helpers on PyPI and use the patched version over any bundled versions. Setting this to `False` will disable that functionality. If the ``--offline`` option is given at the command line the value of this argument is overridden to `False`. offline : bool, optional If `False` disable all actions that require an internet connection, including downloading packages from the package index and fetching updates to any git submodule. Defaults to `True`.
[ "Ensure", "that", "the", "astropy_helpers", "module", "is", "available", "and", "is", "importable", ".", "This", "supports", "automatic", "submodule", "initialization", "if", "astropy_helpers", "is", "included", "in", "a", "project", "as", "a", "git", "submodule", "or", "will", "download", "it", "from", "PyPI", "if", "necessary", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L959-L1022
237,896
astropy/regions
ah_bootstrap.py
_Bootstrapper.config
def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname))
python
def config(self): """ A `dict` containing the options this `_Bootstrapper` was configured with. """ return dict((optname, getattr(self, optname)) for optname, _ in CFG_OPTIONS if hasattr(self, optname))
[ "def", "config", "(", "self", ")", ":", "return", "dict", "(", "(", "optname", ",", "getattr", "(", "self", ",", "optname", ")", ")", "for", "optname", ",", "_", "in", "CFG_OPTIONS", "if", "hasattr", "(", "self", ",", "optname", ")", ")" ]
A `dict` containing the options this `_Bootstrapper` was configured with.
[ "A", "dict", "containing", "the", "options", "this", "_Bootstrapper", "was", "configured", "with", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L393-L400
237,897
astropy/regions
ah_bootstrap.py
_Bootstrapper.get_local_directory_dist
def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
python
def get_local_directory_dist(self): """ Handle importing a vendored package from a subdirectory of the source distribution. """ if not os.path.isdir(self.path): return log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 'submodule' if self.is_submodule else 'directory', self.path)) dist = self._directory_import() if dist is None: log.warn( 'The requested path {0!r} for importing {1} does not ' 'exist, or does not contain a copy of the {1} ' 'package.'.format(self.path, PACKAGE_NAME)) elif self.auto_upgrade and not self.is_submodule: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
[ "def", "get_local_directory_dist", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "path", ")", ":", "return", "log", ".", "info", "(", "'Attempting to import astropy_helpers from {0} {1!r}'", ".", "format", "(", "'submodule'", "if", "self", ".", "is_submodule", "else", "'directory'", ",", "self", ".", "path", ")", ")", "dist", "=", "self", ".", "_directory_import", "(", ")", "if", "dist", "is", "None", ":", "log", ".", "warn", "(", "'The requested path {0!r} for importing {1} does not '", "'exist, or does not contain a copy of the {1} '", "'package.'", ".", "format", "(", "self", ".", "path", ",", "PACKAGE_NAME", ")", ")", "elif", "self", ".", "auto_upgrade", "and", "not", "self", ".", "is_submodule", ":", "# A version of astropy-helpers was found on the available path, but", "# check to see if a bugfix release is available on PyPI", "upgrade", "=", "self", ".", "_do_upgrade", "(", "dist", ")", "if", "upgrade", "is", "not", "None", ":", "dist", "=", "upgrade", "return", "dist" ]
Handle importing a vendored package from a subdirectory of the source distribution.
[ "Handle", "importing", "a", "vendored", "package", "from", "a", "subdirectory", "of", "the", "source", "distribution", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L402-L429
237,898
astropy/regions
ah_bootstrap.py
_Bootstrapper.get_local_file_dist
def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
python
def get_local_file_dist(self): """ Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive. """ if not os.path.isfile(self.path): return log.info('Attempting to unpack and import astropy_helpers from ' '{0!r}'.format(self.path)) try: dist = self._do_download(find_links=[self.path]) except Exception as e: if DEBUG: raise log.warn( 'Failed to import {0} from the specified archive {1!r}: ' '{2}'.format(PACKAGE_NAME, self.path, str(e))) dist = None if dist is not None and self.auto_upgrade: # A version of astropy-helpers was found on the available path, but # check to see if a bugfix release is available on PyPI upgrade = self._do_upgrade(dist) if upgrade is not None: dist = upgrade return dist
[ "def", "get_local_file_dist", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "self", ".", "path", ")", ":", "return", "log", ".", "info", "(", "'Attempting to unpack and import astropy_helpers from '", "'{0!r}'", ".", "format", "(", "self", ".", "path", ")", ")", "try", ":", "dist", "=", "self", ".", "_do_download", "(", "find_links", "=", "[", "self", ".", "path", "]", ")", "except", "Exception", "as", "e", ":", "if", "DEBUG", ":", "raise", "log", ".", "warn", "(", "'Failed to import {0} from the specified archive {1!r}: '", "'{2}'", ".", "format", "(", "PACKAGE_NAME", ",", "self", ".", "path", ",", "str", "(", "e", ")", ")", ")", "dist", "=", "None", "if", "dist", "is", "not", "None", "and", "self", ".", "auto_upgrade", ":", "# A version of astropy-helpers was found on the available path, but", "# check to see if a bugfix release is available on PyPI", "upgrade", "=", "self", ".", "_do_upgrade", "(", "dist", ")", "if", "upgrade", "is", "not", "None", ":", "dist", "=", "upgrade", "return", "dist" ]
Handle importing from a source archive; this also uses setup_requires but points easy_install directly to the source archive.
[ "Handle", "importing", "from", "a", "source", "archive", ";", "this", "also", "uses", "setup_requires", "but", "points", "easy_install", "directly", "to", "the", "source", "archive", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L431-L461
237,899
astropy/regions
ah_bootstrap.py
_Bootstrapper._directory_import
def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): # We use subprocess instead of run_setup from setuptools to # avoid segmentation faults - see the following for more details: # https://github.com/cython/cython/issues/2104 sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist
python
def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): # We use subprocess instead of run_setup from setuptools to # avoid segmentation faults - see the following for more details: # https://github.com/cython/cython/issues/2104 sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist
[ "def", "_directory_import", "(", "self", ")", ":", "# Return True on success, False on failure but download is allowed, and", "# otherwise raise SystemExit", "path", "=", "os", ".", "path", ".", "abspath", "(", "self", ".", "path", ")", "# Use an empty WorkingSet rather than the man", "# pkg_resources.working_set, since on older versions of setuptools this", "# will invoke a VersionConflict when trying to install an upgrade", "ws", "=", "pkg_resources", ".", "WorkingSet", "(", "[", "]", ")", "ws", ".", "add_entry", "(", "path", ")", "dist", "=", "ws", ".", "by_key", ".", "get", "(", "DIST_NAME", ")", "if", "dist", "is", "None", ":", "# We didn't find an egg-info/dist-info in the given path, but if a", "# setup.py exists we can generate it", "setup_py", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'setup.py'", ")", "if", "os", ".", "path", ".", "isfile", "(", "setup_py", ")", ":", "# We use subprocess instead of run_setup from setuptools to", "# avoid segmentation faults - see the following for more details:", "# https://github.com/cython/cython/issues/2104", "sp", ".", "check_output", "(", "[", "sys", ".", "executable", ",", "'setup.py'", ",", "'egg_info'", "]", ",", "cwd", "=", "path", ")", "for", "dist", "in", "pkg_resources", ".", "find_distributions", "(", "path", ",", "True", ")", ":", "# There should be only one...", "return", "dist", "return", "dist" ]
Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise.
[ "Import", "astropy_helpers", "from", "the", "given", "path", "which", "will", "be", "added", "to", "sys", ".", "path", "." ]
452d962c417e4ff20d1268f99535c6ff89c83437
https://github.com/astropy/regions/blob/452d962c417e4ff20d1268f99535c6ff89c83437/ah_bootstrap.py#L486-L519